blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5b2e633438e6df952a07296adb3699013e3958b4
|
401a15e9196c2a9136982298b13394a87789bcb9
|
/sesion4.py
|
5f4fa9551479cda1846fbbb247d65fd5363a7d05
|
[] |
no_license
|
afuen5/clase
|
f4f6aed41a720cf989e0c834a34552edf4b0d781
|
816f914e5ca8dcd1c89afbcf8cbe0e5f003f99f7
|
refs/heads/main
| 2023-04-21T14:09:10.982178
| 2021-05-10T22:52:21
| 2021-05-10T22:52:21
| 364,003,641
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,328
|
py
|
### Investigacion (Modulos)
# 1: Uso de Modulo tkinter: Crear una interfaz grafica (GUI) básica que sea de 600 px de ancho y 400 px de alto, fondo negro y un botton en el centrode la pantalla
# 2: Como hacer manejo de archivos: Leer un archivo y mostrarlo en pantalla (consola), posteriormente agregar una linea al final del documento
# 3: Como hacer nuestros propios modulos: El ejercicio de la Caja Registradora (la funcion/metodo que se hizo) habrá que ponerlo en un módulo que podamos importar
### EJEMPLO de lectura de linea por linea de un archivo:
with open('ejemplo.txt', 'r') as lectura:
# Leer la primera linea
linea = lectura.readline()
while linea != '': # el EOF o fin de caracter es void, si hay un caracter en particular, especificar...
# Aqui podemos hacer el procesamiento de nuestra linea
linea = lectura.readline() ### Aquipodemos seguir leyendo en el while hasta que se acabe...
### EJEMPLO DE LECTURA completa y generar un array (list) de cada linea:
file_open = open('ejemplo.txt')
file_open.readlines() # Nos retorna una lista o array de cada linea...
file_open.close()
### Ejemplo de escritura en python a un archivo de texto que ya tiene informacion, es decir: APPEND:
with open('ejemplo.txt', 'a') as escritura:
escritura.write('\nSiguiente linea a insertar')
|
[
"afuen5"
] |
afuen5
|
28a5ec365a0f43f2dc31e01296e2913532096669
|
6d2bef91d3cb9bdd1e1b0728e2bb4e60781fdeee
|
/Regular Expression/1.py
|
bff9bfde8983f0250212f8c492015e63e17fa0b6
|
[] |
no_license
|
AnuragT04/Python
|
e0f69ce098a772d8096926daee8f5a551eb0da71
|
946d6aef3073c667eb0aa4edbe6026c1600784f7
|
refs/heads/master
| 2022-10-17T17:37:39.887660
| 2020-06-12T11:04:10
| 2020-06-12T11:04:10
| 271,777,061
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 170
|
py
|
import re
file=input()
fh=open(file)
sum=0
for line in fh:
y=re.findall('[0-9]+',line)
if len(y)==0: continue
for x in y:
sum=sum+int(x)
print(sum)
|
[
"tanurag0@gmail.com"
] |
tanurag0@gmail.com
|
df1132978b10c9d167cca545c9bcc2be89f811ca
|
7e729ea05a6a4e297bb832b77720a18cd0227805
|
/Projects/Online Workouts/w3resource/Basic - Part-II/program-6.py
|
5eee6ff572c22738bec57bd8df0328a6ebf7c813
|
[
"MIT"
] |
permissive
|
ivenpoker/Python-Projects
|
943d127ae900df52b43aac07c395e9d717196115
|
2975e1bd687ec8dbcc7a4842c13466cb86292679
|
refs/heads/master
| 2022-12-18T16:36:37.954835
| 2020-09-14T19:42:46
| 2020-09-14T19:43:09
| 180,323,469
| 1
| 0
|
MIT
| 2022-12-08T01:05:35
| 2019-04-09T08:42:40
|
Python
|
UTF-8
|
Python
| false
| false
| 5,493
|
py
|
#!/usr/bin/env python3
############################################################################################
# #
# Program purpose: Print a long text, convert the string to a list and print all the #
# words and their frequencies. #
# Program Author : Happi Yvan <ivensteinpoker@gmail.com> #
# Creation Date : September 4, 2019 #
# #
############################################################################################
def process_data(text=""):
data = {}
list_data = text.split(" ")
punc = ['?', '.', '!', '\'']
for x in range(len(list_data)):
word = list_data[x].strip()
if len(word) >= 1:
if word[-1] in punc:
word = word[0:len(word) - 1]
if word in data.keys():
data[word] = int(data.get(word)) + 1
else:
data[word] = 1
return data
if __name__ == "__main__":
string_words = '''United States Declaration of Independence
From Wikipedia, the free encyclopedia
The United States Declaration of Independence is the statement
adopted by the Second Continental Congress meeting at the Pennsylvania State
House (Independence Hall) in Philadelphia on July 4, 1776, which announced
that the thirteen American colonies, then at war with the Kingdom of Great
Britain, regarded themselves as thirteen independent sovereign states, no longer
under British rule. These states would found a new nation – the United States of
America. John Adams was a leader in pushing for independence, which was passed
on July 2 with no opposing vote cast. A committee of five had already drafted the
formal declaration, to be ready when Congress voted on independence.
John Adams persuaded the committee to select Thomas Jefferson to compose the original
draft of the document, which Congress would edit to produce the final version.
The Declaration was ultimately a formal explanation of why Congress had voted on July
2 to declare independence from Great Britain, more than a year after the outbreak of
the American Revolutionary War. The next day, Adams wrote to his wife Abigail: "The
Second Day of July 1776, will be the most memorable Epocha, in the History of America."
But Independence Day is actually celebrated on July 4, the date that the Declaration of
Independence was approved.
After ratifying the text on July 4, Congress issued the Declaration of Independence in
several forms. It was initially published as the printed Dunlap broadside that was widely
distributed and read to the public. The source copy used for this printing has been lost,
and may have been a copy in Thomas Jefferson's hand.[5] Jefferson's original draft, complete
with changes made by John Adams and Benjamin Franklin, and Jefferson's notes of changes made
by Congress, are preserved at the Library of Congress. The best-known version of the Declaration
is a signed copy that is displayed at the National Archives in Washington, D.C., and which is
popularly regarded as the official document. This engrossed copy was ordered by Congress on
July 19 and signed primarily on August 2.
The sources and interpretation of the Declaration have been the subject of much scholarly inquiry.
The Declaration justified the independence of the United States by listing colonial grievances against
King George III, and by asserting certain natural and legal rights, including a right of revolution.
Having served its original purpose in announcing independence, references to the text of the
Declaration were few in the following years. Abraham Lincoln made it the centerpiece of his rhetoric
(as in the Gettysburg Address of 1863) and his policies. Since then, it has become a well-known statement
on human rights, particularly its second sentence:
We hold these truths to be self-evident, that all men are created equal, that they are endowed by their
Creator with certain unalienable Rights, that among these are Life, Liberty and the pursuit of Happiness.
This has been called "one of the best-known sentences in the English language", containing "the most potent
and consequential words in American history". The passage came to represent a moral standard to which
the United States should strive. This view was notably promoted by Abraham Lincoln, who considered the
Declaration to be the foundation of his political philosophy and argued that it is a statement of principles
through which the United States Constitution should be interpreted.
The U.S. Declaration of Independence inspired many other similar documents in other countries, the first
being the 1789 Declaration of Flanders issued during the Brabant Revolution in the Austrian Netherlands
(modern-day Belgium). It also served as the primary model for numerous declarations of independence across
Europe and Latin America, as well as Africa (Liberia) and Oceania (New Zealand) during the first half of the
19th century.'''
dict_data = process_data(text=string_words)
print(f"Dictionary obtained:\n{dict_data}")
|
[
"nwaforhappiyvan@gmail.com"
] |
nwaforhappiyvan@gmail.com
|
36beb244070ec712de47cbc8a3a96244deecd9f8
|
e3ed1561febf37999177dcd1721498361b7bc1ef
|
/programFlowChallenge_v2.py
|
29eb25360296c38800d6f63285463e5484296580
|
[] |
no_license
|
ReginaGates/ControlFlowChalleng_CompletePythonMasterclass
|
f5820c88d57fb3721860793c5be5fe409b40e504
|
b9b911811f5583cb1c3a1ddf8c8f259e46164d9b
|
refs/heads/master
| 2021-06-30T12:11:35.921482
| 2017-09-16T23:09:38
| 2017-09-16T23:09:38
| 103,775,185
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,487
|
py
|
#Complete Python Masterclass - Tim Buchalka & Jean-Paul Roberts
#Challenge - Program Flow
#Regina Gates - First efforts with many edits, but without checking, and without correcting for all cases, like the IP
# address starting with '.'
# Even though this code does not account for every error, including adding a + sign, which signals a segment count,
# I'm going to submit to github, before I watch the solution video. -RG
name = input("What is your name? ")
user_IP_address = input("Hello, {}, what is your IP address? ".format(name))
seg_count = 0
new_string = ''
if user_IP_address == '':
user_IP_address = input("Please enter a valid IP address? ")
elif user_IP_address[len(user_IP_address) - 1] == '.':
for char in user_IP_address:
if char not in '0123456789':
seg_count += 1
if user_IP_address[0] == '.':
seg_count -= 1
elif user_IP_address[len(user_IP_address) - 1] != '.':
for char in user_IP_address:
if char not in '0123456789':
seg_count += 1
seg_count += 1
user_IP_address += '.'
if user_IP_address[0] =='.':
seg_count -= 1
print("This IP address has {} segments.".format(seg_count))
seg_count = 0
for char in user_IP_address:
if char in '0123456789':
new_string += char
elif char == '.':
seg_count += 1
print("Segment {0} has {1} digits.".format(seg_count, len(new_string)))
new_string = ''
|
[
"noreply@github.com"
] |
ReginaGates.noreply@github.com
|
0c0ac05186a84c2057b58b3d16491f2eb46e9c6e
|
781e2692049e87a4256320c76e82a19be257a05d
|
/all_data/exercism_data/python/anagram/063a2fcf6ae348c3a27d6d06c1cbfcb7.py
|
3b33fb0cd8f1cc2a526c7688c9e0f069636d306c
|
[] |
no_license
|
itsolutionscorp/AutoStyle-Clustering
|
54bde86fe6dbad35b568b38cfcb14c5ffaab51b0
|
be0e2f635a7558f56c61bc0b36c6146b01d1e6e6
|
refs/heads/master
| 2020-12-11T07:27:19.291038
| 2016-03-16T03:18:00
| 2016-03-16T03:18:42
| 59,454,921
| 4
| 0
| null | 2016-05-23T05:40:56
| 2016-05-23T05:40:56
| null |
UTF-8
|
Python
| false
| false
| 286
|
py
|
def detect_anagrams(word, choices):
anagrams = []
lWord = list(word.lower())
lWord.sort()
for choice in choices:
if choice.lower() == word.lower():
continue
lChoice = list(choice.lower())
lChoice.sort()
if lChoice == lWord:
anagrams.append(choice)
return anagrams
|
[
"rrc@berkeley.edu"
] |
rrc@berkeley.edu
|
8d1cafb9fe59b6c383d923cdbbbb61bb7ec8646c
|
213a777b28766e9b0519d29429d603a5dd24d212
|
/utils.py
|
9249bc42700791d0ae16b17682d2efebc558f918
|
[] |
no_license
|
DanyGLewin/EDV_calculator
|
4611e9cae5ae3f679986828b29b7fd45939fa4af
|
88d62117e3ed3059fb14cf7c10f45f10882e23c4
|
refs/heads/master
| 2020-06-04T15:33:42.941484
| 2019-12-08T06:51:11
| 2019-12-08T06:51:11
| 192,084,100
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 520
|
py
|
ROLL_PATTERN = '(^\d+d\d+)|([\+\-]\d+d\d+)|([\+\-]\d+(?!d))'
BONUS_PATTERN = '(^[+-]?\d+)|((?<=\/)[+-]?\d+)*'
def concat_lists(list_of_iters):
output = []
for sub in list_of_iters:
for item in sub:
output.append(item)
return output
def remove_nulls(origin_list):
return [item for item in origin_list if item]
def clean_float(f):
if f == int(f):
return int(f)
for i in range(2, 5):
if f == round(f, i):
return round(f, i-1)
return round(f, 4)
|
[
"danyglewin@gmail.com"
] |
danyglewin@gmail.com
|
35c2a7c8d0c931f45a0dd7d9628d753e03d26eeb
|
d5995f397c12fae062c9feb08912c8b3a109ead8
|
/src/src/settings.py
|
bde7335bd642f8bbd06b02735bd43ee1a73c7c90
|
[] |
no_license
|
waffle-iron/eAccount
|
70cd1499da25cff4882885e239824498c57fb83b
|
e453164be127b1d957ceebd0d649a269d4dbce94
|
refs/heads/master
| 2021-01-18T11:15:15.558356
| 2016-06-17T16:23:39
| 2016-06-17T16:23:39
| 61,370,935
| 0
| 0
| null | 2016-06-17T12:06:51
| 2016-06-17T12:06:51
| null |
UTF-8
|
Python
| false
| false
| 3,554
|
py
|
"""
Django settings for src project.
Generated by 'django-admin startproject' using Django 1.9.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%g38=9v4u55yo_-!8fb#ci=kqcwmwjq9m_$=d$j^r-^%(5gw_9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'v0',
'rest_framework',
'corsheaders'
]
REST_FRAMEWORK_DOCS = {
'HIDE_DOCS': False # Default: False
}
CORS_ORIGIN_ALLOW_ALL = True
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'src.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'src.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
REST_FRAMEWORK = {
'TEST_REQUEST_DEFAULT_FORMAT': 'json',
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.AllowAny',
),
}
|
[
"yugio33@yahoo.com"
] |
yugio33@yahoo.com
|
2d55ad40d2015db0c2f791d3071e3206f93564f8
|
673f9f418a6c951bcca783cfece870a5d7420a14
|
/crocoite/browser.py
|
35187897acde3e293c4dddb9640a61699082096f
|
[
"MIT"
] |
permissive
|
backwardn/crocoite
|
0253f15ca8375ce3a13db2c4c8e0234e341e5c7c
|
d93e59456e432562e6ceb7a275af47682cc30aa9
|
refs/heads/master
| 2020-07-02T10:35:36.834675
| 2019-12-30T11:02:03
| 2019-12-30T11:02:03
| 201,501,043
| 0
| 0
| null | 2019-08-09T16:08:48
| 2019-08-09T16:08:48
| null |
UTF-8
|
Python
| false
| false
| 20,130
|
py
|
# Copyright (c) 2017 crocoite contributors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Chrome browser interactions.
"""
import asyncio
from base64 import b64decode, b64encode
from datetime import datetime, timedelta
from http.server import BaseHTTPRequestHandler
from yarl import URL
from multidict import CIMultiDict
from .logger import Level
from .devtools import Browser, TabException
# These two classes’ only purpose is so we can later tell whether a body was
# base64-encoded or a unicode string
class Base64Body (bytes):
def __new__ (cls, value):
return bytes.__new__ (cls, b64decode (value))
@classmethod
def fromBytes (cls, b):
""" For testing """
return cls (b64encode (b))
class UnicodeBody (bytes):
def __new__ (cls, value):
if type (value) is not str:
raise TypeError ('expecting unicode string')
return bytes.__new__ (cls, value.encode ('utf-8'))
class Request:
__slots__ = ('headers', 'body', 'initiator', 'hasPostData', 'method', 'timestamp')
def __init__ (self, method=None, headers=None, body=None):
self.headers = headers
self.body = body
self.hasPostData = False
self.initiator = None
# HTTP method
self.method = method
self.timestamp = None
def __repr__ (self):
return f'Request({self.method!r}, {self.headers!r}, {self.body!r})'
def __eq__ (self, b):
if b is None:
return False
if not isinstance (b, Request):
raise TypeError ('Can only compare equality with Request.')
# do not compare hasPostData (only required to fetch body) and
# timestamp (depends on time)
return self.headers == b.headers and \
self.body == b.body and \
self.initiator == b.initiator and \
self.method == b.method
class Response:
__slots__ = ('status', 'statusText', 'headers', 'body', 'bytesReceived',
'timestamp', 'mimeType')
def __init__ (self, status=None, statusText=None, headers=None, body=None, mimeType=None):
self.status = status
self.statusText = statusText
self.headers = headers
self.body = body
# bytes received over the network (not body size!)
self.bytesReceived = 0
self.timestamp = None
self.mimeType = mimeType
def __repr__ (self):
return f'Response({self.status!r}, {self.statusText!r}, {self.headers!r}, {self.body!r}, {self.mimeType!r})'
def __eq__ (self, b):
if b is None:
return False
if not isinstance (b, Response):
raise TypeError ('Can only compare equality with Response.')
# do not compare bytesReceived (depends on network), timestamp
# (depends on time) and statusText (does not matter)
return self.status == b.status and \
self.statusText == b.statusText and \
self.headers == b.headers and \
self.body == b.body and \
self.mimeType == b.mimeType
class ReferenceTimestamp:
""" Map relative timestamp to absolute timestamp """
def __init__ (self, relative, absolute):
self.relative = timedelta (seconds=relative)
self.absolute = datetime.utcfromtimestamp (absolute)
def __call__ (self, relative):
if not isinstance (relative, timedelta):
relative = timedelta (seconds=relative)
return self.absolute + (relative-self.relative)
class RequestResponsePair:
__slots__ = ('request', 'response', 'id', 'url', 'remoteIpAddress',
'protocol', 'resourceType', '_time')
def __init__ (self, id=None, url=None, request=None, response=None):
self.request = request
self.response = response
self.id = id
self.url = url
self.remoteIpAddress = None
self.protocol = None
self.resourceType = None
self._time = None
def __repr__ (self):
return f'RequestResponsePair({self.id!r}, {self.url!r}, {self.request!r}, {self.response!r})'
def __eq__ (self, b):
if not isinstance (b, RequestResponsePair):
raise TypeError (f'Can only compare with {self.__class__.__name__}')
# do not compare id and _time. These depend on external factors and do
# not influence the request/response *content*
return self.request == b.request and \
self.response == b.response and \
self.url == b.url and \
self.remoteIpAddress == b.remoteIpAddress and \
self.protocol == b.protocol and \
self.resourceType == b.resourceType
def fromRequestWillBeSent (self, req):
""" Set request data from Chrome Network.requestWillBeSent event """
r = req['request']
self.id = req['requestId']
self.url = URL (r['url'])
self.resourceType = req.get ('type')
self._time = ReferenceTimestamp (req['timestamp'], req['wallTime'])
assert self.request is None, req
self.request = Request ()
self.request.initiator = req['initiator']
self.request.headers = CIMultiDict (self._unfoldHeaders (r['headers']))
self.request.hasPostData = r.get ('hasPostData', False)
self.request.method = r['method']
self.request.timestamp = self._time (req['timestamp'])
if self.request.hasPostData:
postData = r.get ('postData')
if postData is not None:
self.request.body = UnicodeBody (postData)
def fromResponse (self, r, timestamp=None, resourceType=None):
"""
Set response data from Chrome’s Response object.
Request must exist. Updates if response was set before. Sometimes
fromResponseReceived is triggered twice by Chrome. No idea why.
"""
assert self.request is not None, (self.request, r)
if not timestamp:
timestamp = self.request.timestamp
self.remoteIpAddress = r.get ('remoteIPAddress')
self.protocol = r.get ('protocol')
if resourceType:
self.resourceType = resourceType
# a response may contain updated request headers (i.e. those actually
# sent over the wire)
if 'requestHeaders' in r:
self.request.headers = CIMultiDict (self._unfoldHeaders (r['requestHeaders']))
self.response = Response ()
self.response.headers = CIMultiDict (self._unfoldHeaders (r['headers']))
self.response.status = r['status']
self.response.statusText = r['statusText']
self.response.timestamp = timestamp
self.response.mimeType = r['mimeType']
def fromResponseReceived (self, resp):
""" Set response data from Chrome Network.responseReceived """
return self.fromResponse (resp['response'],
self._time (resp['timestamp']), resp['type'])
def fromLoadingFinished (self, data):
self.response.bytesReceived = data['encodedDataLength']
def fromLoadingFailed (self, data):
self.response = None
@staticmethod
def _unfoldHeaders (headers):
"""
A host may send multiple headers using the same key, which Chrome folds
into the same item. Separate those.
"""
items = []
for k in headers.keys ():
for v in headers[k].split ('\n'):
items.append ((k, v))
return items
async def prefetchRequestBody (self, tab):
if self.request.hasPostData and self.request.body is None:
try:
postData = await tab.Network.getRequestPostData (requestId=self.id)
self.request.body = UnicodeBody (postData['postData'])
except TabException:
self.request.body = None
async def prefetchResponseBody (self, tab):
""" Fetch response body """
try:
body = await tab.Network.getResponseBody (requestId=self.id)
if body['base64Encoded']:
self.response.body = Base64Body (body['body'])
else:
self.response.body = UnicodeBody (body['body'])
except TabException:
self.response.body = None
class NavigateError (IOError):
pass
class PageIdle:
""" Page idle event """
__slots__ = ('idle', )
def __init__ (self, idle):
self.idle = idle
def __bool__ (self):
return self.idle
class FrameNavigated:
__slots__ = ('id', 'url', 'mimeType')
def __init__ (self, id, url, mimeType):
self.id = id
self.url = URL (url)
self.mimeType = mimeType
class SiteLoader:
"""
Load site in Chrome and monitor network requests
XXX: track popup windows/new tabs and close them
"""
__slots__ = ('requests', 'browser', 'logger', 'tab', '_iterRunning',
'_framesLoading', '_rootFrame')
allowedSchemes = {'http', 'https'}
def __init__ (self, browser, logger):
self.requests = {}
self.browser = Browser (url=browser)
self.logger = logger.bind (context=type (self).__name__)
self._iterRunning = []
self._framesLoading = set ()
self._rootFrame = None
async def __aenter__ (self):
tab = self.tab = await self.browser.__aenter__ ()
# enable events
await asyncio.gather (*[
tab.Log.enable (),
tab.Network.enable(),
tab.Page.enable (),
tab.Inspector.enable (),
tab.Network.clearBrowserCache (),
tab.Network.clearBrowserCookies (),
])
return self
async def __aexit__ (self, exc_type, exc_value, traceback):
for task in self._iterRunning:
# ignore any results from stuff we did not end up using anyway
if not task.done ():
task.cancel ()
self._iterRunning = []
await self.browser.__aexit__ (exc_type, exc_value, traceback)
self.tab = None
return False
def __len__ (self):
return len (self.requests)
async def __aiter__ (self):
""" Retrieve network items """
tab = self.tab
assert tab is not None
handler = {
tab.Network.requestWillBeSent: self._requestWillBeSent,
tab.Network.responseReceived: self._responseReceived,
tab.Network.loadingFinished: self._loadingFinished,
tab.Network.loadingFailed: self._loadingFailed,
tab.Log.entryAdded: self._entryAdded,
tab.Page.javascriptDialogOpening: self._javascriptDialogOpening,
tab.Page.frameStartedLoading: self._frameStartedLoading,
tab.Page.frameStoppedLoading: self._frameStoppedLoading,
tab.Page.frameNavigated: self._frameNavigated,
}
# The implementation is a little advanced. Why? The goal here is to
# process events from the tab as quickly as possible (i.e.
# asynchronously). We need to make sure that JavaScript dialogs are
# handled immediately for instance. Otherwise they stall every
# other request. Also, we don’t want to use an unbounded queue,
# since the items yielded can get quite big (response body). Thus
# we need to block (yield) for every item completed, but not
# handled by the consumer (caller).
running = self._iterRunning
tabGetTask = asyncio.ensure_future (self.tab.get ())
running.append (tabGetTask)
while True:
done, pending = await asyncio.wait (running, return_when=asyncio.FIRST_COMPLETED)
for t in done:
result = t.result ()
if result is None:
pass
elif t == tabGetTask:
method, data = result
f = handler.get (method, None)
if f is not None:
task = asyncio.ensure_future (f (**data))
pending.add (task)
tabGetTask = asyncio.ensure_future (self.tab.get ())
pending.add (tabGetTask)
else:
yield result
running = pending
self._iterRunning = running
async def navigate (self, url):
ret = await self.tab.Page.navigate(url=url)
self.logger.debug ('navigate',
uuid='9d47ded2-951f-4e09-86ee-fd4151e20666', result=ret)
if 'errorText' in ret:
raise NavigateError (ret['errorText'])
self._rootFrame = ret['frameId']
# internal chrome callbacks
async def _requestWillBeSent (self, **kwargs):
self.logger.debug ('requestWillBeSent',
uuid='b828d75a-650d-42d2-8c66-14f4547512da', args=kwargs)
reqId = kwargs['requestId']
req = kwargs['request']
url = URL (req['url'])
logger = self.logger.bind (reqId=reqId, reqUrl=url)
if url.scheme not in self.allowedSchemes:
return
ret = None
item = self.requests.get (reqId)
if item:
# redirects never “finish” loading, but yield another requestWillBeSent with this key set
redirectResp = kwargs.get ('redirectResponse')
if redirectResp:
if item.url != url:
# this happens for unknown reasons. the docs simply state
# it can differ in case of a redirect. Fix it and move on.
logger.warning ('redirect url differs',
uuid='558a7df7-2258-4fe4-b16d-22b6019cc163',
expected=item.url)
redirectResp['url'] = str (item.url)
item.fromResponse (redirectResp)
logger.info ('redirect', uuid='85eaec41-e2a9-49c2-9445-6f19690278b8', target=url)
# XXX: queue this? no need to wait for it
await item.prefetchRequestBody (self.tab)
# cannot fetch response body due to race condition (item id reused)
ret = item
else:
logger.warning ('request exists', uuid='2c989142-ba00-4791-bb03-c2a14e91a56b')
item = RequestResponsePair ()
item.fromRequestWillBeSent (kwargs)
self.requests[reqId] = item
return ret
async def _responseReceived (self, **kwargs):
self.logger.debug ('responseReceived',
uuid='ecd67e69-401a-41cb-b4ec-eeb1f1ec6abb', args=kwargs)
reqId = kwargs['requestId']
item = self.requests.get (reqId)
if item is None:
return
resp = kwargs['response']
url = URL (resp['url'])
logger = self.logger.bind (reqId=reqId, respUrl=url)
if item.url != url:
logger.error ('url mismatch', uuid='7385f45f-0b06-4cbc-81f9-67bcd72ee7d0', respUrl=url)
if url.scheme in self.allowedSchemes:
item.fromResponseReceived (kwargs)
else:
logger.warning ('scheme forbidden', uuid='2ea6e5d7-dd3b-4881-b9de-156c1751c666')
async def _loadingFinished (self, **kwargs):
"""
Item was fully loaded. For some items the request body is not available
when responseReceived is fired, thus move everything here.
"""
self.logger.debug ('loadingFinished',
uuid='35479405-a5b5-4395-8c33-d3601d1796b9', args=kwargs)
reqId = kwargs['requestId']
item = self.requests.pop (reqId, None)
if item is None:
# we never recorded this request (blacklisted scheme, for example)
return
if not item.response:
# chrome failed to send us a responseReceived event for this item,
# so we can’t record it (missing request/response headers)
self.logger.error ('response missing',
uuid='fac3ab96-3f9b-4c5a-95c7-f83b675cdcb9', requestId=item.id)
return
req = item.request
if item.url.scheme in self.allowedSchemes:
item.fromLoadingFinished (kwargs)
# XXX queue both
await asyncio.gather (item.prefetchRequestBody (self.tab), item.prefetchResponseBody (self.tab))
return item
async def _loadingFailed (self, **kwargs):
self.logger.info ('loadingFailed',
uuid='4a944e85-5fae-4aa6-9e7c-e578b29392e4', args=kwargs)
reqId = kwargs['requestId']
logger = self.logger.bind (reqId=reqId)
item = self.requests.pop (reqId, None)
if item is not None:
item.fromLoadingFailed (kwargs)
return item
async def _entryAdded (self, **kwargs):
""" Log entry added """
entry = kwargs['entry']
level = {'verbose': Level.DEBUG, 'info': Level.INFO,
'warning': Level.WARNING,
'error': Level.ERROR}.get (entry.pop ('level'), Level.INFO)
entry['uuid'] = 'e62ffb5a-0521-459c-a3d9-1124551934d2'
self.logger (level, 'console', **entry)
async def _javascriptDialogOpening (self, **kwargs):
t = kwargs.get ('type')
if t in {'alert', 'confirm', 'prompt'}:
self.logger.info ('js dialog',
uuid='d6f07ce2-648e-493b-a1df-f353bed27c84',
action='cancel', type=t, message=kwargs.get ('message'))
await self.tab.Page.handleJavaScriptDialog (accept=False)
elif t == 'beforeunload':
# we must accept this one, otherwise the page will not unload/close
self.logger.info ('js dialog',
uuid='96399b99-9834-4c8f-bd93-cb9fa2225abd',
action='proceed', type=t, message=kwargs.get ('message'))
await self.tab.Page.handleJavaScriptDialog (accept=True)
else: # pragma: no cover
self.logger.warning ('js dialog unknown',
uuid='3ef7292e-8595-4e89-b834-0cc6bc40ee38', **kwargs)
async def _frameStartedLoading (self, **kwargs):
self.logger.debug ('frameStartedLoading',
uuid='bbeb39c0-3304-4221-918e-f26bd443c566', args=kwargs)
self._framesLoading.add (kwargs['frameId'])
return PageIdle (False)
async def _frameStoppedLoading (self, **kwargs):
self.logger.debug ('frameStoppedLoading',
uuid='fcbe8110-511c-4cbb-ac2b-f61a5782c5a0', args=kwargs)
self._framesLoading.remove (kwargs['frameId'])
if not self._framesLoading:
return PageIdle (True)
async def _frameNavigated (self, **kwargs):
self.logger.debug ('frameNavigated',
uuid='0e876f7d-7129-4612-8632-686f42ac6e1f', args=kwargs)
frame = kwargs['frame']
if self._rootFrame == frame['id']:
assert frame.get ('parentId', None) is None, "root frame must not have a parent"
return FrameNavigated (frame['id'], frame['url'], frame['mimeType'])
|
[
"lars@6xq.net"
] |
lars@6xq.net
|
7e7ef5d3b401e5fb75250bfbcaee25f66f613d00
|
8e3486b4a58f13f46d306b0aca8ff57f7de83fb2
|
/tennisGame/tennis.py
|
c5429138d195f2ff1ed85a8d86f1bec5265b26a8
|
[] |
no_license
|
Minato007/python-dev
|
18f8c5af1da3877a64625f5c95546fd3fe34d752
|
ccd1904df3dccf959a2b5d7806ffd89e55f5a487
|
refs/heads/master
| 2020-04-09T08:43:55.498204
| 2018-12-10T06:03:06
| 2018-12-10T06:03:06
| 160,206,127
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,490
|
py
|
import pygame
import random
state = 'gamestart'
class Player:
def __init__(self, x, y, width, height):
self.x = x
self.y = y
self.width = width
self.height = height
self.dy = 0
self.score = 0
class Ball:
def __init__(self, x, y, size):
self.x = x
self.y = y
self.size = size
self.dx = 0
self.dy = 0
while self.dx == 0 or self.dy == 0:
self.dx = random.randint(-3, 3)
self.dy = random.randint(-3, 3)
self.img = pygame.image.load('ball.png')
self.img = pygame.transform.scale(self.img, (self.size, self.size))
self.move = False
size = (540, 500)
player1 = Player(5, 100, 20, 100)
player2 = Player(size[0]-5-20, 100, 20, 100)
balls = []
ball = Ball(size[0]/2-20, size[1]/2-20, 40)
balls.append(ball)
pygame.init()
pygame.font.init() # you have to call this at the start,
# if you want to use this module.
myfont = pygame.font.SysFont('Comic Sans MS', 30)
myfont1 = pygame.font.SysFont('Calibri', 65, True, False)
text_game_over = myfont1.render("Game Over", True, (255,0,0))
screen = pygame.display.set_mode(size)
clock = pygame.time.Clock()
done = False
c = 0
clock = pygame.time.Clock()
counter = 5
text = '5'
pygame.time.set_timer(pygame.USEREVENT, 1000)
font = pygame.font.SysFont('Consolas', 30)
while not done:
for event in pygame.event.get():
if event.type == pygame.USEREVENT:
counter -= 1
if counter > 0:
text = str(counter)
else:
text = 'new ball'
ball = Ball(size[0]/2-20, size[1]/2-20, 40)
ball.move = True
balls.append(ball)
counter = 5
text = '5'
if event.type == pygame.QUIT:
done = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
ball.move = True
if event.key == pygame.K_s:
player1.dy = 5
if event.key == pygame.K_w:
player1.dy = -5
if event.key == pygame.K_DOWN:
player2.dy = 5
if event.key == pygame.K_UP:
player2.dy = -5
if event.type == pygame.KEYUP:
if event.key == pygame.K_s:
player1.dy = 0
if event.key == pygame.K_w:
player1.dy = 0
if event.key == pygame.K_DOWN:
player2.dy = 0
if event.key == pygame.K_UP:
player2.dy = 0
if state == "gamestart":
player1.y += player1.dy
player2.y += player2.dy
if player1.y < 1:
player1.y = 1
if player2.y < 1:
player2.y = 1
if player1.y > size[1] - 1 - player1.height:
player1.y = size[1] - 1 - player1.height
if player2.y > size[1] - 1 - player2.height:
player2.y = size[1] - 1 - player2.height
for ball in balls:
if ball.move:
ball.x += ball.dx
ball.y += ball.dy
# Bounce
for ball in balls:
if ball.x > size[0] - ball.size - player2.width:
if (ball.y + ball.size/2 > player2.y) and (ball.y + ball.size/2 < player2.y + player2.height):
ball.dx = -abs(ball.dx)
ball.dx = ball.dx * 1.1
ball.dy = ball.dy * 1.1
else:
# ball = Ball(size[0] / 2 - 20, size[1] / 2 - 20, 40)
# ball.move = True
balls.remove(ball)
player1.score += 1
c = 0
for ball in balls:
if ball.y > size[1] - ball.size:
ball.dy = -abs(ball.dy)
for ball in balls:
if ball.x < player1.x + player1.width:
if (ball.y + ball.size/2 > player1.y) and (ball.y + ball.size/2 < player1.y + player1.height):
ball.dx = abs(ball.dx)
ball.dx = ball.dx * 1.1
ball.dy = ball.dy * 1.1
else:
# ball = Ball(size[0] / 2 - 20, size[1] / 2 - 20, 40)
# ball.move = True
balls.remove(ball)
player2.score += 1
c = 0
for ball in balls:
if ball.y < 1:
ball.dy = abs(ball.dy)
c += 10
if c > 255:
c = 255
if state == "gameOver":
c = 255
screen.fill((255,c,c))
pygame.draw.rect(screen, (0, 128, 0), [
player1.x, player1.y, player1.width, player1.height
], 0)
pygame.draw.rect(screen, (0, 128, 0), [
player2.x, player2.y, player2.width, player2.height
], 0)
for ball in balls:
screen.blit(ball.img, (ball.x, ball.y))
text_surface = myfont.render(str(player1.score),False,(20,20,250))
screen.blit(text_surface, (size[0]/3, 10))
text_surface = myfont.render(str(player2.score),False,(20,20,250))
screen.blit(text_surface, (size[0]*2/3, 10))
screen.blit(font.render(text, True, (0, 0, 0)), (250, 10)) #вывод каунтера на экран
if (player1.score == 3) or (player2.score == 3):
state = 'gameOver'
if state == 'gameOver':
screen.blit(text_game_over, [10, 200])
pygame.display.flip()
clock.tick(80)
pygame.quit()
|
[
"yellow_sama@mail.ru"
] |
yellow_sama@mail.ru
|
82104f52be98ba50d1033ca4bd55be1cbe90cd11
|
02e116cd7ae672d3ab999c6f389288c7aa1028ec
|
/ttt.py
|
d7da943ae9ce3a15f1f6c5a87697df020c978f6a
|
[] |
no_license
|
tanxiumei/interfaceWushui
|
6abcd8ccb8948d2b596b654bac471d8d40abf868
|
ffa2ee71dbcbfed1f0153943eda9640bfc29e31e
|
refs/heads/master
| 2022-12-20T06:31:18.629197
| 2020-10-13T03:06:23
| 2020-10-13T03:06:23
| 303,572,596
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 607
|
py
|
import datetime
from xlrd import open_workbook
from xlutils.copy import copy
import os
from test_excel import ParamFactory
# r_xls = open_workbook('equipmentid_param.xls') # 读取excel文件
# row = r_xls.sheets()[0].nrows # 获取已有的行数
# excel = copy(r_xls) # 将xlrd的对象转化为xlwt的对象
# table = excel.get_sheet(0) # 获取要操作的sheet
#
# # 对excel表追加一行内容
# table.write(5, 10, '内容1') # 括号内分别为行数、列数、内容
#
# excel.save('equipmentid_param.xls') # 保存并覆盖文件
datetime.datetime.strptime(string,'%Y-%m-%d %H:%M:%S')
|
[
"757560315@qq.com"
] |
757560315@qq.com
|
b1e80f1195383a0d0027d1835ba47e97c66c74ae
|
97aa853c4e05fdade3938205e4e63b66ebb430b1
|
/Ex3.4.4.2-2.py
|
b0fdf4e6a6f6f2bb1e40cef84131016a95093aba
|
[] |
no_license
|
EllenHoffmann/BasicTrack3
|
30fd397e5e60837c3e0c0fcf3b93f9e51ffc0a4e
|
95b57f5dfd0360288449f9eafb2e3cb1d51421a8
|
refs/heads/master
| 2022-12-23T17:20:27.656751
| 2020-09-30T15:58:02
| 2020-09-30T15:58:02
| 296,393,060
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 226
|
py
|
months = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]
for element in range(12):
print("One of the months of the year is "+str(months[element]))
|
[
"70917850+EllenHoffmann@users.noreply.github.com"
] |
70917850+EllenHoffmann@users.noreply.github.com
|
15cc7da59b5b7e081c4812327c849244bde141ca
|
8e6c2c8fc95551416fe17b1f423f8ed80f44bce4
|
/example_scripts/getusers.py
|
fae0871815a9bd73f808424ad0e73120bad1a0ff
|
[
"MIT"
] |
permissive
|
Tethik/eves-ornate-lockbox
|
b1b512aa0b7c4a32f5597478b7115f2d5cdd01a2
|
9eb9acbb19b193d0f852c70d6c626f62780f4f4e
|
refs/heads/master
| 2021-01-02T23:07:46.725732
| 2014-03-13T02:08:32
| 2014-03-13T02:08:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 486
|
py
|
#!/usr/bin/python
import httplib
import base64
import json
addr = "127.0.0.1:5000"
endpoint = "/users"
auth = "secret:"
use_ssl = False
headers = {"Content-type": "application/json", "Accept": "text/plain", "Authorization": "Basic " + base64.b64encode(auth)}
h1 = httplib.HTTPConnection(addr)
if use_ssl:
h1 = httplib.HTTPSConnection(addr)
h1.request("GET", endpoint, headers=headers)
resp = h1.getresponse().read()
decoded_resp = json.loads(resp)
print decoded_resp
h1.close()
|
[
"joakim@uddholm.com"
] |
joakim@uddholm.com
|
317510238069e3293f597cd045c2e062ec2bbe4a
|
a4ee3873ccd4b09a26b9febff9cd1a678dd90cc2
|
/solved/swea5650.py
|
760e244f3f34133ef82a8671c933fefe7f975c8d
|
[] |
no_license
|
young2141/PS_Codes
|
d37d97d9b92931d27cefcef052a7f3f897ef8e1c
|
856fe7646d133cfb7e107b05ffe8d03ab8901e2d
|
refs/heads/master
| 2023-02-25T19:10:41.890785
| 2023-02-14T04:16:36
| 2023-02-14T04:16:36
| 191,506,351
| 0
| 0
| null | 2019-06-14T04:06:42
| 2019-06-12T05:53:16
|
C++
|
UTF-8
|
Python
| false
| false
| 1,659
|
py
|
#sw expert academy 5650 dfs
from pprint import pprint
dir = {'UP':0,'RIGHT':1,'DOWN':2,'LEFT':3}
bounce = [[2,2,1,3,2,2,0,0,0,0,0],
[3,3,3,2,0,3,1,1,1,1,1],
[0,1,0,0,3,0,2,2,2,2,2],
[1,0,2,1,1,1,3,3,3,3,3],]
dy = [-1,0,1,0]
dx = [0,1,0,-1]
def find(w,n,ii,jj,holes,d):
count = 0
y,x = ii,jj
while True:
y,x = y + dy[d], x + dx[d]
if y <0 or y >=n or x<0 or x >= n:
return count*2 + 1
elif y == ii and x == jj:
return count
elif w[y][x] == 0:
pass
elif 1 <= w[y][x] <= 5:
d = bounce[d][w[y][x]]
count += 1
elif 6 <= w[y][x] <= 10:
if holes[w[y][x]][0] == [y,x]:
y,x = holes[w[y][x]][1]
else :
y,x = holes[w[y][x]][0]
elif w[y][x] == -1 :
return count
tc = int(input())
for tc in range(1,tc+1):
n = int(input())
w = [[int(x) for x in input().split()] for _ in range(n)]
answer = 0
holes = [[] for _ in range(11)]
for i in range(n):
for j in range(n):
if 6 <= w[i][j] <= 10:
holes[w[i][j]].append([i,j])
for i in range(n):
for j in range(n):
if w[i][j] == 0:
for d in dir.values():
answer = max(answer,find(w,n,i,j,holes,d))
print('#{} {}'.format(tc, answer))
'''
1
10
0 1 0 3 0 0 0 0 7 0
0 0 0 0 -1 0 5 0 0 0
0 4 0 0 0 3 0 0 2 2
1 0 0 0 1 0 0 3 0 0
0 0 3 0 0 0 0 0 6 0
3 0 0 0 2 0 0 1 0 0
0 0 0 0 0 1 0 0 4 0
0 5 0 4 1 0 7 0 0 5
0 0 0 0 0 1 0 0 0 0
2 0 6 0 0 4 0 0 0 4
'''
|
[
"noreply@github.com"
] |
young2141.noreply@github.com
|
840826d546985b8d4f5d68fd09f358fd07b5351b
|
887f89e6e86b76a6fd128e76e3d198731fc3761d
|
/class_circle.py
|
a24894e0651641f8d0f76188d86baf3e67418cf8
|
[] |
no_license
|
geovanne97/python_learn
|
5c38a26cf7a81ec28de400acee11d89320dfcfc9
|
5914d1bc4ee91244f0ef95537031fec25ffc0f97
|
refs/heads/master
| 2021-10-24T18:39:19.351875
| 2019-03-27T17:46:03
| 2019-03-27T17:46:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 282
|
py
|
class Circle():
pi = 3.14
def __init__(self,radius=1):
self.radius = radius
def area(self):
return self.radius*self.radius * Circle.pi
def set_radius(self,new_r):
self.radius = new_r
myc = Circle(3)
myc.set_radius(100)
print(myc.area())
|
[
"geovannessaraiva97@gmail.com"
] |
geovannessaraiva97@gmail.com
|
57f7d154697ab944d89794d7699105e1d49cb04e
|
165d45b38f681f80c4f98372415835a14befb9dd
|
/bin/doc-format
|
3467037e15117c305cdbd5cc091647486e2f09ad
|
[] |
no_license
|
Mark-Seaman/My-Book-Online
|
4f9c5c9c2b6bd21ee6e2105f6def29ce744d3366
|
0a280ef23b31dc26ab1522aac147f887314b4786
|
refs/heads/master
| 2016-08-12T17:21:15.336778
| 2014-08-16T04:04:47
| 2014-08-16T04:04:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 97
|
#!/usr/bin/env python
# Wiki text formatter
from util.doc import doc_format
print doc_format()
|
[
"mark.seaman@shrinking-world.com"
] |
mark.seaman@shrinking-world.com
|
|
e3750481e5642f2fbcf23111a5675c34b4bd7ebc
|
20d651e38f44b89da6bd0e0d0e0bd95d47a9fa59
|
/Offer_letter_Sender3.py
|
797743b2d5934d77ad7cf0dcc84c7089c373e6ee
|
[] |
no_license
|
Lokesh2703/Offer_letter_Sender
|
1af3a1872781e6955f1476d59cd9d61a8a0b10f7
|
3d8028502300ffabfa5ffdf78850e1e6d09c3cf8
|
refs/heads/master
| 2020-07-02T18:09:51.994535
| 2019-12-08T17:52:45
| 2019-12-08T17:52:45
| 201,617,426
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,824
|
py
|
import docx
import pandas as pd
from docx.shared import Pt
import os.path
from os import chdir, getcwd, listdir, path
from time import strftime
from win32com import client
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email import encoders
from tkinter import *
def run_sender(file_nm):
print(file_nm)
# file_nm = input('Enter the filename(with extension): ')
file = pd.read_csv(file_nm)
names = file.iloc[:,0]
emails = file.iloc[:,1]
# print(emails)
doc = docx.Document('Offer Letter - Campus Ambassador Program _ Aparoksha.docx')
doc.paragraphs[6].text='Dear '
doc.paragraphs[6].add_run()
doc.paragraphs[6].runs[1].bold=True
name = doc.paragraphs[6].runs[1]
font = name.font
font.size = Pt(14)
for name in names:
print(name)
doc.paragraphs[6].runs[1].text=name +','
doc.save(os.path.join("E:\\Projects\\OfferLetter_sender\\pdfs", (name+'.docx')))
def count_files(filetype):
count_files = 0
for files in listdir(folder):
if files.endswith(filetype):
count_files += 1
return count_files
def check_path(prompt):
abs_path = input(prompt)
while path.exists(abs_path) != True:
print ("\nThe specified path does not exist.\n")
abs_path = input(prompt)
return abs_path
print ("\n")
folder = "E:\\Projects\\OfferLetter_sender\\pdfs"
chdir(folder)
num_docx = count_files(".docx")
num_doc = count_files(".doc")
if num_docx + num_doc == 0:
print ("\nThe specified folder does not contain docx or docs files.\n")
exit()
else:
print ("\nNumber of doc and docx files: ", num_docx + num_doc, "\n")
print ("\n\nStarting to convert files ...\n")
try:
word = client.DispatchEx("Word.Application")
for files in listdir(getcwd()):
match = 0
if files.endswith(".doc"): s, match = "doc", 1
elif files.endswith(".docx"): s, match = "docx", 1
if match:
new_name = files.replace("."+s, r".pdf")
in_file = path.abspath(folder + "\\" + files)
new_file = path.abspath(folder + "\\" + new_name)
doc = word.Documents.Open(in_file)
print ('Conversion Completed (from .docx to .pdf) ', path.relpath(new_file))
doc.SaveAs(new_file, FileFormat = 17)
doc.Close()
except (Exception, e):
print (e)
finally:
word.Quit()
print("\n", "Finished converting files to pdf format!!!")
print("Starting to send email!!!")
# Count the number of pdf files.
# num_pdf = count_files(".pdf")
# print ("\nNumber of pdf files: ", num_pdf)
# Check if the number of docx and doc file is equal to the number of files.
# if num_docx + num_doc == num_pdf:
# print ("\nNumber of doc and docx files is equal to number of pdf files.")
# else:
# print ("\nNumber of doc and docx files is not equal to number of pdf files.")
os.chdir('E:\\Projects\\OfferLetter_sender\\pdfs')
i=3
j=0
for name in names:
filename = name + '.pdf'
# print(os.getcwd)
# print(filename)
# print(type(filename))
msg = MIMEMultipart()
msg['TO'] = "TO_EMAIL_ADDRESS@gmail.com"
msg['Subject']='Hi This is an pdf for '+ name
body = "Hello"
msg.attach(MIMEText(body,'plain'))
image = open(str(filename),'rb')
part = MIMEBase('application','octet-stream')
part.set_payload(image.read())
encoders.encode_base64(part)
part.add_header('Content-Disposition','attachment;filename=' + filename)
msg.attach(part)
smtp=smtplib.SMTP('smtp.gmail.com',587)
smtp.ehlo()
smtp.starttls()
smtp.ehlo()
smtp.login('FROM_EMAIL@gmail.com','PASSWORD')
# subject='Hi I am Lokesh'
# body='Hello! Welcome to Gmail2.'
# message = f'Subject:{subject}\n\n{body}'
smtp.sendmail('FROM_EMAIL@gmail.com',emails[j],msg.as_string())
smtp.close()
print('DONE ' + 'for ' + name)
j+=1
# msg['Subject']='Hi This is an pdf'+ str(i)
filename = None
run_sender('names.csv')
# window = Tk()
# file_info_shower = Label(window,text="Filename : ")
# file_nm_entered = Entry(window)
# file_nm = file_nm_entered.get()
# print(file_nm)
# btn = Button(window,text= "Submit",command =lambda: '')#run_sender(file_nm))
# file_info_shower.grid(row=0,column = 0)
# file_nm_entered.grid(row=0,column = 1)
# btn.grid(row = 1,column = 1)
# window.mainloop()
|
[
"lokesh27dinu@gmail.com"
] |
lokesh27dinu@gmail.com
|
2020450ea58f4a4a9171ab7759091f459163df60
|
f8d1f30ac8a84da3c2cce90b84877da0a319915c
|
/auctioning_platform/processes/processes/tests/test_repository.py
|
a15637ac306b094374f217a70d905913d0c0eb44
|
[
"MIT"
] |
permissive
|
adarshkhanna/clean-architecture
|
422f688369ca4c903030ca6f1f4db8ecd98eec9c
|
ade099a0d0bcec94f40354f168b2d544fa5c963d
|
refs/heads/master
| 2022-07-18T01:05:28.784222
| 2020-03-24T10:33:07
| 2020-03-24T10:33:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,612
|
py
|
from datetime import datetime
import json
from uuid import UUID, uuid4
import pytest
from sqlalchemy.engine import Connection, Engine
from foundation.value_objects.factories import get_dollars
from db_infrastructure import Base
from processes.paying_for_won_item import PayingForWonItemSagaData
from processes.paying_for_won_item.saga import SagaState
from processes.repository import SagaDataRepo, saga_data_table
EXAMPLE_DATETIME = datetime(2019, 5, 24, 15, 20, 0, 12)
@pytest.fixture(scope="session")
def sqlalchemy_connect_url() -> str:
return "sqlite:///:memory:"
@pytest.fixture(scope="session", autouse=True)
def setup_teardown_tables(engine: Engine) -> None:
Base.metadata.create_all(engine)
@pytest.fixture()
def repo(connection: Connection) -> SagaDataRepo:
return SagaDataRepo(connection)
@pytest.mark.parametrize(
"data, json_repr",
[
(
PayingForWonItemSagaData(UUID("331831f1-3d7c-48c2-9433-955c1cf8deb6")),
{
"saga_uuid": "331831f1-3d7c-48c2-9433-955c1cf8deb6",
"state": None,
"timeout_at": None,
"winning_bid": None,
"auction_title": None,
"auction_id": None,
"winner_id": None,
},
),
(
PayingForWonItemSagaData(
UUID("d1526bb4-cee4-4b63-9029-802abc0f7593"),
SagaState.PAYMENT_STARTED,
EXAMPLE_DATETIME,
get_dollars("15.99"),
"Irrelevant",
1,
2,
),
{
"saga_uuid": "d1526bb4-cee4-4b63-9029-802abc0f7593",
"state": SagaState.PAYMENT_STARTED.value,
"timeout_at": EXAMPLE_DATETIME.isoformat(),
"winning_bid": {"amount": "15.99", "currency": "USD"},
"auction_title": "Irrelevant",
"auction_id": 1,
"winner_id": 2,
},
),
],
)
def test_saving_and_reading(
repo: SagaDataRepo, connection: Connection, data: PayingForWonItemSagaData, json_repr: dict
) -> None:
saga_uuid = uuid4()
connection.execute(saga_data_table.insert(values={"uuid": saga_uuid, "json": json.dumps(json_repr)}))
assert repo.get(saga_uuid, type(data)) == data
connection.execute(saga_data_table.delete().where(saga_data_table.c.uuid == saga_uuid))
repo.save(saga_uuid, data)
row = connection.execute(saga_data_table.select(saga_data_table.c.uuid == saga_uuid)).first()
assert json.loads(row.json) == json_repr
|
[
"nnplaya@gmail.com"
] |
nnplaya@gmail.com
|
27e7ead9ca8d68e2688dcaec1a4636af3e1847e3
|
636b06785f30842d06d0d46cdcb1962f4ba06f80
|
/TAREA3NOV.py
|
0b1b436d29728be3dfa1fe07fee2c38de56c5f8c
|
[] |
no_license
|
danhertz/1729448MC
|
42513f9eb5ccbcc40ab15fac7a2fd47451bcd00d
|
f4a72c66d06d8e3673895cbb04e9a01c4ba6dfe3
|
refs/heads/master
| 2021-01-16T00:42:40.154663
| 2017-11-26T17:08:24
| 2017-11-26T17:08:24
| 99,973,729
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,479
|
py
|
from heapq import heappop,heappush
from copy import deepcopy
import random
import time
def permutation(lst):
if len(lst)==0:
return[]
if len(lst)==1:
return[lst]
l=[]#empty list that will store current permutation
for i in range(len(lst)):
m=lst[i]
remlst=lst[:i]+lst[i+1:]
for p in permutation(remlst):
l.append([m]+p)
return l
class Fila:
def __init__(self):
self.fila=[]
def obtener(self):
return self.fila.pop()
def meter(self,e):
self.fila.insert(0,e)
return len(self.fila)
@property
def longitud(self):
return len(self.fila)
class Pila:
def __init__(self):
self.pila=[]
def obtener(self):
return self.pila.pop()
def meter(self,e):
self.pila.append(e)
return len(self.pila)
@property
def longitud(self):
return len(self.pila)
def flatten(L):
while len(L)>0:
yield L[0]
L=L[1]
class Grafo:
def __init__(self):
self.V = set()#un conjunto
self.E = dict()#un mapeo de pesos de aristas
self.vecinos = dict()#un mapeo
def agrega(self, v):
self.V.add(v)
if not v in self.vecinos:#vecindad de v
self.vecinos[v] = set()#inicialmente no tiene nada
def conecta(self, v, u, peso=1):
self.agrega(v)
self.agrega(u)
self.E[(v, u)] = self.E[(u, v)] = peso#en ambos sentidos
self.vecinos[v].add(u)
self.vecinos[u].add(v)
def complemento(self):
comp= Grafo()
for v in self.V:
for w in self.V:
if v != w and (v, w) not in self.E:
comp.conecta(v, w, 1)
return comp
def BFS(self,ni):
visitados=[]
f=Fila()
f.meter(ni)
while (f.longitud>0):
na=f.obtener()
visitados.append(na)
ln=self.vecinos[na]
for nodo in ln:
if nodo not in visitados:
f.meter(nodo)
return visitados
def DFS(self,ni):
visitados=[]
f=Pila()
f.meter(ni)
while (f.longitud>0):
na=f.obtener()
visitados.append(na)
ln=self.vecinos[na]
for nodo in ln:
if nodo not in visitados:
f.meter(nodo)
return visitados
def shortests(self,v):#algoritmo de dijkstra
q=[(0,v,())]#arreglo q de las tuplas de lo que se va a almacenar donde 0 es la distancia, v el nodo y() el camino hacia el
dist=dict()#diccionario de distancias
visited=set()#conjunto de visitados
while len(q)>0:#mientras exista un nodo pendiente
(l,u,p)=heappop(q)#se toma la tupla con la distancia menor
if u not in visited:#si no lo hemos visitado
visited.add(u)#se agrega a visitados
dist[u]=(l,u,list(flatten(p))[::-1]+[u])#agrega el diccionario
p=(u,p)#tupla del nodo y el camino
for n in self.vecinos[u]:#para cada hijo del nodo actual
if n not in visited:#si no lo hemos visitado
el=self.E[(u,n)]#se toma la distancia del nodo actual mas la distancia hacia el nodo hijo
heappush(q,(l+el,n,p))#se agrega el arreglo q la distancia actual mas la distancia hacia el nodo hijo n hacia donde se va y el camino
return dist #regresa el diccionario de distancias
def kruskal(self):
e=deepcopy(self.E)
arbol=Grafo()
peso=0
comp=dict()
t=sorted(e.keys(),key=lambda k:e[k],reverse=True)
nuevo=set()
while len(t)>0 and len(nuevo)<len(self.V):
#print(len(t))
arista=t.pop()
w=e[arista]
del e[arista]
(u,v)=arista
c=comp.get(v,{v})
if u not in c:
#print('u',u,'v',v,'c',c)
arbol.conecta(u,v,w)
peso+=w
nuevo=c.union(comp.get(u,{u}))
for i in nuevo:
comp[i]=nuevo
print('MST con peso', peso, ':', nuevo, '\n', arbol.E)
return arbol
def vecinoMasCercano(self):
lv=list(self.V)
random.shuffle(lv)
ni=lv.pop()
le=dict()
while len(lv)>=0:
ln=self.v[ni]
for nv in ln:
le[nv]=self.E[(ni,nv)]
menor=min(le.values())
lv.append(menor)
del lv[menor]
return lv
g=Grafo()
g.conecta('a','b', 381)
g.conecta('a','c', 2789)
g.conecta('a','d', 2015)
g.conecta('a','e', 2733)
g.conecta('a','f', 2655)
g.conecta('a','g', 1352)
g.conecta('a','h', 1377)
g.conecta('a','i', 373)
g.conecta('a','j', 2071)
g.conecta('b','c', 2905)
g.conecta('b','d', 2131)
g.conecta('b','e', 3113)
g.conecta('b','f', 2818)
g.conecta('b','g', 1733)
g.conecta('b','h', 1758)
g.conecta('b','i', 753)
g.conecta('b','j', 2275)
g.conecta('c','d', 789)
g.conecta('c','e', 1284)
g.conecta('c','f', 192)
g.conecta('c','g', 1823)
g.conecta('c','h', 1743)
g.conecta('c','i', 2408)
g.conecta('c','j', 709)
g.conecta('d','e', 1377)
g.conecta('d','f', 702)
g.conecta('d','g', 1240)
g.conecta('d','h', 1161)
g.conecta('d','i', 1753)
g.conecta('d','j', 181)
g.conecta('e','f', 1098)
g.conecta('e','g', 1383)
g.conecta('e','h', 1352)
g.conecta('e','i', 2360)
g.conecta('e','j', 1197)
g.conecta('f','g', 1640)
g.conecta('f','h', 1560)
g.conecta('f','i', 2293)
g.conecta('f','j', 594)
g.conecta('g','h', 79)
g.conecta('g','i', 981)
g.conecta('g','j', 1172)
g.conecta('h','i', 1066)
g.conecta('h','j', 1094)
g.conecta('i','j', 1703)
print(g.kruskal())
print(g.shortests('c'))
print(g)
k=g.kruskal()
print([print(x,k.E[x]) for x in k.E])
for r in range(10):
ni=random.choice(list(k.V))
dfs=k.DFS(ni)
c=0
#print(dfs)
#print(len(dfs))
for f in range(len(dfs)-1):
c+=g.E[(dfs[f],dfs[f+1])]
print(dfs[f],dfs[f+1],g.E[(dfs[f],dfs[f+1])])
c+=g.E[(dfs[-1],dfs[0])]
print(dfs[-1],dfs[0],g.E[(dfs[-1],dfs[0])])
print('costo',c)
data=list('abcdefghij')
tim=time.clock()
per=permutation(data)
print(time.clock()-tim)
|
[
"noreply@github.com"
] |
danhertz.noreply@github.com
|
e3f4a0d2c64a1cb5c48b01ab7fc2fac0d9a935db
|
cc9db03d28772a3c9eba55653520edb060214b6d
|
/client.py
|
e06f59adeefddf1e7300a62c0c3c4f9c7eab617b
|
[
"Apache-2.0"
] |
permissive
|
absalon-james/slareporting
|
c9d4451577bca4db0c1a459d7f23f8061074890b
|
e5d40300980704a3127e051385df506fd8696e8a
|
refs/heads/master
| 2021-05-06T08:32:06.911198
| 2017-12-12T19:06:35
| 2017-12-12T19:06:35
| 114,028,504
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 726
|
py
|
import conf
from rackspace_monitoring.providers import get_driver
from rackspace_monitoring.types import Provider
_DRIVER_INSTANCE = None
def get_instance():
"""Get instance of the rackspace cloud monitoring driver.
:returns: Driver instance
:rtype: rackspace_monitoring.drivers.rackspace.RackspaceMonitoringDriver
"""
global _DRIVER_INSTANCE
if _DRIVER_INSTANCE is None:
driver = get_driver(Provider.RACKSPACE)
rax_conf = conf.get_raxrc()
_DRIVER_INSTANCE = driver(
rax_conf.get('credentials', 'username'),
rax_conf.get('credentials', 'api_key'),
ex_force_auth_url=rax_conf.get('auth_api', 'url')
)
return _DRIVER_INSTANCE
|
[
"james.absalon@rackspace.com"
] |
james.absalon@rackspace.com
|
ae6030a7ee1665e7560ad306041b5a4173b4cd97
|
c9d0f92ac66c5a3985561644af95104e280989ff
|
/shortpath_플로이드.py
|
01b64269d84e3ba59c2f1c83c75ed7d3387f761b
|
[] |
no_license
|
yuheunk/practice_codes
|
e0dcafd9c0a9cadef65ac08608502e92123b37b5
|
4a32b89bc970d1a8fecd69246fa9a8564bd25a60
|
refs/heads/main
| 2023-06-13T08:22:41.164562
| 2021-07-06T16:24:42
| 2021-07-06T16:24:42
| 359,150,260
| 0
| 0
| null | 2021-06-22T10:49:15
| 2021-04-18T13:25:58
|
Python
|
UTF-8
|
Python
| false
| false
| 591
|
py
|
n = int(input()) # 도시의 개수
m = int(input()) # 버스의 개수
INF = int(1e9)
graph = [[INF] * (n+1) for _ in range(n+1)]
for i in range(1, n+1):
for j in range(1, n+1):
if i == j:
graph[i][j] = 0
for _ in range(m):
a, b, cost = map(int, input().split())
graph[a][b] = min(graph[a][b], cost)
for c in range(1, n+1):
for a in range(1, n+1):
for b in range(1, n+1):
graph[a][b] = min(graph[a][b], graph[a][c]+graph[c][b])
for i in range(1, n+1):
for j in range(1, n+1):
print(graph[i][j], end=' ')
print()
|
[
"noreply@github.com"
] |
yuheunk.noreply@github.com
|
d2bdc5b50d1079d0023a7c74cdfee7ecc573f16e
|
0f5a1b4c8abeab2c745c74f5a25e41a76b7502b0
|
/playerset1.py
|
520ac5594e85d6b9194f5a74206d9a3350c80c04
|
[] |
no_license
|
sujatha-2/test
|
d0c7af8a11061ce45acd19e86c3e7cfc50f43179
|
9a6338fabd4168ae327cdc61e790dddfda8bba9d
|
refs/heads/master
| 2021-07-19T13:28:46.356761
| 2019-01-09T09:19:10
| 2019-01-09T09:19:10
| 148,427,865
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 86
|
py
|
def reverse(test):
n=len(test)
x=""
for i in range(n-1,-1,-1)
x +=[i]
return x
|
[
"noreply@github.com"
] |
sujatha-2.noreply@github.com
|
99e8b72197f24e59ac04683b6008b7960dd77155
|
27531f9c4387fe06315cf9f7c6112075b697a287
|
/eshop/shop/urls.py
|
33fb75a029f8118556358362e709754f9a3dbd12
|
[] |
no_license
|
HommerSimson/456
|
3a6e41354d9ebf64b8b1d9ffdf23e24db62dd6ca
|
14a324f32a068364928635de872cb4b9989900b3
|
refs/heads/master
| 2020-09-19T10:30:08.158039
| 2020-02-18T02:26:57
| 2020-02-18T02:26:57
| 224,223,215
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 278
|
py
|
from django.urls import path
from . import views as v
urlpatterns = [
path('', v.startpage, name='base'),
path('product.html', v.product, name='product'),
path('product/<str:name>', v.product, name='product'),
path('tool1.html', v.product, name='index.html')
]
|
[
"nurgazy-b@mail.ru"
] |
nurgazy-b@mail.ru
|
101a5696b8aad8c09f8916cead8f4f19cde869ac
|
2e1d9a5c08fc7a2206aeefc5e26f21bc8aae8cc0
|
/net/dev/experiments/ryu/simpleApp.py
|
002876a6f654f84c04f90fcfa1bff73b84a0a8f3
|
[
"Apache-2.0"
] |
permissive
|
yuanzy97/DSSnet
|
7c0022b09287ada14c68d9bd7ba82560853c8100
|
32cda144e8671a81764f43a9f0136c0774194934
|
refs/heads/master
| 2021-06-13T19:51:59.087539
| 2017-03-05T23:30:12
| 2017-03-05T23:30:12
| 257,770,623
| 1
| 0
|
NOASSERTION
| 2020-04-22T02:31:33
| 2020-04-22T02:31:33
| null |
UTF-8
|
Python
| false
| false
| 762
|
py
|
#!/bin/python
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
class L2Switch(app_manager.RyuApp):
def __init__(self, *args, **kwargs):
super(L2Switch, self).__init__(*args, **kwargs)
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def packet_in_handler(self, ev):
msg = ev.msg
dp = msg.datapath
ofp = dp.ofproto
ofp_parser = dp.ofproto_parser
actions = [ofp_parser.OFPActionOutput(ofp.OFPP_FLOOD)]
out = ofp_parser.OFPPacketOut(
datapath=dp, buffer_id=msg.buffer_id, in_port=msg.in_port,
actions=actions)
dp.send_msg(out)
|
[
"channon@hawk.iit.edu"
] |
channon@hawk.iit.edu
|
38787be35c06d807255871c320e51e22f5c49f5c
|
6a8988d59f26cef3eeeedd9a79fb7e3423938dc7
|
/src/notifier.py
|
d5596bf7e817b75859756074c1c91e94f9ea60d5
|
[] |
no_license
|
romanz/thesis
|
12734288ce0d65b48991ef17ec2f5c180c9b4219
|
cc322dacbb68f7ceb0df5e12b8cc35acbe97ee83
|
refs/heads/master
| 2023-01-27T17:08:44.809715
| 2023-01-07T18:41:23
| 2023-01-07T18:41:23
| 1,760,138
| 7
| 6
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 136
|
py
|
import pynotify
import sys
title, msg = sys.argv[1:]
assert( pynotify.init('MATLAB') )
n = pynotify.Notification(title, msg)
n.show()
|
[
"roman.zeyde@gmail.com"
] |
roman.zeyde@gmail.com
|
54ca65e1b54cd60842f6a27d6366cd637cc0b26a
|
c83e356d265a1d294733885c373d0a4c258c2d5e
|
/mayan/apps/redactions/permissions.py
|
760a850481312c00d610618552a6b5d7d38df2f4
|
[
"Apache-2.0"
] |
permissive
|
TrellixVulnTeam/fall-2021-hw2-451-unavailable-for-legal-reasons_6YX3
|
4160809d2c96707a196b8c94ea9e4df1a119d96a
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
refs/heads/master
| 2023-08-21T23:36:41.230179
| 2021-10-02T03:51:12
| 2021-10-02T03:51:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 793
|
py
|
from django.utils.translation import ugettext_lazy as _
from mayan.apps.permissions import PermissionNamespace
namespace = PermissionNamespace(label=_('Redactions'), name='redactions')
permission_redaction_create = namespace.add_permission(
label=_('Create new redactions'), name='redaction_create'
)
permission_redaction_delete = namespace.add_permission(
label=_('Delete redactions'), name='redaction_delete'
)
permission_redaction_edit = namespace.add_permission(
label=_('Edit redactions'), name='redaction_edit'
)
permission_redaction_exclude = namespace.add_permission(
label=_('Exclude redactions'), name='redaction_exclude'
)
permission_redaction_view = namespace.add_permission(
label=_('View existing redactions'), name='redaction_view'
)
|
[
"79801878+Meng87@users.noreply.github.com"
] |
79801878+Meng87@users.noreply.github.com
|
8facef555e2fce74f3f4349268342956fbbb35d1
|
833073527929332580a77d6df477da7deb861b31
|
/harsh.py
|
ebcbb0749f0ee85dd46f755779d3e8407c3d1572
|
[] |
no_license
|
harshjain1212/harsh.py
|
5d153afbe7fcfef0330db076a7ec1737aa30215f
|
e239fe5499ddd30c4fa2e5853f4b7c2633fbf8c5
|
refs/heads/main
| 2023-02-19T02:34:21.979175
| 2021-01-20T07:11:06
| 2021-01-20T07:11:06
| 331,222,385
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20
|
py
|
print("harsh jain")
|
[
"noreply@github.com"
] |
harshjain1212.noreply@github.com
|
d084a05d64b5f1e53f4e8f957b543df711070822
|
f61b523ed1fe05dbd851b385584581aa92da92ab
|
/sigaram/portaladmin/forms/AdminForm.py
|
baa0a4927f94acae40400481653f69fc1618e7c3
|
[] |
no_license
|
vjega/python-tutorial
|
1357318f14b86df1e82ec09249f7d6717bf20f3f
|
7387605298964db82a05b2da8202a520d12b4265
|
refs/heads/master
| 2016-09-06T03:13:15.708801
| 2015-07-12T12:04:18
| 2015-07-12T12:04:18
| 26,359,976
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,692
|
py
|
from django.utils.translation import (ugettext as _,)
from django import forms
from crispy_forms.helper import FormHelper
#from crispy_forms.layout import Submit
class AdminForm(forms.Form):
username = forms.CharField(
label = _("User Name"),
max_length = 100,
required = True,
widget = forms.TextInput({ "placeholder": _("User Name")})
)
password = forms.CharField(
label = _("Password"),
max_length = 100,
required = True,
widget = forms.PasswordInput({ "placeholder": _("Password")})
)
firstname = forms.CharField(
label = "%s %s"%(_("First"),_("Name")),
max_length = 100,
required = True,
widget = forms.TextInput({ "placeholder": "%s %s"%(_("First"),_("Name"))})
)
lastname = forms.CharField(
label = "%s %s"%(_("last"),_("Name")),
max_length = 100,
required = True,
widget = forms.TextInput({ "placeholder": "%s %s"%(_("Last"),_("Name"))})
)
emailid = forms.CharField(
label = _("Email Id"),
max_length = 100,
required = True,
widget = forms.TextInput({ "placeholder": _("Email Id")})
)
image = forms.CharField(
label = _("Photo"),
max_length = 100,
required = True,
widget = forms.HiddenInput({ "placeholder": _("Email Id")})
)
def __init__(self, *args, **kwargs):
super(AdminForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_id = 'add-admin'
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-sm-4'
self.helper.field_class = 'col-sm-8'
|
[
"karthik@jega.in"
] |
karthik@jega.in
|
c7a6d5dd8c41683db707f5e807851a5f4cf4e8bd
|
686810fe4ae24622dcd2dd191ddb97141232f191
|
/DepositionComponents/deposition_components/generic/ChamberCryoPump.py
|
9ad392564889361a2d763ccce5198ee3d4aede49
|
[] |
no_license
|
JPHammonds/DepositionComponents
|
22f904f7d0c08af21a819affa5ec708771e080a9
|
600b74b8f2f35662597286af5d39ed462a16669d
|
refs/heads/master
| 2020-04-18T04:43:50.216740
| 2019-04-18T20:56:05
| 2019-04-18T20:56:05
| 167,250,224
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,551
|
py
|
'''
Created on Jan 23, 2019
@author: hammonds
'''
from deposition_components.DepositionListDevice import DepositionListDevice
from ophyd import (Component as Cpt, DynamicDeviceComponent as DDC,
FormattedComponent as FC)
from ophyd.signal import EpicsSignal
class ChamberCryoPump(DepositionListDevice):
power_on = FC(EpicsSignal, '{self.prefix}{self.power_on_read_pv_suffix}',
write_pv='{self.prefix}{self.power_on_write_pv_suffix}',
name='power_on')
exhaust_to_vp1 = FC(EpicsSignal,
'{self.prefix}{self.exhaust_read_pv_suffix}',
write_pv='{self.prefix}{self.exhaust_write_pv_suffix}',
name='exhaust_to_vp1')
pressure = FC(EpicsSignal, "{self.prefix}{self.pressure_read_pv_suffix}",
name='pressure')
temperature_status = FC(EpicsSignal,
"{self.prefix}{self.temp_status_read_pv_suffix}",
name='temperature_status')
n2_purge = FC(EpicsSignal, "{self.prefix}{self.n2_purge_read_pv_suffix}",
write_pv="{self.prefix}{self.n2_purge_write_pv_suffix}",
name='n2_purge')
def __init__(self, prefix,
power_on_read_pv_suffix,
power_on_write_pv_suffix,
exhaust_read_pv_suffix,
exhaust_write_pv_suffix,
pressure_read_pv_suffix,
temp_status_read_pv_suffix,
n2_purge_read_pv_suffix,
n2_purge_write_pv_suffix,
**kwargs):
self.power_on_read_pv_suffix = power_on_read_pv_suffix
self.power_on_write_pv_suffix = power_on_write_pv_suffix
self.exhaust_read_pv_suffix = exhaust_read_pv_suffix
self.exhaust_write_pv_suffix = exhaust_write_pv_suffix
self.pressure_read_pv_suffix = pressure_read_pv_suffix
self.temp_status_read_pv_suffix = temp_status_read_pv_suffix
self.n2_purge_read_pv_suffix = n2_purge_read_pv_suffix
self.n2_purge_write_pv_suffix = n2_purge_write_pv_suffix
super(ChamberCryoPump, self).__init__(prefix, **kwargs)
def is_cryo_on(self):
return self.power_on.get() == 1
def is_cryo_exhausting_to_vp1(self):
return self.exhaust_to_vp1.get() == 1
# def set(self):
# '''
# Turn the cryo pump on, but make sure that it is ready before turning
# it on and make sure that it is on before completion
# '''
|
[
"JPHammonds@anl.gov"
] |
JPHammonds@anl.gov
|
0af37c323dddce4b21f380367cbf84bb87457ff2
|
38915942f3719baea08396f819707c885b9315ce
|
/smartsheet/staircase/views/foup_wafer/wafer.py
|
9418871f86877c85c3bf6ca911d5c8e40e5c834e
|
[] |
no_license
|
leopardary/smartsheet
|
a2c2b351dd86b904c110c458ffa587d0458158c4
|
4b51a672d6b13ec018a80fa0f88b7d22a5bb9d70
|
refs/heads/python3a
| 2022-11-28T19:23:32.072813
| 2019-04-21T23:23:41
| 2019-04-21T23:23:41
| 89,207,891
| 0
| 1
| null | 2022-11-19T19:21:30
| 2017-04-24T07:00:55
|
Python
|
UTF-8
|
Python
| false
| false
| 2,249
|
py
|
from ...models import Group,Foup
from django.shortcuts import render
from django.http import HttpResponseRedirect
def reclaim_wafers(request,foup_name):
staircase=Group.objects.filter(group_name='Staircase')
foup_list=Foup.objects.filter(owner__group=staircase[0])
foup=Foup.objects.filter(foupname=foup_name)[0]
slot_list=foup.foup_slot_set.all()
context={
'foup':foup,
'slot_list':slot_list,
'foup_list':foup_list,
}
return render(request,'staircase/foup_wafer/foup_details/reclaim_wafers.html',context)
def reclaim_execute(request,foup_name):
if request.method=='POST':
staircase=Group.objects.filter(group_name='Staircase')
foup_list=Foup.objects.filter(owner__group=staircase[0])
foup=Foup.objects.filter(foupname=foup_name)[0]
slot_list=request.POST.getlist('occupied_slot')
#pdb.set_trace()
for slot in slot_list:
foup_slot=foup.foup_slot_set.filter(slot=int(slot))[0]
foup_slot.reclaim_wafer()
return HttpResponseRedirect('/staircase/foups/%s'%foup.foupname)
return render(request,'/staircase/foups/%s'%foup.foupname)
def load_execute(request,foup_name):
if request.method=='POST':
staircase=Group.objects.filter(group_name='Staircase')
foup_list=Foup.objects.filter(owner__group=staircase[0])
foup=Foup.objects.filter(foupname=foup_name)[0]
wafer_type=request.POST['wafer_type']
slot_list=request.POST.getlist('available_slot')
#pdb.set_trace()
for slot in slot_list:
foup_slot=foup.foup_slot_set.filter(slot=int(slot))[0]
foup_slot.load_new_wafers(str(wafer_type))
return HttpResponseRedirect('/staircase/foups/%s'%foup.foupname)
return render(request,'/staircase/foups/%s'%foup.foupname)
def load_wafers(request,foup_name):
staircase=Group.objects.filter(group_name='Staircase')
foup_list=Foup.objects.filter(owner__group=staircase[0])
foup=Foup.objects.filter(foupname=foup_name)[0]
slot_list=foup.foup_slot_set.all()
context={
'foup':foup,
'slot_list':slot_list,
'foup_list':foup_list,
}
return render(request,'staircase/foup_wafer/foup_details/load_wafers.html',context)
|
[
"wenjiao.wang1@gmail.com"
] |
wenjiao.wang1@gmail.com
|
d0f2be29afccd729b75626d77f3edb71dca4e661
|
c9c6c77995923d50e93da62b95a21455f812aec1
|
/src/bumblebot/__main__.py
|
151bb2def08d243400a1af16196915300644d117
|
[] |
no_license
|
Salade2chats/PyBumbleBot
|
cf463a1fda0a02c6c9de25b42648bb31452c88e4
|
c8db36a4ea1f6eb20dcd5351aede5760dd82506c
|
refs/heads/master
| 2020-03-26T13:35:29.019065
| 2018-08-16T06:32:05
| 2018-08-16T06:32:05
| 144,946,674
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,050
|
py
|
import os
from pathlib import Path
import click
from aiohttp import web
from dotenv import load_dotenv
from bumblebot.controllers import Route
from bumblebot.services.google import GoogleClient
from .__about__ import __version__
from .services.logger import Logger
@click.group(context_settings={'help_option_names': ['-h', '--help']})
@click.version_option(__version__)
@click.option('-q', '--quiet', is_flag=True, help="no output")
@click.option('-v', '--verbose', count=True, help="verbosity level")
def main(quiet, verbose):
"""Bumblebot is ur salve. Slap it. 🐟"""
Logger.prepare('main', 1000 if quiet else 50 - verbose * 10)
# configure app
load_dotenv(dotenv_path=Path('.') / '.env')
@main.command()
def run():
app = web.Application()
routes = Route(
google=GoogleClient(api_key=os.getenv('GOOGLE_API_KEY'))
).all()
app.add_routes(routes)
# app.add_routes([web.get('/', handle),
# web.get('/{name}', handle)])
web.run_app(app)
if __name__ == '__main__':
main()
|
[
"dams_terdam@hotmail.fr"
] |
dams_terdam@hotmail.fr
|
ff2509fe6271ef698b97954ab63a3aee5c0e98f2
|
1e7e56dbc226bddf380eec960588a393e98181cc
|
/CGI/scripts/genTable_F.py
|
be3c2271ad0452fddf498ad79ea7cc5d10da3052
|
[] |
no_license
|
pratik-pato/SE01_UI_Database-Programming
|
a01926c2b915063529838d29c6102059f302b672
|
1ee16eb53c28acbeaa91e35430b295390915dca1
|
refs/heads/master
| 2020-05-20T06:03:11.571129
| 2016-06-20T16:37:37
| 2016-06-20T16:37:37
| 51,758,221
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,645
|
py
|
#!/usr/bin/python
import genJS, psycopg2, sys, cgi
form = cgi.FieldStorage()
sDbname = 'SE01'
sUser = 'pratik_SE'
sHost = 'localhost'
sPass = 'easports'
sTablename = form["tabName"].value
conn = psycopg2.connect("dbname={} user={} host={} password={}".format(sDbname, sUser, sHost, sPass))
conn.autocommit = True
cursor = conn.cursor()
sql = []
sql.append("select column_name from information_schema.columns where table_name = '%s';"%(sTablename))
sql.append("select count(*) from %s"%(sTablename))
def executeQuery(query):
resList = []
try:
cursor.execute(query)
results = cursor.fetchall()
for row in results:
resList.append(row[0])
except Exception as e:
print e
return resList
conn.close()
'''
try:
jsFile = open('phase3.html','w+')
except IOError as e:
print "I/O error({0}): {1}".format(e.errno, e.strerror)
except ValueError:
print "Could not convert data to an integer."
except:
print "Unexpected error:", sys.exc_info()[0]
raise
'''
print("Content-Type: text/html")
print ""
#print(""" """)
genJS.genOpenTag()
genJS.javaScriptType("jquery/jquery.js", "", "")
countRows = executeQuery(sql[1])
countRows = map(int, countRows)
colList = executeQuery(sql[0])
#print colList
colListStr = ','.join(colList)
emptyLst = []
for i in range(countRows[0]):
emptyLst.append("")
print("<script type=\"text/javascript\">\n")
print("var count = %d;var table = %s;function getValues(event, n) {if(event.which == \"9\") {var focusedElement = $(\":focus\");var ele = Number(focusedElement.attr(\"id\").slice(4));var val = focusedElement.attr(\"value\");var temp = ele/n;var tempDiff = temp%%1;temp = temp - tempDiff;var temp1 = ele%%n;table[temp][temp1] = val;}else if(event.which == \"13\") {var value = \"\";var i;var rowEntry = [];for(i = (count * n); i < ((count + 1) * n); i++) {var temp;temp = document.getElementById(\"attr\" + i);if(temp != null) {value = temp.value;if(value == \'\') {alert(\"enter attribute \" + (i + 1));break;}else {rowEntry.push(value);}}}table.push(rowEntry);count++;var newTableRow = $(document.createElement(\'tr\')).attr(\"id\", \'TableRow\' + count);newTableRow.after().html("%(int(countRows[0]), str(emptyLst)))
genJS.genColumnTag(int(countRows[0]), len(colList))
print(");newTableRow.appendTo(\"#idTable\");document.getElementById(\"attr\" + (count * n)).focus();}else if(event.which == \"37\") {var focusedElement = $(\":focus\");var ele = Number(focusedElement.attr(\"id\").slice(4));if(ele > 0) {var input = document.getElementById(\"attr\" + (ele - 1));input.focus();var val = input.value;input.value = \'\';input.value = val;}else {var input = document.getElementById(\"attr\" + ele);input.focus();var val = input.value;input.value = \'\';input.value = val;}}else if(event.which == \"38\") {var focusedElement = $(\":focus\");var ele = Number(focusedElement.attr(\"id\").slice(4));if(ele >= n) {var input = document.getElementById(\"attr\" + (ele - n));input.focus();var val = input.value;input.value = \'\';input.value = val;}}else if(event.which == \"39\") {var focusedElement = $(\":focus\");var ele = Number(focusedElement.attr(\"id\").slice(4));if(ele < (count * n) + %d) {var input = document.getElementById(\"attr\" + (ele + 1));input.focus();var val = input.value;input.value = \'\';input.value = val;}}else if(event.which == \"40\") {var focusedElement = $(\":focus\");var ele = Number(focusedElement.attr(\"id\").slice(4));if(ele < (count * n)) {var input = document.getElementById(\"attr\" + (ele + n));input.focus();var val = input.value;input.value = \'\';input.value = val;}}}function submitTable(n) {var i;var rowEntry = [];for(i = 0; i < n; i++)rowEntry.push(document.getElementById(\"attr\" + ((count * n) + i)).value);console.log(count);table.push(rowEntry);for(i = 0; i <= count; i++)console.log(table[i]);}</script>"%(len(colList)-1))
print("\n<body>\n<table id=\"idTable\" border=\"1\" style=\"width:100%\">\n<tr id=\"tableHead\">\n")
genJS.genAttrNames(colList)
print("</tr>")
tableDataQuery = "select %s from %s;"%(colListStr, sTablename)
tableData = []
for i in colList:
tableData.append([])
try:
cursor.execute(tableDataQuery)
results = cursor.fetchall()
for row in results:
for i in range(len(row)):
tableData[i].append(row[i])
#print "tableData"
#print tableData
except:
print "Error: unable to fecth data"
conn.close()
genJS.genRow( tableData)
print("</table><input type = \"button\" value = \"Submit\" onkeydown=\"submitTable(%d)\" onclick=\"submitTable(%d)\"></body></html>"%(int(countRows[0]), int(countRows[0])))
|
[
"charwad.pratik@gmail.com"
] |
charwad.pratik@gmail.com
|
c710e670770a317ba2296df1ec8941a218f68280
|
d1aef0e74af3ed2e4f040ff0812ed13dd754db36
|
/ScrapyTieba/ScrapyTieba/items.py
|
660599df0aded2624f713ced087fa7cd2093aa64
|
[] |
no_license
|
Sora-Shiro/ScrapyLearn
|
d5533c98c02b0b809051bee942a9580ed2b9a067
|
74e30f05e4ba1ff550647f09e4f05c9afd9fddb8
|
refs/heads/master
| 2021-03-16T05:17:21.005992
| 2017-10-24T11:58:20
| 2017-10-24T11:58:20
| 102,423,813
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,009
|
py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class ScrapyTiebaItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
class AskScoreToUniversityItem(scrapy.Item):
title = scrapy.Field()
href = scrapy.Field()
province = scrapy.Field()
university = scrapy.Field()
author = scrapy.Field()
date = scrapy.Field()
class TiebaPostItem(scrapy.Item):
# 用户ID
name_user = scrapy.Field()
# 贴吧名
name_tieba = scrapy.Field()
# 帖子主题名
title = scrapy.Field()
# 帖子主题链接
url = scrapy.Field()
# 回复内容
content = scrapy.Field()
# 回复所在楼层数
level = scrapy.Field()
# 回复所在楼中楼层数,在回复内容所在位置非楼中楼时数值为-1
level_in_level = scrapy.Field()
# 回复时间
time = scrapy.Field()
|
[
"sora95shiro@gmail.com"
] |
sora95shiro@gmail.com
|
8c7586d5846fb0da1f4c78ceef07a2846e702539
|
b3699724907850fd26cbce4509fec83a33b89760
|
/python/ray/tune/tests/tutorial.py
|
2a11f12a0a30ba85b8c40724372c07d9ccd6238f
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
BonsaiAI/ray
|
5e2f26a81d865a795261d11f9182aca7f07c7b97
|
941d30f082fe879ea30618af14327c25b5a21a74
|
refs/heads/master
| 2023-06-12T05:15:29.370188
| 2021-05-06T07:03:53
| 2021-05-06T07:03:53
| 233,708,687
| 3
| 5
|
Apache-2.0
| 2023-05-27T08:06:37
| 2020-01-13T22:41:47
|
Python
|
UTF-8
|
Python
| false
| false
| 5,578
|
py
|
# flake8: noqa
# Original Code: https://github.com/pytorch/examples/blob/master/mnist/main.py
# yapf: disable
# __tutorial_imports_begin__
import numpy as np
import torch
import torch.optim as optim
import torch.nn as nn
from torchvision import datasets, transforms
from torch.utils.data import DataLoader
import torch.nn.functional as F
from ray import tune
from ray.tune.schedulers import ASHAScheduler
# __tutorial_imports_end__
# yapf: enable
# yapf: disable
# __model_def_begin__
class ConvNet(nn.Module):
def __init__(self):
super(ConvNet, self).__init__()
# In this example, we don't change the model architecture
# due to simplicity.
self.conv1 = nn.Conv2d(1, 3, kernel_size=3)
self.fc = nn.Linear(192, 10)
def forward(self, x):
x = F.relu(F.max_pool2d(self.conv1(x), 3))
x = x.view(-1, 192)
x = self.fc(x)
return F.log_softmax(x, dim=1)
# __model_def_end__
# yapf: enable
# yapf: disable
# __train_def_begin__
# Change these values if you want the training to run quicker or slower.
EPOCH_SIZE = 512
TEST_SIZE = 256
def train(model, optimizer, train_loader):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
# We set this just for the example to run quickly.
if batch_idx * len(data) > EPOCH_SIZE:
return
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
def test(model, data_loader):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.eval()
correct = 0
total = 0
with torch.no_grad():
for batch_idx, (data, target) in enumerate(data_loader):
# We set this just for the example to run quickly.
if batch_idx * len(data) > TEST_SIZE:
break
data, target = data.to(device), target.to(device)
outputs = model(data)
_, predicted = torch.max(outputs.data, 1)
total += target.size(0)
correct += (predicted == target).sum().item()
return correct / total
# __train_def_end__
# __train_func_begin__
def train_mnist(config):
# Data Setup
mnist_transforms = transforms.Compose(
[transforms.ToTensor(),
transforms.Normalize((0.1307, ), (0.3081, ))])
train_loader = DataLoader(
datasets.MNIST("~/data", train=True, download=True, transform=mnist_transforms),
batch_size=64,
shuffle=True)
test_loader = DataLoader(
datasets.MNIST("~/data", train=False, transform=mnist_transforms),
batch_size=64,
shuffle=True)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = ConvNet()
model.to(device)
optimizer = optim.SGD(
model.parameters(), lr=config["lr"], momentum=config["momentum"])
for i in range(10):
train(model, optimizer, train_loader)
acc = test(model, test_loader)
# Send the current training result back to Tune
tune.report(mean_accuracy=acc)
if i % 5 == 0:
# This saves the model to the trial directory
torch.save(model.state_dict(), "./model.pth")
# __train_func_end__
# yapf: enable
# __eval_func_begin__
search_space = {
"lr": tune.sample_from(lambda spec: 10**(-10 * np.random.rand())),
"momentum": tune.uniform(0.1, 0.9)
}
# Uncomment this to enable distributed execution
# `ray.init(address="auto")`
# Download the dataset first
datasets.MNIST("~/data", train=True, download=True)
analysis = tune.run(train_mnist, config=search_space)
# __eval_func_end__
#__plot_begin__
dfs = analysis.trial_dataframes
[d.mean_accuracy.plot() for d in dfs.values()]
#__plot_end__
# __run_scheduler_begin__
analysis = tune.run(
train_mnist,
num_samples=20,
scheduler=ASHAScheduler(metric="mean_accuracy", mode="max"),
config=search_space)
# Obtain a trial dataframe from all run trials of this `tune.run` call.
dfs = analysis.trial_dataframes
# __run_scheduler_end__
# yapf: disable
# __plot_scheduler_begin__
# Plot by epoch
ax = None # This plots everything on the same plot
for d in dfs.values():
ax = d.mean_accuracy.plot(ax=ax, legend=False)
# __plot_scheduler_end__
# yapf: enable
# __run_searchalg_begin__
from hyperopt import hp
from ray.tune.suggest.hyperopt import HyperOptSearch
space = {
"lr": hp.loguniform("lr", 1e-10, 0.1),
"momentum": hp.uniform("momentum", 0.1, 0.9),
}
hyperopt_search = HyperOptSearch(space, metric="mean_accuracy", mode="max")
analysis = tune.run(train_mnist, num_samples=10, search_alg=hyperopt_search)
# To enable GPUs, use this instead:
# analysis = tune.run(
# train_mnist, config=search_space, resources_per_trial={'gpu': 1})
# __run_searchalg_end__
# __run_analysis_begin__
import os
df = analysis.results_df
logdir = analysis.get_best_logdir("mean_accuracy", mode="max")
state_dict = torch.load(os.path.join(logdir, "model.pth"))
model = ConvNet()
model.load_state_dict(state_dict)
# __run_analysis_end__
from ray.tune.examples.mnist_pytorch_trainable import TrainMNIST
# __trainable_run_begin__
search_space = {
"lr": tune.sample_from(lambda spec: 10**(-10 * np.random.rand())),
"momentum": tune.uniform(0.1, 0.9)
}
analysis = tune.run(
TrainMNIST, config=search_space, stop={"training_iteration": 10})
# __trainable_run_end__
|
[
"noreply@github.com"
] |
BonsaiAI.noreply@github.com
|
9cdc72971265fd7c826adf688d020d149cc8c294
|
bebf27238fa188fef8543734073dd4751ad55571
|
/novel_site/utils/chapterParser.py
|
48ed0aa4322faa7e8afed93565bec11b0c6e2d0f
|
[] |
no_license
|
gzgdouru/novel_site_beta
|
0a37ec24deb5b6eafd865fb9c3776173d1861aaa
|
746add0e932ffebb67f8b8ec7e6d232c0db31a53
|
refs/heads/master
| 2020-04-10T04:34:30.438838
| 2019-03-08T09:36:55
| 2019-03-08T09:36:55
| 160,802,317
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 551
|
py
|
import requests
from scrapy.selector import Selector
def biquge(chapter_url):
response = requests.get(url=chapter_url, timeout=30)
response.encoding = "gbk"
selector = Selector(text=response.text)
chapter_content = selector.css("#content").extract_first()
return chapter_content
def dingdian(chapter_url):
response = requests.get(url=chapter_url, timeout=30)
response.encoding = "gbk"
selector = Selector(text=response.text)
chapter_content = selector.css("#content").extract_first()
return chapter_content
|
[
"18719091650@163.com"
] |
18719091650@163.com
|
6414e7086c1d84b2116fe4e08bbe5218f727d644
|
781e2692049e87a4256320c76e82a19be257a05d
|
/all_data/exercism_data/python/bob/bce5dc09fb584ddbbf75a78974fd9b10.py
|
f42a5b206c0d0919b4d9754bfdb324e6e44c9797
|
[] |
no_license
|
itsolutionscorp/AutoStyle-Clustering
|
54bde86fe6dbad35b568b38cfcb14c5ffaab51b0
|
be0e2f635a7558f56c61bc0b36c6146b01d1e6e6
|
refs/heads/master
| 2020-12-11T07:27:19.291038
| 2016-03-16T03:18:00
| 2016-03-16T03:18:42
| 59,454,921
| 4
| 0
| null | 2016-05-23T05:40:56
| 2016-05-23T05:40:56
| null |
UTF-8
|
Python
| false
| false
| 421
|
py
|
def isyelling(some_string):
return some_string.isupper()
def issilent(some_string):
return not some_string or some_string.isspace()
def isquestion(some_string):
return some_string.strip().endswith('?')
def hey(some_string):
if issilent(some_string):
return 'Fine. Be that way!'
elif isyelling(some_string):
return 'Whoa, chill out!'
elif isquestion(some_string):
return 'Sure.'
else:
return 'Whatever.'
|
[
"rrc@berkeley.edu"
] |
rrc@berkeley.edu
|
d6f05fb498b4f1dd44b70803aa3d8600c9859ee7
|
c6f2c9a5b69364f7547872ca3c3e5d74aa7efffb
|
/PythonCode/ReadingAndWritingFile.py
|
667812fa59250c19ada2b1a6bc70a24a6beaf9d8
|
[
"MIT"
] |
permissive
|
aredshaw/CodePractice
|
87886378d974da5e5f12379a6ee9f57b539fa1c8
|
1f5267d5cfc83b803a8b0da3cacc9b306492555e
|
refs/heads/master
| 2020-07-26T02:02:36.993207
| 2020-03-06T23:57:38
| 2020-03-06T23:57:38
| 208,496,198
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 662
|
py
|
test_file = open("list.txt", "w" )
test_file.write("This is a test file. It is also a text file.\nI wonder how it will look altogether.\nWhen I write, I write a lot, \nso there is no telling where I will end up \nwith all the thoughts in my mind emptied out onto \nthe paper, or in this case, the screen.")
test_file.close()
test_file = open("list.txt", "r")
print(test_file.read(20)) #the first 20 characters
print(test_file.readlines(1)) #the next line
print(test_file.read(30)) #another 30 charaxters. It ends in the middle of a word.
print(test_file.read()) #the rest of the text. I added line breaks since my terminal does not wrap lines.
test_file.close()
|
[
"akredshaw@gmail.com"
] |
akredshaw@gmail.com
|
039f6e3ac3e74a9d72e9a33b99502e945ff5acce
|
fe9b12cb370cc4f1682a04cb36e49132d3831bbe
|
/main.py
|
53361d009b4c04354c54122b30c23814e746e827
|
[] |
no_license
|
nellyloh/capstone-dashboard
|
268bab57608f1528c4d69d9dba774729c16ee2f6
|
8a61cba81d3bfb9851661e2e8259c3e3d5e6814c
|
refs/heads/master
| 2023-08-22T09:12:21.715988
| 2021-10-29T20:27:18
| 2021-10-29T20:27:18
| 422,670,079
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 551
|
py
|
import individual_query
import webscraper_confidence_score
import sentiment_model
def run(name=None, nationality=None, gender=None, dob=None):
individual_dict = individual_query.preprocess_input_to_dict(name, nationality=nationality, gender=gender, dob=dob)
print(individual_dict)
articles = webscraper_confidence_score.search_articles_on_individual(individual_dict, no_of_articles=10, additional_keywords=None)
print(articles)
model_output = sentiment_model.sentiment_model(articles)
print(model_output)
return individual_dict, model_output
|
[
"nellylohhj@gmail.com"
] |
nellylohhj@gmail.com
|
afcf3e94534926410bf355eff01591a124e75f29
|
86c6012e7ff8b19db7db21606f7747cc6b9f3a01
|
/2018/11/2.py
|
1a5f89fedbd57a67f3c2ca221b66a91b442a49b3
|
[] |
no_license
|
philipdexter/aoc
|
6af67f2e1bb7e073fc503c9299a7c02f63b85949
|
7300e8313f40e4e7011d13445bdad047a9cc3349
|
refs/heads/master
| 2023-07-21T15:28:32.106390
| 2023-07-19T16:29:50
| 2023-07-19T16:29:50
| 224,866,380
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,223
|
py
|
grid_serial = 7347
def power_level(x, y):
rack_id = x + 10
power_level = rack_id * y
power_level += grid_serial
power_level *= rack_id
power_level = int(str(power_level)[len(str(power_level))-3])
power_level -= 5
return power_level
grid = {}
for x in range(1,300+1):
for y in range(1,300+1):
grid[(x,y)] = power_level(x,y)
def calc(x, y, size):
agg = 0
smaller = group_power_levels.get((x,y,size-1))
if smaller:
for a in range(x, x+size):
agg += grid[(a,y+size-1)]
for b in range(y, y+size-1):
agg += grid[(x+size-1, b)]
if size == 3 and (x,y) == (243,17):
print(smaller)
print(agg)
for a in range(x, x+size):
for b in range(y, y+size):
print(grid[(x, y)], end=' ')
print()
agg += smaller
else:
for a in range(x, x+size):
for b in range(y, y+size):
agg += grid[(a,b)]
return agg
group_power_levels = {}
for size in range(1, 300+1):
print(f'size {size}')
for x in range(1,300+1-size):
for y in range(1,300+1-size):
group_power_levels[(x,y,size)] = calc(x,y,size)
print(max(group_power_levels.items(), key=lambda x: x[1]))
print(max(group_power_levels.items(), key=lambda x: x[1]))
|
[
"philip.dexter@gmail.com"
] |
philip.dexter@gmail.com
|
ecb11a70f2b662b8e13c6273c3e6d58537f6ba8f
|
241d5598e142044cbbf3419e13ff34647fe3ecf7
|
/rplugin/python3/denite/modules/models/helper_file.py
|
273cb80f2273bb05fd3b6074f0bdb92c6da62541
|
[
"MIT"
] |
permissive
|
5t111111/denite-rails
|
9c97b1bde19849b4bf972f02fc426b7721bbee9f
|
92051d48161b8b8405d841cd9afac568f4b6d3b7
|
refs/heads/master
| 2020-02-26T14:20:33.989571
| 2018-03-02T01:30:45
| 2018-03-02T01:30:45
| 83,693,724
| 16
| 4
|
MIT
| 2017-12-22T01:10:46
| 2017-03-02T15:33:30
|
Python
|
UTF-8
|
Python
| false
| false
| 217
|
py
|
import re
import os
from file_base import FileBase
class HelperFile(FileBase):
def remove_base_directory(self, filename, root_path):
return re.sub(os.path.join(root_path, 'app/helpers/'), '', filename)
|
[
"baenej@gmail.com"
] |
baenej@gmail.com
|
836b1542757d6be323c0dad4e47b22b765668668
|
7bededcada9271d92f34da6dae7088f3faf61c02
|
/pypureclient/flasharray/FA_2_19/models/protection_group_performance_array_response.py
|
caf0612d16c7e59f33e6bc5ecee79508d9843abf
|
[
"BSD-2-Clause"
] |
permissive
|
PureStorage-OpenConnect/py-pure-client
|
a5348c6a153f8c809d6e3cf734d95d6946c5f659
|
7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e
|
refs/heads/master
| 2023-09-04T10:59:03.009972
| 2023-08-25T07:40:41
| 2023-08-25T07:40:41
| 160,391,444
| 18
| 29
|
BSD-2-Clause
| 2023-09-08T09:08:30
| 2018-12-04T17:02:51
|
Python
|
UTF-8
|
Python
| false
| false
| 5,730
|
py
|
# coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.19
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_19 import models
class ProtectionGroupPerformanceArrayResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'more_items_remaining': 'bool',
'total_item_count': 'int',
'continuation_token': 'str',
'items': 'list[ProtectionGroupPerformanceArray]'
}
attribute_map = {
'more_items_remaining': 'more_items_remaining',
'total_item_count': 'total_item_count',
'continuation_token': 'continuation_token',
'items': 'items'
}
required_args = {
}
def __init__(
self,
more_items_remaining=None, # type: bool
total_item_count=None, # type: int
continuation_token=None, # type: str
items=None, # type: List[models.ProtectionGroupPerformanceArray]
):
"""
Keyword args:
more_items_remaining (bool): Returns a value of `true` if subsequent items can be retrieved.
total_item_count (int): The total number of records after applying all filter query parameters. The `total_item_count` will be calculated if and only if the corresponding query parameter `total_item_count` is set to `true`. If this query parameter is not set or set to `false`, a value of `null` will be returned.
continuation_token (str): Continuation token that can be provided in the `continuation_token` query param to get the next page of data. If you use the continuation token to page through data you are guaranteed to get all items exactly once regardless of how items are modified. If an item is added or deleted during the pagination then it may or may not be returned. The continuation token is generated if the limit is less than the remaining number of items, and the default sort is used (no sort is specified).
items (list[ProtectionGroupPerformanceArray]): List performance data, broken down by array.
"""
if more_items_remaining is not None:
self.more_items_remaining = more_items_remaining
if total_item_count is not None:
self.total_item_count = total_item_count
if continuation_token is not None:
self.continuation_token = continuation_token
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ProtectionGroupPerformanceArrayResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def __getitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ProtectionGroupPerformanceArrayResponse`".format(key))
return object.__getattribute__(self, key)
def __setitem__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ProtectionGroupPerformanceArrayResponse`".format(key))
object.__setattr__(self, key, value)
def __delitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ProtectionGroupPerformanceArrayResponse`".format(key))
object.__delattr__(self, key)
def keys(self):
return self.attribute_map.keys()
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ProtectionGroupPerformanceArrayResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ProtectionGroupPerformanceArrayResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"noreply@github.com"
] |
PureStorage-OpenConnect.noreply@github.com
|
4b86573d1ab2cc4435f396b62b8121820bdf8b2e
|
7c1eeff9d7f8d560a1185bfd08eb5e72ce3d16c8
|
/app/controllers/default.py
|
d01b1b1a469a9e8bd597e60aaa35da8beef03739
|
[] |
no_license
|
ThuBitencourtt/Robotica
|
2dc47b1d298e6b6c96efbd26c6bbee45b4e0e300
|
a00dac34e388f1f514eb04c67c8beaae0053114f
|
refs/heads/master
| 2022-10-16T22:02:02.415991
| 2019-11-30T15:03:56
| 2019-11-30T15:03:56
| 225,048,615
| 0
| 1
| null | 2022-09-30T18:38:29
| 2019-11-30T17:45:23
|
Python
|
UTF-8
|
Python
| false
| false
| 3,679
|
py
|
from flask import render_template, flash, redirect, url_for, request
from flask_login import login_user, logout_user
from app import app, db, lm
from app.models.tables import User, Equipe,Pessoa,Robo,Evento
from app.models.forms import LoginForm, CadastroForm,EquipeForm,PessoaForm,RoboForm,EventoForm
@lm.user_loader
def load_user(id):
return User.query.filter_by(id=id).first()
@app.route("/index")
@app.route("/")
def index():
return render_template('index.html')
@app.route("/login", methods=["GET","POST"])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user and user.password == form.password.data:
login_user(user)
flash("Login in")
return redirect(url_for("index"))
else:
flash("Invalid Login")
return render_template('login.html',
form=form)
@app.route("/logout")
def logout():
logout_user()
flash ("Logged out")
return redirect(url_for("login"))
@app.route("/cadastro", methods=["GET","POST"])
@app.route("/")
def cadastro():
form = CadastroForm()
if request.form:
user = User(username=request.form.get("username"),password=request.form.get("password"))
db.session.add(user)
db.session.commit()
return render_template('cadastro.html',
form=form)
@app.route("/cadastro_equipe", methods=["GET","POST"])
@app.route("/")
def cadastro_equipe():
form = EquipeForm()
if request.form:
equipe = Equipe(firstname=request.form.get("firstname"), lastname=request.form.get("lastname"),slogan=request.form.get("slogan"),email=request.form.get("email"),
site=request.form.get("site"),país=request.form.get("país"),estado=request.form.get("estado"),
cidade=request.form.get("cidade"),instituicao=request.form.get("instituicao"),capitao=request.form.get("capitao"))
db.session.add(equipe)
db.session.commit()
return render_template('cadastro_equipe.html',
form=form)
@app.route("/cadastro_pessoa", methods=["GET","POST"])
@app.route("/")
def cadastro_pessoa():
form = PessoaForm()
if request.form:
pessoa = Pessoa(name=request.form.get("name"),email=request.form.get("email"),
RG=request.form.get("RG"),CPF=request.form.get("CPF"),telefone=request.form.get("telefone"),
idade=request.form.get("idade"))
db.session.add(pessoa)
db.session.commit()
return render_template('cadastro_pessoa.html',
form=form)
@app.route("/cadastro_robo", methods=["GET","POST"])
@app.route("/")
def cadastro_robo():
form = RoboForm()
if request.form:
robo = Robo(name=request.form.get("name"),email=request.form.get("categoria"),
RG=request.form.get("peso"),CPF=request.form.get("responsavel"))
db.session.add(robo)
db.session.commit()
return render_template('cadastro_robo.html',
form=form)
@app.route("/cadastro_evento", methods=["GET","POST"])
@app.route("/")
def cadastro_evento():
form = EventoForm()
if request.form:
evento = Evento(name=request.form.get("name"), endereco=request.form.get("endereco"),email=request.form.get("email"),
site=request.form.get("site"),país=request.form.get("país"),estado=request.form.get("estado"),
cidade=request.form.get("cidade"))
db.session.add(evento)
db.session.commit()
return render_template('cadastro_evento.html',
form=form)
|
[
"thubittencourt@gmail.com"
] |
thubittencourt@gmail.com
|
5bb26fe6d3ac1cb96a4721f35a25d9b559af030d
|
d125c002a6447c3f14022b786b07712a7f5b4974
|
/tests/bugs/core_4342_test.py
|
97096578735a1274aa965853094b2e0e8017292a
|
[
"MIT"
] |
permissive
|
FirebirdSQL/firebird-qa
|
89d5b0035071f9f69d1c869997afff60c005fca9
|
cae18186f8c31511a7f68248b20f03be2f0b97c6
|
refs/heads/master
| 2023-08-03T02:14:36.302876
| 2023-07-31T23:02:56
| 2023-07-31T23:02:56
| 295,681,819
| 3
| 2
|
MIT
| 2023-06-16T10:05:55
| 2020-09-15T09:41:22
|
Python
|
UTF-8
|
Python
| false
| false
| 3,787
|
py
|
#coding:utf-8
"""
ID: issue-4664
ISSUE: 4664
TITLE: Non-privileged user can delete records from RDB$SECURITY_CLASSES table
DESCRIPTION:
JIRA: CORE-4342
FBTEST: bugs.core_4342
"""
import pytest
from firebird.qa import *
db = db_factory()
user_boss = user_factory('db', name='boss', password='123')
user_mngr = user_factory('db', name='mngr', password='456')
test_script = """
-- Add these DDL privileges in order to have some rows in
-- rdb$security_classes table for user BOSS:
grant create table to boss;
grant alter any table to boss;
grant drop any table to boss;
commit;
set list on;
select current_user,count(*) acl_count from rdb$security_classes where rdb$acl containing 'boss';
select 1 from rdb$security_classes where rdb$acl containing 'boss' with lock;
update rdb$security_classes set rdb$security_class = rdb$security_class where rdb$acl containing 'boss';
delete from rdb$security_classes where rdb$acl containing 'boss';
commit;
connect '$(DSN)' user 'MNGR' password '456';
select current_user,count(*) acl_count from rdb$security_classes where rdb$acl containing 'boss';
select 1 from rdb$security_classes where rdb$acl containing 'boss' with lock;
update rdb$security_classes set rdb$security_class = rdb$security_class where rdb$acl containing 'boss';
delete from rdb$security_classes where rdb$acl containing 'boss';
commit;
"""
expected_stdout = """
USER SYSDBA
ACL_COUNT 1
USER MNGR
ACL_COUNT 1
"""
# version: 3.0
act = isql_act('db', test_script)
expected_stderr_1 = """
Statement failed, SQLSTATE = HY000
Cannot select system table RDB$SECURITY_CLASSES for update WITH LOCK
Statement failed, SQLSTATE = 42000
UPDATE operation is not allowed for system table RDB$SECURITY_CLASSES
Statement failed, SQLSTATE = 42000
DELETE operation is not allowed for system table RDB$SECURITY_CLASSES
Statement failed, SQLSTATE = HY000
Cannot select system table RDB$SECURITY_CLASSES for update WITH LOCK
Statement failed, SQLSTATE = 28000
no permission for UPDATE access to TABLE RDB$SECURITY_CLASSES
Statement failed, SQLSTATE = 28000
no permission for DELETE access to TABLE RDB$SECURITY_CLASSES
"""
@pytest.mark.version('>=3.0,<4.0')
def test_1(act: Action, user_boss: User, user_mngr: User):
act.expected_stdout = expected_stdout
act.expected_stderr = expected_stderr_1
act.execute()
assert (act.clean_stderr == act.clean_expected_stderr and
act.clean_stdout == act.clean_expected_stdout)
# version: 4.0
expected_stderr_2 = """
Statement failed, SQLSTATE = HY000
Cannot select system table RDB$SECURITY_CLASSES for update WITH LOCK
Statement failed, SQLSTATE = 42000
UPDATE operation is not allowed for system table RDB$SECURITY_CLASSES
Statement failed, SQLSTATE = 42000
DELETE operation is not allowed for system table RDB$SECURITY_CLASSES
Statement failed, SQLSTATE = HY000
Cannot select system table RDB$SECURITY_CLASSES for update WITH LOCK
Statement failed, SQLSTATE = 28000
no permission for UPDATE access to TABLE RDB$SECURITY_CLASSES
-Effective user is MNGR
Statement failed, SQLSTATE = 28000
no permission for DELETE access to TABLE RDB$SECURITY_CLASSES
-Effective user is MNGR
"""
@pytest.mark.version('>=4.0')
def test_2(act: Action, user_boss: User, user_mngr: User):
act.expected_stdout = expected_stdout
act.expected_stderr = expected_stderr_2
act.execute()
assert (act.clean_stderr == act.clean_expected_stderr and
act.clean_stdout == act.clean_expected_stdout)
|
[
"pcisar@ibphoenix.cz"
] |
pcisar@ibphoenix.cz
|
5974c8ec865d8d87eb8b82c094fe53a8eb6590e5
|
f0e9ca3b430df6dab9fd751c303f13e4960f4525
|
/02_steamer.py
|
b4e1abba1f2cc9992b26c244d324149c54bc4235
|
[] |
no_license
|
williampsena/python-text-mining
|
358c9584a3d6d2c04062c35936baa0f112c5498a
|
6965a79c1b56bd3c79ebaddc957a9d8a4a980a15
|
refs/heads/master
| 2020-07-04T16:35:54.996064
| 2019-08-21T12:08:57
| 2019-08-21T12:08:57
| 202,341,963
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,277
|
py
|
import pprint
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
from nltk.stem import RSLPStemmer
pp = pprint.PrettyPrinter(width=41, compact=True)
vegeta_quotes = [
('voce nao e derrotado quando perde, mais sim quando voce desiste.', 'confiante'),
('O melhor guerreiro nao e aquele que sempre ganha, mas o que mantem o seu orgulho mesmo na derrota', 'orgulhoso'),
('Enquanto o inimigo estiver na minha frente, eu lutarei.', 'confiante'),
('Eu sou calmo e tenho o coracao puro... mas e pura maldade.', 'sincero'),
('Meu coracao e puro... pura maldade!', 'sincero'),
('Verme insolente nao entre na frente.', 'bravo'),
('O miseravel e um genio.', 'feliz')
]
stop_words = set(stopwords.words('portuguese') + [
',',
'eu',
'!'
])
# Steamming words, derrota or derrotar or derrotei == derrot
stemmer = RSLPStemmer()
quotes = [(stemmer.stem(q.lower()), f) for (q, f) in vegeta_quotes]
filtered = []
for (quote, felling) in quotes:
filtered.append(
(
[w for w in word_tokenize(quote)
if not w in stop_words and w.isalpha()],
felling
)
)
pp.pprint(filtered)
# [..., ['melhor', 'guerreiro', 'nao', 'sempre', 'ganha', 'mantem', 'orgulho', 'derrot'], ... ]
|
[
"william.sena@skyhub.com.br"
] |
william.sena@skyhub.com.br
|
b7fe085ff8dce1ab31fe203403a7b410fa8a0c12
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_154/ch36_2019_03_31_18_21_54_470469.py
|
90561c9807d62d34458835e2a8c956f92b4a3b79
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 145
|
py
|
def eh_primo(n):
test = 2
while(test < n):
if ((n % test) == 0):
return False
test = test + 1
return True
|
[
"you@example.com"
] |
you@example.com
|
aaef38487542dbdce3bb3d81b11cc3488e8bccb9
|
8bf986158c409f5ca6178625aae6da9ed3c1f220
|
/doc/sphinxext/altair_ext/altairplot.py
|
d5b3558f6c97d806fda09d5387a97580f060b36b
|
[
"BSD-3-Clause"
] |
permissive
|
Sawon1234/altair
|
48585efcab53257f45333fad6cb7e6987ddf0f98
|
5f243e07fa1c29f76105c99aeb177e00f02ec87d
|
refs/heads/master
| 2020-05-21T06:06:11.950552
| 2017-02-27T16:26:14
| 2017-02-27T16:26:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,391
|
py
|
"""
Altair Plot Sphinx Extension
============================
This extension provides a means of inserting live-rendered Altair plots within
sphinx documentation. There are two directives defined: ``altair-setup`` and
``altiar-plot``. ``altair-setup`` code is used to set-up various options
prior to running the plot code. For example::
.. altair-setup::
from altair import *
import pandas as pd
data = pd.DataFrame({'a': list('CCCDDDEEE'),
'b': [2, 7, 4, 1, 2, 6, 8, 4, 7]})
.. altair-plot::
Chart(data).mark_point().encode(
x='a',
y='b'
)
In the case of the ``altair-plot`` code, the *last statement* of the code-block
should contain the chart object you wish to be rendered.
Options
-------
The directives have the following options::
.. altair-setup::
:show: # if set, then show the setup code as a code block
pass
.. altair-plot::
:hide-code: # if set, then hide the code and only show the plot
:code-below: # if set, then code is below rather than above the figure
:alt: text # Alternate text when plot cannot be rendered
:links: editor source export # specify one or more of these options
Chart()
Additionally, this extension introduces a global configuration
``altair_plot_links``, set in your ``conf.py`` which is a dictionary
of links that will appear below plots, unless the ``:links:`` option
again overrides it. It should look something like this::
# conf.py
# ...
altair_plot_links = {'editor': True, 'source': True, 'export': True}
# ...
If this configuration is not specified, all are set to True.
"""
import os
import json
import warnings
import jinja2
from docutils import nodes
from docutils.parsers.rst import Directive
from docutils.parsers.rst.directives import flag, unchanged
from sphinx.locale import _
from sphinx import addnodes, directives
from sphinx.util.nodes import set_source_info
from altair.api import TopLevelMixin
from .utils import exec_then_eval
VGL_TEMPLATE = jinja2.Template("""
<div id="{{ div_id }}">
<script>
vg.embed("#{{ div_id }}", "{{ filename }}", function(error, result) {});
</script>
</div>
""")
class altair_plot(nodes.General, nodes.Element):
pass
class AltairSetupDirective(Directive):
has_content = True
option_spec = {'show': flag}
def run(self):
env = self.state.document.settings.env
targetid = "altair-plot-{0}".format(env.new_serialno('altair-plot'))
targetnode = nodes.target('', '', ids=[targetid])
code = '\n'.join(self.content)
# Here we cache the code for use in later setup
if not hasattr(env, 'altair_plot_setup'):
env.altair_plot_setup = []
env.altair_plot_setup.append({
'docname': env.docname,
'lineno': self.lineno,
'code': code,
'target': targetnode,
})
result = [targetnode]
if 'show' in self.options:
source_literal = nodes.literal_block(code, code)
source_literal['language'] = 'python'
result.append(source_literal)
return result
def purge_altair_plot_setup(app, env, docname):
if not hasattr(env, 'altair_plot_setup'):
return
env.altair_plot_setup = [item for item in env.altair_plot_setup
if item['docname'] != docname]
DEFAULT_LINKS = {'editor': True, 'source': True, 'export': True}
def validate_links(links):
if links.strip().lower() == 'none':
return {}
links = links.strip().split()
diff = set(links) - set(DEFAULT_LINKS.keys())
if diff:
raise ValueError("Following links are invalid: {0}".format(list(diff)))
return dict((link, link in links) for link in DEFAULT_LINKS)
class AltairPlotDirective(Directive):
has_content = True
option_spec = {'hide-code': flag,
'code-below': flag,
'alt': unchanged,
'links': validate_links}
def run(self):
env = self.state.document.settings.env
app = env.app
show_code = 'hide-code' not in self.options
code_below = 'code-below' in self.options
setupcode = '\n'.join(item['code']
for item in getattr(env, 'altair_plot_setup', [])
if item['docname'] == env.docname)
code = '\n'.join(self.content)
if show_code:
source_literal = nodes.literal_block(code, code)
source_literal['language'] = 'python'
#get the name of the source file we are currently processing
rst_source = self.state_machine.document['source']
rst_dir = os.path.dirname(rst_source)
rst_filename = os.path.basename(rst_source)
# use the source file name to construct a friendly target_id
serialno = env.new_serialno('altair-plot')
rst_base = rst_filename.replace('.', '-')
div_id = "{0}-altair-plot-{1}".format(rst_base, serialno)
target_id = "{0}-altair-source-{1}".format(rst_base, serialno)
target_node = nodes.target('', '', ids=[target_id])
# create the node in which the plot will appear;
# this will be processed by html_visit_altair_plot
plot_node = altair_plot()
plot_node['target_id'] = target_id
plot_node['div_id'] = div_id
plot_node['code'] = code
plot_node['setupcode'] = setupcode
plot_node['relpath'] = os.path.relpath(rst_dir, env.srcdir)
plot_node['rst_source'] = rst_source
plot_node['rst_lineno'] = self.lineno
default_links = app.builder.config.altair_plot_links
plot_node['links'] = self.options.get('links', default_links)
if 'alt' in self.options:
plot_node['alt'] = self.options['alt']
result = [target_node]
if code_below:
result += [plot_node]
if show_code:
result += [source_literal]
if not code_below:
result += [plot_node]
return result
def html_visit_altair_plot(self, node):
# Execute the setup code, saving the global & local state
namespace = {}
if node['setupcode']:
exec(node['setupcode'], namespace)
# Execute the plot code in this context, evaluating the last line
try:
chart = exec_then_eval(node['code'], namespace)
except Exception as e:
warnings.warn("altair-plot: {0}:{1} Code Execution failed:"
"{2}: {3}".format(node['rst_source'], node['rst_lineno'],
e.__class__.__name__, str(e)))
raise nodes.SkipNode
if isinstance(chart, TopLevelMixin):
# Last line should be a chart; convert to spec dict
spec = chart.to_dict()
# Create the vega-lite spec to embed
embed_spec = json.dumps({'mode': 'vega-lite',
'actions': node['links'],
'spec': spec})
# Prevent http/https request errors by doing this
embed_spec = embed_spec.replace('http://', '//')
embed_spec = embed_spec.replace('https://', '//')
# Write embed_spec to a *.vl.json file
dest_dir = os.path.join(self.builder.outdir, node['relpath'])
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
filename = "{0}.vl.json".format(node['div_id'])
dest_path = os.path.join(dest_dir, filename)
with open(dest_path, 'w') as f:
f.write(embed_spec)
# Pass relevant info into the template and append to the output
html = VGL_TEMPLATE.render(div_id=node['div_id'],
filename=filename)
self.body.append(html)
else:
warnings.warn('altair-plot: {0}:{1} Malformed block. Last line of '
'code block should define a valid altair Chart object.'
''.format(node['rst_source'], node['rst_lineno']))
raise nodes.SkipNode
def generic_visit_altair_plot(self, node):
# TODO: generate PNGs and insert them here
if 'alt' in node.attributes:
self.body.append(_('[ graph: %s ]') % node['alt'])
else:
self.body.append(_('[ graph ]'))
raise nodes.SkipNode
def setup(app):
setup.app = app
setup.config = app.config
setup.confdir = app.confdir
app.add_stylesheet('altair-plot.css')
app.add_javascript("https://d3js.org/d3.v3.min.js")
app.add_javascript("https://vega.github.io/vega/vega.js")
app.add_javascript("https://vega.github.io/vega-lite/vega-lite.js")
app.add_javascript("https://vega.github.io/vega-editor/vendor/vega-embed.js")
app.add_config_value('altair_plot_links', DEFAULT_LINKS, 'env')
app.add_node(altair_plot,
html=(html_visit_altair_plot, None),
latex=(generic_visit_altair_plot, None),
texinfo=(generic_visit_altair_plot, None),
text=(generic_visit_altair_plot, None),
man=(generic_visit_altair_plot, None))
app.add_directive('altair-plot', AltairPlotDirective)
app.add_directive('altair-setup', AltairSetupDirective)
app.connect('env-purge-doc', purge_altair_plot_setup)
return {'version': '0.1'}
|
[
"jakevdp@gmail.com"
] |
jakevdp@gmail.com
|
efaf48f8b7e9fa2ee10924f45b9ae140e3122820
|
09b8a76c8ae621fc761904823ab2cdc70f347432
|
/src/user_interface.py
|
1a5705afc43a1412e026818bef13e3ab4551bd67
|
[
"MIT"
] |
permissive
|
dat-adi/alarm-clock
|
d99c94789a0b2f9e25f8deda8fec43d782998e73
|
2d877506fe518197fe5f36eab10f09843db42a83
|
refs/heads/master
| 2022-07-23T17:51:16.538454
| 2020-05-21T17:58:47
| 2020-05-21T17:58:47
| 264,455,545
| 1
| 0
|
MIT
| 2020-05-21T07:01:41
| 2020-05-16T14:31:11
|
Python
|
UTF-8
|
Python
| false
| false
| 563
|
py
|
from tkinter import Frame, Canvas, Tk, Text, LEFT, INSERT, END, messagebox, Button, X
from alarming_service import time_diff
def GUI_present():
root = Tk()
canvas = Canvas(root)
canvas.pack()
frame = Frame(canvas)
frame.pack()
top_text = Text(frame)
top_text.insert(
INSERT,
"Welcome to the Simple Alarming Service"
)
top_text.pack()
alarm_set = Button(frame, text="Set and Deploy Alarm", command=time_diff)
alarm_set.pack(fill=X)
root.mainloop()
if __name__ == "__main__":
GUI_present()
|
[
"naruita201@gmail.com"
] |
naruita201@gmail.com
|
e091044347fb78ed6d76d5bc87eacd18312f6d61
|
7f3c7a65d3723d0f48beea963fa9a7477cf6627f
|
/PSFModel.py
|
7a9ca0f6ee4be2d770b4e50171b8f389c9b02071
|
[] |
no_license
|
chrisglass/yapsfm
|
a123ff453f0d443b12abb222c10a95b8b3f2d148
|
9f98236e9eed6bfec53b391a5a9b735311556f1a
|
refs/heads/master
| 2021-01-14T10:36:57.568234
| 2015-04-02T23:21:03
| 2015-04-02T23:21:03
| 33,686,906
| 0
| 0
| null | 2015-04-09T19:00:26
| 2015-04-09T19:00:26
| null |
UTF-8
|
Python
| false
| false
| 13,206
|
py
|
#! /usr/bin/env python
import glob,os,sys
import numpy as np
import png
import matplotlib.pyplot as plt
from scipy.misc import imread
# image handling
import scipy.ndimage.interpolation
# fits file handling and creation
import pyfits
from datetime import datetime as dt
"""
PSF modeling script:
the pupil function P(x,y) = A(x,y) exp(2 \pi i OPD(x,y) / \lambda)
the PSF = | FFT(P(x,y)) |**2
+ convolution with a gaussian for jitter and imprecision
--------------------
The script can load a grayscale image with imageToAperture(image)
or create a circular aperture using aperture(size)
Feb 25th: PSF computed as perfect theoretical Airy Disk: OPD=0.
Mar 3rd: computing the OPD in cartesian space is problematic because of the polar nature of Zernike modes. Can't get a good representation in cartesian space right away. Decided to compute it in polar coords, interpolate it to a continuous function and map it into cartesian space.
Mar 6th: OPD computation is working. Linear combination of Zernike mode done.
Mar 9th: PSF scaling to detector space (0.005''/pixel).
Mar 10th: Code cleaned and polar2cart: theta fixed.
Mar 11th: .fits creation with header information corresponding to Tiny Tim's. Possibility to change pixel scale, defaulted as constant with wavelength.
"""
#--------------------------------------------------
# Methods definition
#--------------------------------------------------
def aperture(size=101,ap=None):
"""creates the aperture function as a matrix where 0 = obstructed and 1 = unobstructed
# for HST:
# secondary obstruction: 0.330
# spider width: 0.022
# pads: (v3(x), v2(y), radius) x,y were arbitrarily chosen
# 1: 0.8921 0.0000 0.065
# 2: -0.4615 0.7555 0.065
# 3: -0.4564 -0.7606 0.065
# ----------
# if nothing specified: circular aperture is used"""
A=np.zeros((size,size))
center=[size/2,size/2] #center of the image
secMir=0.330*size/2
spWidth=0.022*size/2
pad1=[0.8921*size/2,0.0000,0.065*size/2] #x,y,radius
pad2=[-0.4615*size/2,0.7555*size/2,0.065*size/2]
pad3=[-0.4564*size/2,-0.7606*size/2,0.065*size/2]
for y in range(size):
for x in range(size):
# main aperture (including secondary obstruction)
radPos=np.sqrt((x-center[0])**2+(y-center[1])**2)
if ap=='HST':
if radPos<=size/2 and radPos>secMir:
A[y][x]=1.
#Obstructions:
# spiders
if center[0]-spWidth/2.<=x<=center[0]+spWidth/2:
A[y][x]=0.
if center[0]-spWidth/2<=y<=center[1]+spWidth/2:
A[y][x]=0.
# pads
if np.sqrt((x-center[0]-pad1[0])**2+(y-center[1]-pad1[1])**2)<=pad1[2]:
A[y][x]=0.
if np.sqrt((x-center[0]-pad2[0])**2+(y-center[1]-pad2[1])**2)<=pad2[2]:
A[y][x]=0.
if np.sqrt((x-center[0]-pad3[0])**2+(y-center[1]-pad3[1])**2)<=pad3[2]:
A[y][x]=0.
else:
if radPos<=size/2:
A[y][x]=1.
print 'Aperture image size: (%s,%s)'%(len(A), len(A[0]))
png.from_array(A,mode='L;1').save('analyticAp.png')
print 'Aperture created'
return A
#--------------------------------------------------
def imageToAperture(image):
"""transforms a black and white image into an aperture array, where
0 = obstructed, 1 = unobstructed"""
im=imread(image).astype(np.float32)
im/=255.
if len(np.shape(im))>2:
#only take the R component of RGB img (supposedly grayscale so R=G=B)
image_2d=im[:,:,0]
else:
# if the image has only 1 plane
image_2d=im
return image_2d
#--------------------------------------------------
def psf(A,L=.76,scaleFactor=5,dist=[0,0,0,0,0,0,0,0]):
"""fft=complex numbers: amplitude AND phase
We take the modulus square --> distrib. of light
L is the wavelength, same units as OPD (microns)
np.fft.fft2 manages zero-padding on each axis with s[0],s[1] corresponding to both x and y axis.
====================
with a zero-padding factor of 5,
the pixel scale of the PSF image is 0.0088''/pixel.
Since:
6 pixel @ .76um = 1.22*.76um/(2.4*10**6um) = 0.0797'' ,
1 pixel @ .76um = 0.0797'' / 6 pixels = 0.133''/px
"""
P=pupil(A,L,dist)
size=np.shape(P)
scaled=[size[i]*scaleFactor for i in range(len(size))]
print 'Starting FFT with zero-padding factor of %s...'%(scaleFactor)
tmp=np.fft.fft2(P,s=[scaled[0],scaled[1]]) # padding with 0s
#switch quadrant to place the origin in the middle of the array
tmp=np.fft.fftshift(tmp)
#modulus square of the complex matrices
print '... done'
PSF=np.real(np.multiply(tmp,np.conjugate(tmp)))
print '----------\nPSF image size: (%s,%s)'%(np.shape(PSF)[0],np.shape(PSF)[1])
print 'lambda = %s'%(L)
print "pixel size = 0.110''/px"
#print "Pixel size at .76mu: %.4f''"%(1.22*(7.6*10**(-7))/2.4*206264.81) # for Lambda=1. 1 rad = 206264.81 arcsec
#print "PSF size: 5 pixels"
print '----------\nPSF computed'
return PSF
#--------------------------------------------------
def pupil(A,L=.76,dist=[0,0,0,0,0,0,0,0]):
"""P = A exp(2pi i OPD / L), L=lambda"""
print 'Computing pupil...'
size=np.shape(A)[0]
OPD=pathDiff(size,L,dist)
P=np.multiply(A,np.exp(np.divide(2j*np.pi*OPD,L)))
print '... done'
return P
#--------------------------------------------------
def pathDiff(size=101,L=.76,dist=[0.,0.,0.,0.,0.,0.,0.,0.]):
"""
==================================================
Optical Path Differences for pupil characterization
==================================================
from Noll (1976):
If phi(r,theta) is an arbitrary function, its expansion over a circle of radius R is:
phi(R*rho, theta) = Sum_j a_j Z_j(rho,theta), rho=r/R
--------------------
OPD(R*rho, theta) is therefore equals to phi(R*rho, theta)
which is, in cartesian space: OPD(sqrt(x**2+y**2), arctan(y/x))
the wavelength dependence is hidden in a_j. -> each Zernike mode
has a different coefficient, depending on the wavelength.
==================================================
Method:
Compute the Zernike mode(s) and multiply each of them by its coefficient (from Zmax file).
The matrix of the Zernike values is in polar coordinates. Has to be
transformed back to cartesian.
==================================================
1 micron of despace induces 6.1nm of rms wave
Zernike coefficient values are given in microns RMS of wave at 547 microns
"""
# defocus,z5,z6,z7,z8,z9,z10,z11
#Aj=[-0.0068802,0.016,-0.006,-0.003,-0.003,0.011,0.02,-0.0348] #microns
#Aj=[-0.001,0.002,-0.002,-0.001,0.0002,0.,0.001,-0.002]
#Aj=[0.0026,0.0089,0.0222,-0.0018,0.0113,0.,0.,0.]
Znm=[(2,0),(2,-2),(2,2),(3,-1),(3,1),(3,-3),(3,3),(4,0)]
# Creation of matrices
rhorange= np.linspace(0,1,size)
thetarange=np.linspace(0,2*np.pi,size)
rho,theta=np.meshgrid(rhorange,thetarange)
# OPD = Sum aj Zj
Ztot=np.zeros((size,size))
for i in range(len(dist)):
#aj=dist[i]
aj=dist[i]*.547/L #Zern coef at wavelength L. L and .547 in microns
print 'Computing Z%s with aj=%s'%(4+i,aj)
n,m=Znm[i][0],Znm[i][1]
if m<0.:
Z=Zodd(n,-m,rho,theta)
else:
Z=Zeven(n,m,rho,theta)
#Z*=aj
Ztot+=np.multiply(Z,aj)
print 'Saving Ztot'
#print type(Ztot[50][50])
plt.imshow(Ztot)
plt.savefig('Ztot.png')
cartesian_Z=scipy.ndimage.interpolation.geometric_transform(Ztot,polar2cart,extra_arguments=(size,))
print 'Saving cartesian_Z'
plt.imshow(cartesian_Z)
plt.savefig('cartesian_Z.png')
return cartesian_Z
#--------------------------------------------------
def Rnm(n,m,r):
"""computes the radial part R_n^m(r) for r a meshgrid object"""
R = 0
for s in range((n-m)/2+1):
R += (((-1)**s*np.math.factorial(n-s))/(np.math.factorial(s)*np.math.factorial((n+m)/2-s)*np.math.factorial((n-m)/2-s)))*r**(n-2*s)
return R
#--------------------------------------------------
def Zeven(n,m,r,theta):
"""computes the even Zernike Polynomials
r,theta are meshgrid objects"""
Z = np.sqrt(n+1)*Rnm(n,m,r)*np.sqrt(2)*np.cos(m*theta)
return Z
#--------------------------------------------------
def Zodd(n,m,r,theta):
"""calculates the odd Zernike Polynomials
r,theta are meshgrid objects"""
Z = np.sqrt(n+1)*Rnm(n,m,r)*np.sqrt(2)*np.sin(m*theta)
return Z
#--------------------------------------------------
def polar2cart(coords,size=101):
"""conversion to be used in geometric_transform(input,mapping) as mapping"""
#origin back at the center of the image
x=(coords[1]-size//2.)/(size//2.)
y=(coords[0]-size//2.)/(size//2.)
#compute -1<r<1 and 0<theta<2pi
r=np.sqrt(x*x+y*y)
theta=np.arctan2(y,x)
theta=theta<0 and theta+2*np.pi or theta
#bin r,theta back to pixel space (101,101)
r*=size-1
theta*=(size-1)/(2*np.pi)
return (theta,r)
#--------------------------------------------------
def jitter(PSF,jitterSize):
"""Compute the Optical Transfer Function (OTF) and multiply it by the gaussian jitter function
WORK IN PROGRESS"""
jitter=0.
OTF=np.fft.fft2(PSF)*jitter
OTF=np.fft.ifft2(OTF)
return
#--------------------------------------------------
def bin2detector(coords,L,size,detectorScale):
"""rebin the Optical Transfer Function to the detector's scale.
---Used in resizePSF()---
The OTF can be the PSF, or if jitter is specified, its convolution with a gaussian.
"""
# scale the PSF to the desired size (0.106 arcsec)
scaleFactor=0.0797/6.*(L/0.76) #at 5x zero-padding
#PSF=0.0797'' in sky space, has to be 0.106'' in detector
#factor of 0.106/0.0797 = 1.32998
#scaleFactor=0.751887*L/.76
#scaleFactor=0.132998*L/.76
# pixel scale taken from Tiny Tim psf.fits' header
#detectorScale=0.0251 # ''/pixel
x=(coords[1]-size//2.)*detectorScale/scaleFactor+(size//2.)
y=(coords[0]-size//2.)*detectorScale/scaleFactor+(size//2.)
return (y,x)
#--------------------------------------------------
def resizePSF(PSF,L=.76,size=505,scale=0.110):
"""Resize the PSF to match pixel size and resolution of instrument (0.12'') at .76um"""
print "resizing PSF to match detector pixel size of %s''/px..."%(scale)
newPSF=scipy.ndimage.interpolation.geometric_transform(PSF,bin2detector,extra_arguments=(L,size,scale))
newPSF=newPSF[size//2.-32:size//2.+32,size//2.-32:size//2.+32]
print '... done'
return newPSF
#--------------------------------------------------
def createFits(PSF,disto=[0,0,0,0,0,0,0,0],pixelScale=0.0251,wavelength=0.76):
"""Creates a .fits file containing the PSF image
with header informations:
Created, Instrument,
Focus, Astigmatism (0,45), Coma (x,y), Trefoil (x,y), Spherical
Pixel scale and Wavelength"""
name='psf_%s.fits'%(wavelength)
print 'Writting psf to file %s...'%(name)
hdu=pyfits.PrimaryHDU(PSF)
header=hdu.header
now=dt.now()
header['CREATED']=('%s %s %s %s %s'%(dt.strftime(now,'%a'),dt.strftime(now,'%b'),dt.strftime(now,'%d'),dt.strftime(now,'%X'),dt.strftime(now,'%Y')),'Time and date file was created')
header['INSTRUME']=('WFIRST_WFI','Simulated instrument')
header['FOCUS']=(disto[0],'PSF RMS focus (waves @ 547 nm)')
header['X_ASTIG']=(disto[1],'PSF RMS 0d astig (waves @ 547 nm)')
header['Y_ASTIG']=(disto[2],'PSF RMS 45d astig (waves @ 547 nm)')
header['X_COMA']=(disto[3],'PSF RMS X-coma (waves @ 547 nm)')
header['Y_COMA']=(disto[4],'PSF RMS Y-coma (waves @ 547 nm)')
header['X_CLOVER']=(disto[5],'PSF RMS X-clover (waves @ 547 nm)')
header['Y_CLOVER']=(disto[6],'PSF RMS Y-clover (waves @ 547 nm)')
header['SPHEICL']=(disto[7],'PSF RMS spherical (waves @ 547 nm)')
header['PIXSCALE']=(round(pixelScale,4),'Pixel scale in arcseconds')
header['WAVELNTH']=(wavelength,'PSF wavelength in microns')
hdu.writeto('%s'%(name),clobber=True)
print '... done'
return
#==================================================
def main():
L=float(raw_input('Lambda? (0.76-2.00 microns) ') or .76)
A=aperture(101,'HST')
# Zernike coefficients for distortions Z4 to Z11 (defocus to spherical)
dist=[0.0026,0.0089,0.0222,-0.0018,0.0113,0.,0.,0.]
PSF=psf(A,L,5,dist)
size=np.shape(PSF)[0] # size of the array
pixelScale=0.110 # WFI pixel scale
#with constant pixelScale, the size of the PSF will vary with the wavelength
#and its sampling too.
newPSF=resizePSF(PSF,L,size,pixelScale)
#plt.imshow(newPSF,origin='lower',interpolation='nearest')
#plt.show()
createFits(newPSF,pixelScale=pixelScale,wavelength=L,disto=dist)
return
#==================================================
if __name__=='__main__':
main()
|
[
"glass.florian@gmail.com"
] |
glass.florian@gmail.com
|
8a29d8ec0053b40edbbece96e608f56686f2d9a6
|
9ba95ea195c81fe30bffb198e79dc4714721a5b8
|
/mydjangoapp/settings.py
|
40aaadc3e922116dad4e2e3c69bec4f46a2b3384
|
[] |
no_license
|
vincedgy/myDjangoApp
|
11b01d5f153a8b098191c4a88c5f4211c2f70592
|
e6e7c72eb1e867d18c86cff7d1841f33961a2d9c
|
refs/heads/master
| 2020-12-30T15:54:07.179984
| 2017-05-28T15:49:13
| 2017-05-28T15:49:13
| 91,178,863
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,375
|
py
|
"""
Django settings for mydjangoapp project.
Generated by 'django-admin startproject' using Django 1.11.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '98i(j+en+)c276&q0gaafh1k496zok#@djbah^j!s7$bv+01&_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# AWS config for ElasticBeanstalk
ALLOWED_HOSTS = [
'127.0.0.1',
'localhost',
'.compute-1.amazonaws.com', # allows viewing of instances directly
'.elasticbeanstalk.com'
]
# Application definition
INSTALLED_APPS = [
'polls.apps.PollsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mydjangoapp.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mydjangoapp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Paris'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = 'static'
|
[
"vincent.dagoury@gmail.com"
] |
vincent.dagoury@gmail.com
|
76b47bd8eeca351c4f100b539b893b28f6a792cc
|
fa1ad0497672ac45496874fd126a5183a4adb732
|
/packages/fetchai/skills/http_echo/__init__.py
|
d3af87d5c5ab4695215fd225e1584a455b2c0a49
|
[
"Apache-2.0"
] |
permissive
|
mandyohhh/agents-aea
|
ccb2733fb29b176e49cfe4e51c9c9b298d3f0e33
|
2aaddb7c21e1867c4d27ac9ebc24813bb0ebc383
|
refs/heads/master
| 2022-12-29T16:04:47.900424
| 2020-10-22T10:48:56
| 2020-10-22T10:48:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 971
|
py
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This module contains the implementation of the http echo skill."""
from aea.configurations.base import PublicId
PUBLIC_ID = PublicId.from_str("fetchai/http_echo:0.9.0")
|
[
"david.minarsch@googlemail.com"
] |
david.minarsch@googlemail.com
|
17e2f6c60b01961da10ac63d2ac67f8fa340512e
|
d556f1fa146449372e936a949596491b96806103
|
/app/__init__.py
|
22be826c96db32727162b13681b36634865339c6
|
[
"Apache-2.0"
] |
permissive
|
JoeCare/flask_geolocation_api
|
ab787f20671005826ac1e1002eed36f40516729d
|
ad9ea0d22b738a7af8421cc57c972bd0e0fa80da
|
refs/heads/main
| 2023-04-12T17:41:54.966314
| 2021-04-22T10:20:01
| 2021-04-22T10:20:01
| 345,092,700
| 1
| 0
| null | 2021-04-22T10:20:02
| 2021-03-06T12:50:12
|
Python
|
UTF-8
|
Python
| false
| false
| 1,195
|
py
|
import connexion, os
from connexion.resolver import RestyResolver
from flask import json
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
# Globally accessible libraries
db = SQLAlchemy()
mm = Marshmallow()
def init_app():
"""Initialize the Connexion application."""
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
openapi_path = os.path.join(BASE_DIR, "../")
conn_app = connexion.FlaskApp(
__name__, specification_dir=openapi_path, options={
"swagger_ui": True,
"serve_spec": True
}
)
conn_app.add_api("openapi.yaml", resolver=RestyResolver('run'),
strict_validation=True)
# Flask app and getting into app_context
app = conn_app.app
# Load application config
app.config.from_object('config.ProdConfig')
app.json_encoder = json.JSONEncoder
# Initialize Plugins
db.init_app(app)
mm.init_app(app)
with app.app_context():
# Include our Routes/views
import run
# Register Blueprints
# app.register_blueprint(auth.auth_bp)
# app.register_blueprint(admin.admin_bp)
return app
|
[
"g.j.werpachowski@gmail.com"
] |
g.j.werpachowski@gmail.com
|
4469a205271be71211792e4e410a31ceb73c97a3
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03039/s380617982.py
|
05c63b4c723fb34981688390e758d41368de3c07
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 934
|
py
|
facs = []
M = 1000000007
def modulo(n):
global facs
facs = [1]*(n+1)
last = 1
for i in range(1,n+1):
last = facs[i] = mulmod(last,i,M)
def mulmod(x,y,p):
return x*y % p
def divmod(x,y,p):
return mulmod(x,powmod(y,p-2,p),p)
def ncr(n,r):
if n<r:
return 0
if n==r:
return 1
res = facs[n]
res = divmod(res,facs[r],M)
res = divmod(res,facs[n-r],M)
return res
def powmod(x,y,p):
if y==0:
return 1
elif y==1:
return x % p
elif (y%2)==0:
return powmod(x,y//2,p)**2 % p
else:
return powmod(x,y//2,p)**2 * x % p
def resolve():
n,m,k = map(int,input().split())
modulo(2*10**5+5)
kumi=ncr(n*m-2,k-2)
ans = 0
for i in range(1,n):
ans = ans + ((i*(n-i)*m*m)%M)
for i in range(1,m):
ans = ans + ((i*(m-i)*n*n)%M)
print(ans*kumi%M)
if __name__ == "__main__":
resolve()
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
1dd2b3b4713bed074603e0875f7ea483ca5f70bd
|
a49ca01336d39f190f79260a42ed03767d5dcb0c
|
/utilities/videoReaderSanityCheck.py
|
a13fd1f288328b6e7b8e1e57256d246552fa6477
|
[
"MIT"
] |
permissive
|
chuanzhidong/VehicleTracking
|
63db5b86edf6cc0badde1ba7bb0b40d8c9763213
|
99d5c284ee1dc5765e23ad13a87d82e12e7d576b
|
refs/heads/master
| 2021-06-07T12:17:14.289700
| 2016-10-24T14:59:34
| 2016-10-24T14:59:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,297
|
py
|
#opencv videoreader test
import cv2
import os
import sys
import pdb
import glob as glob
import matplotlib.pyplot as plt
from DataPathclass import *
DataPathobj = DataPath(dataSource,VideoIndex)
from parameterClass import *
Parameterobj = parameter(dataSource,VideoIndex)
def readVideo(cap,subSampRate):
"""when read video in a loop, every subSampRate frames"""
status, frame = cap.read()
for ii in range(subSampRate-1):
status, frameskip = cap.read()
return frame, status
def readBuffer(startOffset, cap):
for ii in range(startOffset):
ret, frame = cap.read()
return cap
cap = cv2.VideoCapture(DataPathobj.video)
print 'fps=', np.int(DataPathobj.cap.get(cv2.cv.CV_CAP_PROP_FPS)) ## this is not reliable
print 'whole frame count=', np.int(DataPathobj.cap.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT)) ## this is not reliable, either
startOffset = 0
cap = readBuffer(startOffset, cap)
frameInd = 0
subSampRate = 1
status = True
while status:
frame,status = readVideo(cap, subSampRate)
# cv2.imshow('vis', frame)
# cv2.waitKey(5)
# print 'current frame loc=', DataPathobj.cap.get(cv2.cv.CV_CAP_PROP_POS_FRAMES)
frameInd+=1
print startOffset+(frameInd-1)*subSampRate
print 'is the last ', startOffset+(frameInd-1)*subSampRate, '= 54552?'
|
[
"lichenge0223@gmail.com"
] |
lichenge0223@gmail.com
|
f67f859a7e2900c934553cd65d4ef27df1d6ee01
|
d1ea0e695930dd4cfc3128c0da467e4bdceeabd9
|
/select_clinic.sikuli/select_clinic.py
|
c9a61cf8df20831634b720cf598f18232a644abb
|
[
"Apache-2.0"
] |
permissive
|
openmash/medsphere-test
|
0f21ea89fed349a91598eb43e19814ed4dc14d52
|
2d5e1270b4109b8b8151e24fd3531ea02fcbfb8a
|
refs/heads/master
| 2016-09-06T05:53:09.605267
| 2013-06-13T15:45:45
| 2013-06-13T15:45:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 174
|
py
|
doubleClick(Pattern("1370982387429.png").similar(0.82))
click("SelectaClinic.png")
click(Pattern("open.png").similar(0.74))
find(Pattern("day_week.png").similar(0.63))
|
[
"brian.lampe@medsphere.com"
] |
brian.lampe@medsphere.com
|
fe179eb42dd6808abd571b1e19a57611dd1ede2f
|
571d36f865b545c0a72134e586fbcddd6953a68b
|
/eng/exp.py
|
73da614e6fd63870fbafd08e7aeab587133b8490
|
[] |
no_license
|
andrew-turner/Ditto
|
a5a79faaf31cc44d08ac5f70fa2ac51e51d1b60f
|
72841fc503c716ac3b524e42f2311cbd9d18a092
|
refs/heads/master
| 2020-12-24T14:19:01.164846
| 2015-05-20T08:42:26
| 2015-05-20T08:42:26
| 35,935,568
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 660
|
py
|
#exp formulas as given by Bulbapedia
GEN5_FORMULA = True
def getExpGain(victor, defeated, isTrainer=False):
#trainer
trainer = 1.5 if isTrainer else 1.0
#traded
traded = 1
#base exp
baseExp = defeated.baseExp
#lucky egg
egg = 1
#levels
levelVictor = victor.level
levelDefeated = defeated.level
#exp share
share = 1
#exp point power
power = 1
if GEN5_FORMULA:
first = (trainer*baseExp*levelDefeated)/(5*share)
second = (((2*levelDefeated)+10)**2.5)/((levelDefeated+levelVictor+10)**2.5)
block = (first*second)+1
return int(block*traded*egg*power)
else:
return 1
|
[
"andrew.turner@merton.ox.ac.uk"
] |
andrew.turner@merton.ox.ac.uk
|
b6af0a9db82f3be76d56551c91e3297834915960
|
fc5f91b253900aabb35b2f79efa40088581f29ec
|
/My_face_recognition/mtcnn_model.py
|
5f0f9421dffa4c1398526066765ec822acb45d6a
|
[] |
no_license
|
lksshub/deeplearning
|
c153218698359bdfa32b1b7fbb77bd293dbd4932
|
9e812cbc6ef4a97742e9d3648748cfe60c85fac9
|
refs/heads/master
| 2020-05-04T08:16:56.752501
| 2019-05-09T15:28:50
| 2019-05-09T15:28:50
| 179,043,187
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,887
|
py
|
#coding:utf-8
import tensorflow as tf
from tensorflow.contrib import slim
from tensorflow.contrib.tensorboard.plugins import projector
import numpy as np
num_keep_radio = 0.7
#define prelu
def prelu(inputs):
alphas = tf.get_variable("alphas", shape=inputs.get_shape()[-1], dtype=tf.float32, initializer=tf.constant_initializer(0.25))
pos = tf.nn.relu(inputs)
neg = alphas * (inputs-abs(inputs))*0.5
return pos + neg
def dense_to_one_hot(labels_dense,num_classes):
num_labels = labels_dense.shape[0]
index_offset = np.arange(num_labels)*num_classes
#num_sample*num_classes
labels_one_hot = np.zeros((num_labels,num_classes))
labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1
return labels_one_hot
#cls_prob:batch*2
#label:batch
def cls_ohem(cls_prob, label):
zeros = tf.zeros_like(label)
#label=-1 --> label=0net_factory
#pos -> 1, neg -> 0, others -> 0
label_filter_invalid = tf.where(tf.less(label,0), zeros, label)
num_cls_prob = tf.size(cls_prob)
cls_prob_reshape = tf.reshape(cls_prob,[num_cls_prob,-1])
label_int = tf.cast(label_filter_invalid,tf.int32)
# get the number of rows of class_prob
num_row = tf.to_int32(cls_prob.get_shape()[0])
#row = [0,2,4.....]
row = tf.range(num_row)*2
indices_ = row + label_int
label_prob = tf.squeeze(tf.gather(cls_prob_reshape, indices_))
loss = -tf.log(label_prob+1e-10)
zeros = tf.zeros_like(label_prob, dtype=tf.float32)
ones = tf.ones_like(label_prob,dtype=tf.float32)
# set pos and neg to be 1, rest to be 0
valid_inds = tf.where(label < zeros,zeros,ones)
# get the number of POS and NEG examples
num_valid = tf.reduce_sum(valid_inds)
keep_num = tf.cast(num_valid*num_keep_radio,dtype=tf.int32)
#FILTER OUT PART AND LANDMARK DATA
loss = loss * valid_inds
loss,_ = tf.nn.top_k(loss, k=keep_num)
return tf.reduce_mean(loss)
def bbox_ohem_smooth_L1_loss(bbox_pred,bbox_target,label):
sigma = tf.constant(1.0)
threshold = 1.0/(sigma**2)
zeros_index = tf.zeros_like(label, dtype=tf.float32)
valid_inds = tf.where(label!=zeros_index,tf.ones_like(label,dtype=tf.float32),zeros_index)
abs_error = tf.abs(bbox_pred-bbox_target)
loss_smaller = 0.5*((abs_error*sigma)**2)
loss_larger = abs_error-0.5/(sigma**2)
smooth_loss = tf.reduce_sum(tf.where(abs_error<threshold,loss_smaller,loss_larger),axis=1)
keep_num = tf.cast(tf.reduce_sum(valid_inds)*num_keep_radio,dtype=tf.int32)
smooth_loss = smooth_loss*valid_inds
_, k_index = tf.nn.top_k(smooth_loss, k=keep_num)
smooth_loss_picked = tf.gather(smooth_loss, k_index)
return tf.reduce_mean(smooth_loss_picked)
def bbox_ohem_orginal(bbox_pred,bbox_target,label):
zeros_index = tf.zeros_like(label, dtype=tf.float32)
#pay attention :there is a bug!!!!
valid_inds = tf.where(label!=zeros_index,tf.ones_like(label,dtype=tf.float32),zeros_index)
#(batch,)
square_error = tf.reduce_sum(tf.square(bbox_pred-bbox_target),axis=1)
#keep_num scalar
keep_num = tf.cast(tf.reduce_sum(valid_inds)*num_keep_radio,dtype=tf.int32)
#keep valid index square_error
square_error = square_error*valid_inds
_, k_index = tf.nn.top_k(square_error, k=keep_num)
square_error = tf.gather(square_error, k_index)
return tf.reduce_mean(square_error)
#label=1 or label=-1 then do regression
def bbox_ohem(bbox_pred,bbox_target,label):
'''
:param bbox_pred:
:param bbox_target:
:param label: class label
:return: mean euclidean loss for all the pos and part examples
'''
zeros_index = tf.zeros_like(label, dtype=tf.float32)
ones_index = tf.ones_like(label,dtype=tf.float32)
# keep pos and part examples
valid_inds = tf.where(tf.equal(tf.abs(label), 1),ones_index,zeros_index)
#(batch,)
#calculate square sum
square_error = tf.square(bbox_pred-bbox_target)
square_error = tf.reduce_sum(square_error,axis=1)
#keep_num scalar
num_valid = tf.reduce_sum(valid_inds)
#keep_num = tf.cast(num_valid*num_keep_radio,dtype=tf.int32)
# count the number of pos and part examples
keep_num = tf.cast(num_valid, dtype=tf.int32)
#keep valid index square_error
square_error = square_error*valid_inds
# keep top k examples, k equals to the number of positive examples
_, k_index = tf.nn.top_k(square_error, k=keep_num)
square_error = tf.gather(square_error, k_index)
return tf.reduce_mean(square_error)
def landmark_ohem(landmark_pred,landmark_target,label):
'''
:param landmark_pred:
:param landmark_target:
:param label:
:return: mean euclidean loss
'''
#keep label =-2 then do landmark detection
ones = tf.ones_like(label,dtype=tf.float32)
zeros = tf.zeros_like(label,dtype=tf.float32)
valid_inds = tf.where(tf.equal(label,-2),ones,zeros)
square_error = tf.square(landmark_pred-landmark_target)
square_error = tf.reduce_sum(square_error,axis=1)
num_valid = tf.reduce_sum(valid_inds)
#keep_num = tf.cast(num_valid*num_keep_radio,dtype=tf.int32)
keep_num = tf.cast(num_valid, dtype=tf.int32)
square_error = square_error*valid_inds
_, k_index = tf.nn.top_k(square_error, k=keep_num)
square_error = tf.gather(square_error, k_index)
return tf.reduce_mean(square_error)
def cal_accuracy(cls_prob,label):
'''
:param cls_prob:
:param label:
:return:calculate classification accuracy for pos and neg examples only
'''
# get the index of maximum value along axis one from cls_prob
# 0 for negative 1 for positive
pred = tf.argmax(cls_prob,axis=1)
label_int = tf.cast(label,tf.int64)
# return the index of pos and neg examples
cond = tf.where(tf.greater_equal(label_int,0))
picked = tf.squeeze(cond)
# gather the label of pos and neg examples
label_picked = tf.gather(label_int,picked)
pred_picked = tf.gather(pred,picked)
#calculate the mean value of a vector contains 1 and 0, 1 for correct classification, 0 for incorrect
# ACC = (TP+FP)/total population
accuracy_op = tf.reduce_mean(tf.cast(tf.equal(label_picked,pred_picked),tf.float32))
return accuracy_op
def _activation_summary(x):
'''
creates a summary provides histogram of activations
creates a summary that measures the sparsity of activations
:param x: Tensor
:return:
'''
tensor_name = x.op.name
print('load summary for : ',tensor_name)
tf.summary.histogram(tensor_name + '/activations',x)
#tf.summary.scalar(tensor_name + '/sparsity', tf.nn.zero_fraction(x))
#construct Pnet
#label:batch
def P_Net(inputs,label=None,bbox_target=None,landmark_target=None,training=True):
#define common param
with slim.arg_scope([slim.conv2d],
activation_fn=prelu,
weights_initializer=slim.xavier_initializer(),
biases_initializer=tf.zeros_initializer(),
weights_regularizer=slim.l2_regularizer(0.0005),
padding='valid'):
print(inputs.get_shape())
net = slim.conv2d(inputs, 10, 3, stride=1,scope='conv1')
_activation_summary(net)
print(net.get_shape())
net = slim.max_pool2d(net, kernel_size=[2,2], stride=2, scope='pool1', padding='SAME')
_activation_summary(net)
print(net.get_shape())
net = slim.conv2d(net,num_outputs=16,kernel_size=[3,3],stride=1,scope='conv2')
_activation_summary(net)
print(net.get_shape())
#
net = slim.conv2d(net,num_outputs=32,kernel_size=[3,3],stride=1,scope='conv3')
_activation_summary(net)
print(net.get_shape())
#batch*H*W*2
conv4_1 = slim.conv2d(net,num_outputs=2,kernel_size=[1,1],stride=1,scope='conv4_1',activation_fn=tf.nn.softmax)
_activation_summary(conv4_1)
#conv4_1 = slim.conv2d(net,num_outputs=1,kernel_size=[1,1],stride=1,scope='conv4_1',activation_fn=tf.nn.sigmoid)
print (conv4_1.get_shape())
#batch*H*W*4
bbox_pred = slim.conv2d(net,num_outputs=4,kernel_size=[1,1],stride=1,scope='conv4_2',activation_fn=None)
_activation_summary(bbox_pred)
print (bbox_pred.get_shape())
#batch*H*W*10
landmark_pred = slim.conv2d(net,num_outputs=10,kernel_size=[1,1],stride=1,scope='conv4_3',activation_fn=None)
_activation_summary(landmark_pred)
print (landmark_pred.get_shape())
# add projectors for visualization
#cls_prob_original = conv4_1
#bbox_pred_original = bbox_pred
if training:
#batch*2
# calculate classification loss
cls_prob = tf.squeeze(conv4_1,[1,2],name='cls_prob')
cls_loss = cls_ohem(cls_prob,label)
#batch
# cal bounding box error, squared sum error
bbox_pred = tf.squeeze(bbox_pred,[1,2],name='bbox_pred')
bbox_loss = bbox_ohem(bbox_pred,bbox_target,label)
#batch*10
landmark_pred = tf.squeeze(landmark_pred,[1,2],name="landmark_pred")
landmark_loss = landmark_ohem(landmark_pred,landmark_target,label)
accuracy = cal_accuracy(cls_prob,label)
L2_loss = tf.add_n(slim.losses.get_regularization_losses())
return cls_loss,bbox_loss,landmark_loss,L2_loss,accuracy
#test
else:
#when test,batch_size = 1
cls_pro_test = tf.squeeze(conv4_1, axis=0, name='cls_prob')
bbox_pred_test = tf.squeeze(bbox_pred,axis=0, name='bbox_pred')
landmark_pred_test = tf.squeeze(landmark_pred,axis=0, name='landmark_pred')
return cls_pro_test,bbox_pred_test,landmark_pred_test
def R_Net(inputs,label=None,bbox_target=None,landmark_target=None,training=True):
with slim.arg_scope([slim.conv2d],
activation_fn = prelu,
weights_initializer=slim.xavier_initializer(),
biases_initializer=tf.zeros_initializer(),
weights_regularizer=slim.l2_regularizer(0.0005),
padding='valid'):
print (inputs.get_shape())
net = slim.conv2d(inputs, num_outputs=28, kernel_size=[3,3], stride=1, scope="conv1")
print (net.get_shape())
net = slim.max_pool2d(net, kernel_size=[3, 3], stride=2, scope="pool1", padding='SAME')
print(net.get_shape())
net = slim.conv2d(net,num_outputs=48,kernel_size=[3,3],stride=1,scope="conv2")
print(net.get_shape())
net = slim.max_pool2d(net,kernel_size=[3,3],stride=2,scope="pool2")
print(net.get_shape())
net = slim.conv2d(net,num_outputs=64,kernel_size=[2,2],stride=1,scope="conv3")
print(net.get_shape())
fc_flatten = slim.flatten(net)
print(fc_flatten.get_shape())
fc1 = slim.fully_connected(fc_flatten, num_outputs=128,scope="fc1")
print(fc1.get_shape())
#batch*2
cls_prob = slim.fully_connected(fc1,num_outputs=2,scope="cls_fc",activation_fn=tf.nn.softmax)
print(cls_prob.get_shape())
#batch*4
bbox_pred = slim.fully_connected(fc1,num_outputs=4,scope="bbox_fc",activation_fn=None)
print(bbox_pred.get_shape())
#batch*10
landmark_pred = slim.fully_connected(fc1,num_outputs=10,scope="landmark_fc",activation_fn=None)
print(landmark_pred.get_shape())
#train
if training:
cls_loss = cls_ohem(cls_prob,label)
bbox_loss = bbox_ohem(bbox_pred,bbox_target,label)
accuracy = cal_accuracy(cls_prob,label)
landmark_loss = landmark_ohem(landmark_pred,landmark_target,label)
L2_loss = tf.add_n(slim.losses.get_regularization_losses())
return cls_loss,bbox_loss,landmark_loss,L2_loss,accuracy
else:
cls_prob = tf.identity(cls_prob, name='cls_prob')
bbox_pred = tf.identity(bbox_pred, name='bbox_pred')
landmark_pred = tf.identity(landmark_pred, name='landmark_pred')
return cls_prob,bbox_pred,landmark_pred
def O_Net(inputs,label=None,bbox_target=None,landmark_target=None,training=True):
with slim.arg_scope([slim.conv2d],
activation_fn = prelu,
weights_initializer=slim.xavier_initializer(),
biases_initializer=tf.zeros_initializer(),
weights_regularizer=slim.l2_regularizer(0.0005),
padding='valid'):
print(inputs.get_shape())
net = slim.conv2d(inputs, num_outputs=32, kernel_size=[3,3], stride=1, scope="conv1")
print(net.get_shape())
net = slim.max_pool2d(net, kernel_size=[3, 3], stride=2, scope="pool1", padding='SAME')
print(net.get_shape())
net = slim.conv2d(net,num_outputs=64,kernel_size=[3,3],stride=1,scope="conv2")
print(net.get_shape())
net = slim.max_pool2d(net, kernel_size=[3, 3], stride=2, scope="pool2")
print(net.get_shape())
net = slim.conv2d(net,num_outputs=64,kernel_size=[3,3],stride=1,scope="conv3")
print(net.get_shape())
net = slim.max_pool2d(net, kernel_size=[2, 2], stride=2, scope="pool3", padding='SAME')
print(net.get_shape())
net = slim.conv2d(net,num_outputs=128,kernel_size=[2,2],stride=1,scope="conv4")
print(net.get_shape())
fc_flatten = slim.flatten(net)
print(fc_flatten.get_shape())
fc1 = slim.fully_connected(fc_flatten, num_outputs=256,scope="fc1")
print(fc1.get_shape())
#batch*2
cls_prob = slim.fully_connected(fc1,num_outputs=2,scope="cls_fc",activation_fn=tf.nn.softmax)
print(cls_prob.get_shape())
#batch*4
bbox_pred = slim.fully_connected(fc1,num_outputs=4,scope="bbox_fc",activation_fn=None)
print(bbox_pred.get_shape())
#batch*10
landmark_pred = slim.fully_connected(fc1,num_outputs=10,scope="landmark_fc",activation_fn=None)
print(landmark_pred.get_shape())
#train
if training:
cls_loss = cls_ohem(cls_prob,label)
bbox_loss = bbox_ohem(bbox_pred,bbox_target,label)
accuracy = cal_accuracy(cls_prob,label)
landmark_loss = landmark_ohem(landmark_pred, landmark_target,label)
L2_loss = tf.add_n(slim.losses.get_regularization_losses())
return cls_loss,bbox_loss,landmark_loss,L2_loss,accuracy
else:
cls_prob = tf.identity(cls_prob, name='cls_prob')
bbox_pred = tf.identity(bbox_pred, name='bbox_pred')
landmark_pred = tf.identity(landmark_pred, name='landmark_pred')
return cls_prob,bbox_pred,landmark_pred
|
[
"980294373@qq.com"
] |
980294373@qq.com
|
d714174bbebc40b2b5cb7665cf0e3f7820e877ef
|
471af9c94770ee1e977f72f9f6bd2164db71096e
|
/ntuple/cmssw.py
|
6e7738e20c0318aec1656e2808016d99313737d4
|
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
kreczko/ntuple
|
670e4625413121cff96fe6577954a2d88b5a04e7
|
6a2cebe3a416db5d7130056921e0c7c32caa1736
|
refs/heads/master
| 2016-08-10T08:37:08.966445
| 2016-02-16T11:33:55
| 2016-02-16T11:33:55
| 50,350,545
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 476
|
py
|
'''
Converter for the CMS SoftWare (CMSSW).
It requires a fully set up CMSSW area
'''
class CMSSWConverter():
def __init__(self):
# check if CMSSW_BASE is set
import os
is_cmssw_set_up = 'CMSSW_base' in os.environ
if not is_cmssw_set_up:
import sys
sys.exit('CMSSW does not seem to be set up, aborting...')
def convert_file_path(self, file_path):
# call edmFileUtil -d file_path
pass
|
[
"lkreczko@googlemail.com"
] |
lkreczko@googlemail.com
|
672f3ed658454c5a1185392cddf2dbd60fabaa86
|
c6247cdc17bd1d1d88255d21e6f09c838fda2bf4
|
/tsp_utils.py
|
2dfe162783aab4fba0d3ceaa8ea4c132e159be9a
|
[] |
no_license
|
maxgold/gcn
|
7925f86ff8b37eef9cea4101f5e15036a84bcf45
|
75f42e616d8fd76b0fea4e00f87b19d041eb6f02
|
refs/heads/master
| 2021-04-26T16:52:33.247542
| 2017-10-13T20:23:45
| 2017-10-13T20:23:45
| 106,869,800
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,791
|
py
|
## Utility functions to help generate TSP instances
import numpy as np
import pickle
from copy import copy
import numpy.linalg as nlg
import json
import random
#from tsp_utils import *
class create_adj(object):
def __init__(self, tsp_size):
self.scale = 1000
self.matrix = np.round(np.random.rand(tsp_size,tsp_size),3)*self.scale
np.fill_diagonal(self.matrix, 0)
self.matrix = (self.matrix + self.matrix.T)/2
def Distance(self, from_node, to_node):
return self.matrix[from_node][to_node]
def edges_from_mat(matrix):
edges = zip(np.where(matrix>0)[0], np.where(matrix>0)[1])
return(edges)
class create_adj_cycle(object):
def __init__(self, tsp_size, num_new_edges = None):
if num_new_edges == None:
num_new_edges = tsp_size
self.scale = 1000
cycle_weights = np.random.rand(tsp_size)
cycle_cost = np.sum(cycle_weights)
self.matrix = np.zeros((tsp_size, tsp_size))
for i in range(tsp_size):
self.matrix[i, (i+1)%tsp_size] = cycle_weights[i]
self.matrix[(i+1)%tsp_size, i] = cycle_weights[i]
cycle_edges = edges_from_mat(self.matrix)
t = np.ones((tsp_size, tsp_size))
np.fill_diagonal(t, 0)
all_edges = np.array(edges_from_mat(t))
all_edges = all_edges[all_edges[:,0] < all_edges[:,1]]
all_edges = list(map(tuple, all_edges))
new_edges = [x for x in all_edges if x not in cycle_edges]
random.shuffle(new_edges)
new_edges = np.array(new_edges)
num_new_edges = min(len(new_edges), num_new_edges)
for i in range(num_new_edges):
val = np.random.rand(1)
self.matrix[new_edges[i,0],new_edges[i,1]] = val
self.matrix[new_edges[i,1],new_edges[i,0]] = val
self.matrix = np.round(self.matrix, 3) * self.scale
self.matrix[self.matrix==0] = 1e6
self.cycle_cost = cycle_cost
np.fill_diagonal(self.matrix, 0)
def Distance(self, from_node, to_node):
return(self.matrix[from_node][to_node])
def distance(x1, y1, x2, y2):
dist = np.sqrt((x1 - x2)**2 + (y1 - y2)**2)
return dist
def construct_cycle_feature_route_cw(num_nodes, start_node, goal_node):
pos_mat = np.eye(num_nodes)
pos_mat[start_node, :] = 1
pos_vec = np.zeros(num_nodes)
pos_vec[start_node] = 1
goal_vec = np.zeros(num_nodes)
goal_vec[goal_node] = 1
visited_vec = np.zeros(num_nodes)
visited_vec[start_node] = 1
goal_feature = np.zeros([num_nodes, 3])
goal_feature[start_node, 0] = 1
goal_feature[start_node, 1] = 1
goal_feature[:, 2] = 1
#feature_tour = np.c_[pos_mat, goal_vec, visited_vec, goal_feature][:,:,None]
feature_tour = np.c_[pos_vec, goal_vec, visited_vec, goal_feature][:,:,None]
route0 = np.zeros([num_nodes, 1])
route0[0] = (start_node + 1)%num_nodes
for i in range(1, num_nodes):
cur_node = (start_node + i) % num_nodes
pos_mat = np.eye(num_nodes)
pos_mat[cur_node, :] = 1
pos_vec = np.zeros(num_nodes)
pos_vec[cur_node] = 1
goal_vec = np.zeros(num_nodes)
goal_vec[start_node] = 1
visited_vec[cur_node] = 1
#feature0 = np.c_[pos_mat, goal_vec, visited_vec, goal_feature][:,:,None]
feature0 = np.c_[pos_vec, goal_vec, visited_vec, goal_feature][:,:,None]
feature_tour = np.concatenate((feature_tour, feature0), axis=2)
route0[i] = (cur_node + 1) % num_nodes
return(feature_tour, route0)
def construct_cycle_feature_route_ccw(num_nodes, start_node, goal_node):
pos_mat = np.eye(num_nodes)
pos_mat[start_node, :] = 1
pos_vec = np.zeros(num_nodes)
pos_vec[start_node] = 1
goal_vec = np.zeros(num_nodes)
goal_vec[goal_node] = 1
visited_vec = np.zeros(num_nodes)
visited_vec[start_node] = 1
goal_feature = np.zeros([num_nodes, 3])
goal_feature[start_node, 0] = 1
goal_feature[start_node, 1] = 1
goal_feature[:, 2] = 1
#feature_tour = np.c_[pos_mat, goal_vec, visited_vec, goal_feature][:,:,None]
feature_tour = np.c_[pos_vec, goal_vec, visited_vec, goal_feature][:,:,None]
route0 = np.zeros([num_nodes, 1])
route0[0] = (start_node - 1)%num_nodes
for i in range(1, num_nodes):
cur_node = (start_node - i) % num_nodes
pos_mat = np.eye(num_nodes)
pos_mat[cur_node, :] = 1
pos_vec = np.zeros(num_nodes)
pos_vec[cur_node] = 1
goal_vec = np.zeros(num_nodes)
goal_vec[start_node] = 1
visited_vec[cur_node] = 1
#feature0 = np.c_[pos_mat, goal_vec, visited_vec, goal_feature][:,:,None]
feature0 = np.c_[pos_vec, goal_vec, visited_vec, goal_feature][:,:,None]
feature_tour = np.concatenate((feature_tour, feature0), axis=2)
route0[i] = (cur_node - 1) % num_nodes
return(feature_tour, route0)
def construct_cycle_adj(num_nodes):
adj = np.zeros([num_nodes, num_nodes])
for i in range(num_nodes):
adj[i, (i+1)%num_nodes] = 1
adj[(i+1)%num_nodes, i] = 1
return adj
def construct_cycle_feature_missing(num_nodes, start_node, goal_node, start_missing, end_missing):
visited = np.zeros(num_nodes)
visited[start_node] = 1
goal = np.zeros([num_nodes, 3])
goal[goal_node, 0] = 1
goal[goal_node, 1] = 1
goal[:, 2] = 1
if start_missing <= end_missing:
if end_missing - num_nodes != -1:
goal[(end_missing-num_nodes+1)%num_nodes:,2] = 0
goal[:start_missing, 2] = 0
else:
goal[(end_missing+1)%num_nodes:start_missing,2] = 0
features = np.zeros([num_nodes,6,0])
routes = []
#routes.append((start_node - 1)%num_nodes)
cur_node = start_node
for i in range(0, (start_node - start_missing + num_nodes)%num_nodes):
cur_node = (start_node - i)%num_nodes
state0 = np.zeros([num_nodes, 3])
state0[cur_node, 0] = 1
state0[goal_node, 1] = 1
visited[cur_node] = 1
state0[:, 2] = visited
feature0 = np.c_[state0, goal][:,:,None]
features = np.concatenate((features,feature0), axis=2)
routes.append((cur_node - 1)%num_nodes)
for i in range(0, (end_missing - start_missing + num_nodes)%num_nodes):
cur_node = (start_missing + i)%num_nodes
state0 = np.zeros([num_nodes, 3])
state0[cur_node, 0] = 1
state0[goal_node, 1] = 1
visited[cur_node] = 1
state0[:, 2] = visited
feature0 = np.c_[state0, goal][:,:,None]
features = np.concatenate((features,feature0), axis=2)
routes.append((cur_node + 1)%num_nodes)
return(features, routes)
def construct_cycle_adj_missing_fieldsize(num_nodes, field_size, num_layers, start_node, start_missing, end_missing):
adj = np.zeros([num_nodes, num_nodes])
for i in range(num_nodes):
adj[i, (i+1)%num_nodes] = 1
adj[(i+1)%num_nodes, i] = 1
if end_missing < start_missing:
adj[end_missing+1:start_missing] = 0
adj[end_missing, (end_missing+1)%num_nodes] = 0
adj[start_missing, start_missing-1] = 0
else:
adj[end_missing+1:] = 0
adj[end_missing, (end_missing+1)%num_nodes] = 0
adj[:start_missing] = 0
adj[start_missing, (start_missing-1)%num_nodes] = 0
res = {}
for i in range(num_layers):
t = nlg.matrix_power(adj, field_size[i])
#t[t>0] = 1
res[i] = t
return res
def construct_cycle_adj_missing_fieldsize_beta(num_nodes, field_size, num_layers, start_node, start_missing, end_missing):
adj = np.zeros([num_nodes, num_nodes])
for i in range(num_nodes):
adj[i, (i+1)%num_nodes] = 1
adj[(i+1)%num_nodes, i] = 1
if end_missing < start_missing:
adj[end_missing+1:start_missing, :] = 0
adj[end_missing, (end_missing+1)%num_nodes] = 0
adj[start_missing, start_missing-1] = 0
else:
adj[end_missing+1:, :] = 0
adj[end_missing, (end_missing+1)%num_nodes] = 0
adj[:start_missing, :] = 0
adj[start_missing, (start_missing-1)%num_nodes] = 0
### a lot of choices down here
power_tracker = np.zeros([num_nodes, num_nodes])
counter = nlg.matrix_power(adj, field_size[0])
res = {}
for i in range(num_layers):
t1 = nlg.matrix_power(adj, field_size[i])
#t1[power_tracker > 0] = 0
power_tracker = power_tracker + t1
t2 = np.zeros([num_nodes,num_nodes])
t2[t1 > 0] = field_size[i]
mask = (t2 > 0) & (counter == 0)
counter[mask] = np.maximum(counter[mask], t2[mask])
t = np.concatenate((t1[:,:,None],counter[:,:,None]), axis=2)
#t[t>0] = 1
res[i] = t
return res
def construct_cycle_adj_missing(num_nodes, start_node, start_missing, end_missing):
adj = np.zeros([num_nodes, num_nodes])
for i in range(num_nodes):
adj[i, (i+1)%num_nodes] = 1
adj[(i+1)%num_nodes, i] = 1
if end_missing < start_missing:
adj[end_missing+1:start_missing] = 0
adj[end_missing, (end_missing+1)%num_nodes] = 0
adj[start_missing, start_missing-1] = 0
else:
adj[end_missing+1:] = 0
adj[end_missing, (end_missing+1)%num_nodes] = 0
adj[:start_missing] = 0
adj[start_missing, (start_missing-1)%num_nodes] = 0
return adj
def construct_cycle_weight(num_nodes, max_weight = 10):
W = np.zeros([num_nodes, num_nodes])
for i in range(num_nodes):
weight = np.random.randint(max_weight)
W[i, (i+1)%num_nodes] = weight
W[(i+1)%num_nodes, i] = weight
return W
def nn_mats_from_adj(A):
num_edges = np.sum(A > 0)
num_nodes = A.shape[0]
edges = np.zeros([0,2])
R = A.shape[0]
C = A.shape[1]
for i in range(C):
for j in range(R):
if A[i,j] > 0:
edges = np.r_[edges, np.array([[i, j]])]
edges = edges.astype(int)
P1 = np.zeros([edges.shape[0], num_nodes])
P2 = np.zeros([edges.shape[0], num_nodes])
P1[np.arange(edges.shape[0]).astype(int), edges[:,0]] = 1
P2[np.arange(edges.shape[0]).astype(int), edges[:,1]] = 1
A_nn = np.zeros([num_nodes, num_edges])
c = 0
for edge in edges:
A_nn[edge[0], c] = 1 # this should be set to W_ij
c += 1
return(P1, P2, A_nn)
def nn_mats_from_adj_fieldsize(adj, field_size, num_layers):
P1d = {}
P2d = {}
A_nnd = {}
Fd = {}
num_nodes = adj[0].shape[0]
for layer in range(num_layers):
dist = field_size[layer]
A = adj[layer]
num_edges = np.sum(A > 0)
num_nodes = A.shape[0]
edges = np.zeros([0,2])
R = A.shape[0]
C = A.shape[1]
for i in range(C):
for j in range(R):
if A[i,j] > 0:
edges = np.r_[edges, np.array([[i, j]])]
edges = edges.astype(int)
P1 = np.zeros([edges.shape[0], num_nodes])
P2 = np.zeros([edges.shape[0], num_nodes])
F = np.zeros([edges.shape[0], 1])
P1[np.arange(edges.shape[0]).astype(int), edges[:,0]] = 1
P2[np.arange(edges.shape[0]).astype(int), edges[:,1]] = 1
A_nn = np.zeros([num_nodes, num_edges])
c = 0
for edge in edges:
F[c, 0] = A[edge[0], edge[1]]
#F[c, 1] = A[edge[0], edge[1]] #should be field_size[layer]
#F[c, 2] = A[edge[0], edge[1], 2] #should be entry of the weight power matrix
A_nn[edge[0], c] = 1 # this should be set to W_ij
c += 1
F = F/num_nodes
P1d[layer] = P1
P2d[layer] = P2
A_nnd[layer] = A_nn
Fd[layer] = F
return(P1d, P2d, A_nnd, Fd)
def nn_mats_from_adj_fieldsize_beta(adj, field_size, num_layers):
P1d = {}
P2d = {}
A_nnd = {}
Fd = {}
num_nodes = adj[0].shape[0]
for layer in range(num_layers):
dist = field_size[layer]
A = adj[layer]
num_edges = np.sum(A[:,:,0] > 0)
num_nodes = A.shape[0]
edges = np.zeros([0,2])
R = A.shape[0]
C = A.shape[1]
for i in range(C):
for j in range(R):
if A[i,j, 0] > 0:
edges = np.r_[edges, np.array([[i, j]])]
edges = edges.astype(int)
P1 = np.zeros([edges.shape[0], num_nodes])
P2 = np.zeros([edges.shape[0], num_nodes])
F = np.zeros([edges.shape[0], 2])
## TODO try replacing this with the weight
P1[np.arange(edges.shape[0]).astype(int), edges[:,0]] = 1
P2[np.arange(edges.shape[0]).astype(int), edges[:,1]] = 1
A_nn = np.zeros([num_nodes, num_edges])
c = 0
for edge in edges:
F[c, 0] = A[edge[0], edge[1], 0]
F[c, 1] = A[edge[0], edge[1], 1] #should be field_size[layer]
#F[c, 2] = A[edge[0], edge[1], 2] #should be entry of the weight power matrix
A_nn[edge[0], c] = 1 # this should be set to W_ij
c += 1
F = F/num_nodes
P1d[layer] = P1
P2d[layer] = P2
A_nnd[layer] = A_nn
Fd[layer] = F
return(P1d, P2d, A_nnd, Fd)
def gen_cycle_data(num_nodes, trials = 100):
#features = np.zeros([num_nodes, num_nodes + 5, num_nodes, 0])
features = np.zeros([num_nodes, 6, num_nodes, 0])
weights = np.zeros([num_nodes, num_nodes, 0])
adj = np.zeros([num_nodes, num_nodes, 0])
routes = np.zeros([num_nodes, 0])
P1 = np.zeros([2*num_nodes, num_nodes, 0])
P2 = np.zeros([2*num_nodes, num_nodes, 0])
A_nn = np.zeros([num_nodes, 2*num_nodes, 0])
for i in range(num_nodes):
start_node = i
goal_node = i
feature_tour1, route1 = construct_cycle_feature_route_cw(num_nodes, start_node, goal_node)
feature_tour2, route2 = construct_cycle_feature_route_ccw(num_nodes, start_node, goal_node)
adj0 = construct_cycle_adj(num_nodes)
weight0 = construct_cycle_weight(num_nodes)
P1_0, P2_0, A_nn0 = nn_mats_from_adj(adj0)
features = np.concatenate((features, feature_tour1[:,:,:,None]), axis=3)
features = np.concatenate((features, feature_tour2[:,:,:,None]), axis=3)
routes = np.concatenate((routes, route1), axis=1)
routes = np.concatenate((routes, route2), axis=1)
adj = np.concatenate((adj, adj0[:,:,None]), axis=2)
adj = np.concatenate((adj, adj0[:,:,None]), axis=2)
weights = np.concatenate((weights, weight0[:,:,None]), axis=2)
weights = np.concatenate((weights, weight0[:,:,None]), axis=2)
P1 = np.concatenate((P1, P1_0[:,:,None]), axis=2)
P2 = np.concatenate((P2, P2_0[:,:,None]), axis=2)
A_nn = np.concatenate((A_nn, A_nn0[:,:,None]), axis=2)
return(features, weights, adj, routes, P1, P2, A_nn)
def gen_cycle_data_missing(num_nodes, field_size, num_layers):
features = {}
adj = {}
routes = {}
P1 = {}
P2 = {}
A_nn = {}
F = {}
for start_missing in range(num_nodes):
for end_missing in range(num_nodes):
if (start_missing != end_missing) & (abs((start_missing - end_missing )%num_nodes)!=1):
if start_missing < end_missing:
start_vals = np.arange(start_missing,end_missing+1)
else:
start_vals = np.arange(start_missing, end_missing + num_nodes + 1) % num_nodes
for i in start_vals:
start_node = i
goal_node = end_missing
feature0, route0 = construct_cycle_feature_missing(num_nodes, start_node, goal_node, start_missing, end_missing)
adj0 = construct_cycle_adj_missing_fieldsize_beta(num_nodes, field_size, num_layers, start_node, start_missing, end_missing)
P1_0, P2_0, A_nn0, F_0 = nn_mats_from_adj_fieldsize_beta(adj0, field_size, num_layers)
features[(start_missing, end_missing, i)] = feature0
routes[(start_missing, end_missing, i)] = route0
adj[(start_missing, end_missing, i)] = adj0
P1[(start_missing, end_missing, i)] = P1_0
P2[(start_missing, end_missing, i)] = P2_0
A_nn[(start_missing, end_missing, i)] = A_nn0
F[(start_missing, end_missing, i)] = F_0
return(features, adj, routes, P1, P2, A_nn, F)
def construct_adj_fieldsize(A, W, field_size, num_layers):
res = {}
for i in range(num_layers):
t1 = nlg.matrix_power(W, field_size[i])
t2 = nlg.matrix_power(A, field_size[i])
t = np.concatenate((t1[:,:,None],t2[:,:,None]), axis=2)
res[i] = t
return res
def feature_from_assignment(routing, assignment, num_nodes):
route = []
features = np.zeros([num_nodes,6,0])
index = routing.Start(0)
cur_node = index % num_nodes
start_node = cur_node
goal_node = cur_node
visited = np.zeros(num_nodes)
visited[start_node] = 1
goal = np.zeros([num_nodes, 3])
goal[goal_node, 0] = 1
goal[goal_node, 1] = 1
goal[:, 2] = 1
state0 = np.zeros([num_nodes, 3])
state0[cur_node, 0] = 1
state0[goal_node, 1] = 1
visited[cur_node] = 1
state0[:, 2] = visited
feature0 = np.c_[state0, goal][:,:,None]
features = np.concatenate((features,feature0), axis=2)
while np.sum(visited)!=num_nodes:
index = assignment.Value(routing.NextVar(cur_node))
cur_node = index % num_nodes
route.append(cur_node)
state0 = np.zeros([num_nodes, 3])
state0[cur_node, 0] = 1
state0[goal_node, 1] = 1
visited[cur_node] = 1
state0[:, 2] = visited
feature0 = np.c_[state0, goal][:,:,None]
features = np.concatenate((features,feature0), axis=2)
route.append(start_node)
return(features, route)
def feature_from_route(route, num_nodes):
# this doesn't work for some unknown reason...w
features = np.zeros([num_nodes,6,0])
cur_node = route[0]
i = 1
start_node = cur_node
goal_node = cur_node
visited = np.zeros(num_nodes)
visited[start_node] = 1
goal = np.zeros([num_nodes, 3])
goal[goal_node, 0] = 1
goal[goal_node, 1] = 1
goal[:, 2] = 1
state0 = np.zeros([num_nodes, 3])
state0[cur_node, 0] = 1
state0[goal_node, 1] = 1
visited[cur_node] = 1
state0[:, 2] = visited
feature0 = np.c_[state0, goal][:,:,None]
features = np.concatenate((features,feature0), axis=2)
while np.sum(visited)!=num_nodes:
cur_node = route[i]
state0 = np.zeros([num_nodes, 3])
state0[cur_node, 0] = 1
state0[goal_node, 1] = 1
visited[cur_node] = 1
state0[:, 2] = visited
feature0 = np.c_[state0, goal][:,:,None]
features = np.concatenate((features,feature0), axis=2)
i += 1
route = list(route)
route.append(start_node)
route = route[1:]
return(features, route)
def np_dic_to_json2(dic):
keys1 = dic.keys()
new_dic = {}
for key1 in keys1:
new_key1 = str(key1[0]) + ',' + str(key1[1])
new_dic[new_key1] = {}
keys2 = dic[key1].keys()
for key2 in keys2:
new_key2 = str(key2)
arr = dic[key1][key2]
if type(arr) == np.ndarray:
new_dic[new_key1][new_key2] = arr.tolist()
else:
new_dic[new_key1][new_key2] = arr
return(new_dic)
def np_dic_to_json3(dic):
keys1 = dic.keys()
new_dic = {}
for key1 in keys1:
new_key1 = str(key1[0]) + ',' + str(key1[1])
new_dic[new_key1] = {}
keys2 = dic[key1].keys()
for key2 in keys2:
new_key2 = str(key2)
new_dic[new_key1][new_key2] = {}
keys3 = dic[key1][key2].keys()
for key3 in keys3:
new_key3 = str(key3)
arr = dic[key1][key2][key3]
if type(arr) == np.ndarray:
new_dic[new_key1][new_key2][new_key3] = arr.tolist()
else:
new_dic[new_key1][new_key2][new_key3] = arr
return(new_dic)
def json_dic_to_np2(dic):
keys1 = dic.keys()
new_dic = {}
for key1 in keys1:
new_key1 = tuple(np.array(key1.split(',')).astype(int))
new_dic[new_key1] = {}
keys2 = dic[key1].keys()
for key2 in keys2:
new_key2 = int(key2)
arr = dic[key1][key2]
if type(arr) == list:
new_dic[new_key1][new_key2] = np.array(arr)
else:
new_dic[new_key1][new_key2] = arr
return(new_dic)
def json_dic_to_np3(dic):
keys1 = dic.keys()
new_dic = {}
for key1 in keys1:
new_key1 = tuple(np.array(key1.split(',')).astype(int))
new_dic[new_key1] = {}
keys2 = dic[key1].keys()
for key2 in keys2:
new_key2 = int(key2)
new_dic[new_key1][new_key2] = {}
keys3 = dic[key1][key2].keys()
for key3 in keys3:
new_key3 = int(key3)
arr = dic[key1][key2][key3]
if type(arr) == list:
new_dic[new_key1][new_key2][new_key3] = np.array(arr)
else:
new_dic[new_key1][new_key2][new_key3] = arr
return(new_dic)
|
[
"noreply@github.com"
] |
maxgold.noreply@github.com
|
01672a0b179f6d2567c1d6f70a6625e4008e3138
|
2f7ade102f564759bb55d7dfde2bf25ca78fe574
|
/Set5/Challenge33/CP5_33.py
|
f9e0aa941f2eddfdfd77bba48217aee1629f159b
|
[] |
no_license
|
grandfoosier/Cryptopals
|
6d60948b148ab6edd433fb413e849a4e623ef9e2
|
a1f79e240ba9264b31569786ba7d05122e313958
|
refs/heads/master
| 2020-04-11T02:06:00.700555
| 2016-12-13T21:02:52
| 2016-12-13T21:02:52
| 68,222,057
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,404
|
py
|
from hashlib import sha256
from random import randint
import array
#######################################################################
# Bignum given from challenge
class Bignum(object):
def __init__(self):
self.ig = 0xffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552bb9ed529077096966d670c354e4abc9804f1746c08ca237327ffffffffffffffff
#######################################################################
# Create a key given inputs p and g
def dh_key(p, g):
a = randint(0, p-1); b = randint(0, p-1)
A = pow(g, a, p); B = pow(g, b, p)
s = pow(B, a, p); assert s == pow(A, b, p)
h = hex(s)[2:]
if h[-1] == b'L': h = h[: -1]
if len(h) % 2: h = "0" + h
bh = bytearray.fromhex(h)
keyEh = sha256(bh).hexdigest()[: 32]
keyMh = sha256(bh).hexdigest()[32: ]
print keyEh, keyMh
return keyEh, keyMh
#######################################################################
# Main routine
if __name__ == "__main__":
B = Bignum()
print ""
print "37, 5:"
keyE, keyM = dh_key(37, 5)
print ""
print "big, 2:"
keyE, keyM = dh_key(B.ig, 2)
print "\n"
|
[
"noreply@github.com"
] |
grandfoosier.noreply@github.com
|
713733903696af3343825284b53b29db939fa02c
|
7b971c4465797ef3561c69d7dd819fcd5d2a3736
|
/support/convexhull.py
|
09d10549b53809c9137859ed37245488e2207775
|
[] |
no_license
|
rkdarst/pcd
|
978d898a3ab1a1fd8fbb48e644234d66fe7ab95f
|
54dda49ce8a248446b236a42b9d313ce410cf58b
|
refs/heads/master
| 2021-01-23T12:38:09.042817
| 2016-11-01T18:02:25
| 2016-11-01T18:02:25
| 22,289,583
| 5
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,647
|
py
|
# See convexhull.txt
def convex_hull(points):
"""Computes the convex hull of a set of 2D points.
Input: an iterable sequence of (x, y) pairs representing the points.
Output: a list of vertices of the convex hull in counter-clockwise order,
starting from the vertex with the lexicographically smallest coordinates.
Implements Andrew's monotone chain algorithm. O(n log n) complexity.
"""
# Sort the points lexicographically (tuples are compared lexicographically).
# Remove duplicates to detect the case we have just one unique point.
points = sorted(set(points))
# Boring case: no points or a single point, possibly repeated multiple times.
if len(points) <= 1:
return points
# 2D cross product of OA and OB vectors, i.e. z-component of their 3D cross product.
# Returns a positive value, if OAB makes a counter-clockwise turn,
# negative for clockwise turn, and zero if the points are collinear.
def cross(o, a, b):
return (a[0] - o[0]) * (b[1] - o[1]) - (a[1] - o[1]) * (b[0] - o[0])
# Build lower hull
lower = []
for p in points:
while len(lower) >= 2 and cross(lower[-2], lower[-1], p) <= 0:
lower.pop()
lower.append(p)
# Build upper hull
upper = []
for p in reversed(points):
while len(upper) >= 2 and cross(upper[-2], upper[-1], p) <= 0:
upper.pop()
upper.append(p)
# Concatenation of the lower and upper hulls gives the convex hull.
# Last point of each list is omitted because it is repeated at the beginning of the other list.
return lower[:-1] + upper[:-1]
|
[
"rkd@zgib.net"
] |
rkd@zgib.net
|
230ce511489abcd87db47480e34a007726c3f506
|
c971765ba3c96288f935455e3fdc587ddec3689c
|
/ir_cdk_stacks/in_clt_01_stack.py
|
1b12aea8a3c8ccb68582513593d5404b339379f6
|
[] |
no_license
|
martinpham97/IR-CDK-Stacks
|
358f5d7e93fb896179b2223dc2eee9faed4824b0
|
1043f4bf1639b6736b3f7858ed7b2b82975ae7bb
|
refs/heads/master
| 2022-07-31T01:13:47.802511
| 2020-05-07T07:26:14
| 2020-05-07T07:26:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,550
|
py
|
from aws_cdk import (
core,
aws_cloudwatch as cloudwatch,
aws_events as events,
aws_lambda as _lambda,
aws_iam as iam,
aws_events_targets as event_target,
aws_sns as sns,
aws_sns_subscriptions as subs
)
import os
import logging
logger = logging.getLogger(__name__)
class InClt01Stack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
NOTIFY_EMAIL = self.node.try_get_context("notify_email")
SLACK_WEBHOOK_URL = self.node.try_get_context("webhook_url")
WHITE_LIST_GROUP = self.node.try_get_context("white_list_group")
if (
not NOTIFY_EMAIL
or not SLACK_WEBHOOK_URL
or not WHITE_LIST_GROUP
):
logger.error(f"Required context variables for {id} were not provided!")
else:
# 1. Create Response Lambda
lambda_dir_path = os.path.join(os.getcwd(), "ir_cdk_stacks", "in_clt_01")
response_lambda = _lambda.Function(
self,
"InClt01ResponseFunction",
runtime=_lambda.Runtime.PYTHON_3_8,
handler="clUnauthAccessResponse.lambda_handler",
code=_lambda.Code.from_asset(lambda_dir_path),
function_name="InClt01ResponseFunction",
environment={
"webhook_url": SLACK_WEBHOOK_URL,
"white_list_group": WHITE_LIST_GROUP,
}
)
ep = {
"source": [
"aws.cloudtrail"
]
}
# 2. Make that rule Track Cloudtrail events
rule = events.Rule(self,
"cdkRule",
description= 'Rule created by CDK for monitoring CloudTrail access',
enabled= True,
rule_name= "CltAccessRule",
event_pattern= ep )
# 3. Add Permissions and role to Lambda
action = [
"iam:*",
"organizations:DescribeAccount",
"organizations:DescribeOrganization",
"organizations:DescribeOrganizationalUnit",
"organizations:DescribePolicy",
"organizations:ListChildren",
"organizations:ListParents",
"organizations:ListPoliciesForTarget",
"organizations:ListRoots",
"organizations:ListPolicies",
"organizations:ListTargetsForPolicy"
]
response_lambda.add_to_role_policy(
iam.PolicyStatement(
actions=action,
effect=iam.Effect.ALLOW, resources=["*"],
)
)
# 4. Permission to send SNS notification
response_lambda.add_to_role_policy(
iam.PolicyStatement(
actions=[
"sns:*"
],
effect=iam.Effect.ALLOW,
resources=["*"],
)
)
# 5. Add Lambda as target of Rule
rule.add_target(event_target.LambdaFunction(response_lambda))
# 6. Create SNS topic and subscription
topic = sns.Topic(self, "CLTAccessCDK", topic_name="CLTAccessCDK")
# topic.grant_publish(iam.ServicePrincipal("*"))
topic.add_subscription(subs.EmailSubscription(NOTIFY_EMAIL))
# 7. Create IAM allow/deny policy
cltDenyAccessPolicy = iam.ManagedPolicy(self,
"InCLT01DenyPolicy",
managed_policy_name = "CltDenyAccess",
statements=[
iam.PolicyStatement(
effect=iam.Effect.DENY,
actions=["cloudtrail:*"],
resources=["*"]
)
])
# 8. Create IAM group
cltAccessGroup = iam.Group(
self,
"cltAccessGroup",
group_name = "cltAccessGroup"
)
|
[
"neel64tamakuwala@gmail.com"
] |
neel64tamakuwala@gmail.com
|
e6013f393ec1ffdd449fa0e80b59f1a6ecfe2670
|
482d7d5770dfc17db5b1a0e780b634d3a9f5572a
|
/Project3/code/metrics.py
|
5e5138446ddf6d1d6ee23c6b0936f15c514201be
|
[] |
no_license
|
fmsilver89/FYS_STK_4155
|
5b9a878330f06a29ec6416aff92a06ebf0ba8dd8
|
189b7ef0d18cd9395eeab82702376ae91ad24d17
|
refs/heads/master
| 2020-09-11T13:24:15.963157
| 2019-11-16T10:18:21
| 2019-11-16T10:18:21
| 222,078,923
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,036
|
py
|
# Required packages
import numpy as np
def mean_squared_error(y, yhat):
"""
y: True values.
yhat: Predictions.
"""
res = y - yhat
mse = np.divide(res.T@res, len(yhat))
return mse
def r2_score(y, yhat):
"""
y: True values.
yhat: Predictions.
"""
res = y - yhat
ymean = np.mean(y)
ssr = y - ymean * np.ones((len(y),))
R2 = 1 - np.divide(res.T@res, ssr.T@ssr)
return R2
def bias2(y, yhat):
"""
y: True values.
yhat: Predictions.
"""
n = len(yhat)
bias2 = np.sum((y - (np.mean(yhat)))**2) / n
return bias2
def variance_error(yhat):
"""
yhat: Predictions.
"""
variance = np.mean(yhat**2) - np.mean(yhat)**2
return variance
def accuracy(y, yhat):
"""
Metrics for binary data.
y: True values.
yhat: Predictions.
"""
n = len(y)
accuracy = np.sum(y == yhat) / n
return accuracy
|
[
"noreply@github.com"
] |
fmsilver89.noreply@github.com
|
26384a8be39cfdd9cbc222c1b37af013b4f43337
|
1a330be03318d7402e4525d435ee169e0f796f04
|
/camera_info_publisher/camera_info_publisher.py
|
23f5ff979a3f1b82c640d1117abacef7769ce1aa
|
[] |
no_license
|
purdue-arc/autonomous_car_misc
|
dd5f5998a86a97776428a4c8796c98c1b9cae9bf
|
235e2f15550c73a1adb8fb22f4ce25929164ed37
|
refs/heads/master
| 2020-05-02T17:26:11.180990
| 2019-04-14T23:39:16
| 2019-04-14T23:39:16
| 178,098,194
| 0
| 1
| null | 2019-10-01T00:27:45
| 2019-03-28T00:53:07
|
C++
|
UTF-8
|
Python
| false
| false
| 2,542
|
py
|
"""
pointgrey_camera_driver (at least the version installed with apt-get) doesn't
properly handle camera info in indigo.
This node is a work-around that will read in a camera calibration .yaml
file (as created by the cameracalibrator.py in the camera_calibration pkg),
convert it to a valid sensor_msgs/CameraInfo message, and publish it on a
topic.
The yaml parsing is courtesy ROS-user Stephan:
http://answers.ros.org/question/33929/camera-calibration-parser-in-python/
This file just extends that parser into a rosnode.
"""
import rospy
import yaml
from sensor_msgs.msg import CameraInfo
def yaml_to_CameraInfo(yaml_fname):
"""
Parse a yaml file containing camera calibration data (as produced by
rosrun camera_calibration cameracalibrator.py) into a
sensor_msgs/CameraInfo msg.
Parameters
----------
yaml_fname : str
Path to yaml file containing camera calibration data
Returns
-------
camera_info_msg : sensor_msgs.msg.CameraInfo
A sensor_msgs.msg.CameraInfo message containing the camera calibration
data
"""
# Load data from file
with open(yaml_fname, "r") as file_handle:
calib_data = yaml.load(file_handle)
# Parse
camera_info_msg = CameraInfo()
camera_info_msg.width = calib_data["image_width"]
camera_info_msg.height = calib_data["image_height"]
camera_info_msg.K = calib_data["camera_matrix"]["data"]
camera_info_msg.D = calib_data["distortion_coefficients"]["data"]
camera_info_msg.R = calib_data["rectification_matrix"]["data"]
camera_info_msg.P = calib_data["projection_matrix"]["data"]
camera_info_msg.distortion_model = calib_data["distortion_model"]
return camera_info_msg
if __name__ == "__main__":
# Get fname from command line (cmd line input required)
import argparse
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("filename", help="Path to yaml file containing " +\
"camera calibration data")
args = arg_parser.parse_args()
filename = args.filename
# Parse yaml file
camera_info_msg = yaml_to_CameraInfo(filename)
# Initialize publisher node
rospy.init_node("camera_info_publisher", anonymous=True)
publisher = rospy.Publisher("camera_info", CameraInfo, queue_size=10)
rate = rospy.Rate(10)
# Run publisher
while not rospy.is_shutdown():
publisher.publish(camera_info_msg)
rate.sleep()
|
[
"baxter26@purdue.edu"
] |
baxter26@purdue.edu
|
73487eec553a8ca6ee7d5c5a641a16f04d766f97
|
b0c071eabc7f51f335892d49b015e28fcc4e9193
|
/flaskRestFul/venv/bin/easy_install
|
dc197eb4ac7fa243d1b9ddba65756660efe84d2f
|
[] |
no_license
|
namelessfintech/FlaskMastery
|
64627e4e8d361ec2e92794a39935342d356eb6d4
|
6b98d42f591ee8a0ff023f0f5b55f87e550c3c36
|
refs/heads/master
| 2021-11-04T17:26:52.323376
| 2019-04-28T00:53:11
| 2019-04-28T00:53:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 316
|
#!/Users/MichaelBallard/Documents/2019/Code/Python/April/FlaskMaster/flaskRestFul/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"MichaelBallard@Michaels-MacBook-Pro-2.local"
] |
MichaelBallard@Michaels-MacBook-Pro-2.local
|
|
d54f6d7bb54e5d14437247e6cb86fbff66c9e315
|
e980879b9b96e466fae0093d2aa10c90119f03a9
|
/splitPng/ToolFunctions.py
|
c653fc6777ec8c87a45e60b5b2827f831d7d974a
|
[] |
no_license
|
a77654135/tools
|
dc05d3e6fba2b59ae8a8fa36b3e6160f15e2908a
|
c0028c108bf8ea46f6cc09076441f28cae640d63
|
refs/heads/master
| 2018-12-25T20:42:37.754086
| 2018-11-21T05:47:48
| 2018-11-21T05:47:48
| 120,172,453
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,775
|
py
|
# encoding: utf-8
"""
-------------------------------------------------
File Name: ToolFunctions
@time: 2018/2/4 21:47
@author: talus
@desc: 图片处理工具
-------------------------------------------------
"""
import os
from PIL import Image
class Rectangle():
def __init__(self, l=0, t=0, w=1, h=1):
self.left = l
self.top = t
self.width = w
self.height = h
@property
def right(self):
return self.left + self.width - 1
@property
def bottom(self):
return self.top + self.height - 1
def __str__(self):
return "{} {} {} {}".format(self.left,self.top,self.width,self.height)
def __unicode__(self):
return "{} {} {} {}".format(self.left,self.top,self.width,self.height)
def getColors(img):
"""
判断所有像素点是否存在非透明像素
:param img:
:return: 二维数组,存储像素是否为非透明像素
"""
width,height = img.size
has = []
for i in range(0,width):
has.append([])
for j in range(0,height):
has[i].append([])
has[i][j] = img.getpixel((i,j)) != 0
return has
def Exist(colors,x,y):
"""
判断坐标处是否存在非透明像素值
:param colors:
:param x:
:param y:
:return:
"""
if x < 0 or y < 0 or x >= len(colors) or y >= len(colors[0]):
return False
else:
return colors[x][y]
def L_Exist(colors,rect):
"""
判定区域Rect右侧是否存在像素点
:param colors:
:param rect:
:return:
"""
if rect.right >= len(colors )or rect.left < 0:
return False
for i in range(0,rect.height):
if Exist(colors,rect.left - 1, rect.top + i):
return True
return False
def R_Exist(colors,rect):
"""
判定区域Rect右侧是否存在像素点
:param colors:
:param rect:
:return:
"""
if rect.right >= len(colors)or rect.left < 0:
return False
for i in range(0,rect.height):
if Exist(colors,rect.right + 1,rect.top + i):
return True
return False
def D_Exist(colors,rect):
"""
判定区域Rect下侧是否存在像素点
:param colors:
:param rect:
:return:
"""
if rect.bottom >= len(colors[0]) or rect.top < 0:
return False
for i in range(0,rect.width):
if Exist(colors, rect.left + i,rect.bottom + 1):
return True
return False
def U_Exist(colors,rect):
"""
判定区域Rect上侧是否存在像素点
:param colors:
:param rect:
:return:
"""
if rect.bottom >= len(colors[0]) or rect.top < 0:
return False
for i in range(0,rect.width):
if Exist(colors, rect.left + i,rect.top - 1):
return True
return False
def clearRect(colors,rect):
"""
清空区域内的像素非透明标记
:param colors:
:param rect:
:return:
"""
for i in range(rect.left,rect.right+1):
for j in range(rect.top,rect.bottom+1):
colors[i][j] = False
def getRect(colors,x,y):
"""
获取坐标所在图块的区域范围
:param colors:
:param x:
:param y:
:return:
"""
rect = Rectangle(x,y,1,1)
flag = True
while flag:
flag = False
while R_Exist(colors,rect):
rect.width += 1
flag = True
while D_Exist(colors,rect):
rect.height += 1
flag = True
while L_Exist(colors,rect):
rect.width += 1
rect.left -= 1
flag = True
while U_Exist(colors,rect):
rect.height += 1
rect.top -= 1
flag = True
clearRect(colors,rect)
return rect
def GetRects(img):
"""
对图像pic进行图块分割,分割为一个个的矩形子图块区域
分割原理: 相邻的连续区域构成一个图块,透明区域为分割点
:param img:
:return:
"""
rects = []
colors = getColors(img)
width,height = img.size
for i in range(0,width):
for j in range(0,height):
if Exist(colors,i,j):
rect = getRect(colors,i,j)
rects.append(rect)
# if rect.width > 10 and rect.height > 10:
# rects.append(rect)
return rects
if __name__ == "__main__":
path = os.path.abspath(r"C:\Users\talus\work\moni\sheep\0\103.png")
dirname = os.path.dirname(path)
img = Image.open(path)
rects = GetRects(img)
for idx, rect in enumerate(rects):
im = img.crop((rect.left, rect.top, rect.left + rect.width, rect.top + rect.height))
print rect
im.save(os.path.join(dirname, "{}.png".format(idx)))
|
[
"330243319@qq.com"
] |
330243319@qq.com
|
e251803edc18e68adf1d9ff53ea3f1996e1b583b
|
5ee6faeb3ba5959e09cbadbb72dc0b5e0398ea1e
|
/run_meta_mnist_test.py
|
46770d1a0b8387911dae1649342a086e2b2e4430
|
[] |
no_license
|
jinxu06/hmaml
|
6ce2e5a6307da69eb848592ac0d351afcf07aa94
|
990c9faaa9731987c7514c61cfa40ce850312316
|
refs/heads/master
| 2020-03-20T17:56:08.689374
| 2018-06-30T11:24:38
| 2018-06-30T11:24:38
| 137,568,663
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,312
|
py
|
import numpy as np
import tensorflow as tf
import argparse
from models.classifiers import MNISTClassifier
from components.learners import Learner
import data.mnist as mnist
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--load_params', help='', action='store_true', default=False)
parser.add_argument('--num_inner_iters', help='', default=10, type=int)
args = parser.parse_args()
meta_train_set, meta_val_set, meta_test_set = mnist.load(data_dir="~/scikit_learn_data", num_classes=5, batch_size=5, split=[5./7, 1./7, 1./7], return_meta=True)
model = MNISTClassifier(num_classes=5, inputs=None, targets=None)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
optimizer = tf.train.AdamOptimizer(1e-4).minimize(model.loss)
global_init_op = tf.global_variables_initializer()
saver = tf.train.Saver()
save_dir = "/data/ziz/jxu/hmaml-saved-models"
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
with tf.Session(config=config) as sess:
acc_arr = []
for dk in range(20):
sess.run(global_init_op)
if args.load_params:
ckpt_file = save_dir + '/params_' + "mnist" + '.ckpt'
print('restoring parameters from', ckpt_file)
saver.restore(sess, ckpt_file)
print(dk, "resample dataset...")
train_set, val_set = meta_train_set.sample_mini_dataset(num_classes=5, num_shots=15, test_shots=5, classes=[0,1,2,3,4])
learner = Learner(session=sess, model=model)
accs = []
for epoch in range(args.num_inner_iters):
# print(epoch, "......")
learner.train(train_set, optimizer)
evals = learner.evaluate(val_set)
accs.append(evals["accuracy"])
acc_arr.append(accs)
m = np.array(acc_arr)
print(m.mean(0))
# train_set, test_set = meta_train_set.sample_mini_dataset(num_classes=5, num_shots=15, test_shots=5, classes=[5,6,7,8,9])
# learner = Learner(session=sess, model=model)
# for epoch in range(20):
# print(epoch, "......")
# learner.train(train_set, optimizer)
# evals = learner.evaluate(test_set)
# print(evals)
# saver.save(sess, save_dir + '/params_' + "mnist" + '.ckpt')
|
[
"aaron.jin.xu@gmail.com"
] |
aaron.jin.xu@gmail.com
|
d129125455e5894af610b0743efe026f7323fd5d
|
b79fc4af0ac609a843c72e62e048f3fd60874432
|
/dialogue_system/nlu/nlu_baseline.py
|
5eac696f4250123ad81457508eaabe187ba30ca8
|
[] |
no_license
|
brunonishimoto/tcc
|
fbb69e5301976861eca96bcb8f6c2600720bdd48
|
65a0912568bb749efc9482ee764cb32525d7d863
|
refs/heads/master
| 2021-07-15T19:19:51.704993
| 2019-12-17T03:14:35
| 2019-12-17T03:14:35
| 183,078,387
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,605
|
py
|
'''
Created on Jul 13, 2016
@author: xiul
'''
import pickle
import copy
import numpy as np
import dialogue_system.constants as const
from .models import lstm, biLSTM
class NLUBaseline:
def __init__(self, config):
self.params = config["nlu"]
self.load_model(self.params["model_path"])
def generate_dia_act(self, annot):
""" generate the Dia-Act with NLU model """
if len(annot) > 0:
tmp_annot = annot.strip('.').strip('?').strip(',').strip('!')
rep = self.parse_str_to_vector(tmp_annot)
Ys, cache = self.model.fwdPass(rep, self.params, predict_model=True) # default: True
maxes = np.amax(Ys, axis=1, keepdims=True)
e = np.exp(Ys - maxes) # for numerical stability shift into good numerical range
probs = e/np.sum(e, axis=1, keepdims=True)
if np.all(np.isnan(probs)): probs = np.zeros(probs.shape)
# special handling with intent label
for tag_id in self.inverse_tag_dict.keys():
if self.inverse_tag_dict[tag_id].startswith('B-') or self.inverse_tag_dict[tag_id].startswith('I-') or self.inverse_tag_dict[tag_id] == 'O':
probs[-1][tag_id] = 0
pred_words_indices = np.nanargmax(probs, axis=1)
pred_tags = [self.inverse_tag_dict[index] for index in pred_words_indices]
print(pred_tags)
diaact = self.parse_nlu_to_diaact(pred_tags, tmp_annot)
return diaact
else:
return None
def load_model(self, model_path):
""" load the trained NLU model """
model_params = pickle.load(open(model_path, 'rb'), encoding='latin1')
hidden_size = model_params['model']['Wd'].shape[0]
output_size = model_params['model']['Wd'].shape[1]
if model_params['params']['model'] == 'lstm': # lstm_
input_size = model_params['model']['WLSTM'].shape[0] - hidden_size - 1
rnnmodel = lstm(input_size, hidden_size, output_size)
elif model_params['params']['model'] == 'bi_lstm': # bi_lstm
input_size = model_params['model']['WLSTM'].shape[0] - hidden_size - 1
rnnmodel = biLSTM(input_size, hidden_size, output_size)
rnnmodel.model = copy.deepcopy(model_params['model'])
self.model = rnnmodel
self.word_dict = copy.deepcopy(model_params['word_dict'])
self.slot_dict = copy.deepcopy(model_params['slot_dict'])
self.act_dict = copy.deepcopy(model_params['act_dict'])
self.tag_set = copy.deepcopy(model_params['tag_set'])
self.params = copy.deepcopy(model_params['params'])
self.inverse_tag_dict = {self.tag_set[k]:k for k in self.tag_set.keys()}
def parse_str_to_vector(self, string):
""" Parse string into vector representations """
tmp = 'BOS ' + string + ' EOS'
words = tmp.lower().split(' ')
vecs = np.zeros((len(words), len(self.word_dict)))
for w_index, w in enumerate(words):
if w.endswith(',') or w.endswith('?'): w = w[0:-1]
if w in self.word_dict.keys():
vecs[w_index][self.word_dict[w]] = 1
else: vecs[w_index][self.word_dict['unk']] = 1
rep = {}
rep['word_vectors'] = vecs
rep['raw_seq'] = string
return rep
def parse_nlu_to_diaact(self, nlu_vector, string):
""" Parse BIO and Intent into Dia-Act """
tmp = 'BOS ' + string + ' EOS'
words = tmp.lower().split(' ')
print(tmp)
print(words)
diaact = {}
diaact[const.INTENT] = const.INFORM
diaact[const.REQUEST_SLOTS] = {}
diaact[const.INFORM_SLOTS] = {}
intent = nlu_vector[-1]
pre_tag = nlu_vector[0]
pre_tag_index = 0
index = 1
slot_val_dict = {}
while index<(len(nlu_vector)-1): # except last Intent tag
cur_tag = nlu_vector[index]
if cur_tag == 'O' and pre_tag.startswith('B-'):
slot = pre_tag.split('-')[1]
slot_val_str = ' '.join(words[pre_tag_index:index])
slot_val_dict[slot] = slot_val_str
elif cur_tag.startswith('B-') and pre_tag.startswith('B-'):
slot = pre_tag.split('-')[1]
slot_val_str = ' '.join(words[pre_tag_index:index])
slot_val_dict[slot] = slot_val_str
elif cur_tag.startswith('B-') and pre_tag.startswith('I-'):
if cur_tag.split('-')[1] != pre_tag.split('-')[1]:
slot = pre_tag.split('-')[1]
slot_val_str = ' '.join(words[pre_tag_index:index])
slot_val_dict[slot] = slot_val_str
elif cur_tag == 'O' and pre_tag.startswith('I-'):
slot = pre_tag.split('-')[1]
slot_val_str = ' '.join(words[pre_tag_index:index])
slot_val_dict[slot] = slot_val_str
if cur_tag.startswith('B-'): pre_tag_index = index
pre_tag = cur_tag
index += 1
if cur_tag.startswith('B-') or cur_tag.startswith('I-'):
slot = cur_tag.split('-')[1]
slot_val_str = ' '.join(words[pre_tag_index:-1])
slot_val_dict[slot] = slot_val_str
if intent != 'null':
arr = intent.split('+')
diaact[const.INTENT] = arr[0]
diaact[const.REQUEST_SLOTS] = {}
for ele in arr[1:]:
diaact[const.REQUEST_SLOTS][ele] = 'UNK'
diaact[const.INFORM_SLOTS] = slot_val_dict
# add rule here
for slot in diaact[const.INFORM_SLOTS].keys():
slot_val = diaact[const.INFORM_SLOTS][slot]
if slot_val.startswith('bos'):
slot_val = slot_val.replace('bos', '', 1)
diaact[const.INFORM_SLOTS][slot] = slot_val.strip(' ')
self.refine_diaact_by_rules(diaact)
return diaact
def refine_diaact_by_rules(self, diaact):
""" refine the dia_act by rules """
# rule for taskcomplete
if const.REQUEST_SLOTS in diaact.keys():
if const.TASK_COMPLETE_SLOT in diaact[const.REQUEST_SLOTS].keys():
del diaact[const.REQUEST_SLOTS][const.TASK_COMPLETE_SLOT]
diaact[const.INFORM_SLOTS][const.TASK_COMPLETE_SLOT] = const.PLACEHOLDER
# rule for request
if len(diaact[const.REQUEST_SLOTS])>0: diaact[const.INTENT] = const.REQUEST
def diaact_penny_string(self, dia_act):
""" Convert the Dia-Act into penny string """
penny_str = ""
penny_str = dia_act[const.INTENT] + "("
for slot in dia_act[const.REQUEST_SLOTS].keys():
penny_str += slot + ";"
for slot in dia_act[const.INFORM_SLOTS].keys():
slot_val_str = slot + "="
if len(dia_act[const.INFORM_SLOTS][slot]) == 1:
slot_val_str += dia_act[const.INFORM_SLOTS][slot][0]
else:
slot_val_str += "{"
for slot_val in dia_act[const.INFORM_SLOTS][slot]:
slot_val_str += slot_val + "#"
slot_val_str = slot_val_str[:-1]
slot_val_str += "}"
penny_str += slot_val_str + ";"
if penny_str[-1] == ";": penny_str = penny_str[:-1]
penny_str += ")"
return penny_str
|
[
"bruno_nishimoto@hotmail.com"
] |
bruno_nishimoto@hotmail.com
|
8cfda395886a48b53b8e306391b70fbde83ef18a
|
3f46af2da32d9f02d1ebbdef6784ece1d64aace3
|
/Production/python/PrivateSamples/EMJ_2018_mMed-1200_mDark-20_ctau-100_unflavored-down_cff.py
|
2d231881276a2803259a3c8b46582978055f33c9
|
[] |
no_license
|
cms-svj/TreeMaker
|
53bf4b1e35d2e2a4fa99c13c2c8b60a207676b6d
|
0ded877bcac801a2a394ad90ed987a20caa72a4c
|
refs/heads/Run2_2017
| 2023-07-19T07:14:39.175712
| 2020-10-06T21:10:26
| 2020-10-06T21:10:26
| 305,753,513
| 0
| 0
| null | 2021-01-26T18:58:54
| 2020-10-20T15:32:19
| null |
UTF-8
|
Python
| false
| false
| 1,892
|
py
|
import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2018/step4_MINIAOD_mMed-1200_mDark-20_ctau-100_unflavored-down_n-500_part-1.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2018/step4_MINIAOD_mMed-1200_mDark-20_ctau-100_unflavored-down_n-500_part-2.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2018/step4_MINIAOD_mMed-1200_mDark-20_ctau-100_unflavored-down_n-500_part-3.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2018/step4_MINIAOD_mMed-1200_mDark-20_ctau-100_unflavored-down_n-500_part-4.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2018/step4_MINIAOD_mMed-1200_mDark-20_ctau-100_unflavored-down_n-500_part-5.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2018/step4_MINIAOD_mMed-1200_mDark-20_ctau-100_unflavored-down_n-500_part-6.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2018/step4_MINIAOD_mMed-1200_mDark-20_ctau-100_unflavored-down_n-500_part-7.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2018/step4_MINIAOD_mMed-1200_mDark-20_ctau-100_unflavored-down_n-500_part-8.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2018/step4_MINIAOD_mMed-1200_mDark-20_ctau-100_unflavored-down_n-500_part-9.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2018/step4_MINIAOD_mMed-1200_mDark-20_ctau-100_unflavored-down_n-500_part-10.root',
] )
|
[
"enochnotsocool@gmail.com"
] |
enochnotsocool@gmail.com
|
fd92b400aba7975634f692a9bc72e5a32d14007d
|
c41b7b8ef0d988705d27f596263dda348b813212
|
/utils/imessage.py
|
d203aa2ca31a9fcdd587a8a76a6ad2629936baf6
|
[
"MIT"
] |
permissive
|
baileyparker/whatwouldisay.py
|
9f1f4dd9388f09d2e0d96ea42f93c5fd98275303
|
ce4369459a5f60fb17251db05bac0be01efe116e
|
refs/heads/master
| 2016-09-05T16:49:17.854025
| 2015-07-02T05:05:56
| 2015-07-02T05:05:56
| 31,518,049
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 415
|
py
|
import sqlite3
from os.path import expanduser
from operator import itemgetter
MESSAGES_FROM_ME_QUERY = 'SELECT `text` FROM message WHERE `is_from_me` = 1'
def get_messages_from_me():
"""
Get all "from me" iMessages in the current users's chat db.
"""
with sqlite3.connect(expanduser('~/Library/Messages/chat.db')) as conn:
return map(itemgetter(0), conn.execute(MESSAGES_FROM_ME_QUERY))
|
[
"b@ileyparker.com"
] |
b@ileyparker.com
|
62037dc524fbfe889a05c6ccbeca2cba8372ee45
|
31648f7ba9eab4841eae211b36a5ea025570ba78
|
/exam_project_15_08_2021/project/drink/tea.py
|
17e302adc54276324b486af969746bdf9b962f1d
|
[
"MIT"
] |
permissive
|
ivan-yosifov88/python_oop_june_2021
|
d7c4d3ba93f3085f019a4409c33b8ae9739de372
|
7ae6126065abbcce7ce97c86d1150ae307360249
|
refs/heads/main
| 2023-07-04T21:12:07.592730
| 2021-08-18T15:12:50
| 2021-08-18T15:12:50
| 385,363,143
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 180
|
py
|
from project.drink.drink import Drink
class Tea(Drink):
_cost = 2.50
def __init__(self, name, portion, brand):
super().__init__(name, portion, self._cost, brand)
|
[
"ivan.yosifov88@gmail.com"
] |
ivan.yosifov88@gmail.com
|
a5d1257d55107efdb8b87da0580bf3aee73e665c
|
569bf419458b6304fb4e16bef80fb6ba4db287f5
|
/7 - Machine Learning/Decision Tree/Decision_Tree_example.py
|
888a9db7969d898895481459965f8e0f114aae73
|
[] |
no_license
|
ilopezgazpio/UD_Intelligent_Systems
|
80629464baaed39dd06bdeb76ce4b0b662840806
|
f5c518a08760b70c3b8464b5e8ac84997ff6019f
|
refs/heads/master
| 2023-09-02T13:47:29.069765
| 2021-11-08T15:09:28
| 2021-11-08T15:09:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,658
|
py
|
#!/usr/bin/python3
#-------------------------------------------
# Decision Trees Concepts
# Intelligent Systems - University of Deusto
# Inigo Lopez-Gazpio
#-------------------------------------------
#----------------------------------
# PART 1: Environment and libraries
#----------------------------------
# We'll use scikit-learn, numpy, pandas and matplotlib libraries for Machine Learning projects
# These libraries are amongst the strongest ones for data scientists
# All of them can be installed through conda environments, pip or pip3
from math import *
epsilon = -1e-100
#---------------------------------------------------------------------------
# PART 2: Formulas
#---------------------------------------------------------------------------
# Entropy of a dataset with 50-50 elements of different classes (worst case, maximum entropy)
entropy = -0.50 * log2(0.50) - 0.50 * log2(0.50)
print(entropy)
# Entropy of a dataset with 100-0 elements of different classes (best case, minimum entropy)
entropy = epsilon * log2(epsilon) - 1.00 * log2(1.00)
print(entropy)
# Entropy of intermediate distributions
entropy = -0.75*log2(0.75) - 0.25*log2(0.25)
print(entropy)
entropy = -0.01*log2(0.01) - 0.99*log2(0.99)
print(entropy)
# Defining a function
def entropy (a, b):
total = a + b
prob_a = a / total
prob_b = b / total
if prob_a == 0 or prob_b == 0:
return 0
else:
return -prob_a * log2(prob_a) - prob_b * log2(prob_b)
# Imagine we have an initial datase with 10 and 10 elements
entropy(10, 10)
# We can split into a 7-3 and 3-7 datasets... and start computing information gain
# IG = H(class) - H ( class | attributes)
gain1 = entropy(10, 10) - ( (10/20) * entropy(3,7) + (10/20) * entropy(7,3) )
print(gain1)
# We can split into a 1-9 and 9-1 datasets...
gain2 = entropy(10,10) - ( (10/20) * entropy(1,9) + (10/20) * entropy(9,1) )
print(gain2)
# We can split into a 9-9 and 1-1 datasets...
gain3 = entropy(10,10) - ( (18/20) * entropy(9,9) + (2/20) * entropy(1,1) )
print(gain3)
# We can split into a 9-3 and 1-7 datasets...
gain4 = entropy(10,10) - ( (12/20) * entropy(9,3) + (8/20) * entropy(1,7) )
print(gain4)
# We can split into a 10-0 and 0-10 datasets...
gain5 = entropy(10,10) - ( (10/20) * entropy(10,0) + (10/20) * entropy(0,10) )
print(gain5)
# Entropy is implemented in scikit-learn... https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.entropy.html
#---------------------------------------------------------------------------
# PART 3: (General porpuses) Formulas
#---------------------------------------------------------------------------
# Define the entropy as a function able to receive any set of partitions
import numpy as np
import pandas as pd
import requests
def entropy (s : np.array):
# s is a numpy array with counts per class
probs = s / np.sum(s)
logprobs = np.log2(probs)
logprobs[logprobs == np.inf * -1 ] = 0
return sum(-1 * probs * logprobs)
# Defining information gain as a function able to receive any set of partitions
def gain (dataframe : pd.DataFrame, attr : str, target : str):
values = dataframe.groupby([attr, target]).size().unstack().values
values = np.nan_to_num(values)
# to compute class entropy H(class)
class_variable_counts = np.sum(values, axis = 0)
# class given value entropy H(class | attribute)
attribute_variable_counts = np.sum(values, axis=1)
attribute_variable_probs = attribute_variable_counts / np.sum(values)
entropy_given_attribute = np.apply_along_axis(entropy, 1, values)
return entropy(class_variable_counts) - np.sum(attribute_variable_probs * entropy_given_attribute)
url = "https://raw.githubusercontent.com/lgazpio/UD_Intelligent_Systems/master/Datasets/agaricus-lepiota.csv"
#source = requests.get(url).content
data = pd.read_csv(url)
data.columns = [
"class",
"cap.shape",
"cap.surface",
"cap.color",
"bruises",
"odor",
"gill.attachment",
"gill.spacing",
"gill.size",
"gill.color",
"stalk.shape",
"stalk.root",
"stalk.surface.above.ring",
"stalk.surface.below.ring",
"stalk.color.above.ring",
"stalk.color.below.ring",
"veil.type",
"veil.color",
"ring.number",
"ring.type",
"spore.print.color",
"population",
"habitat"
]
data.size
data.head()
data.columns
# Which is the attribute with more "gain"?
gain(data, "cap.shape", "class")
gain(data, "ring.type", "class")
gain(data, "cap.color", "class")
gain(data, "odor", "class")
# For each partition we should keep on with this process recursively...
# If data is categorical instead of numerical we need to transform it to numbers by discretizing...
# Mutual information classifier from scikit-learn performs similar job
# https://scikit-learn.org/stable/modules/generated/sklearn.feature_selection.mutual_info_classif.html#sklearn.feature_selection.mutual_info_classif
#---------------------------------------------------------------------------
# PART 4: Decision tree is (Fortunately) implemented in Python
#---------------------------------------------------------------------------
# https://scikit-learn.org/stable/modules/tree.html
from sklearn import tree
import matplotlib.pyplot as plt
data_onehot = pd.get_dummies( data )
trainingSet = data_onehot.values[:,2:]
trainingSet.shape
labels = data.values[:,0]
labels.shape
clf = tree.DecisionTreeClassifier()
clf = clf.fit(trainingSet, labels)
tree.plot_tree(clf.fit(trainingSet, labels))
plt.show()
# export with graphviz
import graphviz
tree_data = tree.export_graphviz(clf, out_file=None)
graph = graphviz.Source(tree_data)
graph.render("Decision_tree_example")
|
[
"inigo.lopezgazpio@deusto.es"
] |
inigo.lopezgazpio@deusto.es
|
e76e287685752e28fa2e8c5f6863ee900fe5fcbb
|
cbda89443b351bb2047180dad4e300c13dc3df7f
|
/Crystals/Morpurgo_all_sp_Reorgs/Jobs/Rubrene/Rubrene_cation_neut_inner3_outer0/Rubrene_cation_neut_inner3_outer0.py
|
ab3611a677adb23520fda61425992f20962063c3
|
[] |
no_license
|
sheridanfew/pythonpolarisation
|
080f52979f98d26360a46412a10c8e3f51ee4549
|
178e2684e9a239a8e60af5f7b1eb414ac5f31e92
|
refs/heads/master
| 2021-07-10T01:07:40.978790
| 2021-03-11T16:56:37
| 2021-03-11T16:56:37
| 96,101,351
| 0
| 0
| null | 2017-07-03T13:37:06
| 2017-07-03T10:54:52
| null |
UTF-8
|
Python
| false
| false
| 7,186
|
py
|
import sys
sys.path.append('../../../../../')
from BasicElements import *
from BasicElements.Register import GetRegister
from BasicElements.MoleculeFactory import ReadMoleculeType
from BasicElements.MoleculeFactory import GetMolecule
from BasicElements.Crystal import *
from Polarizability.GetDipoles import get_dipoles,split_dipoles_onto_atoms
from Polarizability import *
from Polarizability.GetEnergyFromDips import *
from Polarizability.JMatrix import JMatrix
import numpy as np
from math import *
from time import gmtime, strftime
import os
print strftime("%a, %d %b %Y %X +0000", gmtime())
name='Rubrene_cation_neut_inner3_outer0'
#For crystals here, all cubic and centred at centre
insize=3
#number of TVs in each dir central mol is from edge of inner region
outsize=0
mols_cen=['sp_Rubrene_mola_cation.xyz','sp_Rubrene_molb_neut.xyz']
mols_sur=['sp_Rubrene_mola_neut.xyz','sp_Rubrene_molb_neut.xyz']
mols_outer=['sp_Rubrene_mola_neut.xyz','sp_Rubrene_molb_neut.xyz']
#centres=['Rubrene_mola_anion_aniso_cifstruct_chelpg_edited.xyz','Rubrene_molb_neut_aniso_cifstruct_chelpg_edited.xyz','Rubrene_mola_neut_aniso_cifstruct_chelpg_edited.xyz','Rubrene_molb_neut_aniso_cifstruct_chelpg_edited.xyz']
#surroundings=['Rubrene_mola_neut_aniso_cifstruct_chelpg_edited.xyz','Rubrene_molb_neut_aniso_cifstruct_chelpg_edited.xyz','Rubrene_mola_neut_aniso_cifstruct_chelpg_edited.xyz','Rubrene_molb_neut_aniso_cifstruct_chelpg_edited.xyz']
#From cif:
'''
Rubrene
_cell_length_a 7.184(1)
_cell_length_b 14.433(3)
_cell_length_c 26.897(7)
_cell_angle_alpha 90
_cell_angle_beta 90
_cell_angle_gamma 90
_cell_volume 2788.86
_cell_formula_units_Z 4
'''
#Get translation vectors:
a=7.1841/0.5291772109217
b= 14.4333/0.5291772109217
c= 26.8977/0.5291772109217
alpha=90*(pi/180)
beta=90*(pi/180)
gamma=90*(pi/180)
cif_unit_cell_volume=2788.86/(a*b*c*(0.5291772109217**3))
cell_volume=sqrt(1 - (cos(alpha)**2) - (cos(beta)**2) - (cos(gamma)**2) + (2*cos(alpha)*cos(beta)*cos(gamma)))
#Converts frac coords to carts
matrix_to_cartesian=np.matrix( [[a, b*cos(gamma), c*cos(beta)],
[0, b*sin(gamma), c*(cos(alpha) - cos(beta)*cos(gamma))/sin(gamma)],
[0, 0, c*cell_volume/sin(gamma)]])
#carts to frac
matrix_to_fractional=matrix_to_cartesian.I
#TVs, TV[0,1,2] are the three translation vectors.
TV=matrix_to_cartesian.T
cut=8.0
totsize=insize+outsize
#number of TVs in each dir nearest c inner mol is from edge of outer region
cenpos=[totsize,totsize,totsize]
length=[2*totsize+1,2*totsize+1,2*totsize+1]
maxTVs=insize
outer_maxTVs=insize+outsize
#for diamond outer, don't specify for cube and will fill to cube edges.
print 'name: ',name,'mols_cen: ', mols_cen,' mols_sur: ',mols_sur,' TVs: ', TV
# Place Molecules
prot_neut_cry=Crystal(name=name,mols_cen=mols_cen,mols_sur=mols_sur,cenpos=cenpos,length=length,TVs=TV,maxTVs=maxTVs,mols_outer=mols_outer,outer_maxTVs=outer_maxTVs)
#prot_neut_cry._mols contains all molecules.
#mols[0] contains a list of all molecules in position a, mols[1] all mols in pos'n b, etc.
#mols[0][x,y,z] contains molecule a in position x,y,z
#mols may as such be iterated over in a number of ways to consider different molecules.
prot_neut_cry().print_posns()
#Calculate Properties:
print strftime("%a, %d %b %Y %X +0000", gmtime())
E0 = np.matrix([0.,0.,0.])
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Calc jm'
jm = JMatrix(cutoff=cut)
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Calc dips:'
d = get_dipoles(E0=E0,jm=jm._m,cutoff=cut)
print strftime("%a, %d %b %Y %X +0000", gmtime())
Efield = get_electric_field(E0)
potential = get_potential()
print strftime("%a, %d %b %Y %X +0000", gmtime())
#print 'dips', d
print 'splitting dips onto atoms'
split_d = split_dipoles_onto_atoms(d)
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'summing dips:'
tot = np.matrix([0.,0.,0.])
for dd in split_d:
tot += dd
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'total dip moment', tot
Uqq = np.multiply(get_U_qq(potential=potential),27.211)
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Uqq', Uqq
Uqd = np.multiply(get_U_qdip(dips=d,Efield=Efield),27.211)
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Uqd', Uqd
Udd = np.multiply(get_U_dipdip(jm=jm._m,dips=d.T),27.211)
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Udd', Udd
energyev = Udd+Uqd+Uqq
print 'energyev', energyev
energy=energyev/27.211
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Making .dat cross sections for gnuplot'
# print TVs
if not os.path.exists('Dips_Posns_TVs'): os.makedirs('Dips_Posns_TVs')
f = open('Dips_Posns_TVs/%s_TVs.dat' % name, 'w')
TVstr=str(str(TV[0,0]) + ' ' + str(TV[0,1]) + ' ' + str(TV[0,2]) + '\n' + str(TV[1,0]) + ' ' + str(TV[1,1]) + ' ' + str(TV[1,2]) + '\n' + str(TV[2,0]) + ' ' + str(TV[2,1]) + ' ' + str(TV[2,2])+ '\n')
f.write(TVstr)
f.flush()
f.close()
# print dipoles
if not os.path.exists('Dips_Posns_TVs'): os.makedirs('Dips_Posns_TVs')
f = open('Dips_Posns_TVs/%s_dipoles.dat' % name, 'w')
for dd in split_d:
dstr=str(dd)
f.write(dstr)
f.write('\n')
f.flush()
f.close()
# print properties for charge in centrepos
time=strftime("%a, %d %b %Y %X +0000", gmtime())
f = open('%s_properties.csv' % name, 'w')
f.write ('time\tname\tmols_cen\tmols_sur\tmols_outer\tinsize\toutsize\tenergyev\tUqq\tUqd\tUdd\tTotdip_x\tTotdip_y\tTotdip_z')
f.write ('\n%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s' % (time,name,mols_cen,mols_sur,mols_outer,insize,outsize,energyev,Uqq,Uqd,Udd,tot[0,0],tot[0,1],tot[0,2]))
f.flush()
f.close()
# print header for reorgs
f = open('reorg_energies_%s_properties.csv' % name, 'w')
f.write ('time\tname\tmols_cen\tmols_sur\tmols_outer\tinsize\toutsize\ta\tb\tc\tmolincell\tReorg(eV)')
f.flush()
f.close()
# REORGANISATION ENERGIES
#Note that this assumes a cube, and values for which
for dist in range(0,(length[0]/2)+1,1):
print '\n\nDIST: ', dist, '\n'
for a in range(prot_neut_cry()._cenpos[0]-dist,prot_neut_cry()._cenpos[0]+dist+1,1):
for b in range(prot_neut_cry()._cenpos[1]-dist,prot_neut_cry()._cenpos[1]+dist+1,1):
for c in range(prot_neut_cry()._cenpos[2]-dist,prot_neut_cry()._cenpos[2]+dist+1,1):
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'a,b,c',a,b,c
for molincell in range(0,len(prot_neut_cry()._mols),1):
prot_neut_cry().calc_reorg(a1=prot_neut_cry()._cenpos[0],b1=prot_neut_cry()._cenpos[1],c1=prot_neut_cry()._cenpos[2],molincell1=0,a2=a,b2=b,c2=c,molincell2=molincell,dips=d,oldUqd=Uqd)
print 'Reorg: ', prot_neut_cry()._reorgs[molincell][a][b][c]
f = open('reorg_energies_%s_properties.csv' % name, 'a')
f.write ('\n%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s' % (time,name,mols_cen,mols_sur,mols_outer,insize,outsize,a,b,c,molincell,prot_neut_cry()._reorgs[molincell][a][b][c]))
f.flush()
f.close()
# Redo this and overwrite after each set to ensure we have some even if not all reorgs complete
prot_neut_cry().print_reorgs()
print 'Job Completed Successfully.'
|
[
"sheridan.few@gmail.com"
] |
sheridan.few@gmail.com
|
1002495073d3e000e8aabd09b9992e5f51966b27
|
24c489f58213971f23e72a5aa8ba92758f14077d
|
/notario/tests/test_exceptions.py
|
a8b3a0092b261b8ddb89c724fff5dca4b6426e1b
|
[] |
no_license
|
shaunduncan/notario
|
2ff7287903950d899d1c0bc2fa708221f2142de7
|
d6ba713f017ff9914aadf28f0694465d8996c223
|
refs/heads/master
| 2021-01-22T07:39:19.557961
| 2014-02-26T12:53:55
| 2014-02-26T12:53:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,996
|
py
|
from notario import exceptions
def foo(): return True
class Object(object): pass
class TestInvalid(object):
def test_include_the_key(self):
error = exceptions.Invalid('key', ['foo', 'bar', 'key'])
assert 'key' in error._format_path()
def test_include_the_path_in_str(self):
error = exceptions.Invalid('key', ['path'])
assert 'path' in error.__str__()
def test_include_the_key_in_str(self):
error = exceptions.Invalid('key', ['path'])
assert 'key' in error.__str__()
def test_multiple_keys_in_format_path(self):
error = exceptions.Invalid('schema', ['key', 'subkey', 'bar'])
assert '-> key -> subkey -> bar' in error._format_path()
def test_full_message(self):
error = exceptions.Invalid('3', ['foo', 'bar', 'baz'])
result = error.__str__()
assert "-> foo -> bar -> baz key did not match '3'" == result
def test_full_message_for_callable(self):
error = exceptions.Invalid(foo, ['foo', 'bar', 'baz'])
result = error.__str__()
assert "-> foo -> bar -> baz key did not pass validation against callable: foo" == result
def test_full_message_for_value(self):
error = exceptions.Invalid('3', ['foo', 'bar', 'baz'], pair='value')
result = error.__str__()
assert "-> foo -> bar -> baz did not match '3'" == result
def test_full_message_for_callable_with_value(self):
error = exceptions.Invalid(foo, ['foo', 'bar', 'baz'], pair='value')
result = error.__str__()
assert "-> foo -> bar -> baz did not pass validation against callable: foo" == result
class TestSchemaError(object):
def test_reason_has_no_args(self):
class Foo(object):
def __repr__(self):
return "some reason"
reason = Foo()
reason.args = []
error = exceptions.SchemaError(foo, ['foo'], reason=reason, pair='value')
assert "some reason" == repr(error.reason)
|
[
"alfredodeza@gmail.com"
] |
alfredodeza@gmail.com
|
a325d753908629cb45ff4bb92d59c15ade88a68d
|
11d2c81d0b66f5742159e35442b1867bd44390a1
|
/production/Algorithm/python_code/line4.py
|
9016633fcfde697dba59d5e4d8e4d5286a8a36d5
|
[] |
no_license
|
hyeongseoblim/Algorithm
|
2c482e240a7278a0fd487769d3b95527fae95e7f
|
a6c903ab13e3ba418ce4f94d2f20b6fdf8433131
|
refs/heads/master
| 2023-02-07T11:04:48.897168
| 2021-01-02T15:15:24
| 2021-01-02T15:15:24
| 255,542,563
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,518
|
py
|
west = 0
north = 1
east = 2
south = 3
isgo = 4
dy = [-1, 1, 0, 0]
dx = [0, 0, -1, 1]
def solution(maze):
answer = 0
st_x = 0
st_y = 0
n = len(maze)
maze[n - 1][n - 1] = 'E'
direction = nextDirection(st_x, st_y, maze)
while (True):
temp_x = st_x + dx[direction]
temp_y = st_y + dy[direction]
if temp_x == len(maze) - 1 and temp_y == len(maze) - 1 or maze[temp_y - 1][temp_x - 1] == 'E':
break
if temp_x < len(maze[0]) and temp_x >= 0:
if (maze[temp_y][temp_x] == 1):
direction = nextDirection(st_x, st_y, maze, direction)
continue
left = seeLeft(temp_x, temp_y, direction, maze)
st_x = temp_x
st_y = temp_y
if left == '0' or left == "E":
direction = (direction + isgo - 1) % 4
answer += 1
return answer
def nextDirection(st_x, st_y, maze, cur_dir=east):
next_dir = None
for j in range(len(dx)):
d = (cur_dir + isgo - 1 + j) % 4
ne_x = st_x + dx[d]
ne_y = st_y + dy[d]
if (ne_x >= len(maze[0]) or ne_x < 0) or (ne_y >= 7 or ne_y < 0):
continue
if (maze[ne_y][ne_x] == 0):
next_dir = d
return next_dir
def seeLeft(x, y, direction, maze):
reverse = (direction + 2) % 4
d = (reverse + 1) % 4
nx = x + dx[d]
ny = y + dy[d]
return maze[ny][nx]
solution([[0, 1, 0, 1], [0, 1, 0, 0], [0, 0, 0, 0], [1, 0, 1, 0]])
|
[
"iii3@cnu.ac.kr"
] |
iii3@cnu.ac.kr
|
dde06cae367508f8aae19177ac78dd22367a7276
|
1f76baa1a461a9b3e72deeef1b527a8ae51624bf
|
/mysite final/mysite/frasim/admin.py
|
f25cbf15a4dfe1eb0d704bdbc8c8d6d7e77c8285
|
[] |
no_license
|
nnaser/pibidi
|
0e1fd222a16fc7baaaab8e6683df72e46b08cda3
|
b19ced18e08a4a93f898d3955084b44e05f1d086
|
refs/heads/master
| 2021-01-13T01:48:53.817906
| 2013-08-26T07:32:36
| 2013-08-26T07:32:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 271
|
py
|
from frasim.models import *
from django.contrib import admin
admin.site.register(Vendedor)
admin.site.register(Material)
admin.site.register(Proveedor)
admin.site.register(Bodega)
admin.site.register(Area)
admin.site.register(Cotizacion)
admin.site.register(Productos)
|
[
"nicolas.naser@usach.cl"
] |
nicolas.naser@usach.cl
|
daf659e14bd42e00eb25e3082ca4417436c386b3
|
c25c5f7637dd7e259e9d1e3b47ee013c8b2a2f18
|
/Filtragem no domínio espacial/smoothing.py
|
d34bced244f027139ab3e1093db2cedc78c5c4bf
|
[] |
no_license
|
JuliaOli/DIP
|
4083b09ffe2727661a66ade31cac5027699fda09
|
fb3bfccade0951ad42971c0d4cd5793f89d28e2c
|
refs/heads/master
| 2018-10-01T04:09:17.462068
| 2018-06-08T00:15:51
| 2018-06-08T00:15:51
| 120,705,039
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,753
|
py
|
import cv2
import numpy as np
def callback(img):
pass
def createWindow(img):
wind = "Smoothing"
slider_name = "1-Aver 2-Med 3-Gau 4-Bil"
slider_Pos = 0
image = img.copy()
#create window
cv2.namedWindow(wind)
#show image
cv2.imshow(wind, image)
#add slider (slider_name, window_name, start_value, max_value, callback)
cv2.createTrackbar(slider_name, wind, 1, 4, callback)
while(cv2.waitKey(1000)):
cv2.imshow(wind, image)
slider_Pos = cv2.getTrackbarPos(slider_name, wind)
if(slider_Pos == 1):
image = averageBlur(img)
elif(slider_Pos == 2):
image = meidanBlur(img)
elif(slider_Pos == 3):
image = gaussianBlur(img)
else:
image = bilateralFilt(img)
cv2.destroyAllWindows()
# Takes the average of all the pixels under kernel area and replace the central element.
def averageBlur(img):
return cv2.blur(img,(5,5))
# Takes median of all the pixels under kernel area and central element is replaced with this median value.
def meidanBlur(img):
return cv2.medianBlur(img,5)
# Gaussian blurring is highly effective in removing gaussian noise from the image.
#This gaussian filter is a function of space alone, that is, nearby pixels are considered while filtering.
def gaussianBlur(img):
return cv2.GaussianBlur(img,(5,5),0)
# Takes a gaussian filter in space, but one more gaussian filter which is a function of pixel difference.
def bilateralFilt(img):
return cv2.bilateralFilter(img,9,75,75)
def main():
kitty = cv2.imread('big_cat.png')
createWindow(kitty)
if __name__=='__main__':
main()
|
[
"maju.olivi@gmail.com"
] |
maju.olivi@gmail.com
|
326af3c8492ea7522eef273a09e92137b6e039cf
|
1554e209866ddcfc75519278303229bfdff4c9d3
|
/tbjcconstants.py
|
6908d70687a7f62b9cfba799665336607001670a
|
[] |
no_license
|
tbjc1magic/HELIOStrajectory
|
215b4da6515351811f08ece1289f48a17460fccf
|
c095c85f209f6bee5e51f96f028dd6fa67df76f7
|
refs/heads/master
| 2016-08-09T03:46:29.945441
| 2016-01-19T19:25:32
| 2016-01-19T19:25:32
| 49,976,166
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 100
|
py
|
__constant_u = 1.66e-27
__constant_MeV = 1.6e-13
__constant_e = 1.6e-19
__constant_c = 3.0e8
|
[
"tbjc1magic@gmail.com"
] |
tbjc1magic@gmail.com
|
27ed96ecec25d22429ab603aebcd2572ce077373
|
5ab4ecce716fba15f0ee298320fd9ce81593b296
|
/src/app.py
|
59b1773e2715b89865968eb1b0cc28ebf7865ff7
|
[] |
no_license
|
AdleyTales/img2text
|
c97f3726753cb31ce4db34f1443fc56079388c60
|
31c91c3aa64262b540e19fc0fb8d1ccde9a52cb7
|
refs/heads/main
| 2023-02-14T02:28:02.422838
| 2021-01-06T01:50:38
| 2021-01-06T01:50:38
| 326,582,225
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 797
|
py
|
# -*- coding: utf-8 -*-
"""
pip install baidu-aip
"""
from aip import AipOcr
# 定义常量
APP_ID = '10379743'
API_KEY = 'xxx'
SECRET_KEY = 'xxx'
# 初始化文字识别分类器
aipOcr=AipOcr(APP_ID, API_KEY, SECRET_KEY)
# 读取图片
filePath = "b.png"
def get_file_content(filePath):
with open(filePath, 'rb') as fp:
return fp.read()
# 定义参数变量
options = {
'detect_direction': 'true',
'language_type': 'CHN_ENG',
}
# 网络图片文字文字识别接口
result = aipOcr.webImage(get_file_content(filePath),options)
# 如果图片是url 调用示例如下
# result = apiOcr.webImage('http://www.xxxxxx.com/img.jpg')
# print(result['words_result'])
res = result['words_result']
word = ''
for item in res:
word = word + item['words']
print(word)
|
[
"adleytales@126.com"
] |
adleytales@126.com
|
72e363883d70ec2cd83764eb7d1bc618ecdb6e10
|
51f7db7ffac715e797f40af1dc8e5137b6f2244d
|
/test.py
|
2eb68b936771586eaac57e577ecb4b16c8cbf510
|
[] |
no_license
|
fml1039/news
|
9e06d97f3a355a71dec60812b868db804e969357
|
f011ab3a6cb9bdeef8dbeb07f917719d6369267a
|
refs/heads/master
| 2021-05-01T03:39:49.588523
| 2016-12-27T06:05:28
| 2016-12-27T06:05:28
| 57,991,316
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 733
|
py
|
# -*- coding: UTF-8 -*-
from selenium import webdriver
from bs4 import BeautifulSoup
from selenium.webdriver.support.ui import Select
mykey = u"日本 地震"
print mykey
browser = webdriver.Firefox()
browser.get('http://search.gmw.cn/search.do?advType=news')
'''
The following statement can be used to fill the keyword form, yet I don't want to use that
input = browser.find_element_by_css_selector('input[type="text"]')
input.send_keys(mykey)
print "done"
'''
browser.execute_script("document.getElementById('keyword').value='"+mykey+"'")
select = Select(browser.find_element_by_id("time"))
select.select_by_visible_text("2011")
button = browser.find_element_by_css_selector('button')
button.click()
#browser.quit()
|
[
"jp2011212847@qmul.ac.uk"
] |
jp2011212847@qmul.ac.uk
|
1216545cef8974dac002e69e47472107dee90945
|
45efad4df4b57ba115badf2cc160e00cf509ef23
|
/Space Invader/space_invader_main.py
|
b501ac6e77ed4aee903504af5f4d340aed7197de
|
[
"MIT"
] |
permissive
|
omar-zaman10/Space-Invaders-pygame
|
d1fdc848702aea75c1d06e39ca95f1abb7d23b36
|
59c0e7277cf01b7c7407f50ae45404726af2673f
|
refs/heads/main
| 2023-06-27T00:52:59.017050
| 2021-07-28T14:51:20
| 2021-07-28T14:51:20
| 388,486,498
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,702
|
py
|
import pygame
import numpy as np
from pygame import mixer
import random
#initialise pygame
pygame.init()
# Initial screen diplay, title and background music
screen_size = (900 ,600)
screen = pygame.display.set_mode(screen_size)
running = True
clock = pygame.time.Clock()
fps = 300
pygame.display.set_caption('Space Invaders')
icon = pygame.image.load('Images/logo.png')
pygame.display.set_icon(icon)
background = pygame.image.load('Images/background.jpg')
mixer.music.load('Sounds/background_music.mp3')
mixer.music.play(-1)
title = pygame.image.load('Images/title.png')
font = pygame.font.Font('arcade_ya/ARCADE_N.TTF',32)
opacity = 0
opacity_change = 0.5
def fading_text(text,opacity,position):
text = font.render(text, True, (255,255,255))
surf = pygame.Surface(text.get_size()).convert_alpha()
surf.fill((255, 255, 255, opacity))
text.blit(surf, (0, 0), special_flags=pygame.BLEND_RGBA_MULT)
screen.blit(text, position)
def Title_screen():
global opacity
global opacity_change
text = 'Ready Player One'
fading_text(text,opacity,(225,500))
screen.blit(title,(175,100))
if opacity > 254:
opacity_change = -1.0
elif opacity < 1:
opacity_change = 0.5
opacity += opacity_change
def Ending_screen():
global opacity_change
global opacity
text = 'Press Space to play again'
fading_text(text, opacity,(75,500))
if opacity > 254:
opacity_change = -1.0
elif opacity < 1:
opacity_change = 0.5
opacity += opacity_change
#PLayer
player_image = pygame.image.load('Images/space_ship.png')
playerX = 450
playerY = 700
player_life = True
player_explosion_index = 0
movement_x = 0
movement_y = 1
def player():
global playerX
global playerY
global player_life
global movement_x
global fire
if player_life:
playerX += movement_x
playerX = np.clip(playerX,0,850)
screen.blit(player_image,(playerX,playerY))
else:
fire = False
pass
# Projectile
projectile_image = pygame.image.load('Images/bullet.png')
projectileX = 400
projectileY = 500
fire = False
movement_y_projectile= 3
bullet_sound = mixer.Sound('Sounds/laser_shot.mp3')
bullet_sound.set_volume(0.5)
def projectile_fire(projectileX):
global projectileY
global fire
if fire:
screen.blit(projectile_image,(projectileX+12,projectileY-20))
projectileY -= movement_y_projectile
if projectileY <= 0 :
fire = False
projectileY = 500
# Alien
alien_image = pygame.image.load('Images/green_alien.png')
number_of_aliens = 24
collisions = [False for i in range(number_of_aliens)]
vel_alien_x = np.zeros(number_of_aliens)
vel_alien_y = np.ones(number_of_aliens) * 1.0
alien_x = np.linspace(90,810,number_of_aliens//3)
alien_x = np.concatenate([alien_x,alien_x,alien_x])
alien_y = np.ones(number_of_aliens//3) *-50
alien_y = np.concatenate([alien_y,3*alien_y,5*alien_y])
def alien(index):
global collisions
global alien_x
global alien_y
if not collisions[index]:
screen.blit(alien_image,(alien_x[index],alien_y[index]))
if alien_x[index] >= 850:
vel_alien_x[index] = -1.5
alien_y[index] += vel_alien_y[index]
elif alien_x[index] <= 0:
vel_alien_x[index] = 1.5
alien_y[index] += vel_alien_y[index]
alien_x[index] += vel_alien_x[index]
else:
vel_alien_x[index] = 0
vel_alien_y[index] = 0
#Loading screen
round_one_sound = mixer.Sound('Sounds/round_one.mp3')
def load_aliens():
global number_of_aliens
global alien_x
global alien_y
global vel_alien_x
global vel_alien_y
global load_state_1
global play_state_1
global movement_y
global movement_x
global finish_him
font = pygame.font.Font('arcade_ya/ARCADE_N.TTF',20)
text_string = 'Aliens are coming to invade!'
text = font.render(text_string, True, (255,255,255))
screen.blit(text,(200,400))
if finish_him:
round_one_sound.play()
finish_him = False
#Update positions
alien_y = alien_y + vel_alien_y
for i in range(number_of_aliens):
screen.blit(alien_image,(alien_x[i],alien_y[i]))
if alien_y[i] >= 100:
finish_him = True
load_state_1 = False
play_state_1 = True
vel_alien_x = np.ones(number_of_aliens) * 1.0
vel_alien_y = np.ones(number_of_aliens) * 50.0
movement_y = 0.0
movement_x = 0.0
# Aliens explosion
def is_collision(index,x1,y1,x2,y2):
global collisions
global fire
global projectileY
distance = np.linalg.norm(np.array([x1,y1])-np.array([x2,y2]))
if distance < 25:
collisions[index] = True
fire = False
projectileY = 500
explosion_sheet = pygame.image.load('Images/explosion_sheet.png').convert_alpha()
explosion_sound = mixer.Sound('Sounds/explosion1.mp3')
explosion_sound.set_volume(0.5)
explosion_pass = [True for i in range(number_of_aliens)]
explosion_indexs = np.zeros(number_of_aliens)
explosion_x = np.zeros(number_of_aliens)
explosion_y = np.zeros(number_of_aliens)
def get_explosion_image(index,width,height):
global explosion_sheet
x = index % 5
y = index // 5
x *= 48
y *= 48
image = pygame.Surface((width,height)).convert_alpha()
image.blit(explosion_sheet,(0,0),(x,y,width,height))
return image
def new_explosion(i):
global alien_x
global alien_y
global collisions
global explosion_indexs
global explosion_x
global explosion_y
is_collision(i,projectileX,projectileY,alien_x[i],alien_y[i])
frames = 8
if not collisions[i]:
explosion_x[i] = alien_x[i]
explosion_y[i] = alien_y[i]
else:
#off screen
alien_x[i] = 300
alien_y[i] = -500
index = explosion_indexs[i] // frames
if explosion_indexs[i] < 1.0:
explosion_sound.play()
image = get_explosion_image(index,48,48)
screen.blit(image,(explosion_x[i],explosion_y[i]))
explosion_indexs[i] += 1
#Cap out index
#Boss
boss_image = pygame.image.load('Images/boss.png')
boss_health = 10
boss_x = 200
boss_y = -1000
boss_collision = False
boss_explosion_sound = mixer.Sound('Sounds/explosion2.mp3')
boss_final_explosion = mixer.Sound('Sounds/boss_explosion.mp3')
vel_boss_x = 2.0
vel_boss_y = 5.0
boss_index = 0
def boss():
global boss_health
global boss_index
global boss_x
global boss_y
global vel_boss_y
global vel_boss_x
global play_state_1
global wins_state
if boss_health > 0.0:
if boss_y < -75:
boss_y += vel_boss_y
if boss_x > 675:
vel_boss_x = -2.0
elif boss_x < 0:
vel_boss_x = 2.0
boss_x += vel_boss_x
screen.blit(boss_image,(boss_x,boss_y))
side_fire()
middle_fire()
else:
#Remove Boss from screen
frames = 20
if boss_index == 0:
mixer.music.stop()
boss_final_explosion.play()
toasty_sound.play()
enter_index = boss_index // frames
if enter_index > 30:
play_state_1 = False
wins_state = True
image = boss_explosion_image(enter_index,300,200)
screen.blit(image,(boss_x,boss_y+50))
boss_index +=1
# Boss explosion
boss_explosion_sheet = pygame.image.load('Images/boss_explosion.png')
def boss_explosion_image(index,width,height):
global boss_explosion_sheet
x = index % 5
y = index // 5
x *= 300
y *= 200
y += 75
image = pygame.Surface((width,height)).convert_alpha()
image.blit(boss_explosion_sheet,(0,0),(x,y,width,height))
return image
def is_boss_collision():
global projectileX
global projectileY
global boss_x
global boss_y
global boss_collision
global boss_health
global fire
x_dist = boss_x - projectileX
y_dist = abs(boss_y - projectileY)
if x_dist <5 and x_dist > -200:
if y_dist < 150:
boss_health -= 1.0
projectileY = 500
fire = False
boss_explosion_sound.play()
#Boss firing projectiles
double_fire = pygame.image.load('Images/double_fire.png')
triple_fire = pygame.image.load('Images/triple_fire.png')
quadruple_fire_image = pygame.image.load('Images/quadruple_fire.png')
special_fire = pygame.image.load('Images/special_fire.png')
boss_fire_velovity = 3.0
boss_side_fire = False
boss_middle_fire = False
special_fire_choice = False
middle_fire_x = 0
middle_fire_y = 0
side_fire_x = 0
side_fire_y = 0
middle_firing_frames = 800
side_firing_frames = 700
side_fire_sound = mixer.Sound('Sounds/side_fire.mp3')
side_fire_sound.set_volume(0.5)
middle_fire_sound = mixer.Sound('Sounds/middle_fire.mp3')
middle_fire_sound.set_volume(0.5)
def middle_fire():
global boss_middle_fire
global boss_x
global boss_y
global middle_fire_y
global middle_fire_x
global middle_firing_frames
global special_fire_choice
if middle_firing_frames < 1:
middle_firing_frames = random.randint(250,500)
boss_middle_fire = True
special_fire_choice = random.choice([True,False])
middle_fire_sound.play()
else:
middle_firing_frames -= 1
if boss_middle_fire:
middle_fire_y += boss_fire_velovity
if special_fire_choice:
screen.blit(special_fire,(middle_fire_x+65,middle_fire_y+140))
else:
screen.blit(quadruple_fire_image,(middle_fire_x+85,middle_fire_y+150))
if middle_fire_y > 600:
middle_fire_y = 0
boss_middle_fire = False
else:
middle_fire_y = boss_y
middle_fire_x = boss_x
def side_fire():
global boss_side_fire
global boss_x
global boss_y
global side_fire_y
global side_fire_x
global side_firing_frames
if side_firing_frames < 1:
side_firing_frames = random.randint(250,500)
boss_side_fire = True
#triple_fire_choice = random.choice([True,False])
triple_fire_choice = True
side_fire_sound.play()
else:
side_firing_frames -= 1
if boss_side_fire:
side_fire_y += boss_fire_velovity
screen.blit(triple_fire,(side_fire_x+40,side_fire_y+160))
screen.blit(triple_fire,(side_fire_x+150,side_fire_y+160))
if side_fire_y > 600:
side_fire_y = 0
boss_side_fire = False
else:
side_fire_y = boss_y
side_fire_x = boss_x
# Player Explosion
def laser_collision():
global middle_fire_x
global middle_fire_y
global side_fire_x
global side_fire_y
global playerX
global playerY
global player_life
global special_fire_choice
mid_dist_y = playerY - middle_fire_y
mid_dist_x = playerX - middle_fire_x
side_dist_x = playerX - side_fire_x
side_dist_y = playerY - side_fire_y
#distance = np.linalg.norm(np.array([playerX,playerY])-np.array([x2,y2]))
if not special_fire_choice:
if mid_dist_y < 200 and mid_dist_y > 165:
if mid_dist_x > 50 and mid_dist_x < 155:
player_life = False
explosion_sound.play()
else:
if mid_dist_y < 200 and mid_dist_y > 165:
if mid_dist_x > 25 and mid_dist_x < 175:
player_life = False
explosion_sound.play()
if side_dist_y < 200 and side_dist_y > 165:
if (side_dist_x > 0 and side_dist_x < 90) or (side_dist_x > 110 and side_dist_x < 200) :
player_life = False
explosion_sound.play()
def destruction():
global alien_x
global alien_y
global playerX
global player_life
global lose_state
global play_state_1
if max(alien_y) > 450:
i = list(alien_y).index(max(alien_y))
dist = abs(alien_x[i]-playerX)
if dist < 400:
if player_life:
explosion_sound.play()
player_life = False
def player_explosion():
global player_life
global playerX
global playerY
global player_explosion_index
global lose_state
global play_state_1
frames = 15
laser_collision()
destruction()
if not player_life:
index = player_explosion_index // frames
image = get_explosion_image(index,48,48)
screen.blit(image,(playerX,playerY))
player_explosion_index += 1
if index == 16:
lose_state = True
play_state_1 = False
# State Machine
intro_state = True
load_state_1 = False
play_state_1 = False
wins_state = False
lose_state = False
def play_state():
screen.blit(background,(0,0))
player()
boss()
is_boss_collision()
player_explosion()
for i in range(number_of_aliens):
new_explosion(i)
alien(i)
projectile_fire(projectileX)
def original_state():
global playerX
global playerY
global player_life
global player_explosion_index
global movement_x
global movement_y
global collisions
global vel_alien_x
global vel_alien_y
global alien_x
global alien_y
global alien_y
global projectileX
global projectileY
global fire
global movement_y_projectile
global boss_health
global boss_x
global boss_y
global boss_collision
global vel_boss_x
global vel_boss_y
global boss_index
global boss_fire_velovity
global boss_side_fire
global boss_middle_fire
global special_fire_choice
global middle_fire_x
global middle_fire_y
global side_fire_x
global side_fire_y
global middle_firing_frames
global side_firing_frames
global explosion_pass
global explosion_indexs
global explosion_x
global explosion_y
global finish_him
global finish_sound
playerX = 450
playerY = 700
player_life = True
player_explosion_index = 0
movement_x = 0
movement_y = 1
collisions = [False for i in range(number_of_aliens)]
vel_alien_x = np.zeros(number_of_aliens)
vel_alien_y = np.ones(number_of_aliens) * 1.0
alien_x = np.linspace(90,810,number_of_aliens//3)
alien_x = np.concatenate([alien_x,alien_x,alien_x])
alien_y = np.ones(number_of_aliens//3) *-50
alien_y = np.concatenate([alien_y,3*alien_y,5*alien_y])
projectileX = 400
projectileY = 500
fire = False
movement_y_projectile= 3
boss_health = 10
boss_x = 200
boss_y = -1000
boss_collision = False
vel_boss_x = 2.0
vel_boss_y = 5.0
boss_index = 0
boss_fire_velovity = 2.5
boss_side_fire = False
boss_middle_fire = False
special_fire_choice = False
middle_fire_x = 0
middle_fire_y = 0
side_fire_x = 0
side_fire_y = 0
middle_firing_frames = 800
side_firing_frames = 700
explosion_pass = [True for i in range(number_of_aliens)]
explosion_indexs = np.zeros(number_of_aliens)
explosion_x = np.zeros(number_of_aliens)
explosion_y = np.zeros(number_of_aliens)
finish_him = True
finish_sound = True
mixer.music.play(-1)
# Added sound effects
finish_him_sound = mixer.Sound('Sounds/finish_him.mp3')
flawless = mixer.Sound('Sounds/flawless.mp3')
fatality = mixer.Sound('Sounds/fatality.mp3')
fatality_background = mixer.Sound('Sounds/fatality_background.mp3')
toasty_sound = mixer.Sound('Sounds/toasty.mp3')
finish_him = True
finish_sound = True
while running:
if intro_state:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
intro_state = False
load_state_1 = True
screen.fill((0,0,0))
Title_screen()
elif load_state_1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
screen.blit(background,(0,0))
load_aliens()
if playerY > 500:
playerY -= movement_y
screen.blit(player_image,(playerX,playerY))
elif play_state_1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
movement_x = -1.5
if event.key == pygame.K_RIGHT:
movement_x = 1.5
if event.key == pygame.K_UP:
movement_y = -1
if event.key == pygame.K_DOWN:
movement_y = 1
if event.key == pygame.K_SPACE:
if not fire:
projectileX = playerX
bullet_sound.play()
fire = True
if event.type == pygame.KEYUP:
if event.key == pygame.K_RIGHT or event.key == pygame.K_LEFT:
movement_x = 0.0
if event.key == pygame.K_UP or event.key == pygame.K_DOWN:
movement_y = 0.0
#running in the game
play_state()
if all(collisions):
if finish_him:
finish_him_sound.play()
finish_him = False
elif lose_state:
mixer.music.stop()
screen.fill((0,0,0))
text_string = 'Game Over'
text = font.render(text_string, True, (255,255,255))
screen.blit(text,(325,250))
text_string = 'You Lose'
text = font.render(text_string, True, (255,255,255))
screen.blit(text,(350,350))
Ending_screen()
if finish_sound:
fatality_background.play()
fatality.play()
finish_sound = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
original_state()
intro_state = True
load_state_1 = False
play_state_1 = False
wins_state = False
lose_state = False
elif wins_state:
mixer.music.stop()
screen.fill((0,0,0))
text_string = 'Hudaifa is a Whore!!!'
text = font.render(text_string, True, (255,255,255))
screen.blit(text,(150,100))
text_string = 'Game Over'
text = font.render(text_string, True, (255,255,255))
screen.blit(text,(325,250))
text_string = 'You Win'
text = font.render(text_string, True, (255,255,255))
screen.blit(text,(350,350))
Ending_screen()
if finish_sound:
flawless.play()
finish_sound = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
original_state()
intro_state = True
load_state_1 = False
play_state_1 = False
wins_state = False
lose_state = False
clock.tick(fps)
pygame.display.update()
pygame.quit()
|
[
"noreply@github.com"
] |
omar-zaman10.noreply@github.com
|
588b165dd9b200319f9781744df15bbf3cae530f
|
cf2cca49648c678c0912233584fe706f1ae7f377
|
/makereport.py
|
2904b449d4ef61665d4f70fe68e81ab2a7cb3324
|
[] |
no_license
|
fancker1992/UnitTestCase
|
ee6936f64e95b86cf2cc090dfa09325c9bb73a12
|
fe3e67f9811e4acd1d94e4a4d9a6ce48f6c56450
|
refs/heads/master
| 2020-03-25T06:54:13.672318
| 2018-08-04T13:32:31
| 2018-08-04T13:32:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 785
|
py
|
# 导包
import unittest
import time
from Tools.HTMLTestReportCN import HTMLTestRunner
if __name__ == '__main__':
# 1.组装测试用例case
case_dir = './Case/'
discover = unittest.defaultTestLoader.discover(case_dir, pattern='test_*.py')
# 2.准备报告生成的路径
report_dir = './Report/'
# 3.获取当前时间
now_time = time.strftime('%Y-%m-%d %H_%M_%S')
# 4.设置报告名称
report_name = report_dir + now_time + 'Report.html'
print(report_name)
# 打开报告写入文件流
with open(report_name, 'wb') as f:
# 初始化报告生成对象
runner = HTMLTestRunner(stream=f, verbosity=2, title='单元测试报告', description='运行环境:macOS,执行人:test04QA')
runner.run(discover)
|
[
"786087292@qq.com"
] |
786087292@qq.com
|
00f889e4e3b1667cb40a686747476c42deb156f2
|
ad81f5a090cc68236c1333d526c13543da6b63b4
|
/authentication/helper/models.py
|
7be057a91b1c3dee8b9e87c7d52e1c71a345dafb
|
[] |
no_license
|
dachieng/django-custom-user-model
|
41d5af86dddb0bf372cbd1e3050704a09cb2bf62
|
94a8ce4f4eb57d9d619c3ac0c9e36495c559d10c
|
refs/heads/main
| 2023-07-28T11:51:50.360871
| 2021-09-09T07:35:35
| 2021-09-09T07:35:35
| 398,224,398
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 294
|
py
|
from django.db import models
from django.contrib.auth.models import User
class TrackingModel(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
end_date = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
ordering = ['-created_at']
|
[
"oloodorcas99@gmail.com"
] |
oloodorcas99@gmail.com
|
3ef14a461d4a9a44aa2256ac72dfa740efb73669
|
2f304de8e0e76df4c615a65b60bde7a514ecb9a3
|
/Exercise3/code/Optimization/Optimizers.py
|
43adb4667dbdabb0f1c5e91ce1e7305dfeb99af3
|
[] |
no_license
|
Chengjun-Xie/Deep-Learning
|
56d7b825ba192e31bda38cc1396492ac9707a038
|
ac8371b328dd949a3655cf69b72607ca156c9fa5
|
refs/heads/master
| 2022-04-05T19:00:19.864485
| 2020-02-03T16:31:06
| 2020-02-03T16:31:06
| 217,325,961
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,486
|
py
|
import numpy as np
import matplotlib.pyplot as plt
class Sgd:
def __init__(self, learning_rate):
self.learning_rate = learning_rate
def calculate_update(self, weight_tensor, gradient_tensor):
return weight_tensor - self.learning_rate * gradient_tensor
class SgdWithMomentum:
def __init__(self, learning_rate, momentum_rate):
self.learning_rate = learning_rate
self.momentum_rate = momentum_rate
self.prevMomentum = 0
def calculate_update(self, weight_tensor, gradient_tensor):
curMomentum = self.momentum_rate * self.prevMomentum - self.learning_rate * gradient_tensor
weight_tensor = weight_tensor + curMomentum
self.prevMomentum = curMomentum
return weight_tensor
class Adam:
def __init__(self, learning_rate, mu, rho):
self.learning_rate = learning_rate
self.mu = mu
self.rho = rho
self.preV = 0
self.preR = 0
self.k = 1
def calculate_update(self, weight_tensor, gradient_tensor):
g = gradient_tensor
v = self.mu * self.preV + (1 - self.mu) * g
r = self.rho * self.preR + (1 - self.rho) * g * g
v_hat = v / (1 - self.mu ** self.k)
r_hat = r / (1 - self.rho ** self.k)
eps = np.finfo(float)
weight_tensor -= self.learning_rate * ((v_hat + eps.eps) / (np.sqrt(r_hat) + eps.eps))
self.preV = v
self.preR = r
self.k += 1
return weight_tensor
|
[
"15863001671@gmail.com"
] |
15863001671@gmail.com
|
ce3cdf2600b9127334c5c2d8af577397f215eb63
|
9bdbb1a1e8e1a047c1ccbdb2eb1506983de87d5d
|
/dev.py
|
b9a201de2d95de3d60f545a852023fd543c23165
|
[] |
no_license
|
Tepuradesu/AutoReportTemperature
|
f0cb0e26841f421e22edff33264c2ae57ccbe616
|
8a991dd729ddfe2890eaed504186e972aa7109f1
|
refs/heads/main
| 2023-04-11T18:18:10.216335
| 2021-05-13T15:20:10
| 2021-05-13T15:20:10
| 365,046,025
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 144
|
py
|
from line_notify import LineNotify
#変数定義
token=''
bot = LineNotify(access_token=token)
bot.send(
message='Your Message',
)
|
[
"tepuracococo@gmail.com"
] |
tepuracococo@gmail.com
|
e1fd272d8e52e2e89fa2cf54684cb2b059f80913
|
8f64d50494507fd51c0a51010b84d34c667bd438
|
/BeautyForMe/myvenv/Lib/site-packages/ebcli/operations/gitops.py
|
6c09162d6a835dada9ca3a41d4ff2f348657e9be
|
[
"MIT"
] |
permissive
|
YooInKeun/CAU_CSE_Capstone_3
|
5a4a61a916dc13c8635d25a04d59c21279678477
|
51405c4bed2b55661aa0708c8acea17fe72aa701
|
refs/heads/master
| 2022-12-11T15:39:09.721019
| 2021-07-27T08:26:04
| 2021-07-27T08:26:04
| 207,294,862
| 6
| 1
|
MIT
| 2022-11-22T04:52:11
| 2019-09-09T11:37:13
|
Python
|
UTF-8
|
Python
| false
| false
| 4,769
|
py
|
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from cement.utils.misc import minimal_logger
from ebcli.core import fileoperations, io
from ebcli.lib import codecommit
from ebcli.objects.exceptions import CommandError, ValidationError
from ebcli.objects.sourcecontrol import SourceControl
from ebcli.operations import commonops
LOG = minimal_logger(__name__)
def git_management_enabled():
return get_default_branch() and get_default_repository()
def get_config_setting_from_current_environment_or_default(key_name):
setting = get_setting_from_current_environment(key_name)
return setting or fileoperations.get_config_setting('global', key_name)
def write_setting_to_current_environment_or_default(keyname, value):
env_name = commonops.get_current_branch_environment()
if env_name is None:
fileoperations.write_config_setting('global', keyname, value)
else:
fileoperations.write_config_setting('environment-defaults', env_name, {keyname: value})
def get_setting_from_current_environment(keyname):
env_name = commonops.get_current_branch_environment()
env_dict = fileoperations.get_config_setting('environment-defaults', env_name)
if env_dict:
return env_dict.get(keyname)
def set_branch_default_for_global(branch_name):
fileoperations.write_config_setting('global', 'branch', branch_name)
def set_repo_default_for_global(repo_name):
fileoperations.write_config_setting('global', 'repository', repo_name)
def set_branch_default_for_current_environment(branch_name):
write_setting_to_current_environment_or_default('branch', branch_name)
def set_repo_default_for_current_environment(repo_name):
write_setting_to_current_environment_or_default('repository', repo_name)
def get_branch_default_for_current_environment():
return get_config_setting_from_current_environment_or_default('branch')
def get_repo_default_for_current_environment():
return get_config_setting_from_current_environment_or_default('repository')
def get_default_branch():
result = get_branch_default_for_current_environment()
if result:
return result
LOG.debug('Branch not found')
def get_default_repository():
result = get_repo_default_for_current_environment()
if result:
return result
LOG.debug('Repository not found')
def initialize_codecommit():
source_control = SourceControl.get_source_control()
try:
source_control_setup = source_control.is_setup()
except CommandError:
source_control_setup = False
if not source_control_setup:
io.log_error("Cannot setup CodeCommit because there is no Source Control setup")
return
if codecommit.region_supported(commonops.get_default_region()):
codecommit_setup = print_current_codecommit_settings()
if codecommit_setup:
try:
io.validate_action("Do you wish to continue (y/n)", "y")
except ValidationError:
return
source_control.setup_codecommit_cred_config()
from ebcli.controllers import initialize
repository = initialize.get_repository_interactive()
branch = initialize.get_branch_interactive(repository)
set_repo_default_for_current_environment(repository)
set_branch_default_for_current_environment(branch)
else:
io.log_error("The region {0} is not supported by CodeCommit".format(commonops.get_default_region()))
def disable_codecommit():
LOG.debug("Denied option to use CodeCommit removing default values")
set_repo_default_for_current_environment(None)
set_branch_default_for_current_environment(None)
fileoperations.write_config_setting('global', 'repository', None)
fileoperations.write_config_setting('global', 'branch', None)
LOG.debug("Disabled CodeCommit for use with EB CLI")
def print_current_codecommit_settings():
default_branch = get_default_branch()
default_repo = get_default_repository()
codecommit_setup = default_repo or default_branch
if codecommit_setup:
io.echo("Current CodeCommit setup:")
io.echo(" Repository: " + str(default_repo))
io.echo(" Branch: " + str(default_branch))
return codecommit_setup
|
[
"keun0390@naver.com"
] |
keun0390@naver.com
|
ca2c875cae90738eeb0f49011918a9be4eafb3af
|
805edbc10dd808f4caab04a8bbb13408f7d4e82b
|
/website/migrations/0007_auto_20210205_2001.py
|
a8bcd0cba4ff6d25af52193d1483845700de8744
|
[] |
no_license
|
counter-king/ongorWebsite
|
727bf0d1a1c657aaf71f490abd94c62775b8ec0b
|
894f7adc8148c779fcfa4884a56a4ead124d41fe
|
refs/heads/main
| 2023-03-01T13:55:59.456025
| 2021-02-12T10:02:50
| 2021-02-12T10:02:50
| 338,286,580
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 507
|
py
|
# Generated by Django 3.1.6 on 2021-02-05 15:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0006_auto_20210205_1904'),
]
operations = [
migrations.RemoveField(
model_name='menuitem',
name='quantity',
),
migrations.AddField(
model_name='ordermodel',
name='quantity',
field=models.IntegerField(default=0, null=True),
),
]
|
[
"counterkingbarcelona@gmail,com"
] |
counterkingbarcelona@gmail,com
|
3871535003cf0921b8de447ff5292f04ce82d085
|
4d6323ab73baf9433efd4b827df18df7bb3cd152
|
/DesignReports/SystemRefinement/UGV/drop/DropSimulation/drop.py
|
a7420c692964ca94b441a21b79538201533015ed
|
[] |
no_license
|
linzgood/auvsi_documentation_2019
|
c4be2c1134f0b0fc777de0dc23a9ac6c3a30ba68
|
9650395cbd23e53c01572b7e3a9b7821991832fd
|
refs/heads/master
| 2022-02-18T03:18:49.865913
| 2019-08-12T19:44:33
| 2019-08-12T19:44:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,188
|
py
|
#! /usr/bin/env python3
"""
drop.py
Simulation of parachute unraveling and opening during a UGV drop.
Author: Jacob Willis
Date: 29-Jan-2019
"""
import numpy as np
import matplotlib.pyplot as plt
# when the parachute is opening
# times estimated from drop testing with actual parachute
paraTrans = (1.3,1.67) # seconds
# calculate coefficient of drag for opening the parachute after a certain
# time. Piecewise step with a slope between the min and max
def Cd(t):
minCd = .5 # starting coefficient of drag
maxCd = 1.5 # final coefficient of drag
if (t < paraTrans[0]): # parachute not deployed
return minCd
elif (t < paraTrans[1]): # parachute deploying
return minCd + (t-paraTrans[0])*(maxCd-minCd)/(paraTrans[1] - paraTrans[0])
else:
return maxCd # parachute fully deployed
# calculate the parachute area for opening the parachute after a certain
# time. Piecewise step with a slope between the min and max
def Area(t):
minArea = np.pi*.05**2 # area of closed parachute
maxArea = np.pi*.5**2 # area of open parachute
if (t < paraTrans[0]): # parachute not deployed
return minArea
elif (t < paraTrans[1]): # parachute deploying
return minArea + (t-paraTrans[0])*(maxArea-minArea)/(paraTrans[1] - paraTrans[0])
else:
return maxArea # parachute fully deployed
g = 9.8 # acceleration due to gravity
mass = 450 # grams
rho = 1.2 # density of air
k = np.array([[0], [0], [1]])
t_start = 0
t_end = 10
Ts = .01
tvec = np.linspace(t_start, t_end, (t_end-t_start)/Ts)
v0 = np.array([10, 0, 0]) # initial velocity conditions (i, j, k)
v = np.zeros([3, len(tvec)])
v[:,0] = v0
z = np.zeros([3, len(tvec)])
# run the simulation
step = 0
while step < len(tvec)-1:
t = tvec[step]
vs = v[:, [step]]
v[:, [step+1]] = vs + Ts*(g*k - (rho*Area(t)*Cd(t)*(np.linalg.norm(vs)*vs)))
z[:, [step+1]] = z[:, [step]] + Ts*vs
step += 1
# plot the velocity results
plt.subplot(2, 1, 1)
plt.title("Object Drop with Deploying Parachute")
plt.plot(tvec, v[0,:], 'r', label='$v_i$')
plt.plot(tvec, v[1, :], 'g', label='$v_j$')
plt.plot(tvec, v[2, :], 'b', label='$v_k$')
plt.legend(loc=1)
plt.xlabel("Time (s)")
plt.ylabel("Velocity (m/s)")
# draw arrow for parachute deployment
arrow_x = paraTrans[0]
arrow_y = v[2, int(paraTrans[0]*len(tvec)/(t_end - t_start))]
arrow_dx = 1.5
arrow_dy = -.5
plt.arrow(arrow_x, arrow_y, arrow_dx, arrow_dy)
text_x = arrow_x + arrow_dx + .1
text_y = arrow_y + arrow_dy - .1
plt.text(text_x, text_y, "Parachute begins to open")
# draw arrow for parachute fully open
arrow_x = paraTrans[1]
arrow_y = v[2, int(paraTrans[1]*len(tvec)/(t_end - t_start))]
arrow_dx = 1.5
arrow_dy = .5
plt.arrow(arrow_x, arrow_y, arrow_dx, arrow_dy)
text_x = arrow_x + arrow_dx + .1
text_y = arrow_y + arrow_dy + .1
plt.text(text_x, text_y, "Parachute fully open")
# plot position results
plt.subplot(2, 1, 2)
plt.plot(tvec, z[0,:], 'r', label='$z_i$')
plt.plot(tvec, z[1, :], 'g',label='$z_j$')
plt.plot(tvec, z[2, :], 'b', label='$z_k$')
plt.xlabel("Time (s)")
plt.ylabel("Position (m)")
plt.legend(loc=1)
plt.show(block=False)
input("Press any key to continue...")
|
[
"trcritchfield@gmail.com"
] |
trcritchfield@gmail.com
|
a5656b0c338beaf8803e251404e0427ad58596f9
|
a893d00bae0c0fa7db1d42cd14c368033e1c3d3f
|
/10-4存储数据/10-4-1使用json.dump()和json.load()/number_writer.py
|
ac470f9195d87a8cbb13ca9ca577f61092803636
|
[] |
no_license
|
taozhenting/python_introductory
|
71ac4b5fe4aa45a9008c9510c77e34e31226f849
|
f88afa0b4232e7ba79b42c370f2266fde85e7462
|
refs/heads/master
| 2020-04-27T06:02:20.169314
| 2019-05-22T09:17:40
| 2019-05-22T09:17:40
| 174,096,850
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 194
|
py
|
import json
numbers = [2,3,5,7,11,13]
filename = 'numbers.json'
with open(filename,'w') as f_obj:
#函数json.dump()将数字列表存储到文件numbers.json中
json.dump(numbers,f_obj)
|
[
"taozt@ichile.com.cn"
] |
taozt@ichile.com.cn
|
dbadda63c549dee0a4586fb947e7cbf6593c3c13
|
48f64950118ef0446d229de6f9b9cf977543a1e6
|
/resources/PTZgrid/cornerFinderPtzGrid.py
|
5d54525622f283c11cec537233bc9305569b03dd
|
[
"BSD-3-Clause"
] |
permissive
|
LionelLeee/sebaPhD
|
36b7522160e434fc32e95c3a039f761bdcfcf105
|
0260094bd5143843ef372ce52aceb568834f90f4
|
refs/heads/master
| 2020-07-29T05:41:30.389518
| 2018-06-23T00:24:32
| 2018-06-23T00:24:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,649
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 27 13:34:13 2016
@author: sebalander
"""
# %%
import cv2
import numpy as np
import matplotlib.pyplot as plt
# %%
# input
# 6x9 chessboard
#imageFile = "./resources/fishChessboard/Screenshot from fishSeba.mp4 - 12.png"
# 8x11 A4 shetts chessboard
imageFile = "ptz_(0.850278, -0.014444, 0.0).jpg"
cornersIniFile = "PTZgridImageInitialConditions.txt"
# output
cornersFile = "ptzCorners.npy"
patternFile = "ptzGridPattern.npy"
imgShapeFile = "ptzImgShape.npy"
# load
# corners set by hand, read as (n,1,2) size
# must format as float32
cornersIni = np.array([[crnr] for crnr in np.loadtxt(cornersIniFile)],
dtype='float32')
img = cv2.imread(imageFile, cv2.IMREAD_GRAYSCALE)
imgCol = cv2.imread(imageFile)
# %% BINARIZE IMAGE
# see http://docs.opencv.org/3.0.0/d7/d4d/tutorial_py_thresholding.html
th = cv2.adaptiveThreshold(img,
255,
cv2.ADAPTIVE_THRESH_GAUSSIAN_C,
cv2.THRESH_BINARY,
501,
0)
# haceomos un close para sacar manchas
kernel = np.ones((5,5),np.uint8)
closed = cv2.morphologyEx(th, cv2.MORPH_CLOSE, kernel)
plt.imshow(th)
plt.imshow(closed)
plt.imshow(imgCol)
plt.plot(cornersIni[:,0,0],cornersIni[:,0,1],'ow')
# %% refine corners
# criterio de finalizacion de cornerSubPix
subpixCriteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, # termination criteria type
300, # max number of iterations
0.01) # min accuracy
corners = np.copy(cornersIni)
cv2.cornerSubPix(closed,
corners,
(15, 15),
(5, 5),
subpixCriteria);
plt.imshow(imgCol[:,:,[2,1,0]])
plt.plot(cornersIni[:,0,0],cornersIni[:,0,1],'+r', label="Initial")
plt.plot(corners[:,0,0],corners[:,0,1],'xb', label="Optimized")
plt.legend()
# %% DEFINE FIDUCIAL POINTS IN 3D SCENE, by hand
# shape must be (1,n,3), float32
nx = 8
ny = 12
xx = range(nx)
y0 = 12
yy = range(y0,y0-ny,-1)
grid = np.array([[[[x, y, 0] for x in xx] for y in yy]], dtype='float32')
grid = grid.reshape((1,nx*ny,3))
toDelete = np.logical_and(grid[0,:,0] < 2, grid[0,:,1] < 2)
grid = grid[:,np.logical_not(toDelete),:]
# scale to the size of A4 sheet
grid[0,:,0] *= 0.21
grid[0,:,1] *= 0.297
# %% PLOT FIDUCIAL POINTS
fig = plt.figure()
from mpl_toolkits.mplot3d import Axes3D
ax = fig.gca(projection='3d')
ax.scatter(grid[0,:,0], grid[0,:,1], grid[0,:,2])
plt.show()
# %% SAVE DATA POINTS
np.save(cornersFile, corners)
np.save(patternFile, grid)
np.save(imgShapeFile, img.shape)
|
[
"seba.arroyo7@gmail.com"
] |
seba.arroyo7@gmail.com
|
2e176a997fb605afd7785a5526cd2ae18f743e19
|
c23c5f3edcab8604579ec3dacc2041b2452f72ab
|
/interpreter.py
|
1b95a85df80a9426de52d04618c965447c8c1c4c
|
[] |
no_license
|
torchhound/bf
|
f0a8ef0dc86d76ec69c9cda086b88ab26c79de17
|
c28a0b7bac35f9d35f4475ac59e20413c1511dd0
|
refs/heads/master
| 2021-01-09T06:01:51.457190
| 2017-03-15T00:09:17
| 2017-03-15T00:09:17
| 80,895,897
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,844
|
py
|
import sys
array = {}
previousWhile = 0
dataPtr = 0
prgPtr = 0
def parse(program):
'''Parses a string of brainfuck commands'''
global array
global dataPtr
global prgPtr
while(prgPtr < len(program)):
def goto():
global array
global dataPtr
global prgPtr
try:
if dataPtr < 0:
dataPtr = 0
print("Pointer has been reset to zero because it was negative.")
if prgPtr < 0:
print("Fatal Error: Instruction Pointer less than zero")
quit()
eval(program[prgPtr], program)
except KeyError as e:
array[prgPtr] = 0
goto()
except ValueError as e:
print(e)
quit()
goto()
prgPtr += 1
def increment(x):
'''Increments an input by one'''
x = x + 1
return x
def decrement(x):
'''Decrements an input by one'''
x = x - 1
return x
def beginLoop(program):
'''Begins a while loop'''
global array
global dataPtr
global prgPtr
global previousWhile
if array[dataPtr] == 0:
for x, item in enumerate(program):
if item == "]":
previousWhile = prgPtr
prgPtr = x + 1
def closeLoop(program):
'''Closes a while loop'''
global array
global dataPtr
global prgPtr
global previousWhile
if array[dataPtr] != 0:
prgPtr = previousWhile;
def eval(x, program):
'''Evaluates a single brainfuck command'''
global array
global dataPtr
global prgPtr
global previousWhile
command = {
">" : lambda _: increment(dataPtr),
"<" : lambda _: decrement(dataPtr),
"+" : lambda _: increment(array[dataPtr]),
"-" : lambda _: decrement(array[dataPtr]),
"." : lambda _: print("print ", array[dataPtr]),
"," : lambda _: sys.stdin.read(1),
"[" : lambda _: beginLoop(program),
"]" : lambda _: closeLoop(program),
}.get(x, ValueError)
print(command(0))
return command(0)
def main():
"""Interpreter debugging"""
pass
if __name__ == "__main__":
main()
|
[
"unknownmasayoshi@gmail.com"
] |
unknownmasayoshi@gmail.com
|
16286276366ff0182e4003611f83c76206c62661
|
f05733abc47da5fd54826556af82dfb3d8901222
|
/pip_conda_demo/tests/mock_server.py
|
aa9e31aaacc7db8fdcad6bbed2b2d89a7d976e39
|
[
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
benthayer/pip_conda_demo
|
3523ad2b8eb1655f7ab27574600ea6977cb1f26b
|
fe230cd8abc720dfe5b67e3048d1c315b55f905b
|
refs/heads/master
| 2022-12-19T11:10:31.802150
| 2019-01-14T19:09:18
| 2019-01-14T19:09:18
| 292,656,787
| 0
| 0
|
MIT
| 2020-09-03T19:04:54
| 2020-09-03T19:04:53
| null |
UTF-8
|
Python
| false
| false
| 966
|
py
|
import os
from http.server import HTTPServer as BaseHTTPServer, SimpleHTTPRequestHandler
class HTTPHandler(SimpleHTTPRequestHandler):
"""This handler uses server.base_path instead of always using os.getcwd()"""
def translate_path(self, path):
path = SimpleHTTPRequestHandler.translate_path(self, path)
relpath = os.path.relpath(path, os.getcwd())
fullpath = os.path.join(self.server.base_path, relpath)
return fullpath
class HTTPServer(BaseHTTPServer):
"""The main server, you pass in base_path which is the path you want to serve requests from"""
def __init__(self, base_path, server_address, RequestHandlerClass=HTTPHandler):
self.base_path = base_path
BaseHTTPServer.__init__(self, server_address, RequestHandlerClass)
web_dir = os.path.join(os.path.dirname(__file__), 'data')
httpd = HTTPServer(web_dir, ("", 8000))
httpd.serve_forever()
# serves files with pathway localhost:8000/file_name
|
[
"wino6687@colorado.edu"
] |
wino6687@colorado.edu
|
e7a54c6024928a0126b2c937618b368b4d1a6fd0
|
d2f10799a204c0ac0164f703ed830e1373b12f19
|
/算法/情景题/找重的球.py
|
e7295a98ca0bf6310feada91cdcf4b36966061f4
|
[] |
no_license
|
RichieSong/algorithm
|
9d44685f38c240a4864ec100c2f83e4db1956652
|
069bb0b751ef7f469036b9897436eb5d138ffa24
|
refs/heads/master
| 2020-12-27T04:22:35.235548
| 2020-10-16T02:02:44
| 2020-10-16T02:02:44
| 237,763,531
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 468
|
py
|
# -*- coding: utf-8 -*-
"""
8个球有一个重一点,最少称几次能找出来?
2次能称出来?
球分3堆 分别3、3、2个
1、先称 3-3 如果一样证明在2哪里,再来一次称就知道那个重就知道结果,如果不一样,将重的一端分别拿出2个放入天平上,
如果还一样,那剩下没称的就是重球,如果不一样,那么沉下去的就是重球
扩展:100个球找出一个轻的球的最少次数?
"""
|
[
"songming@luojilab.com"
] |
songming@luojilab.com
|
9ebe8598f6c29dea90a52ac71f40f1bea73cd362
|
aa92aa27a85080b1a7aede69ba739fd89ce95756
|
/Simulation2.py
|
d1ca905f779da1a187bc17af398072697109f7b8
|
[] |
no_license
|
sauchakr/def
|
fffa3b5a549eee2449777b70e33ac645373e3da6
|
256d348a29524778e5223adad85f749b1ba6a15f
|
refs/heads/master
| 2021-01-10T04:05:34.736213
| 2015-12-29T20:47:20
| 2015-12-29T20:47:20
| 48,766,077
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,002
|
py
|
import math
import random as rnd
#!/usr/bin/env python
try:
import matplotlib.pyplot as plt
except:
raise
import networkx as nx
def create_barabasi_albert_graph(en,em):
n = en
m = em
G = nx.nx.barabasi_albert_graph(n,m)
node_total = 130
node_weight = node_total/ len(G.nodes(data=True))
edge_total = 96
edge_weight = edge_total/ len(G.edges(data=True))
#G=nx.Graph()
for n in G.nodes():
G.add_node(n,weight=node_weight)
for ee in G.edges(data=True):
G.add_edge(ee[0],ee[1],weight=edge_weight)
print G.edges(data=True)
print G.nodes(data=True)
return G
def plot_the_graph(G):
elarge=[(u,v) for (u,v,d) in G.edges(data=True) if d['weight'] >0.5]
esmall=[(u,v) for (u,v,d) in G.edges(data=True) if d['weight'] <=0.5]
pos=nx.spring_layout(G) # positions for all nodes
# nodes
nx.draw_networkx_nodes(G,pos,node_size=700)
# edges
nx.draw_networkx_edges(G,pos,edgelist=elarge,
width=6)
nx.draw_networkx_edges(G,pos,edgelist=esmall,
width=6,alpha=0.5,edge_color='b',style='dashed')
# labels
nx.draw_networkx_labels(G,pos,font_size=20,font_family='sans-serif')
plt.axis('off')
plt.savefig("weighted_graph2.png") # save as png
plt.show() # display
def print_graph(G, pr = 'e'):
if pr == 'n':
for iterator in G.nodes_iter(data=True):
print iterator
else:
for iterator in G.edges_iter(data=True):
print iterator
def add_node_info(G):
d_nodes = {i:{} for i in range(len(G.nodes())) }
#initialise cv to node weigths (reserve)
for iterator in G.nodes_iter(data=True):
d_nodes[iterator[0]]['cv'] = iterator[1]['weight']
d_nodes[iterator[0]]['ev'] = iterator[1]['weight']
d_nodes[iterator[0]]['sv'] = 0
for iterator in G.edges_iter(data=True):
d_nodes[iterator[0]]['ev']= d_nodes[iterator[0]]['ev'] - iterator[2]['weight']
d_nodes[iterator[1]]['ev']= d_nodes[iterator[1]]['ev'] + iterator[2]['weight']
return d_nodes
def shock_nodes(G, d_nodes, kappa, phi):
n_shocked_nodes = int(kappa*(len(G.nodes())) )
print "number of shocked nodes for kappa %f = %d " %(kappa, n_shocked_nodes)
shocked_nodes = []
while len(shocked_nodes) < n_shocked_nodes:
r = rnd.randint(0, len(G.nodes())-1)
if r not in shocked_nodes:
shocked_nodes.append(r)
print "Nodes to be shocked : ", shocked_nodes
for n in shocked_nodes:
d_nodes[n]['sv'] = d_nodes[n]['ev']*(phi)
d_nodes[n]['cv'] = d_nodes[n]['cv'] - d_nodes[n]['sv']
print "Nodes shocked"
def check_shock_result(G, d_nodes):
insolvent_nodes = set([])
for iterator in G.edges_iter(data=True):
if d_nodes[iterator[1]]['cv'] <=0 :
if iterator[1] in insolvent_nodes:
continue
print "Node %d bankrupt !!!" %iterator[1]
d_nodes[iterator[0]]['cv'] = d_nodes[iterator[0]]['cv'] - iterator[2]['weight']
iterator[2]['weight'] = 0
insolvent_nodes.add(iterator[1])
print "Insolvent Nodes : ", insolvent_nodes
for n in insolvent_nodes:
G.remove_node(n)
return insolvent_nodes
def iterate_shocks(G, d_nodes, k):
insolvent_nodes = []
i = 1
shock_nodes(G, d_nodes, k, 0.8)
while(1):
print 'iteration %d' %i
tmp = check_shock_result(G,d_nodes)
if len(tmp) == 0:
break
insolvent_nodes.extend(tmp)
i = i+1
def mainCall(n,m,k):
G = create_barabasi_albert_graph(n,m)
d_nodes = add_node_info(G)
print_graph(G, 'e')
print "---------------------"
print_graph(G, 'n')
iterate_shocks(G,d_nodes,k)
solvent_nodes= G.nodes()
print "nodes surviving at the end:- ", solvent_nodes
return solvent_nodes
'''
Created on Feb 28, 2015
@author: Saurav
'''
|
[
"saurav.c53@gmail.com"
] |
saurav.c53@gmail.com
|
73396ae2e707456a405ee6204ffc4b90f82fd45b
|
86955443c25d116dd6d164a9e812017d86c07bc6
|
/engine/trainers/base_trainer.py
|
7ce4b1e07bd67e46b60d3e6041296c84c8269441
|
[
"MIT"
] |
permissive
|
melgor/metric_learning.pytorch
|
88df1bb098fea95de0daf1036d824821d3c9ea90
|
955f13b83382d6d79ef067f2275c6875498151ad
|
refs/heads/master
| 2020-09-29T01:19:37.241744
| 2019-12-10T15:12:30
| 2019-12-10T15:13:06
| 226,912,342
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,172
|
py
|
import logging
import torch
from tqdm.autonotebook import tqdm
from utils.data_logger import find_device, MetricLogger
class BaseTrainer:
def __init__(self,
models,
optimizers,
lr_schedulers,
loss_funcs,
train_dataset,
batch_size,
dataloader_num_workers,
mining_funcs=lambda x, y: x,
sampler=None
):
self.models = models
self.optimizers = optimizers
self.lr_schedulers = lr_schedulers
self.loss_funcs = loss_funcs
self.mining_funcs = mining_funcs
self.train_dataset = train_dataset
self.batch_size = batch_size
self.dataloader_num_workers = dataloader_num_workers
self.sampler = sampler
self.dataloader = None
self.metrics = MetricLogger(name="train")
self.device = find_device()
self.logger = logging.getLogger('metric.learning')
self.setup_dataloader()
# self.models = self.models.to(self.device)
def setup_dataloader(self):
self.dataloader = torch.utils.data.DataLoader(
self.train_dataset,
batch_size=int(self.batch_size),
sampler=self.sampler,
drop_last=True,
num_workers=self.dataloader_num_workers,
shuffle=self.sampler is None,
pin_memory=True
)
def train(self):
self.set_to_train()
with tqdm(total=len(self.dataloader)) as pbar:
for idx, (data, labels) in enumerate(self.dataloader):
self.forward_and_backward(data, labels)
pbar.set_postfix(loss=self.metrics['loss'].latest(), refresh=False)
pbar.update()
self.logger.info(f"End Epoch: Loss Mean Value: {self.metrics['loss'].avg(window_size=len(self.dataloader))}")
return self.metrics.avg(window_size=len(self.dataloader))
def set_to_train(self):
self.models.train()
def forward_and_backward(self, data, labels):
"""
Step of optimization
1. Move data to device
2. Get emmbeddings from data
3. Run Miners for triplet/pair mining. It can be also empty function
4. Run Loss function. Return Loss and logs
5. Update model parameters
:param data: Data as Tensor
:param labels: Labels as Tensor
"""
data = data.to(self.device)
labels = labels.to(self.device)
embeddings = self.models(data)
embeddings = self.mining_funcs(embeddings, labels)
# triplet sampler and Loss
loss, logs = self.loss_funcs(embeddings, labels)
self.metrics.update(**logs)
self.optimizers.zero_grad()
loss.backward()
self.optimizers.step()
|
[
"bartosz.ludwiczuk@intive.com"
] |
bartosz.ludwiczuk@intive.com
|
81845f1d6ef44c5f7e0e38f392a3088e19d63850
|
e67d38be1fda0d9c09fb169d73d9f3153f3c4610
|
/events/urls.py
|
2318893123ab4d4f8713b33ead420f33d8f9bb64
|
[] |
no_license
|
s-kobets/book_me
|
965b95e92886baaeb389e4212a7c88e08626a200
|
d5999ac1cec08adc126b46615ecdd6ba753d2c35
|
refs/heads/master
| 2023-06-10T03:12:08.264666
| 2021-07-05T18:57:22
| 2021-07-05T18:57:22
| 382,767,972
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 478
|
py
|
from django.contrib import admin
from django.urls import path, include
from django.views.generic import ListView, DetailView
from .models import Event
from . import views, api
from rest_framework import routers
router = routers.DefaultRouter()
router.register(r'events', api.EventViewSet, 'event')
urlpatterns = [
path('', views.EventsView.as_view(), name='list'),
path('<int:pk>/', views.EventView.as_view(), name='detail'),
path('api/', include(router.urls)),
]
|
[
"s.kobets@semrush.com"
] |
s.kobets@semrush.com
|
6afb9cbab6b859d87b8bf06d5ae9afbffa896968
|
6059b2c07cff534ca54c766a9a628992daf31a15
|
/Platform/utils/context_processors.py
|
958991575e23a3d0dbf5212e2a17b64916be47c4
|
[] |
no_license
|
rac2895/Wishkaro
|
0ce5e01a0ebd7f54f88332bdd6f4469d85aa8f03
|
3430302f8f943ce5fe52890ca7dbf5a4cec90a63
|
refs/heads/master
| 2021-01-23T14:14:43.144391
| 2017-06-17T13:31:34
| 2017-06-17T13:31:34
| 93,248,791
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 317
|
py
|
from products.models import Category
from django.conf import settings
def pycart_store(request):
return {
'active_categories': Category.objects.filter(is_active=True),
'site_name': settings.SITE_NAME,
'meta_keywords': settings.META_KEYWORDS,
'meta_description': settings.META_DESCRIPTION,
'request': request
}
|
[
"jainrachit28@gmail.com"
] |
jainrachit28@gmail.com
|
63a64b3a4dc82edd47fdc95d4a457373004013fb
|
76173fc1c994a854d4e40591c79fcd261ff6f88c
|
/object_detection/protos/model_pb2.py
|
2e3b1bf639563eb8d6249d0a2dd1b08ba69164ad
|
[] |
no_license
|
DongChen06/Icon_detector
|
4c5c87abf6836bb76dbff4384adff259d7f5dbfb
|
eac27e81624ce4b8d4c8da53fc9a3b21ca896858
|
refs/heads/master
| 2022-12-08T07:54:16.236694
| 2020-08-18T19:22:20
| 2020-08-18T19:22:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| true
| 6,958
|
py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: object_detection/protos/model.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from object_detection.protos import center_net_pb2 as object__detection_dot_protos_dot_center__net__pb2
from object_detection.protos import faster_rcnn_pb2 as object__detection_dot_protos_dot_faster__rcnn__pb2
from object_detection.protos import ssd_pb2 as object__detection_dot_protos_dot_ssd__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='object_detection/protos/model.proto',
package='object_detection.protos',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n#object_detection/protos/model.proto\x12\x17object_detection.protos\x1a(object_detection/protos/center_net.proto\x1a)object_detection/protos/faster_rcnn.proto\x1a!object_detection/protos/ssd.proto\"\x86\x02\n\x0e\x44\x65tectionModel\x12:\n\x0b\x66\x61ster_rcnn\x18\x01 \x01(\x0b\x32#.object_detection.protos.FasterRcnnH\x00\x12+\n\x03ssd\x18\x02 \x01(\x0b\x32\x1c.object_detection.protos.SsdH\x00\x12H\n\x12\x65xperimental_model\x18\x03 \x01(\x0b\x32*.object_detection.protos.ExperimentalModelH\x00\x12\x38\n\ncenter_net\x18\x04 \x01(\x0b\x32\".object_detection.protos.CenterNetH\x00\x42\x07\n\x05model\"!\n\x11\x45xperimentalModel\x12\x0c\n\x04name\x18\x01 \x01(\t')
,
dependencies=[object__detection_dot_protos_dot_center__net__pb2.DESCRIPTOR,object__detection_dot_protos_dot_faster__rcnn__pb2.DESCRIPTOR,object__detection_dot_protos_dot_ssd__pb2.DESCRIPTOR,])
_DETECTIONMODEL = _descriptor.Descriptor(
name='DetectionModel',
full_name='object_detection.protos.DetectionModel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='faster_rcnn', full_name='object_detection.protos.DetectionModel.faster_rcnn', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ssd', full_name='object_detection.protos.DetectionModel.ssd', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='experimental_model', full_name='object_detection.protos.DetectionModel.experimental_model', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='center_net', full_name='object_detection.protos.DetectionModel.center_net', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='model', full_name='object_detection.protos.DetectionModel.model',
index=0, containing_type=None, fields=[]),
],
serialized_start=185,
serialized_end=447,
)
_EXPERIMENTALMODEL = _descriptor.Descriptor(
name='ExperimentalModel',
full_name='object_detection.protos.ExperimentalModel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='object_detection.protos.ExperimentalModel.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=449,
serialized_end=482,
)
_DETECTIONMODEL.fields_by_name['faster_rcnn'].message_type = object__detection_dot_protos_dot_faster__rcnn__pb2._FASTERRCNN
_DETECTIONMODEL.fields_by_name['ssd'].message_type = object__detection_dot_protos_dot_ssd__pb2._SSD
_DETECTIONMODEL.fields_by_name['experimental_model'].message_type = _EXPERIMENTALMODEL
_DETECTIONMODEL.fields_by_name['center_net'].message_type = object__detection_dot_protos_dot_center__net__pb2._CENTERNET
_DETECTIONMODEL.oneofs_by_name['model'].fields.append(
_DETECTIONMODEL.fields_by_name['faster_rcnn'])
_DETECTIONMODEL.fields_by_name['faster_rcnn'].containing_oneof = _DETECTIONMODEL.oneofs_by_name['model']
_DETECTIONMODEL.oneofs_by_name['model'].fields.append(
_DETECTIONMODEL.fields_by_name['ssd'])
_DETECTIONMODEL.fields_by_name['ssd'].containing_oneof = _DETECTIONMODEL.oneofs_by_name['model']
_DETECTIONMODEL.oneofs_by_name['model'].fields.append(
_DETECTIONMODEL.fields_by_name['experimental_model'])
_DETECTIONMODEL.fields_by_name['experimental_model'].containing_oneof = _DETECTIONMODEL.oneofs_by_name['model']
_DETECTIONMODEL.oneofs_by_name['model'].fields.append(
_DETECTIONMODEL.fields_by_name['center_net'])
_DETECTIONMODEL.fields_by_name['center_net'].containing_oneof = _DETECTIONMODEL.oneofs_by_name['model']
DESCRIPTOR.message_types_by_name['DetectionModel'] = _DETECTIONMODEL
DESCRIPTOR.message_types_by_name['ExperimentalModel'] = _EXPERIMENTALMODEL
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DetectionModel = _reflection.GeneratedProtocolMessageType('DetectionModel', (_message.Message,), {
'DESCRIPTOR' : _DETECTIONMODEL,
'__module__' : 'object_detection.protos.model_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.DetectionModel)
})
_sym_db.RegisterMessage(DetectionModel)
ExperimentalModel = _reflection.GeneratedProtocolMessageType('ExperimentalModel', (_message.Message,), {
'DESCRIPTOR' : _EXPERIMENTALMODEL,
'__module__' : 'object_detection.protos.model_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.ExperimentalModel)
})
_sym_db.RegisterMessage(ExperimentalModel)
# @@protoc_insertion_point(module_scope)
|
[
"2452291964@gmail.com"
] |
2452291964@gmail.com
|
b94a889eb6767ea49216906720df55b94298b3bb
|
76e3a7f9853d731f4ef02d829bf2bae8dc056b4a
|
/vview/server/launch_server.py
|
c9044a6b760d05ad0993852e66a4053bad93a5f5
|
[
"LicenseRef-scancode-public-domain",
"MIT",
"BSD-3-Clause"
] |
permissive
|
ppixiv/ppixiv
|
1521178e8b1668c67f9d2b9d76c215a6a8033d3d
|
d09fd1d6bb42386895270ddde48e57d974fb4f37
|
refs/heads/master
| 2023-07-25T09:27:26.087692
| 2023-07-23T01:44:25
| 2023-07-23T01:44:25
| 141,506,792
| 129
| 10
| null | 2023-09-11T04:52:19
| 2018-07-19T01:07:07
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 863
|
py
|
# This module allows launching the server in another process if it's not already
# running.
#
# This is separate from server.py, since that file imports a lot and can take some
# time just to import (200-300ms), which is a waste of time if we're in a front-end
# process that won't be running the server itself.
import subprocess, sys
from ..util import win32
def fork_server():
"""
If the server isn't already running, start it in a new process.
This is used when we want to make sure the server is running before doing something
that requires it, like opening a file association. Note that this doesn't wait for
the server to be ready to receive requests.
"""
if win32.is_server_running():
return
# Run the module in a new process.
process = subprocess.Popen([sys.executable, "-m", "vview.server.start_server"])
|
[
"ppixiv@users.noreply.github.com"
] |
ppixiv@users.noreply.github.com
|
093052f73dc45d04b79f6cea6212ffd7f1906cb4
|
91c5a1865717e6757bbfec825d411f96dcf2e2e5
|
/python/10_file/10.4/try/10-13.py
|
93ea04eae4afc0a8faad8d216f36e21678127699
|
[] |
no_license
|
ydPro-G/Python_file
|
6f676d1b66e21548f5fad8a715a1767344c2b0c4
|
c035e7c344c3c224d55e33efc16ee941bc3d6ff2
|
refs/heads/master
| 2023-06-19T06:12:44.550778
| 2021-07-14T09:15:15
| 2021-07-14T09:15:15
| 261,126,468
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,369
|
py
|
import json
def get_stored_username(): # 存储了用户名这个函数就获取并返回它
"""如果存储了用户名,就获取它"""
filename = 'username.json'
try:
with open(filename) as f_obj:
username = json.load(f_obj) # 使用json.load()来加载存储在json文件中的信息
except FileNotFoundError: # 值不存在返回None
return None
else:
return username # 值存在返回变量
# 这个函数获取用户输入,然后获取输入的变量被写入转储到json文件中,最后返回获取用户输入的变量
def get_new_username():
"""提示用户输入名字"""
username = input("What is your name? ")
filename = 'username.json'
with open(filename,'w') as f_obj:
json.dump(username,f_obj)
return username
# 定义两个变量,每一个变量都获取一个函数,输出相应的内容
def greet_user():
"""问候用户,指出用户名字"""
username = get_stored_username()
filename = 'username.json'
if username:
print(username)
ask = input('Your user name is ' + username + 'right(y/n)?')
if ask == 'y':
print('welcome,' + username)
else:
username = get_new_username()
print(('We`ll remember you when you come back, ' + username + '.'))
greet_user()
|
[
"46178109+ydPro-G@users.noreply.github.com"
] |
46178109+ydPro-G@users.noreply.github.com
|
caab9c4dcd28b1fed838b27677ee92281aae7c0f
|
d488423a19469e753a7b5c4074670dbae21c3679
|
/02.multiple_linear/multiple_linear.py
|
31c8aa8b7c430a539fa6348e634852773481f34f
|
[] |
no_license
|
hiro9108/linear_regression
|
19105714db14a36cb4ef50fd24843eb6f0149292
|
0cb69fc08cff11fc2d9656abb0115c065eaffb90
|
refs/heads/master
| 2022-12-21T05:49:46.398774
| 2020-10-11T16:13:26
| 2020-10-11T16:13:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,590
|
py
|
# import numpy as np
import pandas as pd
# import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
import joblib
# import seaborn as sns
"""
Multiple Linear Regression
"""
# Read csv file
df = pd.read_csv('sample_multiple_liner_data.csv')
# Show plot with seaborn
# sns.distplot(df['y'], bins=50)
# plt.show()
# check correlation
# print(df.corr())
# check correlation with graph
# sns.pairplot(df, height=0.75, aspect=1.8)
# plt.show()
# Separate Input(x) and Output(y) values
X = df.iloc[:, :-1]
y = df.iloc[:, -1]
"""Using sklearn"""
# Declare the model
model = LinearRegression()
"""(Not separate data -> Using all data for creating model)
# Learning the model
model.fit(X, y)
# test
print("All data (100%):", model.score(X, y))
"""
"""Separate train and test data"""
# test data is 40%
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=1)
# Learning the model with train data
model.fit(X_train, y_train)
# test with test data
print("Train data (60%)", model.score(X_test, y_test))
# test with train data (sample)
# print("Train data (60%)", model.score(X_train, y_train))
"""Predict value"""
x = X.iloc[0, :]
y_predict = model.predict([x])
# print(X)
# print(x)
print(y_predict)
"""Save the model"""
joblib.dump(model, 'model.pkl')
"""load the model"""
model_load = joblib.load('model.pkl')
print(model_load.predict([x]))
# Check parameter
# print(model.coef_)
# Easy to read
# np.set_printoptions(precision=3, suppress=True)
# print(model.coef_)
|
[
"hiroshi.8.egawa@gmail.com"
] |
hiroshi.8.egawa@gmail.com
|
20a220e42cb348b2ec6ce84271f291e56870174b
|
68f1a939e05009f8ac20fc7347bfbdeff5500535
|
/Scripts/metaDataService.py
|
d41ef1bc815df9b2476f499a62b4a30ef55517c8
|
[] |
no_license
|
ChiniSinha/ASD-DynamoDB
|
a1c396e3b07654fca6391b76e70e9323632e364c
|
2f095385107869609b2b86a38b2c8f142c6f5736
|
refs/heads/master
| 2021-01-13T06:48:53.378979
| 2017-02-21T21:17:00
| 2017-02-21T21:17:00
| 81,150,816
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 202
|
py
|
#!/usr/bin/env python
import boto3
import json
username = 'Chini'
client = boto3.client('dynamodb')
response = client.get_item( TableName='MetaData', Key={'UserName': { 'S': username }})
print response
|
[
"chinisinha@gmail.com"
] |
chinisinha@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.