blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
65652e1063e82634e4fdc2f3a31ef4a64f108db3
|
64734dce0e290095599e31d0c80920bc58d2779b
|
/intro to test/test.py
|
b98f4767518c68a9384f688ef5ff0bcbe25311bc
|
[] |
no_license
|
satyamsingh2/developers-suitcase
|
33f6e9fe6b8660621aa27439c320b252db322f32
|
bb0651fc6b85c2249459d383e67a68147df19e44
|
refs/heads/main
| 2023-07-10T14:21:12.544243
| 2021-08-06T05:08:22
| 2021-08-06T05:08:22
| 360,185,977
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,089
|
py
|
from demo import *
import unittest
# a unit test is required to be defined inside a class
class TestArea(unittest.TestCase):
def setUp(self):
pass
#runs before running test used as preparation method before test
def test_area_of_rect(self):
self.assertEqual(area_of_rect(5,6), 30)
#basic test method 1
def test_area_of_square(self):
self.assertEqual(area_of_square(8), 64)
#basic test method 2
def test_input_value(self):
self.assertRaises(TypeError, area_of_square, True)
def tearDown(self):
pass
# runs after all the test have been run this is used for purpose like deleting the content or closing the file ,etc.
# python -m unittest discover -> use this command is used for auto discovery for test
# it will not only search them but also attempt testing all the test file present in that directory
# python -m unittest discover -s <directory-name>
# python -m unittest -v <filename>
#the command is used run all the test in verbose mode in a file
# its not compulsory to run a unittest with verbose
|
[
"noreply@github.com"
] |
noreply@github.com
|
52339edf02f3ab2499baae92bfcd98d8aca6a7e2
|
e86de5af089798890fae230fad381ca5a84fa562
|
/rssant_feedlib/reader.py
|
4b3ef65d52ba94cb18dbc3e72c7a306030e97ffe
|
[
"BSD-3-Clause"
] |
permissive
|
RustamYasaviev/rssant
|
853508adfbb269d3ce91d4b4a122b8c65537ee51
|
25a66e136a6154b4ce3ef4004e562c7d0be67ec0
|
refs/heads/master
| 2022-12-30T23:57:42.546833
| 2020-10-23T11:08:26
| 2020-10-23T11:08:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,566
|
py
|
import re
import socket
import ssl
import ipaddress
import logging
from urllib.parse import urlparse
from http import HTTPStatus
import requests
from rssant_common.dns_service import DNSService, DNS_SERVICE
from .response import FeedResponse, FeedResponseStatus
from .response_builder import FeedResponseBuilder
from .useragent import DEFAULT_USER_AGENT
from . import cacert
LOG = logging.getLogger(__name__)
class FeedReaderError(Exception):
"""FeedReaderError"""
status = None
class PrivateAddressError(FeedReaderError):
"""Private IP address"""
status = FeedResponseStatus.PRIVATE_ADDRESS_ERROR.value
class ContentTooLargeError(FeedReaderError):
"""Content too large"""
status = FeedResponseStatus.CONTENT_TOO_LARGE_ERROR.value
class ContentTypeNotSupportError(FeedReaderError):
"""ContentTypeNotSupportError"""
status = FeedResponseStatus.CONTENT_TYPE_NOT_SUPPORT_ERROR.value
class RSSProxyError(FeedReaderError):
"""RSSProxyError"""
status = FeedResponseStatus.RSS_PROXY_ERROR.value
RE_WEBPAGE_CONTENT_TYPE = re.compile(
r'(text/html|application/xml|text/xml|text/plain|application/json|'
r'application/.*xml|application/.*json|text/.*xml)', re.I)
RE_WEBPAGE_EXT = re.compile(
r'(html|xml|json|txt|opml|rss|feed|atom)', re.I)
RE_URL_EXT_SEP = re.compile(r'[./]')
def _get_url_ext(url: str):
"""
>>> _get_url_ext('http://example.com/blog/feed')
'feed'
>>> _get_url_ext('http://example.com/blog/feed.xml')
'xml'
>>> no_error = _get_url_ext('http://example.com')
"""
try:
url_path = urlparse(url).path.strip('/')
except ValueError:
return ''
parts = RE_URL_EXT_SEP.split(url_path[::-1], 1)
if len(parts) > 0:
return parts[0][::-1]
return ''
def is_webpage(content_type, url=None):
"""
>>> is_webpage(' text/HTML ')
True
>>> is_webpage('application/rss+xml; charset=utf-8')
True
>>> is_webpage('application/atom+json')
True
>>> is_webpage('image/jpeg')
False
>>> is_webpage('')
True
>>> is_webpage('application/octet-stream', 'https://www.example.com/feed.XML?q=1')
True
>>> is_webpage('application/octet-stream', 'https://www.example.com/feed')
True
"""
if content_type:
content_type = content_type.split(';', maxsplit=1)[0].strip()
if bool(RE_WEBPAGE_CONTENT_TYPE.fullmatch(content_type)):
return True
# for most of compatibility
if not content_type:
return True
# feed use may 'application/octet-stream', check url ext for the case
# eg: https://blog.racket-lang.org/
if url:
url_ext = _get_url_ext(url)
if url_ext:
if bool(RE_WEBPAGE_EXT.fullmatch(url_ext.lstrip('.'))):
return True
return False
def is_ok_status(status):
return status and 200 <= status <= 299
class FeedReader:
def __init__(
self,
session=None,
user_agent=DEFAULT_USER_AGENT,
request_timeout=30,
max_content_length=10 * 1024 * 1024,
allow_private_address=False,
allow_non_webpage=False,
rss_proxy_url=None,
rss_proxy_token=None,
dns_service: DNSService = DNS_SERVICE,
):
if session is None:
session = requests.session()
self._close_session = True
else:
self._close_session = False
self.session = session
self.user_agent = user_agent
self.request_timeout = request_timeout
self.max_content_length = max_content_length
self.allow_private_address = allow_private_address
self.allow_non_webpage = allow_non_webpage
self.rss_proxy_url = rss_proxy_url
self.rss_proxy_token = rss_proxy_token
self.dns_service = dns_service
self._cacert = cacert.where()
@property
def has_rss_proxy(self):
return bool(self.rss_proxy_url)
def _resolve_hostname(self, hostname):
if self.dns_service:
hosts = self.dns_service.resolve(hostname)
if hosts:
yield from hosts
return
addrinfo = socket.getaddrinfo(hostname, None)
for family, __, __, __, sockaddr in addrinfo:
if family == socket.AF_INET:
ip, __ = sockaddr
yield ip
elif family == socket.AF_INET6:
ip, __, __, __ = sockaddr
yield ip
def check_private_address(self, url):
"""Prevent request private address, which will attack local network"""
if self.allow_private_address:
return
hostname = urlparse(url).hostname
for ip in self._resolve_hostname(hostname):
ip = ipaddress.ip_address(ip)
if ip.is_private:
raise PrivateAddressError(ip)
def check_content_type(self, response):
if self.allow_non_webpage:
return
if not is_ok_status(response.status_code):
return
content_type = response.headers.get('content-type')
if not is_webpage(content_type, str(response.url)):
raise ContentTypeNotSupportError(
f'content-type {content_type!r} not support')
def _read_content(self, response: requests.Response):
content_length = response.headers.get('Content-Length')
if content_length:
content_length = int(content_length)
if content_length > self.max_content_length:
msg = 'content length {} larger than limit {}'.format(
content_length, self.max_content_length)
raise ContentTooLargeError(msg)
content_length = 0
content = bytearray()
for data in response.iter_content(chunk_size=64 * 1024):
content_length += len(data)
if content_length > self.max_content_length:
msg = 'content length larger than limit {}'.format(
self.max_content_length)
raise ContentTooLargeError(msg)
content.extend(data)
return content
def _decode_content(self, content: bytes):
if not content:
return ''
return content.decode('utf-8', errors='ignore')
def _prepare_headers(self, url, etag=None, last_modified=None):
headers = {}
if callable(self.user_agent):
headers['User-Agent'] = self.user_agent(url)
else:
headers['User-Agent'] = self.user_agent
if etag:
headers["ETag"] = etag
if last_modified:
headers["If-Modified-Since"] = last_modified
return headers
def _send_request(self, request, ignore_content):
# http://docs.python-requests.org/en/master/user/advanced/#timeouts
response = self.session.send(
request, verify=self._cacert, timeout=(6.5, self.request_timeout), stream=True)
try:
if not is_ok_status(response.status_code):
content = self._read_content(response)
return response, content
self.check_content_type(response)
content = None
if not ignore_content:
content = self._read_content(response)
finally:
# Fix: Requests memory leak
# https://github.com/psf/requests/issues/4601
response.close()
return response, content
def _read(self, url, etag=None, last_modified=None, ignore_content=False):
headers = self._prepare_headers(url, etag=etag, last_modified=last_modified)
req = requests.Request('GET', url, headers=headers)
prepared = self.session.prepare_request(req)
if not self.allow_private_address:
self.check_private_address(prepared.url)
response, content = self._send_request(prepared, ignore_content=ignore_content)
return response.headers, content, response.url, response.status_code
def _read_by_proxy(self, url, etag=None, last_modified=None, ignore_content=False):
if not self.has_rss_proxy:
raise ValueError("rss_proxy_url not provided")
headers = self._prepare_headers(url, etag=etag, last_modified=last_modified)
data = dict(
url=url,
token=self.rss_proxy_token,
headers=headers,
)
req = requests.Request('POST', self.rss_proxy_url, json=data)
prepared = self.session.prepare_request(req)
response, content = self._send_request(prepared, ignore_content=ignore_content)
if not is_ok_status(response.status_code):
message = 'status={} body={!r}'.format(
response.status_code, self._decode_content(content))
raise RSSProxyError(message)
proxy_status = response.headers.get('x-rss-proxy-status', None)
if proxy_status and proxy_status.upper() == 'ERROR':
message = 'status={} body={!r}'.format(
response.status_code, self._decode_content(content))
raise RSSProxyError(message)
proxy_status = int(proxy_status) if proxy_status else HTTPStatus.OK.value
return response.headers, content, url, proxy_status
def read(self, url, *args, use_proxy=False, **kwargs) -> FeedResponse:
headers = content = None
try:
if use_proxy:
headers, content, url, status = self._read_by_proxy(url, *args, **kwargs)
else:
headers, content, url, status = self._read(url, *args, **kwargs)
except socket.gaierror:
status = FeedResponseStatus.DNS_ERROR.value
except requests.exceptions.ReadTimeout:
status = FeedResponseStatus.READ_TIMEOUT.value
except (socket.timeout, TimeoutError, requests.exceptions.ConnectTimeout):
status = FeedResponseStatus.CONNECTION_TIMEOUT.value
except (ssl.SSLError, ssl.CertificateError, requests.exceptions.SSLError):
status = FeedResponseStatus.SSL_ERROR.value
except requests.exceptions.ProxyError:
status = FeedResponseStatus.PROXY_ERROR.value
except (ConnectionError, requests.exceptions.ConnectionError):
status = FeedResponseStatus.CONNECTION_RESET.value
except requests.exceptions.TooManyRedirects:
status = FeedResponseStatus.TOO_MANY_REDIRECT_ERROR.value
except requests.exceptions.ChunkedEncodingError:
status = FeedResponseStatus.CHUNKED_ENCODING_ERROR.value
except requests.exceptions.ContentDecodingError:
status = FeedResponseStatus.CONTENT_DECODING_ERROR.value
except UnicodeDecodeError:
status = FeedResponseStatus.CONTENT_DECODING_ERROR.value
except FeedReaderError as ex:
status = ex.status
LOG.warning(type(ex).__name__ + " url=%s %s", url, ex)
except (requests.HTTPError, requests.RequestException) as ex:
if ex.response is not None:
status = ex.response.status_code
else:
status = FeedResponseStatus.UNKNOWN_ERROR.value
builder = FeedResponseBuilder(use_proxy=use_proxy)
builder.url(url)
builder.status(status)
builder.content(content)
builder.headers(headers)
return builder.build()
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.close()
def close(self):
if self._close_session:
self.session.close()
|
[
"guyskk@qq.com"
] |
guyskk@qq.com
|
7de9beaf1b472d91a1800e9205dae33702e2e645
|
6810201a040c7a77f1e5e9a93882f34717aaa4a4
|
/find disappear number.py
|
d431c337db4b5747d8305a1f36522d7172795e5a
|
[] |
no_license
|
phyllis-jia/array-practice
|
b848bfa52a82d1508b11ad946ba168a58e668030
|
3c1974866489384992d3e76307d90e5127edaf6f
|
refs/heads/master
| 2020-11-29T06:15:29.915254
| 2020-01-11T07:04:57
| 2020-01-11T07:04:57
| 230,043,151
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 558
|
py
|
###Find All Numbers Disappeared in an Array
##Given an array of integers where 1 ≤ a[i] ≤ n (n = size of array), some elements appear twice and others appear once.Find all the elements of [1, n] inclusive that do not appear in this array.Could you do it without extra space and in O(n) runtime? You may assume the returned list does not count as extra space.
def find_disappear_num(nums):
for i in range(len(nums)):
index = abs(nums[i]) - 1
nums[index] = - abs(nums[index])
return [i + 1 for i in range(len(nums)) if nums[i] > 0]
|
[
"54754413+490232987@users.noreply.github.com"
] |
54754413+490232987@users.noreply.github.com
|
c46422aa62a585b2fef203ad5395901b118ea3da
|
d1cd97730d5ed4f7bec147d237cfe9ac9b2f6134
|
/app.py
|
9ef7c1d9e2022174e85c07cdc30742e823d87014
|
[] |
no_license
|
tsungic/MVP-backend
|
b5354c6fb13bfdfbc33ad7d85b98b195c90a1be1
|
2c371f4a10e36799c8c26cac933b55caff86ff72
|
refs/heads/master
| 2023-04-01T20:35:46.341005
| 2021-04-14T22:17:31
| 2021-04-14T22:17:31
| 349,769,684
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,489
|
py
|
from flask import Flask, request, Response
from flask_cors import CORS
import dbcreds
import mariadb
import json
import secrets
app = Flask(__name__)
CORS(app)
@app.route("/api/users", methods=["GET","POST","PATCH","DELETE"])
def users():
if request.method =="GET":
user_id = request.args.get("userId")
conn = None
cursor = None
users_data = None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
if user_id:
cursor.execute("SELECT * FROM users where id =?", [user_id])
users_data = cursor.fetchall()
else:
cursor.execute("SELECT * FROM users")
users_data = cursor.fetchall()
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if users_data or users_data ==[]:
users_info =[]
for user in users_data:
user_dic={
"userId": user[0],
"email": user [1],
"name": user [3]
}
users_info.append(user_dic)
return Response(json.dumps(users_info, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method =="POST":
conn = None
cursor = None
user_info = request.json
name = user_info.get("name")
password = user_info.get("password")
email = user_info.get("email")
user_session_id = None
if email!=None and email !="" and name!=None and name !="" and password!=None and password !="" :
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("INSERT INTO users (email, password, name) VALUES (?,?,?)", [email, password, name])
conn.commit()
user_id = cursor.lastrowid
login_token= secrets.token_urlsafe(20)
cursor.execute("INSERT INTO user_session (user_id, loginToken) VALUES (?,?)", [user_id, login_token])
conn.commit()
user_session_id = cursor.lastrowid
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if user_session_id != None:
user_dic={
"userId": user_id,
"email": email,
"name": name,
"loginToken": login_token
}
return Response(json.dumps(user_dic, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method == "PATCH":
user_info = request.json
conn = None
cursor = None
name = user_info.get("name")
password = user_info.get("password")
email = user_info.get("email")
login_token = user_info.get("loginToken")
user= None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
if email != None and email !="" and login_token != None and login_token !="":
#get userid based on login token
cursor.execute("SELECT user_id FROM user_session where loginToken = ?",[login_token])
user_id = cursor.fetchone()[0]
#can update user table based on user id
cursor.execute("UPDATE users SET email = ? where id = ?", [email, user_id])
if name != None and name !="" and login_token != None and login_token !="":
cursor.execute("SELECT user_id FROM user_session where loginToken = ?",[login_token])
user_id = cursor.fetchone()[0]
cursor.execute("UPDATE users SET name = ? where id = ?", [name, user_id])
if password != None and password !="" and login_token != None and login_token !="":
cursor.execute("SELECT user_id FROM user_session where loginToken = ?",[login_token])
user_id = cursor.fetchone()[0]
cursor.execute("UPDATE users SET password = ? where id = ?", [password, user_id])
conn.commit()
row=cursor.rowcount
cursor.execute("SELECT * FROM users where id = ?", [user_id])
user = cursor.fetchone()
except Exception as e:
print (e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if user != None:
user_dic={
"userId": user[0],
"email": user [1],
"name": user[3]
}
return Response(json.dumps(user_dic, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method == "DELETE":
user_info = request.json
conn = None
cursor = None
password = user_info.get("password")
login_token = user_info.get("loginToken")
user= None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("SELECT user_id FROM user_session WHERE loginToken = ?",[login_token])
user_id = cursor.fetchone()[0]
if password != None and password !="" and login_token != None and login_token !="":
cursor.execute("DELETE FROM users WHERE id = ?",[user_id])
conn.commit()
row=cursor.rowcount
except Exception as e:
print (e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if user == None:
return Response("Delete successful", mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
@app.route("/api/login", methods=["POST", "DELETE"])
def login():
if request.method == "POST":
conn = None
cursor = None
users_data = None
user_info = request.json
password = user_info.get("password")
email = user_info.get("email")
login_rows = None
user_data = None
if email !="" and email !=None and password !="" and password !=None:
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("SELECT * FROM users where email =? AND password =?", [email, password])
user_data = cursor.fetchone()
rows = cursor.rowcount
#to login need user id, can get from fetch one(which hold all user data)
if (user_data != None):
#user id is first row in db-0
user_id = user_data[0]
login_token = secrets.token_urlsafe(20)
cursor.execute("INSERT INTO user_session (user_id, loginToken) VALUES (?,?)",[user_id, login_token])
conn.commit()
#login_rows check if insertion is done correct
login_rows = cursor.rowcount
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
#determine if login is working or not
if(login_rows != None):
#return user date
user_dic = {
"userId": user_data[0],
"email": user_data [1],
"name": user_data[3],
"loginToken": login_token
}
return Response(json.dumps(user_dic, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method =="DELETE":
login_token = request.json.get("loginToken")
rows = None
if login_token != None and login_token !="":
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("DELETE FROM user_session where loginToken = ?", [login_token])
conn.commit()
rows = cursor.rowcount
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if (rows == 1):
return Response("logout success", mimetype="text/html", status =204)
else:
return Response ("logout failed", mimetype="text/html", status =404)
@app.route("/api/place", methods=["GET","POST","PATCH","DELETE"])
def place():
if request.method == "GET":
user_id = request.args.get("userId")
conn = None
cursor = None
place_data = None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
if user_id:
cursor.execute("SELECT * FROM users u INNER JOIN place p ON u.id = p.user_id WHERE u.id = ?", [user_id])
t_data = cursor.fetchall()
else:
cursor.execute("SELECT * FROM users u INNER JOIN place p ON u.id = p.user_id")
place_data = cursor.fetchall()
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if place_data or place_data ==[]:
place_info =[]
#create for loop
for place in place_data:
place_dic={
"placeId": place[4],
"userId": place [0],
"name": place [5],
"accomodates": place[6],
"bathrooms": place [7],
"bedrooms": place [8],
"beds": place [9],
"images": place [10],
"price": place [13],
"propertyType": place [14],
"roomType": place[15]
}
place_info.append(place_dic)
return Response(json.dumps(place_info, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method == "POST":
login_token = request.json.get("loginToken")
name = request.json.get("name")
conn = None
cursor = None
place = None
user_id = None
place_id = None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("SELECT user_id FROM user_session WHERE loginToken = ?", [login_token])
user_id = cursor.fetchone()[0]
cursor.execute("INSERT INTO place(user_id, name) VALUES (?,?)", [user_id, name])
conn.commit()
place_id = cursor.lastrowid
cursor.execute("SELECT * FROM users u INNER JOIN place p ON u.id = p.user_id where p.id = ?", [place_id])
place = cursor.fetchone()
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if place or place ==[]:
place_dic={
"placeId": place[4],
"userId": place [0],
"name": place [5],
"accomodates": place[6],
"bathrooms": place [7],
"bedrooms": place [8],
"beds": place [9],
"images": place [10],
"price": place [13],
"propertyType": place [14],
"roomType": place[15]
}
return Response(json.dumps(place_dic, default = str), mimetype="application/json", status=201)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method == "PATCH":
login_token = request.json.get("loginToken")
place_id = request.json.get("placeId")
name = request.json.get("name")
conn = None
cursor = None
user_id = None
rows= None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("SELECT user_id FROM user_session WHERE loginToken = ?", [login_token])
user_id = cursor.fetchone()[0]
cursor.execute("UPDATE place SET name = ? WHERE id=? AND user_id =?", [name, place_id, user_id])
conn.commit()
rows = cursor.rowcount
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if rows != None:
response_dic={
"placeId": place_id,
"name": name,
}
return Response(json.dumps(response_dic, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method == "DELETE":
login_token = request.json.get("loginToken")
place_id = request.json.get("placeId")
conn = None
cursor = None
user_id = None
rows= None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("SELECT user_id FROM user_session WHERE loginToken = ?", [login_token])
user_id = cursor.fetchone()[0]
cursor.execute("DELETE FROM place WHERE id=? AND user_id =?", [place_id, user_id])
conn.commit()
rows = cursor.rowcount
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if rows != None:
return Response("Delete success", mimetype="html/text", status=204)
else:
return Response("failure", mimetype="html/text", status=400)
|
[
"you@example.com"
] |
you@example.com
|
9dae38ac98dd54958fae339efd2fc043865b9d73
|
279ed7207ac2c407487416b595e12f573049dd72
|
/pybvk/libbvk/input-generators/syspy/Tb
|
25afd282481bd5e76e39d4bbee45aa72e6cada06
|
[] |
no_license
|
danse-inelastic/pybvk
|
30388455e211fec69130930f2925fe16abe455bd
|
922c8c0a8c50a9fabd619fa06e005cacc2d13a15
|
refs/heads/master
| 2016-09-15T22:21:13.131688
| 2014-06-25T17:12:34
| 2014-06-25T17:12:34
| 34,995,254
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,595
|
#!/usr/bin/python
# Terbium
import System
from math import sqrt
# a and c are lattice parameters for hex
a=sqrt(3)
c=(5.696/3.599)*a # Article, page 3943
# c=(5.70/3.60)*a # LB
m=2.639017e-25 # atomic (average IUPAC) mass in kg
cell=[
a*sqrt(3)/2,-a/2,0,
0,a,0,
0,0,c,
]
atoms=[
[ "A", m ],
[ "B", m ],
]
sites=[
[ 0,0,0, 0 ],
[ a/sqrt(3),0,c/2, 1 ],
]
bonds=[
[ 0,1, a/sqrt(3),0,c/2, 5.467, 0.000, 7.286,
0.000, 1.562, 0.000,
7.286, 0.000, 9.901 ],
[ 1,0,-a/sqrt(3),0,-c/2, 5.467, 0.000, 7.286,
0.000, 1.562, 0.000,
7.286, 0.000, 9.901 ],
# NOTE: you must take the transpose for Bonds going in the other direction!
[ 0,0, 0,a,0, 0.954, 2.426, 0.000,
-2.426,11.416, 0.000,
0.000, 0.000,-0.952 ],
[ 1,1, 0,a,0, 0.954,-2.426, 0.000,
2.426,11.416, 0.000,
0.000, 0.000,-0.952 ],
[ 0,1, -2*a/sqrt(3),0,c/2, -1.889, 0.000, 0.046,
0.000,-0.975, 0.000,
0.046, 0.000,-0.894 ],
[ 1,0, 2*a/sqrt(3),0,-c/2, -1.889, 0.000, 0.046,
0.000,-0.975, 0.000,
0.046, 0.000,-0.894 ],
[ 0,0, 0,0,c, -0.032, 0.000, 0.000,
0.000,-0.032, 0.000,
0.000, 0.000,-2.228 ],
[ 1,1, 0,0,c, -0.032, 0.000, 0.000,
0.000,-0.032, 0.000,
0.000, 0.000,-2.228 ],
[ 0,1, System.axial([5*a/(2*sqrt(3)),a/2,c/2], 1.225,-0.180) ],
[ 1,0, System.axial([-5*a/(2*sqrt(3)),-a/2,-c/2], 1.225,-0.180) ],
[ 0,0, System.axial([ a*sqrt(3),0,0], 1.250, 0.241) ],
[ 1,1, System.axial([ a*sqrt(3),0,0], 1.250, 0.241) ],
[ 0,0, System.axial([-a*sqrt(3),0,0], 1.250, 0.241) ],
[ 1,1, System.axial([-a*sqrt(3),0,0], 1.250, 0.241) ],
[ 0,0, System.axial([0,a,c], 0.762,-0.098) ],
[ 1,1, System.axial([0,a,c], 0.762,-0.098) ],
[ 0,0, System.axial([0,2*a,0], -0.410, 0.066) ],
[ 1,1, System.axial([0,2*a,0], -0.410, 0.066) ],
]
System.write(cell,atoms,sites,bonds,"hcp")
|
[
"mkresch.caltech@gmail.com"
] |
mkresch.caltech@gmail.com
|
|
f7c552759f66e732e862f5d4c641e014c42fc6ef
|
8ceb07890ad7c6b64a2224e863aedccd17d51cfa
|
/TechnicalCodingExamples/PythonTechnicalExamples/PythonPrograms/feb/feb1.py
|
3459145f99b62ef5edb301021e59ea363491eea9
|
[] |
no_license
|
sagarreddyg/PythonTechnicalExamplesProject
|
af1fa8d0e70885f1df82cc3bdf72beb771927396
|
dd5072370b8e43490f1fe9463d189ba6c97fe51d
|
refs/heads/master
| 2020-03-21T13:06:25.675727
| 2018-07-05T11:48:52
| 2018-07-05T11:48:52
| 138,588,502
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 97
|
py
|
a, b = 0, 1
for i in range(100):
if a <= 100:
print(a, end=" ")
a, b = b, a
b += a
|
[
"gantasagarreddy467@gmail.com"
] |
gantasagarreddy467@gmail.com
|
0f7f0dd51238abea947caf09e24c2ee1e7230cc6
|
40df3950daf64e8c580fa6f7c3bab9ab57dc8dcf
|
/keido/urls.py
|
65c99a55e74e5b839ca71ab3a846ffdf07cab227
|
[] |
no_license
|
azymv/clustering
|
5f018a1efabe684081f6c1b1cddae607af112c30
|
0ec54e472c5ef525aeca5a9e91734b51cbb561f3
|
refs/heads/master
| 2020-07-09T11:57:58.053512
| 2019-08-23T09:27:18
| 2019-08-23T09:27:18
| 203,963,642
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 809
|
py
|
"""keido URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('admin/', admin.site.urls),
path('clustermap/', include('clustermap.urls')),
]
|
[
"timurazymov@Timurs-MacBook-Pro.local"
] |
timurazymov@Timurs-MacBook-Pro.local
|
533d9a3aba0fddd5fedf95a5119260c435870c14
|
941688a0f6e382a28130b7f254fa302b0285182e
|
/Mind controlled drone/getAlpha_noPlotly.py
|
38107a4c391789e3431a37846fb70809dcd08066
|
[] |
no_license
|
Hugo-W/Imperial-festival
|
d8575bad194700bf817e8b705ff2b4782a8ca652
|
930f05104d2487c76c9a242c06e8db7db79f3992
|
refs/heads/master
| 2021-01-19T15:34:03.739562
| 2015-05-12T10:30:48
| 2015-05-12T10:30:48
| 35,057,812
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,623
|
py
|
# This is an example of popping a packet from the Emotiv class's packet queue
# and printing the gyro x and y values to the console.
from emokit.emotiv import Emotiv
import platform
if platform.system() == "Windows":
import socket # Needed to prevent gevent crashing on Windows. (surfly / gevent issue #459) / UDP Client requires
import gevent
import sys
import time
import numpy as np
import scipy
class RingBuffer():
"A 1D ring buffer using numpy arrays"
def __init__(self, length):
self.data = np.zeros(length, dtype='f')
self.index = 0
def extend(self, x):
"adds array x to ring buffer"
if not isinstance(x,np.ndarray): x = np.array(x)
x_index = (self.index + np.arange(x.size)) % self.data.size
self.data[x_index] = x
self.index = x_index[-1] + 1
def get(self):
"Returns the first-in-first-out data in the ring buffer"
idx = (self.index + np.arange(self.data.size)) %self.data.size
return self.data[idx]
def __len__(self):
return np.size(self.data)
def getAlphaPower(buff, ffttype,Ts = 1./128):
N = len(buff)
freq = np.fft.fftfreq(N,Ts) # Creating a vector (np.array) for all frequencies (Nyquist in the middle); N number of points, Ts sampling time
if ffttype == 'square':
Power = 2.*np.abs(np.fft.fft(buff))**2/N**2 # squared
else:
Power = 2.*np.abs(np.fft.fft(buff))/N # Not squared
spectrum = zip(freq,Power) #This is a list of "tuples" (freq, value)...
alphalist = [] # Taking the power of alpha rhythms into this list
for (f,S) in spectrum:
if f>=8 and f<=12: #You cannot do more basic filtering...
alphalist.append(S)
#alpha = np.amax(alphalist) #mean is bad, max is not relativ ptp is probably the best, var depends on number of points...
alpha = np.ptp(alphalist)
#alpha = np.var(alphalist)
if ffttype=='log': alpha = 10.*np.log10(alpha) # Log/Linear
return alpha
#def calibration(N_calib=2048, Nsample=256, Fs = 128.):
if __name__ == "__main__":
N = 256
ffttype = 'square'
param = dict({'square':[10.,100.] , 'log':[6.,30.], 'abs':[5.,20.]}) # [min value (eyes open), max (eyes closed)]
buffin = RingBuffer(N)
headset = Emotiv()
routine = gevent.spawn(headset.setup)
gevent.sleep(0)
win = scipy.hamming(N)
normalizing = np.sum(np.multiply(win,np.ones(N)*1./N))
#win = np.ones(N)
t0 = time.clock()
told = t0
# Jin UDP Client
host = '127.0.0.1'
port = 10000 #int(raw_input("Enter Listening Port:"))
# Create a UDP socket
try:
print 'Init UDP Client: IP: %s Port: %s...' % (host,port)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
print 'Client init complete'
except socket.error, msg:
print 'Client init failed. Error Code: ' + str(msg[0]) + ' Message: ' + msg[1]
try:
temp_buffer = []
for i in xrange(N):
packet = headset.dequeue()
temp_buffer.append(packet.sensors['F3']['value'])
gevent.sleep(0)
buffin.extend(temp_buffer)
#print getAlphaPower(np.multiply(win,buffin.get())) # hamming window to sharpen fft
while True:
packet = headset.dequeue()
buffin.extend(packet.sensors['O1']['value'])
#print packet.gyro_x, packet.gyro_y, packet.battery
gevent.sleep(0)
alpha = getAlphaPower(np.multiply(win,buffin.get())/normalizing,ffttype) # hamming window to sharpen fft
#print alpha
# Jin UDP Client
try:
alphaScale = (alpha - param[ffttype][0])/(param[ffttype][1]-param[ffttype][0])
#print alphaScale
alphaScale = max(0, min(alphaScale,1)) # clamp alpha
#if np.mod(np.trunc(1000*(time.clock()-t0)),5.0)<1:
if time.clock()-told>0.015:
told = time.clock()
sock.sendto(str(alphaScale), (host, port))
sock.sendto(str(alphaScale), (host, port + 1))
except socket.error, msg:
print 'Error Code: ' + str(msg[0]) + ' Message: ' + msg[1]
except KeyboardInterrupt:
headset.running = False
headset.close()
gevent.kill(routine, KeyboardInterrupt)
finally:
headset.close()
gevent.kill(routine)
# Jin UDP Client
print >>sys.stderr, 'Closing socket'
sock.close()
sys.exit()
|
[
"hugo.weissbart@gmail.com"
] |
hugo.weissbart@gmail.com
|
3b5873aab4eec5bf799b98a457ec157fe794c642
|
a4cbe5263e4bf62f37fe4aa1117a84e74af357b5
|
/classification_synthetic/synthetic_classification_rs.py
|
6e8c333ff9e92dd9775aa56133858bcb52ad3f7d
|
[] |
no_license
|
andressp05/bayopt_spn
|
639b660671938dcca95a80a22ed5599fd7287caf
|
36a57d04405befe9f05a32564bd781c0da647483
|
refs/heads/master
| 2023-06-19T00:48:14.672018
| 2021-07-14T18:35:33
| 2021-07-14T18:35:33
| 376,756,584
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,585
|
py
|
# Debug Option
import pdb
# General Imports
import sys
import math
from sklearn.metrics import accuracy_score
from sklearn.model_selection import KFold
import numpy as np
import pandas as pd
# Imports SPN
from spn.algorithms.LearningWrappers import learn_parametric, learn_classifier
from spn.structure.leaves.parametric.Parametric import Categorical, Gaussian
from spn.structure.Base import Context
from spn.algorithms.MPE import mpe
# Imports Bayessian Optimization
import GPyOpt
def rows_value_to_rows(rows_value):
if rows_value == 0:
rows = 'rdc'
elif rows_value == 1:
rows = 'kmeans'
else:
rows = 'gmm'
return rows
def optimize_classification_rs_function(threshold, min_instances_slice, min_features_slice, rows_value):
# Carga Dataset Synthetic Classification
df_data = pd.read_pickle("classification_data.pkl")
df_target = pd.read_pickle("classification_target.pkl")
classification_data = df_data.to_numpy()
classification_target = df_target.to_numpy().reshape(-1)
# Hyperparams
threshold = threshold
min_instances_slice = min_instances_slice
min_features_slice = min_features_slice
rows = rows_value_to_rows(rows_value)
# K-Fold Cross Validation Params
k = 2
error = 0.0
label = 5
kf = KFold(n_splits=2)
for train_index, test_index in kf.split(classification_data):
#print("K-FOLD" + str(i))
# Divide K-Fold Cross Validation
x_train, x_test = classification_data[train_index], classification_data[test_index]
y_train, y_test = classification_target[train_index], classification_target[test_index]
# Prepare Train Data
y_train_reshape = y_train.reshape(-1,1)
train_data = np.hstack((x_train, y_train_reshape))
# Learn SPN
hyperparams = {"threshold": threshold, "min_instances_slice": min_instances_slice, "min_features_slice": min_features_slice, "rows": rows}
try:
spn_classification = learn_classifier(train_data,
Context(parametric_types=[Gaussian, Gaussian, Gaussian, Gaussian, Gaussian, Categorical]).add_domains(train_data),
learn_parametric, label, **hyperparams)
# Prediction with MPE
y_predict = np.empty(len(x_test))
y_predict.fill(np.nan)
y_predict_reshape = y_predict.reshape(-1,1)
predict_data = np.hstack((x_test, y_predict_reshape))
predict_data = mpe(spn_classification, predict_data)
# Calculate Error
y_predict = predict_data[:,5]
#print(y_test)
#print(y_predict.reshape(1,-1))
error += (1.0-accuracy_score(y_test, y_predict))
except:
error += 1.0
#print(error)
error = error/float(k)
#print(error)
#print("ITERACION TERMINADA")
return error,threshold,min_instances_slice,min_features_slice,rows
def main():
seed = np.random.seed(int(sys.argv[1]))
f = open("classification_rs_hyperparams.txt", "w")
f.write('Seed ' + sys.argv[1] + '\n')
num_iterations = 30
error_min = 100
for i in range(num_iterations):
#print("Iteracion" + str(i) + '\n')
# Hyperparams to optimize
threshold = np.random.rand()/2.0
min_instances_slice = np.random.randint(0,high=101)
min_features_slice = np.random.randint(1, high=4)
rows_value = np.random.randint(0,high=3)
f.write(str(i) + ": threshold=" + str(threshold) + " - min_instances_slice=" + str(min_instances_slice) + " - min_features_slice=" + str(min_features_slice) + " - rows=" + str(rows_value))
error,threshold,min_instances_slice,min_features_slice,rows = optimize_classification_rs_function(threshold, min_instances_slice, min_features_slice, rows_value)
f.write(' --> ERROR:' + str(error))
if error == 1:
f.write(' --> Fallo Prog: SI' + '\n')
else:
f.write(' --> Fallo Prog: NO' + '\n')
if error < error_min:
error_min = error
threshold_min = threshold
min_instances_slice_min = min_instances_slice
min_features_slice_min = min_features_slice
rows_min = rows
#print("Value of (cols,rows,threshold,num_instances) that minimises the objective: ("+ cols_min + ", " + rows_min + ", " + str(threshold_min) + ", "+ str(num_instances_min)+ ")" )
#print("Minimum error of the objective: ", error_min)
f.close()
result = "RS error --> " + str(error_min) + " with hyperparams: Threshold = " + str(threshold_min) + ", Min_instances_slice = " + str(min_instances_slice_min) + ", Min_features_slice = " + str(min_features_slice_min) + ", Rows = " + str(rows_min) + "."
print(result)
return result
if __name__ == "__main__":
main()
|
[
"asalaspena@gmail.com"
] |
asalaspena@gmail.com
|
b296990cc4eb26731b584ca00d04d1379ab8adda
|
d0787de1b8957b42e7bea12befd4e6d28a42bf9d
|
/question_b.py
|
b5d502c33cd245f956ac23a1d9a0e92550fb93f5
|
[] |
no_license
|
jucarrier/Justin-Carrier-Test
|
c88f3b012fefad327e3a131393b999a6d3285e10
|
b5a1956be45c7355dbcbb2f8c030c22110c4b32a
|
refs/heads/master
| 2020-05-24T03:26:50.838097
| 2019-05-17T14:29:10
| 2019-05-17T14:29:21
| 187,072,742
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,546
|
py
|
import re
def sanitize_input(str):
# Remove superfluous separators
str = " ".join(re.split("\s+", str, flags=re.UNICODE))
str = " ".join(re.split(",{2,}", str, flags=re.UNICODE))
if str.count(',') > 1 or str.count(' ') > 1:
return "You must only enter two numbers"
try:
if ',' in str:
num_1 = str.replace(' ', '').split(',')[0]
num_2 = str.replace(' ', '').split(',')[1]
else:
num_1 = float(str.split(' ')[0])
num_2 = float(str.split(' ')[1])
if num_1 == "" or num_2 == "":
raise IndexError
num_1 = float(num_1)
num_2 = float(num_2)
if num_1 is None or num_2 is None:
raise ValueError
if num_1 == num_2:
comparator = "equal to"
elif num_1 > num_2:
comparator = "larger than"
else:
comparator = "smaller than"
return "%g is %s %g" % (num_1, comparator, num_2)
except ValueError:
# Values entered were not floats or ints
return "You must enter numbers"
except IndexError:
# Checks if user only entered one number
return "You must enter two numbers"
def request_input():
s = input("Numbers to compare: ")
res = sanitize_input(s)
if any(char.isdigit() for char in res):
print(res)
return res
else:
request_input()
if __name__ == "__main__":
assert (sanitize_input("5,1") == "5 is larger than 1")
assert (sanitize_input("5, 1") == "5 is larger than 1")
assert (sanitize_input("5 1") == "5 is larger than 1")
assert (sanitize_input("5.9 1") == "5.9 is larger than 1")
assert (sanitize_input("5,,,,1") == "5 is larger than 1")
assert (sanitize_input("5.1,1.3") == "5.1 is larger than 1.3")
assert (sanitize_input("5,1") == "5 is larger than 1")
assert (sanitize_input("5,1") == "5 is larger than 1")
assert (sanitize_input("1.8, 9") == "1.8 is smaller than 9")
assert (sanitize_input("-2, -1") == "-2 is smaller than -1")
assert (sanitize_input("0 0") == "0 is equal to 0")
assert (sanitize_input("0,") == "You must enter two numbers")
assert (sanitize_input("seven,") == "You must enter two numbers")
assert (sanitize_input(",9") == "You must enter two numbers")
assert (sanitize_input("abc,") == "You must enter two numbers")
assert (sanitize_input("help") == "You must enter numbers")
assert (sanitize_input("1,2,3") == "You must only enter two numbers")
request_input()
|
[
"justind.carrier@gmail.com"
] |
justind.carrier@gmail.com
|
418adfe62158fd425838f6516db8796311d443e5
|
101e4ffbb876fbb255014b03b685d52cdbb45103
|
/flask/lib/python2.7/site-packages/pbr/tests/test_wsgi.py
|
0bdf178f0e9a485bee0545bade56581f341846a0
|
[] |
no_license
|
cherryshen/cherryblog
|
23d008c634542c47a965510efd77b4ff166f6ad7
|
f401570f0ddc663185932265fc2c8fee11d6f771
|
refs/heads/master
| 2020-12-01T19:05:06.692396
| 2016-09-06T04:40:03
| 2016-09-06T04:40:03
| 66,790,200
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,470
|
py
|
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. (HP)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import subprocess
import sys
try:
# python 2
from urllib2 import urlopen
except ImportError:
# python 3
from urllib.request import urlopen
from pbr.tests import base
class TestWsgiScripts(base.BaseTestCase):
cmd_names = ('pbr_test_wsgi', 'pbr_test_wsgi_with_class')
def _get_path(self):
if os.path.isdir("%s/lib64" % self.temp_dir):
path = "%s/lib64" % self.temp_dir
else:
path = "%s/lib" % self.temp_dir
return ".:%s/python%s.%s/site-packages" % (
path,
sys.version_info[0],
sys.version_info[1])
def test_wsgi_script_install(self):
"""Test that we install a non-pkg-resources wsgi script."""
if os.name == 'nt':
self.skipTest('Windows support is passthrough')
stdout, _, return_code = self.run_setup(
'install', '--prefix=%s' % self.temp_dir)
self._check_wsgi_install_content(stdout)
def test_wsgi_script_run(self):
"""Test that we install a runnable wsgi script.
This test actually attempts to start and interact with the
wsgi script in question to demonstrate that it's a working
wsgi script using simple server.
"""
if os.name == 'nt':
self.skipTest('Windows support is passthrough')
stdout, _, return_code = self.run_setup(
'install', '--prefix=%s' % self.temp_dir)
self._check_wsgi_install_content(stdout)
# Live test run the scripts and see that they respond to wsgi
# requests.
for cmd_name in self.cmd_names:
self._test_wsgi(cmd_name, b'Hello World')
def _test_wsgi(self, cmd_name, output, extra_args=None):
cmd = os.path.join(self.temp_dir, 'bin', cmd_name)
print("Running %s -p 0" % cmd)
popen_cmd = [cmd, '-p', '0']
if extra_args:
popen_cmd.extend(extra_args)
env = {'PYTHONPATH': self._get_path()}
p = subprocess.Popen(popen_cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, cwd=self.temp_dir,
env=env)
self.addCleanup(p.kill)
stdoutdata = p.stdout.readline() # ****...
stdoutdata = p.stdout.readline() # STARTING test server...
self.assertIn(
b"STARTING test server pbr_testpackage.wsgi",
stdoutdata)
stdoutdata = p.stdout.readline() # Available at ...
print(stdoutdata)
m = re.search(b'(http://[^:]+:\d+)/', stdoutdata)
self.assertIsNotNone(m, "Regex failed to match on %s" % stdoutdata)
stdoutdata = p.stdout.readline() # DANGER! ...
self.assertIn(
b"DANGER! For testing only, do not use in production",
stdoutdata)
stdoutdata = p.stdout.readline() # ***...
f = urlopen(m.group(1).decode('utf-8'))
self.assertEqual(output, f.read())
# Request again so that the application can force stderr.flush(),
# otherwise the log is buffered and the next readline() will hang.
urlopen(m.group(1).decode('utf-8'))
stdoutdata = p.stderr.readline()
# we should have logged an HTTP request, return code 200, that
# returned the right amount of bytes
status = '"GET / HTTP/1.1" 200 %d' % len(output)
self.assertIn(status.encode('utf-8'), stdoutdata)
def _check_wsgi_install_content(self, install_stdout):
for cmd_name in self.cmd_names:
install_txt = 'Installing %s script to %s' % (cmd_name,
self.temp_dir)
self.assertIn(install_txt, install_stdout)
cmd_filename = os.path.join(self.temp_dir, 'bin', cmd_name)
script_txt = open(cmd_filename, 'r').read()
self.assertNotIn('pkg_resources', script_txt)
main_block = """if __name__ == "__main__":
import argparse
import socket
import sys
import wsgiref.simple_server as wss"""
if cmd_name == 'pbr_test_wsgi':
app_name = "main"
else:
app_name = "WSGI.blog"
starting_block = ("STARTING test server pbr_testpackage.wsgi."
"%s" % app_name)
else_block = """else:
application = None"""
self.assertIn(main_block, script_txt)
self.assertIn(starting_block, script_txt)
self.assertIn(else_block, script_txt)
def test_with_argument(self):
if os.name == 'nt':
self.skipTest('Windows support is passthrough')
stdout, _, return_code = self.run_setup(
'install', '--prefix=%s' % self.temp_dir)
self._test_wsgi('pbr_test_wsgi', b'Foo Bar', ["--", "-c", "Foo Bar"])
|
[
"cshen@wellesley.edu"
] |
cshen@wellesley.edu
|
1d5c74667e175aaa2beed57eccdd0fd63940fc90
|
8512f25ed1829b321c276b98c5cd388f048778b1
|
/Jeu/Tetris.py
|
c248d23d98e46501d7b25353cc2abba66454aea6
|
[] |
no_license
|
VL74/ProjetTetris
|
9ad953708013bb0c6f890c2d37ea45aeb8796c27
|
a8ecf7e350bfaeec0cb48aa2606214d3cee265c2
|
refs/heads/main
| 2023-04-04T16:21:59.406050
| 2021-03-31T09:59:13
| 2021-03-31T09:59:13
| 353,256,047
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,802
|
py
|
from Jeu.Figure import Figure
from Jeu.Joueur import Joueur
class Tetris:
level = 2
grab = 1
state = "start"
field = []
hauteur = 0
longueur = 0
score = 0
x = 100
y = 60
zoom = 20
figure = None
Joueur1 = Joueur()
def __init__(self, hauteur, longueur):
self.hauteur = hauteur
self.longueur = longueur
self.field = []
self.state = "start"
self.Joueur1.score = 0
for i in range(hauteur):
new_line = []
for j in range(longueur):
new_line.append(0)
self.field.append(new_line)
def new_figure(self):
self.figure = Figure(3, 0)
def intersects(self):
intersection = False
for i in range(4):
for j in range(4):
if i * 4 + j in self.figure.image():
if i + self.figure.y > self.hauteur - 1 or \
j + self.figure.x > self.longueur - 1 or \
j + self.figure.x < 0 or \
self.field[i + self.figure.y][j + self.figure.x] > 0:
intersection = True
return intersection
def break_lines(self):
lines = 0
for i in range(1, self.hauteur):
zeros = 0
for j in range(self.longueur):
if self.field[i][j] == 0:
zeros += 1
if zeros == 0:
lines += 1
for i1 in range(i, 1, -1):
for j in range(self.longueur):
self.field[i1][j] = self.field[i1 - 1][j]
self.score += lines ** 2
if self.score / self.grab > 10 * self.grab:
self.grab += 1
self.Joueur1.score = self.score * self.grab
self.Joueur1.score_max = self.Joueur1.score
def go_space(self):
while not self.intersects():
self.figure.y += 1
self.figure.y -= 1
self.freeze()
def go_down(self):
self.figure.y += 1
if self.intersects():
self.figure.y -= 1
self.freeze()
def freeze(self):
for i in range(4):
for j in range(4):
if i * 4 + j in self.figure.image():
self.field[i + self.figure.y][j + self.figure.x] = self.figure.color
self.break_lines()
self.new_figure()
if self.intersects():
self.state = "gameover"
def go_side(self, dx):
old_x = self.figure.x
self.figure.x += dx
if self.intersects():
self.figure.x = old_x
def rotate(self):
old_rotation = self.figure.rotation
self.figure.rotate()
if self.intersects():
self.figure.rotation = old_rotation
|
[
"vincentlapierre74@gmail.com"
] |
vincentlapierre74@gmail.com
|
07a0c742bac229ea4e7fc786a234c4da7d784306
|
df0544a0da4e5c16c7e18eeb898e24bac6db76ca
|
/object/animal_reflect.py
|
ddd7cfe68dabb7be8b41f1b7b478e18f576e3df4
|
[] |
no_license
|
Xu-Guo/python
|
c37227bde318647daf6eafdd36a7356232fed46e
|
54ea35470916e63c00b0a5ce2c265a9b3868fd3f
|
refs/heads/master
| 2020-04-05T23:21:51.994207
| 2015-07-28T22:29:30
| 2015-07-28T22:29:30
| 39,860,612
| 0
| 0
| null | 2015-07-28T22:22:37
| 2015-07-28T22:22:36
| null |
UTF-8
|
Python
| false
| false
| 1,358
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
class Animal(object):
def __init__(self, age=9):
self.age = age
print "Animal constructor is called"
def run(self):
print 'Animal is running...'
def __old__(self):
return self.age
def __len__(self):
return 100
class Dog(Animal):
def __init__(self):
super(Dog, self).__init__()
print "Dog constructor is called"
def run(self):
super(Dog, self).run()
print 'Dog is running...'
a = Animal()
a.run()
d = Dog()
d.run()
print len(d)
print d.__old__()
# print old(d)
print type(123)
print type('str')
print type(123).__name__
print type(d)
print type(d).__name__
print types.StringType
print types.StringType == type("str")
print type(u'abc')==types.UnicodeType
print type([])==types.ListType
print type(str)==types.TypeType
print type(int)==type(str)==types.TypeType
print isinstance(a, Animal)
print isinstance(a, Dog)
print isinstance(d, Animal)
print isinstance(d, Dog)
print dir(d)
# print d.__new__()
print hasattr(d, 'age')
print hasattr(d, 'age1')
print getattr(d, 'age')
# AttributeError: 'Dog' object has no attribute 'age1'
# print getattr(d, 'age1')
print getattr(d, 'age1', 10)
print hasattr(d, 'run')
run = getattr(d, 'run')
print run
print dir(run)
print run.__class__.__name__
run()
|
[
"qguan@itu.edu"
] |
qguan@itu.edu
|
81270c4b09a1d3528f7e7da50545fdb98d9f0426
|
2ade7afe274e3f6252bcfc38c17639fc31379002
|
/checkio.org/Elementary/popular_words.py
|
fc9c1bcbd795b9b52e02106d5b35a8974fedf37e
|
[] |
no_license
|
dunaldo/checkio-solutions
|
78c6922efb6b7f321476be1e15d56beafe5efc8b
|
a8ed7ac072d43d185f95149473f0e529d41e1d6c
|
refs/heads/master
| 2020-03-29T09:23:43.521483
| 2019-02-07T20:28:40
| 2019-02-07T20:28:40
| 149,756,023
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 929
|
py
|
from collections import Counter
def popular_words(text, words):
# your code here
text = dict(Counter(text.replace(',', '').lower().replace('.', '').split()))
newtext = text.fromkeys(words, 0)
for key, value in text.items():
if key in newtext:
newtext[key] = text[key]
else:
continue
return newtext
if __name__ == '__main__':
print("Example:")
print(popular_words('''
When I was One,
I had just begun.
When I was Two,
I was nearly new.
''', ['i', 'was', 'three']))
# # These "asserts" are used for self-checking and not for an auto-testing
# assert popular_words('''
# When I was One,
# I had just begun.
# When I was Two,
# I was nearly new.
# ''', ['i', 'was', 'three']) == {
# 'i': 4,
# 'was': 3,
# 'three': 0
# }
# print("Coding complete? Click 'Check' to earn cool rewards!")
|
[
"noreply@github.com"
] |
noreply@github.com
|
d9ee8a40fc8b7250e6a9fa0183f5948f79759ff5
|
93ab4c0dc6da2eba2d10ac290090ddd528f0f001
|
/gpt2_robustness/argparser.py
|
5a40514cc5a2d99bf0a940d2490fd42f142aea58
|
[] |
no_license
|
hansonhl/GenerativeCommonsenseQA
|
849ef3a2c0b702d7d4b993462f6dc2ec97848662
|
875a0ff76f752adcd7ef9d926216aa5123cc16e4
|
refs/heads/main
| 2023-05-11T23:20:09.454626
| 2021-06-02T22:19:18
| 2021-06-02T22:19:18
| 373,312,262
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,423
|
py
|
import configargparse
def get_ansgen_args(parser):
parser.add_argument("-c", "--configs", is_config_file=True)
parser.add_argument('--preprocessed_dir', type=str)
parser.add_argument('--graph_info_dir', type=str)
parser.add_argument('--save_dir', type=str)
parser.add_argument("--protoqa_data_path", type=str)
parser.add_argument("--seed", type=int, default=39)
# model
parser.add_argument('--model_type', type=str, default="gpt2", choices=["gpt2", "bart"])
# parser.add_argument('--embedding_dim', type=int, default=128)
# parser.add_argument('--hidden_dim', type=int, default=128)
# parser.add_argument('--nhead', type=int, default=1)
# parser.add_argument('--nlayer', type=int, default=1)
parser.add_argument('--eval_only', action="store_true")
parser.add_argument('--verbose_generation_output', action="store_true")
parser.add_argument('--ans_concept_input_type', type=str, default="everything")
parser.add_argument('--formatting', type=str, default="SEP")
parser.add_argument('--no_permutation_invariant', action="store_true")
parser.add_argument('--max_additional_concepts', type=int, default=4)
parser.add_argument('--num_noisy_examples', type=int, default=1)
parser.add_argument('--multitask_subepochs', type=int, default=20)
parser.add_argument('--multitask_type', type=str)
parser.add_argument('--eval_epochs', type=int, default=20)
# training
parser.add_argument('--learning_rate', type=float, default=1e-3)
parser.add_argument('--weight_decay', '-w', type=float, default=0.0)
parser.add_argument("--adam_epsilon", default=1e-8, type=float, help="Epsilon for Adam optimizer.")
parser.add_argument("--max_grad_norm", default=1.0, type=float, help="Max gradient norm.")
parser.add_argument("--warmup_steps", default=0, type=int, help="Linear warmup over warmup_steps.")
parser.add_argument('--num_epoch', type=int, default=5)
parser.add_argument('--batch_size', type=int, default=128)
# parser.add_argument('--max_step', type=int, default=50000)
# parser.add_argument('--logging_step', type=int, default=2000)
parser.add_argument('--early_stopping_patient_epochs', type=int)
parser.add_argument("--generation_batch_size", type=int, default=16)
# gpu option
parser.add_argument('--gpu_device', type=str, default='0')
args = parser.parse_args()
return args
|
[
"hansonlu.hl@gmail.com"
] |
hansonlu.hl@gmail.com
|
d6f5168e7ed6ddd0d588ee89ae179faafdae37c6
|
d78dfc5089717fc242bbd7097f507d811abb4260
|
/USA/script.icechannel.Thevideome.settings/default.py
|
b7d904cce475928faec807af89e23a0002229f7a
|
[] |
no_license
|
tustxk/AddOnRepo
|
995b980a9ec737e2c25bed423fc83f710c697e40
|
6b86a06cb37e6e10b4119584dd7311ebc2318e54
|
refs/heads/master
| 2022-10-08T21:34:34.632346
| 2016-10-28T09:48:01
| 2016-10-28T09:48:01
| 70,684,775
| 1
| 1
| null | 2022-10-01T16:27:13
| 2016-10-12T09:31:16
|
Python
|
UTF-8
|
Python
| false
| false
| 169
|
py
|
addon_id="script.icechannel.Thevideome.settings"
addon_name="iStream - Thevideome - Settings"
import xbmcaddon
addon = xbmcaddon.Addon(id=addon_id)
addon.openSettings()
|
[
"ke.xiao@netxeon.com"
] |
ke.xiao@netxeon.com
|
026929ec24e1f7081f6395d8e0f8da89463f35df
|
9733d4f36767ace1c9b6461961244377b0fc9e21
|
/Set Country on TimerA.py
|
36c94a4e7ee4fd9e3280883daf070dc704350e55
|
[] |
no_license
|
kshish/phantom
|
8ae08c40f8c180f28bfb7a4841332deeac699fd5
|
e99fbd0173210529174bb667b1dd01112c9528d1
|
refs/heads/main
| 2023-08-17T23:19:41.876473
| 2023-08-15T19:42:17
| 2023-08-15T19:42:17
| 158,777,184
| 6
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,017
|
py
|
"""
Sets the cn CEF field to country. Normally this might be set by a ip lookup action.
"""
import phantom.rules as phantom
import json
from datetime import datetime, timedelta
def on_start(container):
phantom.debug('on_start() called')
# call 'add_country_code' block
add_country_code(container=container)
return
def add_country_code(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None):
phantom.debug('add_country_code() called')
# collect data for 'add_country_code' call
parameters = []
# build parameters list for 'add_country_code' call
parameters.append({
'container_id': "",
'name': "Country",
'contains': "",
'source_data_identifier': "chris",
'label': "timera_country",
'cef_value': "NK",
'cef_name': "deviceCustomString1",
'cef_dictionary': "",
})
phantom.act("add artifact", parameters=parameters, assets=['phantom container assorted cruds'], callback=retrieve_countries_list, name="add_country_code")
return
def retrieve_countries_list(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None):
phantom.debug('retrieve_countries_list() called')
# collect filtered artifact ids for 'if' condition 1
matched_artifacts_1, matched_results_1 = phantom.condition(
container=container,
action_results=results,
conditions=[
["custom_list:Bad Nation States", "!=", ""],
],
name="retrieve_countries_list:condition_1")
# call connected blocks if filtered artifacts or results
if matched_artifacts_1 or matched_results_1:
format_1(action=action, success=success, container=container, results=results, handle=handle, filtered_artifacts=matched_artifacts_1, filtered_results=matched_results_1)
return
def format_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None):
phantom.debug('format_1() called')
template = """{0}"""
# parameter list for template variable replacement
parameters = [
"add_country_code:artifact:*.cef.deviceCustomString1",
]
phantom.format(container=container, template=template, parameters=parameters, name="format_1")
return
def on_finish(container, summary):
phantom.debug('on_finish() called')
# This function is called after all actions are completed.
# summary of all the action and/or all detals of actions
# can be collected here.
# summary_json = phantom.get_summary()
# if 'result' in summary_json:
# for action_result in summary_json['result']:
# if 'action_run_id' in action_result:
# action_results = phantom.get_action_results(action_run_id=action_result['action_run_id'], result_data=False, flatten=False)
# phantom.debug(action_results)
return
|
[
"churyn@splunk.com"
] |
churyn@splunk.com
|
f5f2341e86aed45355014d8c3d539a2db0d20206
|
c92e99036c323d918da008b3a44638d71096e699
|
/root_project/urls.py
|
b000a7db11150ebe0e8a67c0e3b1a25e3fc7c518
|
[] |
no_license
|
yassinerahhaui/OnlineStoreBackend
|
c274853e3246333524423f26a7edf4652895c930
|
3d3ccb491cb3218ee0b56f584c2bbc84f127363c
|
refs/heads/main
| 2023-08-10T18:42:25.284987
| 2021-09-26T20:52:14
| 2021-09-26T20:52:14
| 410,339,004
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,059
|
py
|
"""root_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('',include('ProductApp.urls',namespace='ProductApp')),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
[
"yassinerahhaoui12@gmail.com"
] |
yassinerahhaoui12@gmail.com
|
b0c48fdf943b96e8c06a8f538c857d9906b9c978
|
62e0b8df8b42a80947acbc629f2d263e3e3d19da
|
/app.py
|
f9c2b98719581e5dbcacab1a9212615af6ae0d3a
|
[] |
no_license
|
JCRamzz/ketogram
|
1583fd3bedacaafb914a52020b2be01c1616a5fd
|
009ca935950087640c4c887d762d95d4209af722
|
refs/heads/master
| 2020-05-09T16:12:21.074036
| 2019-04-14T04:51:02
| 2019-04-14T04:51:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 754
|
py
|
from flask import Flask, request
from twilio.twiml.messaging_response import MessagingResponse
from tags import get_relevant_tags
app = Flask(__name__)
@app.route('/sms', methods=['POST'])
def sms_reply():
# Create a MessagingResponse object to generate TwiML.
resp = MessagingResponse()
# See if the number of images in the text message is greater than zero.
if request.form['NumMedia'] != '0':
# Grab the image URL from the request body.
image_url = request.form['MediaUrl0']
relevant_tags = get_relevant_tags(image_url)
resp.message(relevant_tags)
else:
resp.message('Please send an image.')
return str(resp)
if __name__ == '__main__':
app.run()
|
[
"austrebertog@gmail.com"
] |
austrebertog@gmail.com
|
472ff802c1037734a5f3e314bbf726dac432fdc5
|
299de1f7c4fcf94c83664abdb1886dcde12e228f
|
/101/test-index.py
|
87916251a3ecdeb0ef9cdc1e66e726877c358247
|
[] |
no_license
|
abijr/master-python101
|
a57c5d255ab592ff17aa08e60fad688f3c30417b
|
4677d2a4796bcc7ffeb2e19b701286d1900d624b
|
refs/heads/master
| 2023-01-20T20:35:30.623563
| 2020-11-17T19:02:25
| 2020-11-17T19:02:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 964
|
py
|
try:
i = 1
while i == 1:
phrase = str(input("Do you want to Calculate again? "))
if phrase.lower() in "yes":
print("YES was detected")
i -= 1
elif phrase.lower() in "no":
print("NO was detected, bye bye.")
i -= 1
else:
print("Invalid operation. Type Yes or No answer. ")
i += 0
except:
print("Valio Versh")
"""
try:
phrase = "Thank you for using, "
print(phrase)
while phrase.lower() != "yes" or "no":
phrase = str(input("Do you want to Calculate again? "))
if phrase.lower() == "yes":
print("YES was detected")
phrase == "no"
elif phrase.lower() == "no":
print("NO was detected, bye bye.")
phrase == "no"
else:
print("Invalid operation. Type Yes or No answer. ")
phrase != "yes" and "no"
except:
print("Valio Versh")
"""
|
[
"alex.israel.mtz@gmail.com"
] |
alex.israel.mtz@gmail.com
|
b23fb0ea79b091e827b32b87afc9de90eca3a541
|
a41bef47b72e7bc3a34d6de106d97805d54de51f
|
/main.py
|
ca9d8946b6794001fbc389d50c7c40b8a6b0ef46
|
[] |
no_license
|
samKM/hangman_game
|
7af8f642e53a328a5f034b0a05a420ced802a7d3
|
11af044c47b19124df4ea0ffa80b8228943cd6fb
|
refs/heads/master
| 2022-11-24T11:50:18.057021
| 2020-07-19T15:16:49
| 2020-07-19T15:16:49
| 280,496,833
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,017
|
py
|
# Inspired by the Hangman game.
#
# Author: Samuel K.
import random
import urllib.request
game_options = {"nw": "guess a New Word", "qg": "Quit Game", "he": "show Help message", "sw": "show Secret Word"}
# Initialize a new game.
def new_game(secret_word: str, number_of_tries: int, word_progress: str) -> list:
secret_word = get_secret_word()
word_progress = "".join(["_"] * len(secret_word))
print("Guess the word: ", word_progress, end="\n")
number_of_tries = 0
return [secret_word, number_of_tries, word_progress]
# Verify User guess input is not empty or more than one character (except for game option commands).
def valid_input(guess: str) -> bool:
if len(guess) > 1 and guess not in game_options.keys():
print("You entered too many characters.")
return False
elif len(guess) < 1:
print("Did you guess a character?")
return False
else:
return True
# Replace underscores with Guessed character
def update_progress(guess_word: str, guess: str, word_progress: str) -> str:
w_progress = list(word_progress)
for i, c in enumerate(guess_word):
if (c == guess) and (w_progress[i] == "_"):
w_progress[i] = guess
break
return "".join(w_progress)
# Check if User has guessed the secret word correctly and that there are no blank (underscores) spaces.
def completed(w_progress: str) -> bool:
status: bool = False
if "_" not in w_progress:
status = True
return status
# Generate a random Secret Word
def get_secret_word() -> str:
words_url = "http://svnweb.freebsd.org/csrg/share/dict/words?view=co&content-type=text/plain"
word_url_response = urllib.request.urlopen(words_url)
word_url_response_text = word_url_response.read().decode()
words = word_url_response_text.splitlines()
new_secret_word = words[random.randint(0, len(words))]
return new_secret_word
# Display the Help Message
def help_msg():
print("Enter a command from list bellow")
help_header = """Command:\tDescription:
--------\t------------"""
print(help_header, end="\n")
for key in game_options.keys():
print("{}\t\t\t{}".format(key, game_options.get(key)), end="\n")
# Show at the end of each word play (or word reveal) to allow the user to exit or continue playing the game.
def exit_option(secret_word="", number_of_tries=0, word_progress=""):
option = input("New Game? [y/n]").strip().lower()
if option == "y" or option == "yes":
return new_game(secret_word, number_of_tries, word_progress)
else:
exit(0)
if __name__ == '__main__':
# Display the welcome message on game start
initial_message = """
Welcome to the Py-Hangman Game.
For you to guess correctly, please enter one character at a time.
"""
# Displayed when user guesses the secret word correctly.
winning_message = """
CONGRATULATIONS, YOU GUESSED CORRECTLY!!!
"""
print(initial_message, end="\n")
print("Enter `he` for help.")
# Initialize the Secret word.
secret_word = get_secret_word()
# Generate blank/underscore placeholders for each character in secret word
# The `word_progress` variable will track user progress in guessing the secret word correctly.
word_progress = "".join(["_"] * len(secret_word))
print("Guess the word: ", word_progress, end="\n")
# Statistics to show user how many attempts they made before getting the correct secret word.
number_of_tries = 0
# Main game play
while True:
while completed(word_progress) is False:
guess = input("Your Guess: ").strip().lower()
while not valid_input(guess):
print(word_progress)
guess = input("Your Guess: ").strip().lower()
# Affirm User Command
if len(guess) > 1:
user_affirmation = input("Do you want to {} ? [y/n]".format(game_options.get(guess))).strip().lower()
if user_affirmation == "y" or user_affirmation == "yes":
if guess == "he": # Show the Help Message
help_msg()
elif guess == "qg": # Quit the Game
exit(0)
elif guess == "nw": # Get a new Secret Word
secret_word, number_of_tries, word_progress = new_game(secret_word, number_of_tries,
word_progress)
continue
elif guess == "sw": # Show the Secret Word and give user option of continuing the game.
print("The Secret Word is '{}'; Number of tries = {}".format(secret_word, number_of_tries))
secret_word, number_of_tries, word_progress = exit_option()
continue
else:
pass
else:
pass
# Update number of tries to exclude help command entries
number_of_tries -= 1
# update user guess progress
word_progress = update_progress(secret_word.lower(), guess, word_progress)
print(word_progress)
number_of_tries += 1
print(winning_message,
"Secret word = {}; Your guess = {}; Number of tries = {}; Word Length = {}".format(secret_word,
word_progress,
number_of_tries,
len(secret_word)),
end="\n")
print("")
# Allow the user to continue with the game after successfully guessing the secret word.
secret_word, number_of_tries, word_progress = exit_option()
|
[
"karanjasamuel49@gmail.com"
] |
karanjasamuel49@gmail.com
|
424005c2fa2abfd76faec29f6fc8fdec1ca62428
|
7ac2139aad3bae34cd81236401c5d10051dde01e
|
/bikeshare.py
|
64212035267cf5743ef7c57860ee1ef8e622dc3c
|
[] |
no_license
|
tschwartz11/pdsnd_github
|
3df2c45b46f9f51d038ea7ffee42e9072c117f5e
|
8e1daa75227e164041343090fd51237580246876
|
refs/heads/master
| 2022-06-05T15:42:01.972702
| 2020-04-30T06:45:29
| 2020-04-30T14:58:06
| 260,051,455
| 0
| 0
| null | 2020-04-29T21:37:55
| 2020-04-29T21:37:54
| null |
UTF-8
|
Python
| false
| false
| 8,216
|
py
|
import time
import pandas as pd
import numpy as np
CITY_DATA = { 'chicago': 'chicago.csv',
'new york city': 'new_york_city.csv',
'washington': 'washington.csv' }
def get_filters():
"""
Asks user to specify a city, month, and day to analyze.
Returns:
(str) city - name of the city to analyze
(str) month - name of the month to filter by, or "all" to apply no month filter
(str) day - name of the day of week to filter by, or "all" to apply no day filter
"""
print('Hello! Let\'s explore some US bikeshare data!')
# TO DO: get user input for city (chicago, new york city, washington). HINT: Use a while loop to handle invalid inputs
while True:
city = input('Which city would you like to filter the data by: chicago, new york city, or washington? ').lower()
if city not in ("chicago", "new york city", "washington"):
print("Please choose one of the three cities mentioned above.")
continue
else:
break
# TO DO: get user input for month (all, january, february, ... , june)
while True:
month = input("Which day of the month would you like to filter by: all, january, february, march, april, may, june, july, august, september, october, november, december: ").lower()
if month not in ("all", "january", "february", "march", "april", "may", "june", "july", "august", "september", "october", "november", "december"):
print("Please choose one of the selections using the right format.")
continue
else:
break
# TO DO: get user input for day of week (all, monday, tuesday, ... sunday)
while True:
day = input("Which day of the week would you like to filter by: all, monday, tuesday, wednesday, thursday, friday, saturday, sunday: ").lower()
if day not in ("all", "monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"):
print("Please choose one of the days of the week or all days of the week with the proper format. ")
continue
else:
break
print('-'*40)
return city, month, day
def load_data(city, month, day):
"""
Loads data for the specified city and filters by month and day if applicable.
Args:
(str) city - name of the city to analyze
(str) month - name of the month to filter by, or "all" to apply no month filter
(str) day - name of the day of week to filter by, or "all" to apply no day filter
Returns:
df - Pandas DataFrame containing city data filtered by month and day
"""
df = pd.read_csv(CITY_DATA[city])
# convert the Start Time column to datetime
df['Start Time'] = pd.to_datetime(df['Start Time'])
# extract month and day of week from Start Time to create new columns
df['month'] = df['Start Time'].dt.month
df['day_of_week'] = df['Start Time'].dt.weekday_name
# filter by month if applicable
if month != 'all':
# use the index of the months list to get the corresponding int
months = ['january', 'february', 'march', 'april', 'may', 'june', 'july', 'august', 'september', 'october','november', 'december']
month = months.index(month) + 1
# filter by month to create the new dataframe
df = df[df['month'] == month]
# filter by day of week if applicable
if day != 'all':
# filter by day of week to create the new dataframe
f = df[df['day_of_week'] == day.title()]
return df
def time_stats(df):
"""Displays statistics on the most frequent times of travel."""
print('\nCalculating The Most Frequent Times of Travel...\n')
start_time = time.time()
# TO DO: display the most common month
common_month = df['month'].mode()[0]
print("The most common month:", common_month)
# TO DO: display the most common day of week
common_day = df['day_of_week'].mode()[0]
print("The most common day of week:", common_day)
# TO DO: display the most common start hour
df['hour'] = df['Start Time'].dt.hour
common_start = df['hour'].mode()[0]
print("The most common start hour:", common_start)
print("\nThis took %s seconds." % (time.time() - start_time))
print('-'*40)
def station_stats(df):
"""Displays statistics on the most popular stations and trip."""
print('\nCalculating The Most Popular Stations and Trip...\n')
start_time = time.time()
# TO DO: display most commonly used start station
common_station = df['Start Station'].mode()[0]
print('The most common starting station:', common_station)
# TO DO: display most commonly used end station
common_end = df['End Station'].mode()[0]
print('The most common ending station:', common_end)
# TO DO: display most frequent combination of start station and end station trip
combine_stations = df['Start Station'] + "*" + df['End Station']
common_station = combine_stations.value_counts().idxmax()
print('Most frequent used combinations are:\n{} \nto\n{}'.format(common_station.split('*')[0], common_station.split('*')[1]))
print("\nThis took %s seconds." % (time.time() - start_time))
print('-'*40)
def trip_duration_stats(df):
"""Displays statistics on the total and average trip duration."""
print('\nCalculating Trip Duration...\n')
start_time = time.time()
# TO DO: display total travel time
start_time = time.time()
total_travel = sum(df['Trip Duration'])
print('The total travel time is:', total_travel/86400, 'days.')
# TO DO: display mean travel time
mean_travel = df['Trip Duration'].mean()
print('The average travel time is:', mean_travel/60, 'minutes.')
print("\nThis took %s seconds." % (time.time() - start_time))
print('-'*40)
def user_stats(df):
"""Displays statistics on bikeshare users."""
print('\nCalculating User Stats...\n')
start_time = time.time()
# TO DO: Display counts of user types
user_types = df['User Type'].value_counts(dropna=False)
print('The following are the user count: {}\n'.format(user_types))
# TO DO: Display counts of gender
try:
gender = df['Gender'].value_counts()
print('These are the gender counts: {}\n'.format(gender))
except KeyError:
print('There is no gender types supported with this selection.')
# TO DO: Display earliest, most recent, and most common year of birth
try:
early_year = df['Birth Year'].min()
print('The earliest year is:', early_year)
except KeyError:
print('The earliest year is not available for this month.')
try:
most_recent = df['Birth Year'].max()
print('The most recent year is:', most_recent)
except KeyError:
print('Most recent year is not available for this month.')
try:
most_common = df['Birth Year'].mode()[0]
print('Most common year is:', most_common)
except KeyError:
print('There is not a most common year for this month.')
print("\nThis took %s seconds." % (time.time() - start_time))
print('-'*40)
def raw_view(df):
"""Ask if the user wants to see raw data with 5 rows at a time"""
raw_count = 0
show_raw = input('Do you wan to see five lines of raw data? Enter yes or no.\n ').lower()
while True:
if show_raw == 'yes':
print (df.iloc[raw_count : raw_count + 5])
raw_count += 5
show_raw = input('Would you like to see five more linees? Enter yes or no. ').lower()
continue
else:
break
def main():
"""
This is the main function where all functions need to be included here for the
code to run smoothly and not forget anything
"""
while True:
city, month, day = get_filters()
df = load_data(city, month, day)
time_stats(df)
station_stats(df)
trip_duration_stats(df)
user_stats(df)
raw_view(df)
restart = input('\nWould you like to restart? Enter yes or no.\n')
if restart.lower() != 'yes':
break
if __name__ == "__main__":
main()
|
[
"ts2083@att.com"
] |
ts2083@att.com
|
0661e0edba59217cb141f3c37b121546ca3d25c9
|
80fd53a27d0c4b4053671a24678aeb67e7c67d04
|
/Digit_recognition_sci_kit_learn_Test.py
|
663d288c8a26a91d2bb193c7cec6ed1fd83bd111
|
[] |
no_license
|
S-shubham/Sci_Kit_learn-
|
39b065d03b99b52823b9d0aa69ca9406ac6d322a
|
9d1a98ee79b2913be5f8606175b58dff0dd4b2ea
|
refs/heads/master
| 2020-04-03T05:46:06.644526
| 2018-11-26T07:49:54
| 2018-11-26T07:49:54
| 155,055,781
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 367
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
2+3
# In[3]:
from sklearn import datasets as ds
# In[5]:
digit=ds.load_digits()
# In[6]:
from sklearn import svm
# In[7]:
clf=svm.SVC(gamma=0.001,C=100.)
# In[10]:
clf.fit(digit.data[:-1],digit.target[:-1])
# In[12]:
clf.predict(digit.data[-1:])
# In[14]:
digit.target[-1:]
# In[ ]:
|
[
"noreply@github.com"
] |
noreply@github.com
|
430d6f6cc8946f4f5860ce89df1dfa1f2c8701bd
|
048b7b25e9fb1df918260bc716b9a81a26ee2797
|
/test/integration/stomp/publisher/create_request_test.py
|
bfcd29b588c3af44977f8425aff0621ed7d53043
|
[
"MIT"
] |
permissive
|
devopstoday11/beer-garden
|
996ae2ae167a22664a458a86004114bad6290cae
|
566fa3cd00d931fa18cbc399aa4f8af6177dd17d
|
refs/heads/master
| 2023-04-21T07:03:26.295320
| 2021-04-23T19:11:33
| 2021-04-23T19:11:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,334
|
py
|
import signal
import stomp
from brewtils.models import Operation, Request
from brewtils.schema_parser import SchemaParser
import pytest
import time
import json
from brewtils.errors import ValidationError
try:
from helper import delete_plugins
from helper.assertion import assert_system_running
from helper.plugin import (create_plugin, start_plugin, stop_plugin,
TestPluginV1, TestPluginV2,
TestPluginV1BetterDescriptions)
except:
from ...helper import delete_plugins
from ...helper.assertion import assert_system_running
from ...helper.plugin import (create_plugin, start_plugin, stop_plugin,
TestPluginV1, TestPluginV2,
TestPluginV1BetterDescriptions)
@pytest.fixture(scope="class")
def system_spec():
return {'system': 'echo', 'system_version': '3.0.0.dev0', 'instance_name': 'default',
'command': 'say'}
class MessageListener(object):
create_event_captured = False
def on_error(self, headers, message):
print('received an error %s' % headers)
def on_message(self, headers, message):
try:
if headers['model_class'] == 'Operation':
parsed = SchemaParser.parse_operation(message, from_string=True)
if parsed.model and parsed.model.name:
if parsed.model.name.startswith("REQUEST"):
print(message)
self.create_event_captured = True
else:
print(parsed.model.name)
elif headers['model_class'] == 'error_message':
print("Error Message Returned:", message)
except:
print("Error: unable to parse message:", message)
class TestPublisher(object):
def create_stomp_connection(self):
"""Creates the Connection class and closes when completed"""
host_and_ports = [("localhost", 61613)]
conn = stomp.Connection(host_and_ports=host_and_ports, heartbeats=(10000, 0))
conn.connect(
"beer_garden", "password", wait=True, headers={"client-id": "beer_garden"}
)
return conn
# if conn.is_connected():
# conn.disconnect()
def create_request(self, function):
request_model = self.request_generator.generate_request(parameters={"message": "test_string", "loud": True})
request_model['metadata'] = {"generated-by": function}
return request_model
@pytest.mark.usefixtures('easy_client', 'request_generator')
def test_listen_create_request(self):
"""Published the Request over HTTP and verifies of STOMP"""
stomp_connection = self.create_stomp_connection()
request_model = self.create_request("test_listen_create_request")
sample_operation_request = Operation(
operation_type="REQUEST_CREATE",
model=request_model,
model_type="Request",
)
listener = MessageListener()
stomp_connection.set_listener('', listener)
stomp_connection.subscribe(destination='Beer_Garden_Events', id='event_listener', ack='auto',
headers={'subscription-type': 'MULTICAST',
'durable-subscription-name': 'events'})
self.easy_client.forward(sample_operation_request)
time.sleep(10)
assert listener.create_event_captured
if stomp_connection.is_connected():
stomp_connection.disconnect()
@pytest.mark.usefixtures('easy_client', 'request_generator')
def test_publish_create_request(self):
"""Published the Request over STOMP and verifies of HTTP"""
stomp_connection = self.create_stomp_connection()
request_model = self.create_request("test_publish_create_request")
sample_operation_request = Operation(
operation_type="REQUEST_CREATE",
model=request_model,
model_type="Request",
)
listener = MessageListener()
stomp_connection.set_listener('', listener)
stomp_connection.subscribe(destination='Beer_Garden_Events', id='event_listener', ack='auto',
headers={'subscription-type': 'MULTICAST',
'durable-subscription-name': 'events'})
stomp_connection.send(
body=SchemaParser.serialize_operation(sample_operation_request, to_string=True),
headers={
"model_class": sample_operation_request.__class__.__name__,
},
destination="Beer_Garden_Operations",
)
time.sleep(10)
requests = self.easy_client.find_requests()
found_request = False
print(len(requests))
for request in requests:
print(SchemaParser.serialize_request(request, to_string=True))
if "generated-by" in request.metadata and request.metadata["generated-by"] == "test_publish_create_request":
found_request = True
break
assert found_request
assert listener.create_event_captured
if stomp_connection.is_connected():
stomp_connection.disconnect()
|
[
"5104941+TheBurchLog@users.noreply.github.com"
] |
5104941+TheBurchLog@users.noreply.github.com
|
c0d7b5c19139292f18d5335e11240ff1d3223dbe
|
3acabf7db0dd4bf7eb45343f9d08a39b22d03e2a
|
/practice/BST.py
|
e2eba25a077a39719d20b4a1fb375c4aa3cef095
|
[] |
no_license
|
kanishkaverma/python_data_science
|
f6b8c9d7a21f6b7219c043bce03a76e3ec7ba725
|
7f9e22319e03658e0d65f1629953a549f41a7087
|
refs/heads/main
| 2023-02-17T08:08:38.931941
| 2021-01-14T20:50:02
| 2021-01-14T20:50:02
| 325,627,998
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 761
|
py
|
class BST:
def __init__(self, head):
self.head = head
def inorder(self, node):
if node:
self.inorder(node.left)
print(node.data, end=" ")
self.inorder(node.right)
def insert(self, node, val):
if not node:
return Node(val)
else:
if (val) < (node.data):
node.left = self.insert(node.left, val)
else:
node.right = self.insert(node.right, val)
return node
class Node:
def __init__(self, data=None, left=None, right=None):
self.data = data
self.left = left
self.right = right
tree = BST(Node(5))
tree.insert(tree.head, 4)
tree.insert(tree.head, 6)
tree.inorder(tree.head)
|
[
"kanishkaverma@users.noreply.github.com"
] |
kanishkaverma@users.noreply.github.com
|
57adbfd2865b7cf8540897ff6ca3685bbaf4dfb0
|
164457b943d0b426e9a5e2eb57779e4e37f2d1bb
|
/the_tale/accounts/workers/accounts_manager.py
|
84d4482f67e8a0b8ffab01b81c7cb415dffd6c34
|
[
"BSD-2-Clause-Views"
] |
permissive
|
lshestov/the-tale
|
64334fd99a442ad736d9e8a38e8f0fb52d0ebab6
|
6229edfec6420307975269be9926c68ecdefb930
|
refs/heads/master
| 2021-01-18T08:38:44.147294
| 2015-10-27T18:43:10
| 2015-10-27T18:43:10
| 50,228,827
| 0
| 0
| null | 2016-01-23T07:38:54
| 2016-01-23T07:38:54
| null |
UTF-8
|
Python
| false
| false
| 3,166
|
py
|
# coding: utf-8
import time
import datetime
from dext.settings import settings
from the_tale.common.utils.workers import BaseWorker
from the_tale.common import postponed_tasks
from the_tale.accounts.prototypes import AccountPrototype, RandomPremiumRequestPrototype
from the_tale.accounts.conf import accounts_settings
class Worker(BaseWorker):
GET_CMD_TIMEOUT = 60
def clean_queues(self):
super(Worker, self).clean_queues()
self.stop_queue.queue.purge()
def initialize(self):
self.initialized = True
postponed_tasks.PostponedTaskPrototype.reset_all()
self.logger.info('ACCOUNT_MANAGER INITIALIZED')
def process_no_cmd(self):
# is send premium expired notifications needed
if (time.time() - float(settings.get(accounts_settings.SETTINGS_PREV_PREIMIUM_EXPIRED_NOTIFICATION_RUN_TIME_KEY, 0)) > 23.5*60*60 and
accounts_settings.PREMIUM_EXPIRED_NOTIFICATION_RUN_TIME <= datetime.datetime.now().hour <= accounts_settings.PREMIUM_EXPIRED_NOTIFICATION_RUN_TIME+1):
settings[accounts_settings.SETTINGS_PREV_PREIMIUM_EXPIRED_NOTIFICATION_RUN_TIME_KEY] = str(time.time())
self.run_send_premium_expired_notifications()
return
self.run_random_premium_requests_processing()
def run_send_premium_expired_notifications(self):
AccountPrototype.send_premium_expired_notifications()
def run_random_premium_requests_processing(self):
while True:
request = RandomPremiumRequestPrototype.get_unprocessed()
if request is None:
return
self.logger.info('process random premium request %d' % request.id)
if not request.process():
self.logger.info('request %d not processed' % request.id)
return
else:
self.logger.info('request %d processed' % request.id)
def cmd_task(self, task_id):
return self.send_cmd('task', {'task_id': task_id})
def process_task(self, task_id):
task = postponed_tasks.PostponedTaskPrototype.get_by_id(task_id)
task.process(self.logger)
task.do_postsave_actions()
def cmd_run_account_method(self, account_id, method_name, data):
return self.send_cmd('run_account_method', {'account_id': account_id,
'method_name': method_name,
'data': data})
def process_run_account_method(self, account_id, method_name, data):
if account_id is not None:
account = AccountPrototype.get_by_id(account_id)
getattr(account, method_name)(**data)
account.save()
else:
# here we can process classmethods, if they appear in future
pass
def cmd_stop(self):
return self.send_cmd('stop')
def process_stop(self):
self.initialized = False
self.stop_required = True
self.stop_queue.put({'code': 'stopped', 'worker': 'accounts_manager'}, serializer='json', compression=None)
self.logger.info('ACCOUNTS MANAGER STOPPED')
|
[
"a.eletsky@gmail.com"
] |
a.eletsky@gmail.com
|
0ca21397231f4da1931b6ef1d6d598b2818e374d
|
9cf16029d98395866ca28858881cccee41579e5d
|
/shrink_machine_ros/scripts/get_transform.py
|
9805053f85f55e4e011ec468165a53abfdeac63e
|
[] |
no_license
|
marek-kolodziejczak98/ProjektTS2021
|
52f7b7ebce8c5cad40b4e30553a038b521871830
|
1f6b0f634afeb1142d57175f6b55b81cd00b4f2c
|
refs/heads/main
| 2023-04-16T02:35:17.357725
| 2021-04-20T21:37:33
| 2021-04-20T21:37:33
| 344,146,089
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 716
|
py
|
#!/usr/bin/env python
import roslib
#roslib.load_manifest('tf_tutorial')
import rospy
import math
import tf
import geometry_msgs.msg
import tf2_msgs.msg
import turtlesim.srv
if __name__ == '__main__':
rospy.init_node('tf_listener')
listener = tf.TransformListener()
listener.waitForTransform('/base_link', '/gripper', rospy.Time(), rospy.Duration(4.0))
rate = rospy.Rate(10.0)
while not rospy.is_shutdown():
try:
(trans,rot) = listener.lookupTransform('/base_link', '/gripper', rospy.Time(0))
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
continue
print(trans)
print(rot)
rate.sleep()
|
[
"radekgoralewski@gmail.com"
] |
radekgoralewski@gmail.com
|
7cf3515e7f6034a2c7c8f4d75546e29fa79cc092
|
1e58c8aaff5bb1273caaa73c49c07fd61ebd4439
|
/wavencoder/__init__.py
|
ff0dd47ac2d71605c97213e27e6d38be784f8314
|
[
"MIT"
] |
permissive
|
samsudinng/wavencoder
|
9870d6dd86cb126b170c9a6af93acee4acbbd633
|
a64e16444ed25b5491fd2ba0c9f1409671e12e5e
|
refs/heads/master
| 2023-03-01T22:42:42.477643
| 2021-02-08T11:23:00
| 2021-02-08T11:23:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,024
|
py
|
__version__ = '0.0.6'
from wavencoder.models.wav2vec import Wav2Vec
from wavencoder.models.wav2vec2 import Wav2Vec2Model
from wavencoder.models.sincnet import SincNet, SincConvLayer
from wavencoder.models.lstm_classifier import LSTM_Classifier
from wavencoder.models.lstm_classifier import LSTM_Attn_Classifier
from wavencoder.models.baseline import CNN1d
from wavencoder.models.attention import DotAttention, SoftAttention
from wavencoder.models.rawnet import RawNet2Model
from wavencoder.trainer.classification_trainer import train
from wavencoder.trainer.classification_trainer import test_predict_classifier
from wavencoder.trainer.classification_trainer import test_evaluate_classifier
from wavencoder.transforms.noise import AdditiveNoise
from wavencoder.transforms.speed import SpeedChange
from wavencoder.transforms.clip import Clipping
from wavencoder.transforms.pad_crop import Pad, Crop, PadCrop
from wavencoder.transforms.reverberation import Reverberation
from wavencoder.transforms.compose import Compose
|
[
"shangethrajaa@gmail.com"
] |
shangethrajaa@gmail.com
|
ac47410c081854dcc9bc0251f7925ae5e152c61f
|
24fe1f54fee3a3df952ca26cce839cc18124357a
|
/servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/fv/poddhcpserverinfo.py
|
578744b8cd584e1c2bc24ce6e7cb39c73bd5bc04
|
[] |
no_license
|
aperiyed/servicegraph-cloudcenter
|
4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff
|
9eb7975f2f6835e1c0528563a771526896306392
|
refs/heads/master
| 2023-05-10T17:27:18.022381
| 2020-01-20T09:18:28
| 2020-01-20T09:18:28
| 235,065,676
| 0
| 0
| null | 2023-05-01T21:19:14
| 2020-01-20T09:36:37
|
Python
|
UTF-8
|
Python
| false
| false
| 7,213
|
py
|
# coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class PodDhcpServerInfo(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.fv.PodDhcpServerInfo")
meta.moClassName = "fvPodDhcpServerInfo"
meta.rnFormat = "podDhcpServerInfo-%(nodeId)s"
meta.category = MoCategory.REGULAR
meta.label = "Dhcp Server info of the current POD"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Inst")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Inst", "fault-"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.parentClasses.add("cobra.model.fv.PodConnPDef")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Def")
meta.rnPrefixes = [
('podDhcpServerInfo-', True),
]
prop = PropMeta("str", "PodDhcpServerDn", "PodDhcpServerDn", 47391, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("PodDhcpServerDn", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5579, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dhcpIssues", "dhcpIssues", 47392, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("incorrect-pod-dhcp-server-configuration", "nodeid-of-fabricpoddhcpserver-configured-is-not-a-vtor", 1)
prop._addConstant("none", "none", 0)
meta.props.add("dhcpIssues", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "nodeId", "nodeId", 44472, PropCategory.REGULAR)
prop.label = "node id of Dhcp server"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.range = [(1, 16000)]
prop.defaultValue = 1
prop.defaultValueStr = "1"
meta.props.add("nodeId", prop)
prop = PropMeta("str", "ownerKey", "ownerKey", 15230, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerKey", prop)
prop = PropMeta("str", "ownerTag", "ownerTag", 15231, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerTag", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "serverType", "serverType", 44473, PropCategory.REGULAR)
prop.label = "Dhcp server Type Primary/Secondary"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("primary", "primary", 1)
prop._addConstant("secondary", "secondary", 2)
prop._addConstant("unspecified", "unspecified", 0)
meta.props.add("serverType", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "nodeId"))
def __init__(self, parentMoOrDn, nodeId, markDirty=True, **creationProps):
namingVals = [nodeId]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
|
[
"rrishike@cisco.com"
] |
rrishike@cisco.com
|
3b730197144fb7ea0165d2a58ba7c067ef881f75
|
30f22113cfb7ccf8a41d7306dd2cb4a0a1fadcfd
|
/2017/20170802 - ocr bancario - python/test_problem.py
|
2abede7592d765b822ef77c758830efc42b06939
|
[] |
no_license
|
dojorio/dojo-centro
|
bb6b120ec13c405a870053ec018006ca6fe08bc0
|
ba33a57d49b829dab7c4431aaf1294affe8e1e38
|
refs/heads/master
| 2021-05-22T11:36:38.940437
| 2020-05-16T22:20:28
| 2020-05-16T22:20:28
| 2,847,726
| 62
| 12
| null | 2020-05-16T22:20:29
| 2011-11-25T04:51:49
|
PHP
|
UTF-8
|
Python
| false
| false
| 2,185
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# http://www.dojopuzzles.com/problemas/exibe/ocr-bancario/
# _ _ _ _ _ _ _
# | _| _| |_| |_ |_ | |_| |_|
# | |_ _| | _| |_| | |_| _|
import unittest
from problem import *
class TestProblem(unittest.TestCase):
def test_1(self):
source = [
" ",
" |",
" |"
]
self.assertEqual(ocr(source), 1)
def test_2(self):
source = [
" _ ",
" _|",
"|_ "
]
self.assertEqual(ocr(source), 2)
def test_3(self):
source = [
" _ ",
" _|",
" _|"
]
self.assertEqual(ocr(source), 3)
def test_4(self):
source = [
" ",
"|_|",
" |"
]
self.assertEqual(ocr(source), 4)
def test_5(self):
source = [
" _ ",
"|_ ",
" _|"
]
self.assertEqual(ocr(source), 5)
def test_6(self):
source = [
" _ ",
"|_ ",
"|_|"
]
self.assertEqual(ocr(source), 6)
def test_7(self):
source = [
" _ ",
" |",
" |"
]
self.assertEqual(ocr(source), 7)
def test_8(self):
source = [
" _ ",
"|_|",
"|_|"
]
self.assertEqual(ocr(source), 8)
def test_9(self):
source = [
" _ ",
"|_|",
" _|"
]
self.assertEqual(ocr(source), 9)
def test_0(self):
source = [
" _ ",
"| |",
"|_|"
]
self.assertEqual(ocr(source), 0)
def test_10(self):
source = [
" _ ",
" | | |",
" | |_|"
]
self.assertEqual(parse(source),
[
[
" ",
" |",
" |"
],
[
" _ ",
"| |",
"|_|"
]
]
)
#self.assertEqual(ocr(source), 10)
if __name__ == "__main__":
unittest.main()
|
[
"dojo-rio@googlegroups.com"
] |
dojo-rio@googlegroups.com
|
147fa6ee422999deb6e56fd3b135c96d12daf294
|
c84a503f851b6e7b618906257107aae61580627f
|
/client.py
|
c5bf1adf5dcfd6998d42a4c8124fb311ac8308f4
|
[] |
no_license
|
PetrutiuPaul/ConferenceRoomManager
|
f20e88fbf007a3bfae094a00721f166b3efa71be
|
abf506bed01d3b5defc998b36a0019457bd5266c
|
refs/heads/master
| 2021-08-15T01:53:46.215588
| 2017-11-17T06:18:19
| 2017-11-17T06:18:19
| 108,638,129
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,394
|
py
|
'''
Created on Oct 28, 2016
@author: Bogdan Boboc (bogdanboboc97@gmail.com) && Florin Tamas (tamasflorin@live.com)
'''
import requests
import RPi.GPIO as GPIO
import time
import sys
import subprocess
from socket import *
import Adafruit_DHT as dht
# Set input GPIO
INPUT_GPIO = 37
INPUT_TEMPERATURE_GPIO = 23
# GPIO configuration
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
GPIO.setup(INPUT_GPIO, GPIO.IN)
#GPIO.setup(INPUT_TEMPERATURE_GPIO, GPIO.IN)
# Global defines
DEFAULT_TIME_OUT = 120 # in seconds
# Room name
ROOM_NAME = ''
# SERVER DATA
CONNECTION_STRING= 'http://10.5.5.25:5000/update_movement'
# VIDEO_PATH
#VIDEO_PATH = "my_video.avi"
# added as a last try to improve the final project
# do not mind this bullshit
def get_weather():
h,t = dht.read_retry(dht.AM2302 ,INPUT_TEMPERATURE_GPIO)
return h,t
def get_image_data():
p = subprocess.Popen(("fswebcam","-q","-r 640x480","1.jpg"))
p.wait()
f = open("1.jpg","rb")
byteData = f.read()
byteData = str(byteData).encode('base64','strict')
f.close()
return byteData
def send_message(status):
h,t = get_weather()
humidity = str(h)
temperature = str(t)
image = get_image_data()
dictionaryToSend = { 'name': ROOM_NAME , 'status' : str(status),
'image' : str(image),'humidity': humidity,
'temperature' : temperature
}
sendResult = requests.post(CONNECTION_STRING,json=dictionaryToSend)
print('Reponse from server:',sendResult.text)
def send_movement():
lastMovement = 0
lastRoomState = 0
start = time.time()
send_message(0) # Initially room state is 0
while True:
isMoving = GPIO.input(INPUT_GPIO)
if isMoving == 1:
lastMovement = time.time()
#print("Rick is moving")
isTaken = ( time.time() - lastMovement ) <= DEFAULT_TIME_OUT
if lastRoomState != isTaken or time.time() - start >=5:
start = time.time()
lastRoomState = isTaken
try:
send_message(int(isTaken))
except:
print("Got an exception!Retrying...")
if __name__ == "__main__":
if len(sys.argv) != 2:
print("python client.py -ROOM_NAME")
else:
ROOM_NAME = sys.argv[1]
send_movement()
|
[
"paul970410@gmail.com"
] |
paul970410@gmail.com
|
8bea41c46829027576c6822593b56edbc8748d46
|
739373ca3a5fe5bc9b495b040a96ec5653a1ba45
|
/Introdução a Ciencia da Computação com Python - PII/w3_Pytest_Cubo.py
|
1c8a31d0d81f80833211a98301f05bfee6d7a3f9
|
[] |
no_license
|
emanuelgustavo/pythonscripts
|
e7e9999ad91b4c97d13ade81d2907f596479863f
|
4ac7c1522407602406ce0b875493daefdcacc8b9
|
refs/heads/master
| 2020-06-25T11:54:05.638587
| 2020-03-05T10:18:04
| 2020-03-05T10:18:04
| 199,301,121
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 421
|
py
|
import pytest
import w3_Cubo
class Test_Cubo:
@pytest.fixture
def cubo(self):
return w3_Cubo.Cubo()
@pytest.mark.parametrize('entrada, esperado', [
(0,0),
(1,1),
(2,8),
(-1,-1),
(-2, -8),
(10, 1000),
(-3, -27)
])
def test_cubo(self, cubo, entrada, esperado):
assert cubo.calcula_cubo(entrada) == esperado
|
[
"emanuelgustavovierne@gmail.com"
] |
emanuelgustavovierne@gmail.com
|
9c7e7b63521c3bf5d1e627c65dd8859b9c9aa682
|
60991b8e8a4a4d09f27cd45b77a1fa087edabf83
|
/sz_c6_svm.py
|
cac0fb0d3cd26ba38a022ce776434770de9d540f
|
[] |
no_license
|
yueqiyisheng/-_-
|
20474877cdcbea46620054cd8bbffccf983003d7
|
fa9e9c8c7e6eebc70c9bad9ed8fc426ec03f8245
|
refs/heads/master
| 2021-04-15T15:26:11.446435
| 2018-03-06T05:41:37
| 2018-03-06T05:41:37
| 126,168,520
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,692
|
py
|
# 机器学习实战 学习记录
# Chapter 6 支持向量机 SVM
# SVM本身是二分类问题,对多类问题需要修改代码
# coding='UTF-8'
'''
基于最大间隔进行分类
思路:先找到具有最小间隔的数据点,然后对该间隔进行最大化
'''
'''
SVM的主要任务就是找到那些使得目标函数成立的alpha
SMO(序列最小化,坐标上升法的进步版)就是完成上述任务的算法,即主要来训练SVM
1)简化版SMO:选择alpha对时,首先在数据集上遍历每一个alpha,然后在剩下的alpha集合中随机选择另一个alpha
2)完整版:外循环确定要优化的最佳alpha对
'''
# 简化版SMO
## SMO辅助函数
#读入数据
from numpy import *
def loaddataset(filename):
fr = open(filename)
dataset = [];labelset = []
for line in fr.readlines():
items = line.strip().split('\t')
dataset.append([float(items[0]),float(items[1])])
labelset.append(float(items[2]))
return dataset,labelset
# 从一定范围内随机选择一个整数
def selectrand(i,m): # 从0到m中选择一个不是i的整数【用于简化版SMO挑选第二个alpha】的下标
j=i
while j==i:
j=int(random.uniform(0,m))
return j
# alpha有范围约束,0<alpha<C
def clipalpha(aj,H,L):
if aj>H: aj=H
elif aj<L: aj=L
return aj
'''
简化版SMO伪代码:
创建一个alpha向量,将其初始化为0向量
当迭代次数小于最大迭代次数(外循环):
对数据集中的每个数据向量(内循环):【?什么数据向量】
如果该数据向量可以被优化:
随机选择另外一个数据向量
同时优化这两个向量
如果这两个向量均不能被优化,退出内循环
如果所有向量都没被优化,增加迭代次数,继续下一次循环
'''
# 简化版SMO算法【???理论推导??】
def amosimple(dataset,labelset,C,toler,maxiter): #输入数据集、标签、常数C、toler容错率、最大迭代次数
datamat = mat(dataset);labelmat = mat(labelset).transpose() #转换成numpy矩阵 labelmat为列向量
b=0;m,n=shape(datamat)
alpha = mat(zeros((m,1))) # 初始化alpha向量 m×1的列向量 m为训练样本个数
iter = 0 # 存储没有任何alpha改变的情况下遍历数据集的次数?
while iter < maxiter: # 外循环
alphapairchanged = 0 # 记录alpha是否进行优化
for i in range(m): # 数据集顺序遍历
fxi = float(multiply(alpha,labelmat).T*(datamat*datamat[i,:].T))+b # 预测的类别 f(xi)=w.T * xi + b ; w = ∑αyx w是列向量n×1
ei = fxi - float(labelmat[i]) # 预测分类结果与正确结果的误差
# yi(w.T*x+b) -1 与toler的关系 与 alpha与C的关系?????
if ((labelmat[i]*ei < -toler) and (alpha[i]<C)) or \
((labelmat[i]*ei>toler) and (alpha[i]>0)):
# 判断该alpha是否需要优化:如果误差很大(超出容错范围),则对该数据实例对应的alpha值进行优化
# 同时检查alpha值,保证其不等于0或者C,因为0或者C的话,就已经在“边界”上了,不能再减小或增大,即不值得再对其优化
# 【问题】:这两个 and 的关系 为什么是这样的??
# 随机选择另一个alpha
j = selectrand(i,m)
fxj = float(multiply(alpha,labelmat).T*(datamat*datamat[j,:].T))+b
ej = fxj - labelmat[j] # 误差
alphaiold = alpha[i].copy(); alphajold = alpha[j].copy() # 保留原alpha值
# 保证 alpha_j 在0到C之间
if (labelmat[i] != labelmat[j]):
L = max(0,alpha[j]-alpha[i])
H = min(C,C+alpha[j]-alpha[i])
else:
L = max(0,alpha[j]+alpha[i]-C)
H = min(C,alpha[j]+alpha[i])
if L == H: print('L = H');continue # 退出此次循环,直接进行下一次for循环
# eta是 alpha_j 的最优修改量
eta = 2.0*datamat[i,:]*datamat[j,:].T-datamat[i,:]*datamat[i,:].T-datamat[j,:]*datamat[j,:].T # 2xixj-xi^2-xj^2
if eta >= 0: print('eta>=0');continue
alpha[j] -= labelmat[j]*(ei - ej)/eta
alpha[j] = clipalpha(alpha[j],H,L)
if (abs(alpha[j]-alphajold) < 0.00001): print('j not moving enough'); continue # alpha_j 改变很小,则退出for循环
alpha[i] += labelmat[j]*labelmat[i]*(alphajold-alpha[j]) # alpha_j 与 alpha_i 改变量相同,但方向相反
b1 = b-ei-labelmat[i]*(alpha[i]-alphaiold)*datamat[i,:]*datamat[i,:].T - labelmat[j]*(alpha[j]-alphajold)*datamat[i,:]*datamat[j,:].T
b2 = b-ej-labelmat[i]*(alpha[i]-alphaiold)*datamat[i,:]*datamat[j,:].T - labelmat[j]*(alpha[j]-alphajold)*datamat[j,:]*datamat[j,:].T
if (0 < alpha[i]) and (C > alpha[i]): b=b1
elif (0 < alpha[j]) and (C > alpha[j]): b= b2
else: b =(b1+b2)/2.0
# 若程序执行到此没有执行continue,则说明已经成功改变了一对alpha
alphapairchanged += 1
print('iter: %d i: %d, pairs changed %d' % (iter,i,alphapairchanged))
if alphapairchanged == 0: iter +=1
else: iter = 0 # 只有在所有数据集上遍历了iter次。且不再发生任何alpha修改,退出while循环,
print('iteration number: %d' % (iter))
return b,alpha
'''
import time
start = time.clock()
filename = r'.\data\testSet.txt'
dataset,labelset = sz_c5_svm.loaddataset(filename)
b,alpha = sz_c5_svm.amosimple(dataset,labelset,0.6,0.001,40)
end = time.clock()
print('running time: %s seconds.' % (end-start))
'''
# alpha中大多数为0,仅有几个是不为0的数 alpha[alpha>0] 仅对numpy类型有用的过滤实例
# 不为0的alpha个数即为支持向量的个数
'''
完整版与简化版的主要不同:
(1)第一个alpha的选取:在两种方式之间交替,① 是在所有数据集上进行单遍扫描;② 是在非边界【即不为0或C】alpha中实现单遍扫描
②中需要建立非边界alpha的列表,同时跳过那些已知不会改变的alpha值
(2)第二个alpha的选取:通过“最大化步长”来选取,建立一个全局的缓存来保存alpha_j 对应的误差值,从中选择使得步长或者说ei-ej最大的值
'''
# 完整版SMO的支持函数
# 作为一个数据结构来使用对象,将值传给函数时,可以通过将所有数据移到一个结构中实现
# 全局的一个结构,函数之间不需要传
class optstruct:
def __init__(self,datamat,labelmat,C,toler,ktup):
self.x = datamat
self.label = labelmat
self.c = C
self.tol = toler
self.m = list(shape(datamat))[0]
self.alpha = mat(zeros((self.m,1)))
self.b = 0
self.ecache = mat(zeros((self.m,2))) # 误差缓存初始化 第1列标记是否有效,第二列为实际e值
self.K = mat(zeros((self.m,self.m)))
for i in range(self.m):
self.K[:,i] = kerneltrans(self.x,self.x[i,:],ktup) #每个样例,都需要与其他和自己所有样例进行kernel里的运算得到一个列向量
# 计算误差ek
def calcek(os,k):
fxk = float(multiply(os.alpha,os.label).T*(os.x*os.x[k,:].T))+os.b
ek = fxk - float(os.label[k])
return ek
# 内循环中的启发式方法
def selectj(i,os,ei):
maxk = -1; maxdeltae = 0; ej = 0
os.ecache[i] = [1,ei]
validecachelist = nonzero(os.ecache[:,0].A)[0] #.A是将matrix转换为array 返回的是下标
if (len(validecachelist)) > 1:
for k in validecachelist:
if k == i: continue
# ek = calcek(os,k)
ek = calcekk(os,k)
deltae = abs(ei-ek)
if (deltae > maxdeltae):
maxk = k; maxdeltae = deltae; ej = ek
return maxk,ej
else: # 第一次循环,则随机选择一个j值
j = selectrand(i,os.m)
# ej = calcek(os,j)
ej = calcekk(os,j)
return j,ej
def updateek(os,k):
# ek = calcek(os,k)
ek = calcekk(os,k)
os.ecache[k] = [1,ek] #对alpha进行优化更新ek
# 内循环:alpha的优化
def innerl(i,os):
# ei = calcek(os,i)
ei = calcekk(os,i)
if ((os.label[i]*ei < -os.tol) and (os.alpha[i] < os.c)) or ((os.label[i]*ei > os.tol) and (os.alpha[i] > 0)):
j,ej = selectj(i,os,ei)
alphaiold = os.alpha[i].copy(); alphajold = os.alpha[j].copy(); # 这里需要使用copy 不用的话并没有真正将原来的值保存下来,只是多了一个此位置的标签
if (os.label[i] != os.label[j]):
L = max(0,os.alpha[j] - os.alpha[i])
H = min(os.c, os.c+os.alpha[j] - os.alpha[i])
else:
L = max(0,os.alpha[j] + os.alpha[i] - os.c)
H = min(os.c, os.alpha[j] + os.alpha[i])
if L == H: print('L=H'); return 0
eta = 2.0*os.x[i,:]*os.x[j,:].T-os.x[i,:]*os.x[i,:].T-os.x[j,:]*os.x[j,:].T # 2xixj-xi^2-xj^2
if eta >= 0: print('eta>=0');return 0
os.alpha[j] -= os.label[j]*(ei-ej)/eta
os.alpha[j] = clipalpha(os.alpha[j],H,L)
updateek(os,j) # alpha更新后e也会变化,需要随之更新
if (abs(os.alpha[j]-alphajold) < 0.00001):
print('j not moving enough'); return 0
os.alpha[i] += os.label[j]*os.label[i]*(alphajold-os.alpha[j]) # alpha_j 与 alpha_i 改变量相同,但方向相反
b1 = os.b-ei-os.label[i]*(os.alpha[i]-alphaiold)*os.x[i,:]*os.x[i,:].T - os.label[j]*(os.alpha[j]-alphajold)*os.x[i,:]*os.x[j,:].T
b2 = os.b-ej-os.label[i]*(os.alpha[i]-alphaiold)*os.x[i,:]*os.x[j,:].T - os.label[j]*(os.alpha[j]-alphajold)*os.x[j,:]*os.x[j,:].T
if (0 < os.alpha[i]) and (os.c > os.alpha[i]): os.b=b1
elif (0 < os.alpha[j]) and (os.c > os.alpha[j]): os.b= b2
else: os.b =(b1+b2)/2.0
return 1
else: return 0
# 外循环
def smoP(dataset,labelset,C,toler,maxiter,ktup=('lin',0)):
os = optstruct(mat(dataset),mat(labelset).transpose(),C,toler,ktup) # 将数据存为 class 数据结构中
iter = 0
entireset = True #
alphapairchanged = 0
while (iter < maxiter) and ((alphapairchanged >0) or (entireset)):
# 该进行遍历所有值时,alphapairchanged可以是0(表示循环刚开始或者非边界值中没有可以改变的了)
# 但是,当该进行非边界值遍历时,alphapairchanged 为0,表明上一次是遍历所有值却没有需要改变的alpha值,则此时退出循环
alphapairchanged = 0
if entireset: # 遍历所有的值
for i in range(os.m):
# alphapairchanged += innerl(i,os) #改变的对数
alphapairchanged += innerlk(i,os) # 使用核函数
print('fullset, iter: %d i: %d, pairs changed %d' % (iter,i,alphapairchanged))
iter += 1
else: # 遍历非边界的值
nonbound = nonzero((os.alpha.A > 0)*(os.alpha.A < C))[0]
for i in nonbound:
# alphapairchanged += innerl(i,os)
alphapairchanged += innerlk(i,os) # 使用核函数
print('non-bound, iter: %d, i: %d, pairs changed %d' % (iter,i,alphapairchanged))
iter += 1
if entireset: entireset = False # 表明刚刚遍历过一次所有的值
elif (alphapairchanged == 0): entireset = True # 遍历非边界的值后,没有改变的alpha值,则重新开始遍历所有的值
# 遍历完一次所有的值后,开始重复遍历非边界值,直至非边界值中没有alpha再改变,重新开始遍历所有的值
print('iteration number: %d' % (iter))
return os.b,os.alpha
'''
import time
start = time.clock()
filename = r'.\data\testSet.txt'
dataset,labelset = sz_c5_svm.loaddataset(filename)
b,alpha = sz_c5_svm.smoP(dataset,labelset,0.6,0.001,40)
end = time.clock()
print('running time: %s seconds.' % (end-start))
'''
# 常数C一方面保证所有样例的间隔不小于1.0;一方面要使得分类间隔尽可能大;两方平衡
# 若C很大,则分类器尽可能将所有样例分类正确
# datamat = mat(dataset);labelmat = mat(labelset).transpose()
# w = multiply(alpha,labelmat).T*datamat #超平面参数
def calw(alpha,dataset,labelset):
datamat = mat(dataset);labelmat = mat(labelset).transpose()
m,n = shape(datamat)
w = zeros((n,1))
for i in range(m):
w += multiply(labelmat[i]*alpha[i],datamat[i,:].T)
# 等效于:
# w = (multiply(alpha,labelmat).T*datamat).T
return w
# w1 = sz_c5_svm.calw(alpha,dataset,labelset)
# 核转换函数
def kerneltrans(x,a,ktup): #输入的是矩阵形式 ktup=('lin',0)这种形式,第一个指示核函数的形式,第二个是sigma的值
m,n = shape(x)
k = mat(zeros((m,1)))
if ktup[0]=='lin': k = x*a.T #线性
elif ktup[0]=='rbf': #高斯
for j in range(m):
deltar = x[j,:] - a
k[j] = deltar*deltar.T
k = exp(k/(-1*ktup[1]**2))
else: raise NameError('Houston We Have a Problem: the Kerlnel is not recognized')
return k
# 修改下列两个函数内的部分语句,适用于使用kernel
# !!!注意将其他引用此函数的地方改了!!!
def innerlk(i,os):
# ei = calcek(os,i)
ei = calcekk(os,i)
if ((os.label[i]*ei < -os.tol) and (os.alpha[i] < os.c)) or ((os.label[i]*ei > os.tol) and (os.alpha[i] > 0)):
j,ej = selectj(i,os,ei)
alphaiold = os.alpha[i].copy(); alphajold = os.alpha[j].copy(); # 这里需要使用copy 不用的话并没有真正将原来的值保存下来,只是多了一个此位置的标签
if (os.label[i] != os.label[j]):
L = max(0,os.alpha[j] - os.alpha[i])
H = min(os.c, os.c+os.alpha[j] - os.alpha[i])
else:
L = max(0,os.alpha[j] + os.alpha[i] - os.c)
H = min(os.c, os.alpha[j] + os.alpha[i])
if L == H: print('L=H'); return 0
eta = 2.0*os.K[i,j]-os.K[i,i]-os.K[j,j] # 2xixj-xi^2-xj^2 # os.K
if eta >= 0: print('eta>=0');return 0
os.alpha[j] -= os.label[j]*(ei-ej)/eta
os.alpha[j] = clipalpha(os.alpha[j],H,L)
updateek(os,j) # alpha更新后e也会变化,需要随之更新
if (abs(os.alpha[j]-alphajold) < 0.00001):
print('j not moving enough'); return 0
os.alpha[i] += os.label[j]*os.label[i]*(alphajold-os.alpha[j]) # alpha_j 与 alpha_i 改变量相同,但方向相反
b1 = os.b-ei-os.label[i]*(os.alpha[i]-alphaiold)*os.K[i,i] - os.label[j]*(os.alpha[j]-alphajold)*os.K[i,j]
b2 = os.b-ej-os.label[i]*(os.alpha[i]-alphaiold)*os.K[i,j] - os.label[j]*(os.alpha[j]-alphajold)*os.K[j,j]
if (0 < os.alpha[i]) and (os.c > os.alpha[i]): os.b=b1
elif (0 < os.alpha[j]) and (os.c > os.alpha[j]): os.b= b2
else: os.b =(b1+b2)/2.0
return 1
else: return 0
def calcekk(os,k):
fxk = float(multiply(os.alpha,os.label).T*os.K[:,k]+os.b)
ek = fxk - float(os.label[k])
return ek
def testrbf(k1 = 1.3): #输入的是高斯核中的sigma
filename = r'.\data\testSetRBF.txt'
dataset,labelset = loaddataset(filename)
b,alpha = smoP(dataset,labelset,200,0.0001,10000,('rbf',k1))
datamat = mat(dataset);labelmat = mat(labelset).transpose()
# 构建支持向量矩阵
svind = nonzero(alpha.A > 0)[0] #支持向量对应的下标
svs = datamat[svind] # 提取出支持向量
svlabel = labelmat[svind]
print('there are %d support vectors' % (shape(svind)[0]))
m,n = shape(datamat)
errorcount = 0
for i in range(m):
ker = kerneltrans(svs,datamat[i,:],('rbf',k1)) # 仅仅对于支持向量来计算
predict = ker.T*multiply(svlabel,alpha[svind])+b
if sign(predict) != sign(labelset[i]): errorcount += 1
print('the training error rate is: %f' % (float(errorcount/m)))
# 测试
filename = r'.\data\testSetRBF2.txt'
dataset,labelset = loaddataset(filename)
datamat = mat(dataset);labelmat = mat(labelset).transpose()
errorcount = 0
m,n = shape(datamat)
for i in range(m):
ker = kerneltrans(svs,datamat[i,:],('rbf',k1)) # 仅仅对于支持向量来计算
predict = ker.T*multiply(svlabel,alpha[svind])+b
if sign(predict) != sign(labelset[i]): errorcount += 1
print('the testing error rate is: %f' % (float(errorcount/m)))
'''
示例:手写识别问题
KNN每次需要使用所有训练样本来分类,占用内存过大,而SVM只需要将支持向量保存即可
'''
# 数据转换成向量:需要将图像的矩阵数据(32*32)转换成一个向量1024
def img2vector(filename):
vec = zeros((1,1024))
fr = open(filename)
#依次读取前32行数据,并将每行的前32个字符存在向量中
for i in range(32):
line = fr.readline()
for j in range(32):
vec[0,32*i+j] = int(line[j]) #不能将一行32个一起int,然后传过去
return vec
# 读入训练样本
def loadimage(dirname): #文件所在文件夹路径
from os import listdir,path
hwlabel = []
trainfilelist = listdir(dirname)
# 读取文件夹中的目录内容 注意 r
m = len(trainfilelist)
dataset = zeros((m,1024))
# 生成训练数据集
for i in range(m):
# 从文件名中解析中分类数字
filenamestr = trainfilelist[i]
classnum = int(filenamestr[0]) #classnum = int((filenamestr.split('.')[0]).split('_')[0])
if classnum == 9: hwlabel.append(-1)
else: hwlabel.append(1)
# 本质上SVM是二分类,因此此处仅将9和其他分类
filename = path.join(dirname,filenamestr)
dataset[i,:] = img2vector(filename)
return dataset,hwlabel
# 训练并测试分类器
def testdigit(ktup=('rbf',10)):
dataset,labelset = loadimage(r'trainingDigits')
b,alpha = smoP(dataset,labelset,200,0.0001,10000,ktup)
datamat = mat(dataset);labelmat = mat(labelset).transpose()
# 构建支持向量矩阵
svind = nonzero(alpha.A > 0)[0] #支持向量对应的下标
svs = datamat[svind] # 提取出支持向量
svlabel = labelmat[svind]
print('there are %d support vectors' % (shape(svind)[0]))
m,n = shape(datamat)
errorcount = 0
for i in range(m):
ker = kerneltrans(svs,datamat[i,:],ktup) # 仅仅对于支持向量来计算
predict = ker.T*multiply(svlabel,alpha[svind])+b
if sign(predict) != sign(labelset[i]): errorcount += 1
print('the training error rate is: %f' % (float(errorcount/m)))
# 测试
dataset,labelset = loadimage(r'testDigits')
datamat = mat(dataset);labelmat = mat(labelset).transpose()
m,n = shape(datamat)
errorcount = 0
for i in range(m):
ker = kerneltrans(svs,datamat[i,:],ktup) # 仅仅对于支持向量来计算
predict = ker.T*multiply(svlabel,alpha[svind])+b
if sign(predict) != sign(labelset[i]): errorcount += 1
print('the test error rate is: %f' % (float(errorcount/m)))
# 之后的重点是要搞懂如何选择sigma值和C值!!!!
|
[
"zhangyue9405@buaa.edu.cn"
] |
zhangyue9405@buaa.edu.cn
|
fb41380c2348b8810a81495d373016be0606ee17
|
e4f97f0e9bb48fde6c25e1dd628d79b2cf905111
|
/setup.py
|
a2d6e8503f371d24394168159549bf9a049e4af7
|
[
"MIT"
] |
permissive
|
DarthThomas/poolvr.py
|
7a167f3c35b1d3d86b5f56cc4cf5770dd9bf2bd6
|
2e983c8c48995eeece82ea4abbda4628cb23285c
|
refs/heads/master
| 2020-12-03T21:30:48.057200
| 2019-08-24T23:36:23
| 2019-08-24T23:36:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,389
|
py
|
#!/bin/env python
from setuptools import setup
from codecs import open
from os import path, listdir
here = path.dirname(path.abspath(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='poolvr.py',
version='0.0.1',
description='Python VR pool simulator',
packages=['poolvr'],
long_description=long_description,
url='https://github.com/jzitelli/poolvr.py',
author='Jeffrey Zitelli',
author_email='jeffrey.zitelli@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='virtual reality vr pool billiards pyopenvr openvr event based physics cue table pocket bumper rail',
install_requires=['openvr', 'numpy', 'pyopengl', 'pillow'], # 'cyglfw3'],
extras_require={},
package_data={
'poolvr': [path.join('shaders', filename) for filename in listdir(path.join('poolvr', 'shaders'))],
},
data_files=[],
scripts=[path.join('scripts', 'gen_assets.py')],
entry_points={
'console_scripts': [
'poolvr = poolvr.__main__:main'
]
}
)
|
[
"jeffrey.zitelli@gmail.com"
] |
jeffrey.zitelli@gmail.com
|
7ad34a71cf548ff1303f903e8c1a5ba7ad27e6e8
|
631b074ba6b901ba5fb709f8e24acb84a596e777
|
/cinder/tests/api/openstack/volume/test_volumes.py
|
9563989a91bfa3d21b06cacf38d01659d5bf1120
|
[
"Apache-2.0"
] |
permissive
|
matiu2/cinder
|
5ee188a834eea06883103ab97cee50a9ee3a21bb
|
1c52fb3041df5661756246705942c60b4b1448d5
|
refs/heads/master
| 2021-01-18T13:54:34.159533
| 2012-05-04T04:45:20
| 2012-05-04T04:45:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,490
|
py
|
# Copyright 2013 Josh Durgin
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from lxml import etree
import webob
from cinder.api.openstack.volume import volumes
from cinder import flags
from cinder import test
from cinder.tests.api.openstack import fakes
from cinder.volume import api as volume_api
FLAGS = flags.FLAGS
NS = '{http://docs.openstack.org/volume/api/v1}'
class VolumeApiTest(test.TestCase):
def setUp(self):
super(VolumeApiTest, self).setUp()
self.controller = volumes.VolumeController()
self.stubs.Set(volume_api.API, 'get_all', fakes.stub_volume_get_all)
self.stubs.Set(volume_api.API, 'get', fakes.stub_volume_get)
self.stubs.Set(volume_api.API, 'delete', fakes.stub_volume_delete)
def test_volume_create(self):
self.stubs.Set(volume_api.API, "create", fakes.stub_volume_create)
vol = {"size": 100,
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "zone1:host1"}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v1/volumes')
res_dict = self.controller.create(req, body)
expected = {'volume': {'status': 'fakestatus',
'display_description': 'Volume Test Desc',
'availability_zone': 'zone1:host1',
'display_name': 'Volume Test Name',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'volume_type': 'vol_type_name',
'snapshot_id': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 100}}
self.assertEqual(res_dict, expected)
def test_volume_create_no_body(self):
body = {}
req = fakes.HTTPRequest.blank('/v1/volumes')
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create,
req,
body)
def test_volume_list(self):
req = fakes.HTTPRequest.blank('/v1/volumes')
res_dict = self.controller.index(req)
expected = {'volumes': [{'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'volume_type': 'vol_type_name',
'snapshot_id': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}]}
self.assertEqual(res_dict, expected)
def test_volume_list_detail(self):
req = fakes.HTTPRequest.blank('/v1/volumes/detail')
res_dict = self.controller.index(req)
expected = {'volumes': [{'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'volume_type': 'vol_type_name',
'snapshot_id': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}]}
self.assertEqual(res_dict, expected)
def test_volume_show(self):
req = fakes.HTTPRequest.blank('/v1/volumes/1')
res_dict = self.controller.show(req, 1)
expected = {'volume': {'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'volume_type': 'vol_type_name',
'snapshot_id': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}}
self.assertEqual(res_dict, expected)
def test_volume_show_no_attachments(self):
def stub_volume_get(self, context, volume_id):
return fakes.stub_volume(volume_id, attach_status='detached')
self.stubs.Set(volume_api.API, 'get', stub_volume_get)
req = fakes.HTTPRequest.blank('/v1/volumes/1')
res_dict = self.controller.show(req, 1)
expected = {'volume': {'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [],
'volume_type': 'vol_type_name',
'snapshot_id': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}}
self.assertEqual(res_dict, expected)
def test_volume_show_no_volume(self):
self.stubs.Set(volume_api.API, "get", fakes.stub_volume_get_notfound)
req = fakes.HTTPRequest.blank('/v1/volumes/1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show,
req,
1)
def test_volume_delete(self):
req = fakes.HTTPRequest.blank('/v1/volumes/1')
resp = self.controller.delete(req, 1)
self.assertEqual(resp.status_int, 202)
def test_volume_delete_no_volume(self):
self.stubs.Set(volume_api.API, "get", fakes.stub_volume_get_notfound)
req = fakes.HTTPRequest.blank('/v1/volumes/1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete,
req,
1)
class VolumeSerializerTest(test.TestCase):
def _verify_volume_attachment(self, attach, tree):
for attr in ('id', 'volume_id', 'server_id', 'device'):
self.assertEqual(str(attach[attr]), tree.get(attr))
def _verify_volume(self, vol, tree):
self.assertEqual(tree.tag, NS + 'volume')
for attr in ('id', 'status', 'size', 'availability_zone', 'created_at',
'display_name', 'display_description', 'volume_type',
'snapshot_id'):
self.assertEqual(str(vol[attr]), tree.get(attr))
for child in tree:
print child.tag
self.assertTrue(child.tag in (NS + 'attachments', NS + 'metadata'))
if child.tag == 'attachments':
self.assertEqual(1, len(child))
self.assertEqual('attachment', child[0].tag)
self._verify_volume_attachment(vol['attachments'][0], child[0])
elif child.tag == 'metadata':
not_seen = set(vol['metadata'].keys())
for gr_child in child:
self.assertTrue(gr_child.tag in not_seen)
self.assertEqual(str(vol['metadata'][gr_child.tag]),
gr_child.text)
not_seen.remove(gr_child.tag)
self.assertEqual(0, len(not_seen))
def test_volume_show_create_serializer(self):
serializer = volumes.VolumeTemplate()
raw_volume = dict(
id='vol_id',
status='vol_status',
size=1024,
availability_zone='vol_availability',
created_at=datetime.datetime.now(),
attachments=[dict(
id='vol_id',
volume_id='vol_id',
server_id='instance_uuid',
device='/foo')],
display_name='vol_name',
display_description='vol_desc',
volume_type='vol_type',
snapshot_id='snap_id',
metadata=dict(
foo='bar',
baz='quux',
),
)
text = serializer.serialize(dict(volume=raw_volume))
print text
tree = etree.fromstring(text)
self._verify_volume(raw_volume, tree)
def test_volume_index_detail_serializer(self):
serializer = volumes.VolumesTemplate()
raw_volumes = [dict(
id='vol1_id',
status='vol1_status',
size=1024,
availability_zone='vol1_availability',
created_at=datetime.datetime.now(),
attachments=[dict(
id='vol1_id',
volume_id='vol1_id',
server_id='instance_uuid',
device='/foo1')],
display_name='vol1_name',
display_description='vol1_desc',
volume_type='vol1_type',
snapshot_id='snap1_id',
metadata=dict(
foo='vol1_foo',
bar='vol1_bar',
),
),
dict(
id='vol2_id',
status='vol2_status',
size=1024,
availability_zone='vol2_availability',
created_at=datetime.datetime.now(),
attachments=[dict(
id='vol2_id',
volume_id='vol2_id',
server_id='instance_uuid',
device='/foo2')],
display_name='vol2_name',
display_description='vol2_desc',
volume_type='vol2_type',
snapshot_id='snap2_id',
metadata=dict(
foo='vol2_foo',
bar='vol2_bar',
),
)]
text = serializer.serialize(dict(volumes=raw_volumes))
print text
tree = etree.fromstring(text)
self.assertEqual(NS + 'volumes', tree.tag)
self.assertEqual(len(raw_volumes), len(tree))
for idx, child in enumerate(tree):
self._verify_volume(raw_volumes[idx], child)
|
[
"mordred@inaugust.com"
] |
mordred@inaugust.com
|
3dd165507fdace1d24cfa246d10ef7add40abda8
|
1d287bf5657eaaaad2c40a7fa9be8f3422cd5705
|
/web/basic_app/migrations/0003_auto_20190613_2322.py
|
7a8eec02dee61037ffabb91603f0aa8a7ba7e6ce
|
[] |
no_license
|
chibike/gclipboard
|
fd2c93d4c4f553880b44e552d929e55ee4e32139
|
2a0dea280478a96b2a561e5c34b22a8ec85a7ac1
|
refs/heads/master
| 2022-12-26T22:24:23.049325
| 2019-09-01T19:14:07
| 2019-09-01T19:14:07
| 191,410,753
| 0
| 0
| null | 2022-12-09T07:40:53
| 2019-06-11T16:33:54
|
CSS
|
UTF-8
|
Python
| false
| false
| 446
|
py
|
# Generated by Django 2.1 on 2019-06-13 23:22
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('basic_app', '0002_auto_20190613_2302'),
]
operations = [
migrations.AlterField(
model_name='clipboard',
name='time_stamp',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
|
[
"cpraise@cryptoquantique.com"
] |
cpraise@cryptoquantique.com
|
a7308f24b2582950c56f5093db7eb9097e99654b
|
e3b9758687890727898876e1d12ca5b5b6db74be
|
/auth_public.py
|
6d8059b44ff4265be6fcc6a29fe0c5c25445d25a
|
[
"MIT"
] |
permissive
|
tinemakovecki/Drinks-and-things
|
00af19ac0192f77be25d2f77869b09ae9c9a3956
|
b6e0db38586b4261b192c5847ca7cde6d3d3785e
|
refs/heads/master
| 2020-03-07T09:47:07.867902
| 2018-08-26T11:17:03
| 2018-08-26T11:17:03
| 127,415,810
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 90
|
py
|
db = 'sem2018_tinem'
host = 'baza.fmf.uni-lj.si'
user = 'javnost'
password = 'javnogeslo'
|
[
"tine.makovecki@gmail.com"
] |
tine.makovecki@gmail.com
|
60737ba91eebed515655c9cabd5b4b3f2cb24da1
|
ce14ad881f246e7598b44bd480abea63f0bc715d
|
/gymtest.py
|
7a23d4be40257e67b53d943f7299cebc2b5f961c
|
[] |
no_license
|
KyubumShin/RLtutorial
|
f30586f5f315c5804a895b7eb2d6f27eff22505c
|
a107b233e93ea3e84f54316c4c012c4eb176d054
|
refs/heads/master
| 2022-12-10T06:11:59.789031
| 2020-09-29T12:31:21
| 2020-09-29T12:31:21
| 299,609,055
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 76
|
py
|
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import torch
import gym
|
[
"kbum0617@naver.com"
] |
kbum0617@naver.com
|
e0e67a92b613b2c4a25170960c968539324316a0
|
c0b9727943ed437377bd672af6604256cd6c0f1e
|
/gen/datagen.py
|
e0fc2c912fe27881f9c629e89f24b9af08e402c9
|
[] |
no_license
|
phongngtuan/play-bank
|
249b2f01ecbe2f7d0346231439292f76224354ac
|
ab999fcdeb4bf13d22a5a6141347780df0e530ae
|
refs/heads/master
| 2020-06-20T03:57:35.733072
| 2019-07-14T14:46:07
| 2019-07-15T11:30:00
| 196,984,076
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,986
|
py
|
#!/usr/bin/python
import sys
import argparse
import random
import re
parser = argparse.ArgumentParser(description='Generate random data samples')
parser.add_argument('-n', "--lines" , type=int, default=100, help="number of transactions")
parser.add_argument('--overdraft', type=int, default=0 , help="position of overdraft transaction (default = no overdrafts)")
parser.add_argument('--print-balance', action="store_true", help="print balance together with the transaction")
parser.add_argument('--file-size', default="", help="estimated file size in this format ^(\d+)\s*(KB|MB|GB)")
args = parser.parse_args()
max_deposit = 100
balance = 0
line_count = 0
byte_count = 0
def parse_file_size(s):
pattern = r"^(\d+)\s*(KB|MB|GB)"
groups = re.search(pattern, s.upper())
value = int(groups.group(1))
unit = groups.group(2)
factor = 1
if "KB" in unit:
factor = 1000
elif "MB" in unit:
factor = 1000 * 1000
elif "GB" in unit:
factor = 1000 * 1000 * 1000
return value * factor
def loop():
#FIXME: don't use global variable
global balance
global byte_count
global line_count
if (args.overdraft >= 0 and line_count == args.overdraft):
transaction = -(balance + random.randint(0, max_deposit))
else:
transaction = random.randint(-balance, max_deposit)
balance += transaction
# print the transaction in normalized from: "[D|W] <positive_number>"
if transaction > 0:
s = 'D' + ' ' + str(transaction)
else:
transaction = -transaction
s = 'W' + ' ' + str(transaction)
if args.print_balance:
s = s + ' ' + str(balance)
print(s)
byte_count += len(s)
line_count += 1
# main work
max_byte_count = 0
if args.file_size:
max_byte_count = parse_file_size(args.file_size)
print("BEGIN")
while True:
loop()
if line_count >= args.lines or (max_byte_count and byte_count >= max_byte_count):
break
print("END")
|
[
"phongnt.ptnk@gmail.com"
] |
phongnt.ptnk@gmail.com
|
0e70c6c78066e70de78e483e1497a1ffa9cf16f7
|
b9b54e9c5b6f9cdb3e8eb449f359d4eda3c4f8c9
|
/博物馆新闻采集分析子系统/museum_news_spider/museum_news_spider/middlewares.py
|
fbf6be80b16bbf7256b2085aada8da756aa69b5f
|
[] |
no_license
|
Ice-Jeffrey/BUCTCS1703SECD
|
ac6fca3179a785df4247daff1e1729dea7b4b200
|
30808a6210e420969a1007c377fcf9a3a081fcbe
|
refs/heads/master
| 2021-05-19T08:18:39.184517
| 2020-05-26T15:19:11
| 2020-05-26T15:19:11
| 251,601,687
| 0
| 1
| null | 2020-05-26T12:22:38
| 2020-03-31T12:51:35
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 4,114
|
py
|
# -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
from scrapy.downloadermiddlewares.useragent import UserAgentMiddleware
import random
class MyUserAgentMiddleware(UserAgentMiddleware):
def __init__(self, user_agent):
self.user_agent = user_agent
@classmethod
def from_crawler(cls, crawler):
return cls(
user_agent=crawler.settings.get('MY_USER_AGENT')
)
def process_request(self, request, spider):
agent = random.choice(self.user_agent)
request.headers['User-Agent'] = agent
class MuseumNewsSpiderSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class MuseumNewsSpiderDownloaderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
|
[
"735429132@qq.com"
] |
735429132@qq.com
|
2d1f1b95797134d0c07deb8b229840725d5ccbfb
|
7e5c4bb68fe9e4f1eb03938e93d82434985664ff
|
/fed/crawl/crawl.py
|
ac1fa84858c8e8fee31e7a83102d3f13556342df
|
[] |
no_license
|
LongHouTin/Data533_Project
|
3ddc9bebd6256eb86dc09411df755f84e39aee49
|
60769a38709138a9194dbeb42f54a79de2a83aa3
|
refs/heads/master
| 2022-03-24T08:30:55.558859
| 2019-12-11T23:50:28
| 2019-12-11T23:50:28
| 226,991,765
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,672
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[15]:
def webcrawl():
import urllib.request
from bs4 import BeautifulSoup as bs
import lxml, html5lib
import time
from xlrd import open_workbook
from xlutils.copy import copy
import xlrd,xlrd,xlwt
varname=["FEDPURCHASE","FEDSALE","CBOE_VIX","NASDAQ"]
linkname=["https://alfred.stlouisfed.org/series?seid=RPONTTLD","https://alfred.stlouisfed.org/series?seid=RRPONTTLD","https://alfred.stlouisfed.org/series?seid=VIXCLS","https://alfred.stlouisfed.org/series?seid=NASDAQCOM"]
toadd=[]
i=0
for i in range(len(linkname)):
st=urllib.request.urlopen(linkname[i])
ct = st.read().decode('utf-8')
soup=bs(ct)
soup.prettify()
cc=soup.find_all('td') ##找到全部的日期和数值, cc是个list
date=cc[0].string[0:10]
if i==3:
value=float(cc[1].string[0]+cc[1].string[2:]) ##when it is thousand, there is a comma, be careful
else:
value=float(cc[1].string)
toadd.append(value)
time.sleep(0.1)
rb = open_workbook("packagedata.xls",formatting_info=True) # copy, cannot directly writes original file
wb = copy(rb)
book=xlrd.open_workbook("packagedata.xls",formatting_info=True)
sheet=book.sheet_by_index(0)
s = wb.get_sheet(0)# 选取表单
# writes
for i in range(len(linkname)):
style = xlwt.easyxf(num_format_str='YYYY-MM-DD')
s.write(sheet.nrows,i+1,toadd[i])
s.write(sheet.nrows,0,date)
wb.save('packagedata2.xls')
print("successfully crawling data from Fed! The most recent date is:{0}".format(date))
return None
|
[
"lsam8910@gmail.com"
] |
lsam8910@gmail.com
|
36e595e072cea6da8c645f8f5f5746541a76a31f
|
5a7dbdc62ca61cdd6c588de30153067f2825acc7
|
/littlebrother/test/__init__.py
|
e22c44b1d769151e73a90636b1aac847505a0194
|
[
"MIT"
] |
permissive
|
kxz/littlebrother
|
87bf12b74990c249dfb3d10af2aa71fa2f5dc86b
|
af9ec9af5c0de9a74796bb7e16a6b836286e8b9f
|
refs/heads/master
| 2020-12-24T07:52:25.897158
| 2016-06-02T06:09:44
| 2016-06-02T06:09:44
| 39,976,488
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 37
|
py
|
"""Unit tests for Little Brother."""
|
[
"blankplacement@gmail.com"
] |
blankplacement@gmail.com
|
cf6c588bc60fd68481a1cf1759277191155b510b
|
08ab013d0cad5d18481498f5361314e72955a331
|
/tetrisboard.py
|
3f533fbb90dc528867234135ce846ae78bee9853
|
[] |
no_license
|
gil9red/tetris-py
|
0d56b9fafd245899ccf7fd8f835eefa04e957053
|
fbd7bc16a0def25f5447d7388a84f87925684846
|
refs/heads/master
| 2022-06-26T16:55:58.958731
| 2022-05-30T14:04:03
| 2022-05-30T14:04:03
| 32,170,613
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,121
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
from PySide.QtGui import *
from PySide.QtCore import *
from tetrispiece import TetrisPiece, TetrisShape
# TODO: закончить портирование
class TetrisBoard(QFrame):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setFrameStyle(QFrame.Panel | QFrame.Sunken)
self.setFocusPolicy(Qt.StrongFocus)
self.timer = QBasicTimer()
self.nextPieceLabel = None
self.isStarted = False
self.isPaused = False
self.isWaitingAfterLine = False
self.curPiece = TetrisPiece()
self.nextPiece = TetrisPiece()
self.curX = 0
self.curY = 0
self.numLinesRemoved = 0
self.numPiecesDropped = 0
self.score = 0
self.level = 0
self.board = [TetrisShape.NoShape
for i in range(TetrisBoard.BOARD_WIDTH * TetrisBoard.BOARD_HEIGHT)]
self.clearBoard()
self.nextPiece.setRandomShape()
Signal
# Сигналы
self.scoreChanged = Signal(int)
self.levelChanged = Signal(int)
self.linesRemovedChanged = Signal(int)
def setNextPieceLabel(self, label):
self.nextPieceLabel = label
def sizeHint(self):
return QSize(TetrisBoard.BOARD_WIDTH * 15 + self.frameWidth() * 2,
TetrisBoard.BOARD_HEIGHT * 15 + self.frameWidth() * 2)
def minimumSizeHint(self):
return QSize(TetrisBoard.BOARD_WIDTH * 5 + self.frameWidth() * 2,
TetrisBoard.BOARD_HEIGHT * 5 + self.frameWidth() * 2)
# @Slot
def start(self):
if self.isPaused:
return
self.isStarted = True
self.isWaitingAfterLine = False
self.numLinesRemoved = 0
self.numPiecesDropped = 0
self.score = 0
self.level = 1
self.clearBoard()
self.linesRemovedChanged.emit(self.numLinesRemoved)
self.scoreChanged.emit(self.score)
self.levelChanged.emit(self.level)
self.newPiece()
self.timer.start(self.timeoutTime(), self)
# @Slot
def pause(self):
if not self.isStarted:
return
self.isPaused = not self.isPaused
if self.isPaused:
self.timer.stop()
else:
self.timer.start(self.timeoutTime(), self)
self.update()
def paintEvent(self, event):
super().paintEvent(event)
painter = QPainter(self)
rect = self.contentsRect()
if self.isPaused:
painter.drawText(rect, Qt.AlignCenter, "Pause")
return
boardTop = rect.bottom() - TetrisBoard.BOARD_HEIGHT * self.squareHeight()
for i in range(TetrisBoard.BOARD_HEIGHT):
for j in range(TetrisBoard.BOARD_WIDTH):
shape = self.getShapeAt(j, TetrisBoard.BOARD_HEIGHT - i - 1)
if shape != TetrisShape.NoShape:
self.drawSquare(painter, rect.left() + j * self.squareWidth(),
boardTop + i * self.squareHeight(), shape)
if self.curPiece.shape() != TetrisShape.NoShape:
for i in range(4):
x = self.curX + self.curPiece.x(i)
y = self.curY - self.curPiece.y(i)
self.drawSquare(painter, rect.left() + x * self.squareWidth(),
boardTop + (self.BOARD_HEIGHT - y - 1) * self.squareHeight(),
self.curPiece.shape())
def keyPressEvent(self, event):
if not self.isStarted or self.isPaused or self.curPiece.shape() == TetrisShape.NoShape:
super().keyPressEvent(event)
return
key = event.key()
if key == Qt.Key_Left:
self.tryMove(self.curPiece, self.curX - 1, self.curY)
elif key == Qt.Key_Right:
self.tryMove(self.curPiece, self.curX + 1, self.curY)
elif key == Qt.Key_Down:
self.tryMove(self.curPiece.rotatedRight(), self.curX, self.curY)
elif key == Qt.Key_Up:
self.tryMove(self.curPiece.rotatedLeft(), self.curX, self.curY)
elif key == Qt.Key_Space:
self.dropDown()
elif key == Qt.Key_D:
self.oneLineDown()
else:
super().keyPressEvent(event)
def timerEvent(self, event):
if event.timerId() == self.timer.timerId:
if self.isWaitingAfterLine:
self.isWaitingAfterLine = False
self.newPiece()
self.timer.start(self.timeoutTime(), self)
else:
self.oneLineDown()
else:
super().timerEvent(event)
BOARD_WIDTH = 10
BOARD_HEIGHT = 22
def setShapeAt(self, x, y, value):
self.board[(y * TetrisBoard.BOARD_WIDTH) + x] = value
def getShapeAt(self, x, y):
return self.board[(y * TetrisBoard.BOARD_WIDTH) + x]
def timeoutTime(self):
return 1000 / (1 + self.level)
def squareWidth(self):
return self.contentsRect().width() / TetrisBoard.BOARD_WIDTH
def squareHeight(self):
return self.contentsRect().height() / TetrisBoard.BOARD_HEIGHT
def clearBoard(self):
for i in range(TetrisBoard.BOARD_WIDTH * TetrisBoard.BOARD_HEIGHT):
self.board[i] = TetrisShape.NoShape
def dropDown(self):
dropHeight = 0
newY = self.curY
while newY > 0:
if not self.tryMove(self.curPiece, self.curX, newY - 1):
break
newY -= 1
dropHeight += 1
self.pieceDropped(dropHeight)
def oneLineDown(self):
if not self.tryMove(self.curPiece, self.curX, self.curY - 1):
self.pieceDropped(0)
def pieceDropped(self, dropHeight):
for i in range(4):
x = self.curX + self.curPiece.x(i)
y = self.curY - self.curPiece.y(i)
self.setShapeAt(x, y, self.curPiece.shape())
self.numPiecesDropped += 1
if self.numPiecesDropped % 25 == 0:
self.level += 1
self.timer.start(self.timeoutTime(), self)
self.levelChanged(self.level)
self.score += dropHeight + 7
self.scoreChanged(self.score)
self.removeFullLines()
if not self.isWaitingAfterLine:
self.newPiece()
def removeFullLines(self):
numFullLines = 0
# TODO: возможно тут ошибка
for i in reversed(range(TetrisBoard.BOARD_HEIGHT - 1)):
lineIsFull = True
for j in TetrisBoard.BOARD_WIDTH:
if self.shapeAt(j, i) == TetrisShape.NoShape:
lineIsFull = False
break
if lineIsFull:
numFullLines += 1
# TODO: возможно тут ошибка
for k in range(i, TetrisBoard.BOARD_HEIGHT - 1):
for j in range(TetrisBoard.BOARD_WIDTH):
self.setShapeAt(j, k, self.getShapeAt(j, k + 1))
for j in range(TetrisBoard.BOARD_WIDTH):
self.setShapeAt(j, TetrisBoard.BOARD_HEIGHT - 1, TetrisShape.NoShape)
if numFullLines > 0:
self.numLinesRemoved += numFullLines
self.score += 10 * numFullLines
self.linesRemovedChanged(self.numLinesRemoved)
self.scoreChanged(self.score)
self.timer.start(500, self)
self.isWaitingAfterLine = True
self.curPiece.setShape(TetrisShape.NoShape)
self.update()
def newPiece(self):
curPiece = self.nextPiece
self.nextPiece.setRandomShape()
self.showNextPiece()
curX = TetrisBoard.BOARD_WIDTH / 2 + 1
curY = TetrisBoard.BOARD_HEIGHT - 1 + curPiece.minY()
if not self.tryMove(curPiece, curX, curY):
curPiece.setShape(TetrisShape.NoShape)
self.timer.stop()
self.isStarted = False
def showNextPiece(self):
if not self.nextPieceLabel:
return
dx = self.nextPiece.maxX() - self.nextPiece.minX() + 1
dy = self.nextPiece.maxY() - self.nextPiece.minY() + 1
pixmap = QPixmap(dx * self.squareWidth(), dy * self.squareHeight())
painter = QPainter(pixmap)
painter.fillRect(pixmap.rect(), self.nextPieceLabel.palette().background())
for i in range(4):
x = self.nextPiece.x(i) - self.nextPiece.minX()
y = self.nextPiece.y(i) - self.nextPiece.minY()
self.drawSquare(painter, x * self.squareWidth(), y * self.squareHeight(),
self.nextPiece.shape())
self.nextPieceLabel.setPixmap(pixmap)
def tryMove(self, newPiece, newX, newY):
for i in range(4):
x = newX + newPiece.x(i);
y = newY - newPiece.y(i);
if x < 0 or x >= TetrisBoard.BOARD_WIDTH or y < 0 or y >= TetrisBoard.BOARD_HEIGHT:
return False
if self.getShapeAt(x, y) != TetrisShape.NoShape:
return False
self.curPiece = newPiece
self.curX = newX
self.curY = newY
self.update()
return True
def drawSquare(self, painter, x, y, shape):
COLORTABLE = [
0x000000, 0xCC6666, 0x66CC66, 0x6666CC,
0xCCCC66, 0xCC66CC, 0x66CCCC, 0xDAAA00
]
color = COLORTABLE[shape.value]
painter.fillRect(x + 1, y + 1, self.squareWidth() - 2, self.squareHeight() - 2, color)
painter.setPen(color.light())
painter.drawLine(x, y + self.squareHeight() - 1, x, y)
painter.drawLine(x, y, x + self.squareWidth() - 1, y)
painter.setPen(color.dark())
painter.drawLine(x + 1, y + self.squareHeight() - 1,
x + self.squareWidth() - 1, y + self.squareHeight() - 1)
painter.drawLine(x + self.squareWidth() - 1, y + self.squareHeight() - 1,
x + self.squareWidth() - 1, y + 1)
|
[
"ip1992@inbox.ru"
] |
ip1992@inbox.ru
|
e09138090d3aa5f88c738829adae8bed852b4eb4
|
fec19fc5d71eed972ab620c50df64252e29e6f32
|
/marquote/shakespeare/views.py
|
61155f2941bf96448e56a0a0a89df4a182303bd7
|
[] |
no_license
|
rixx/marquote-frontend
|
8468b8f67a356be804fc906b3bd4bf1a646b7ac0
|
70f86f2ffe9d9963dd662ec84aeec0d86bc23f16
|
refs/heads/master
| 2021-01-21T13:26:11.924620
| 2016-05-19T23:20:21
| 2016-05-20T23:20:21
| 50,210,297
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 360
|
py
|
from django.shortcuts import render
from django.views.generic import View
from shakespeare.models import ShakespeareSequence
class IndexView(View):
template_name = 'shakespeare/index.html'
def get(self, request):
context = {'project': ShakespeareSequence.get_or_create_project()}
return render(request, self.template_name, context)
|
[
"tobias.kunze@ax-semantics.com"
] |
tobias.kunze@ax-semantics.com
|
283ddf0cecb6580e015b84c826afada6ef5234d6
|
a0f579bc6b1dd310b232982240e15b3aeda8c80f
|
/apps.py
|
729d0bb5df7cb1ae7664be531fccf28a02613b39
|
[] |
no_license
|
megala18/task
|
886e944fc1b59dbd6fbb2485a38ae4bc00dee186
|
1bbe3b27f4a42be57507b34b3fccbf04a2ae02e5
|
refs/heads/main
| 2023-08-18T23:39:53.532379
| 2021-10-06T10:56:01
| 2021-10-06T10:56:01
| 414,173,117
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 152
|
py
|
from django.apps import AppConfig
class CrudappConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'crudApp'
|
[
"noreply@github.com"
] |
noreply@github.com
|
e1ffaef3f1c5494dae189261a3623efe91eafab0
|
b2ba5f361d4ec5e08110ae2db62b8adb6c7eee9a
|
/blog/urls.py
|
e02a111da4c881ff28c53768395ed7741d0f1b2a
|
[] |
no_license
|
hariomvyas/simpleblogwebapp
|
b9622fbfaa3c440e170efa0512db42c5318e0e42
|
dae3acd10cf0a808888ec86bfd385a6ad1f898dc
|
refs/heads/master
| 2022-12-14T19:46:04.139128
| 2020-09-09T10:05:07
| 2020-09-09T10:05:07
| 294,068,407
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 659
|
py
|
from django.urls import path
from .views import (
PostListView,
PostDetailView,
PostCreateView,
PostUpdateView,
PostDeleteView,
UserPostListView
)
from . import views
urlpatterns = [
path('', PostListView.as_view(),name='blog-home'),
path('user/<str:username>', UserPostListView.as_view(),name='user-posts'),
path('post/<int:pk>/', PostDetailView.as_view(),name='post-detail'),
path('post/new/', PostCreateView.as_view(),name='post-create'),
path('post/<int:pk>/update/', PostUpdateView.as_view(),name='post-update'),
path('post/<int:pk>/delete/', PostDeleteView.as_view(),name='post-delete'),
path('about/',views.about,name='blog-about'),
]
|
[
"hariomkvyas@gmail.com"
] |
hariomkvyas@gmail.com
|
2d3681ee37212f46320404bae0172762348ebf5f
|
ae2113a464cde44de13849ca92e316b93e9fdbb9
|
/GSL/main_supervised_ilab.py
|
9ad83699aefa0d3ead7b564d19a2efa302adfd9a
|
[] |
no_license
|
gyhandy/GSL-Video
|
7002e250fbab5eaee0f2ffca2a3583054fca3539
|
1a8d7e55c72a1758dce6c0db4c1d88f98531b223
|
refs/heads/master
| 2022-12-19T00:36:03.603444
| 2020-09-28T18:30:15
| 2020-09-28T18:30:15
| 290,236,604
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,090
|
py
|
"""main.py"""
import argparse
import numpy as np
import torch
from solver_supervised_ilab import Solver
from utils import str2bool
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
def main(args):
seed = args.seed
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
np.random.seed(seed)
net = Solver(args)
if args.train:
net.train()
else:
net.traverse()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='toy Beta-VAE')
parser.add_argument('--train', default=True, type=str2bool, help='train or traverse')
parser.add_argument('--seed', default=1, type=int, help='random seed')
parser.add_argument('--cuda', default=True, type=str2bool, help='enable cuda')
parser.add_argument('--max_iter', default=1e7, type=float, help='maximum training iteration')
parser.add_argument('--batch_size', default=5, type=int, help='batch size')
# model params
parser.add_argument('--crop_size', type=int, default=208, help='crop size for the ilab dataset')
parser.add_argument('--image_size', type=int, default=128, help='crop size for the ilab dataset')
parser.add_argument('--c_dim', type=int, default=6, help='dimension of domain labels (1st dataset)')
parser.add_argument('--g_conv_dim', type=int, default=64, help='number of conv filters in the first layer of G')
parser.add_argument('--d_conv_dim', type=int, default=64, help='number of conv filters in the first layer of D')
# parser.add_argument('--g_repeat_num', type=int, default=2, help='number of residual blocks in G for encoder and decoder')
parser.add_argument('--g_repeat_num', type=int, default=1,
help='number of residual blocks in G for encoder and decoder')
parser.add_argument('--d_repeat_num', type=int, default=6, help='number of strided conv layers in D')
parser.add_argument('--d_pose_repeat_num', type=int, default=2, help='number of strided conv layers in D')
parser.add_argument('--lambda_combine', type=float, default=1, help='weight for lambda_combine')
parser.add_argument('--lambda_unsup', default=0, type=float, help='lambda_recon')
parser.add_argument('--lambda_GAN', default=1, type=float, help='lambda_recon')
parser.add_argument('--norm', type=str, default='instance', help='instance normalization or batch normalization')
# parser.add_argument('--z_dim', default=1000, type=int, help='dimension of the representation z')
parser.add_argument('--z_dim', default=100, type=int, help='dimension of the representation z')
'''
the weight for pose and background
'''
parser.add_argument('--z_pose_dim', default=20, type=int, help='dimension of the pose/background in z')
parser.add_argument('--z_unknow_dim', default=20, type=int, help='dimension of the pose/background in z')
parser.add_argument('--beta', default=4, type=float, help='beta parameter for KL-term in original beta-VAE')
parser.add_argument('--objective', default='H', type=str, help='beta-vae objective proposed in Higgins et al. or Burgess et al. H/B')
parser.add_argument('--model', default='H', type=str, help='model proposed in Higgins et al. or Burgess et al. H/B')
parser.add_argument('--gamma', default=1000, type=float, help='gamma parameter for KL-term in understanding beta-VAE')
parser.add_argument('--C_max', default=25, type=float, help='capacity parameter(C) of bottleneck channel')
parser.add_argument('--C_stop_iter', default=1e5, type=float, help='when to stop increasing the capacity')
parser.add_argument('--lr', default=1e-4, type=float, help='learning rate')
parser.add_argument('--beta1', default=0.9, type=float, help='Adam optimizer beta1')
parser.add_argument('--beta2', default=0.999, type=float, help='Adam optimizer beta2')
parser.add_argument('--dset_dir', default='data', type=str, help='dataset directory')
parser.add_argument('--dataset', default='ilab_unsup_threeswap', type=str, help='dataset name')
# parser.add_argument('--image_size', default=64, type=int, help='image size. now only (64,64) is supported')
parser.add_argument('--num_workers', default=32, type=int, help='dataloader num_workers')
parser.add_argument('--viz_on', default=True, type=str2bool, help='enable visdom visualization')
parser.add_argument('--viz_name', default='ilab_unsup_threeswap_changeZdim', type=str, help='visdom env name')
parser.add_argument('--viz_port', default=8097, type=str, help='visdom port number')
parser.add_argument('--save_output', default=True, type=str2bool, help='save traverse images and gif')
parser.add_argument('--output_dir', default='outputs', type=str, help='output directory')
'''
save model
'''
# parser.add_argument('--model_save_dir', default='checkpoints', type=str, help='output directory')
parser.add_argument('--model_save_dir', default='checkpoints', type=str, help='output directory')
parser.add_argument('--resume_iters', type=int, default=0, help='resume training from this step')
parser.add_argument('--gather_step', default=1000, type=int, help='numer of iterations after which data is gathered for visdom')
parser.add_argument('--display_step', default=1000, type=int, help='number of iterations after which loss data is printed and visdom is updated')
parser.add_argument('--save_step', default=1000, type=int, help='number of iterations after which a checkpoint is saved')
parser.add_argument('--ckpt_dir', default='checkpoints', type=str, help='checkpoint directory')
parser.add_argument('--ckpt_name', default='last', type=str, help='load previous checkpoint. insert checkpoint filename')
parser.add_argument('--use_server', default='False', type=str2bool,
help='use server to train the model need change the data location')
parser.add_argument('--which_server', default='15', type=str,
help='use which server to train the model 15 or 21')
args = parser.parse_args()
main(args)
|
[
"andy@ilab34.usc.edu"
] |
andy@ilab34.usc.edu
|
870491e6e7fdfe32d0764417af358bf3738cdaf5
|
d34b9e080458861b0851c402eae49680a43b78fa
|
/codejam/2018/qualifier/cubic-ufo/cubic-ufo.py
|
e00a7ef2d4cedd7a7d9b00fce505f7cec5f045cb
|
[] |
no_license
|
bambielli/jspractice
|
92cd3853a45d931cdb6c6178474f62e6eea086a7
|
ae1afa37b2e38ad80f0e812ce9f6ab4ea75e8e7e
|
refs/heads/master
| 2021-01-17T06:39:42.655067
| 2019-02-03T20:27:16
| 2019-02-03T20:27:16
| 55,311,945
| 0
| 0
| null | 2017-06-14T04:07:30
| 2016-04-02T19:16:26
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,093
|
py
|
"""
Problem
A mysterious cubic alien ship has appeared in the sky over Toronto! In this problem, Toronto is a plane in three-dimensional space that is parallel to the xz plane at y = -3 km. The alien ship is a solid cube with side length 1 km, centered at (0 km, 0 km, 0 km), with its eight corners at (+/- 0.5 km, +/- 0.5 km, +/- 0.5 km). The ship is casting an ominous shadow onto the plane; formally, the shadow is the orthogonal projection of the cube onto the plane. (We consider the sun to be a point infinitely far above the Toronto plane along the y-axis.)
The military is willing to tolerate the ship as long as the aliens meet their bureaucratic demand: the shadow must cover an area of the plane that is acceptably close to A km2 (see the Output section for a precise definition). They have hired you, a geometric linguistics expert, to convey this demand to the aliens. In your communications so far, you have learned that the ship cannot change size, and the center of the ship cannot move, but the ship is able to rotate arbitrarily in place.
Please find a way that the aliens can rotate the ship so that the shadow's area is close to A. Express your rotation using three points: the centers of any three non-pairwise-opposing faces.
Input
The first line of the input gives the number of test cases, T. T test cases follow; each consists of one line with a rational A, the desired area of the shadow, in km2, with exactly six digits after the decimal point.
It is guaranteed that there is always a way to rotate the ship in the desired manner for the values of A allowed in this problem.
Output
For each test case, first output one line containing Case #x:, where x is the test case number (starting from 1). Then, output three more lines with three rational values each: the x, y, and z coordinates of one of your three provided face-centers, as described above. You are welcome to use decimal (e.g., 0.000123456) or scientific notation (e.g., 1.23456e-4).
Your answer will be considered correct if and only if all of the following are true:
The distance (in km) from each point to the origin must be between 0.5 - 10-6 and 0.5 + 10-6, inclusive.
The angles (in radians) between segments connecting the origin to each point must be between π/2 - 10-6 and π/2 + 10-6, inclusive.
The area of the shadow (in km2), computed by projecting all 8 vertices onto the y = -3 plane and finding the area of the convex hull of those projected points, must be between A - 10-6 and A + 10-6, inclusive. We will compute the vertices as +/- p1 +/- p2 +/- p3 (that is, for each pi we add either pi or -pi to the total using vector addition), where p1, p2, and p3 are the face-centers that you provide.
Please note that you might need to output more than 6 digits after the decimal point to safely pass the checks mentioned above. If there are multiple acceptable answers, you may output any one of them.
Limits
1 ≤ T ≤ 100.
Time limit: 30 seconds per test set.
Memory limit: 1GB.
Test set 1 (Visible)
1.000000 ≤ A ≤ 1.414213
Test set 2 (Hidden)
1.000000 ≤ A ≤ 1.732050
"""
|
[
"brian.ambielli@gmail.com"
] |
brian.ambielli@gmail.com
|
a729aa764e5639efa79e31ee6bdf1885b4467c45
|
b9d3ec9c6bc976c5a05f4fbd32a56b42ed6db624
|
/modules/PaloAltoNetworks/Panorama/Templates/Network_Templates/GlobalProtect/GlobalProtect.py
|
d2d257dd9c4563888771341f84f56edfcc4f2ac4
|
[] |
no_license
|
nachieket/Phantom
|
207a07dad5460564f81e0c80d05d575751420b3c
|
2bd0cdac563bbe8d7e12b9a801ea2ce0e35cce84
|
refs/heads/master
| 2022-08-22T03:24:20.894297
| 2020-05-19T12:07:13
| 2020-05-19T12:07:13
| 176,566,019
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 503
|
py
|
#!/usr/bin/env /Library/Frameworks/Python.framework/Versions/3.6/bin/python3
from .Portals.Portals import GlobalProtectPortal
from .Gateways.Gateways import GlobalProtectGateway
class GlobalProtect(GlobalProtectPortal, GlobalProtectGateway):
"""
GlobalProtect Class
"""
def __init__(self, panorama_ip, api_key):
# print('++++ GlobalProtect Class')
super().__init__(panorama_ip, api_key)
super(GlobalProtectPortal, self).__init__(panorama_ip, api_key)
# print('---- GlobalProtect Class')
|
[
"nachiketj@outlook.com"
] |
nachiketj@outlook.com
|
9f948546652be4760b5d078808483f504f292773
|
d1ab86d9a87c0f22a8a266455e130ce41c7bce30
|
/codekata/string.py
|
329d461f9f5528dd7ebf412dc116fa174b624b09
|
[] |
no_license
|
madhankmr/guvi
|
0a4cca09aa449d352feedbfc8f2312b2e164de2b
|
eea3ff1e3f02fe2cd003432417924a9548f12cb9
|
refs/heads/master
| 2020-06-03T08:24:22.988728
| 2019-07-09T10:32:25
| 2019-07-09T10:32:25
| 191,355,228
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 65
|
py
|
S1,S2=input().split()
if (S1>=S2):
print(S1)
else:
print(S2)
|
[
"noreply@github.com"
] |
noreply@github.com
|
cda8f45c33a9727867c324be9b68dea6af8eccae
|
db3a007479d33725e1944bed3dc78c101c825c1f
|
/6DoF_CNN/data_preprocessing.py
|
e21cf85067c1adffe2c316cf0bd4aaf70c21891b
|
[] |
no_license
|
tse-hou/tomm20_TMIV
|
1079d03b2028bab1da44b2ee19a7342b4f730d7d
|
9cf28add29828ce5d0dd591295a2247edf621a72
|
refs/heads/main
| 2023-04-12T00:44:21.906256
| 2021-05-03T06:04:44
| 2021-05-03T06:04:44
| 360,378,144
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,485
|
py
|
# This script is used to produce traning and testing dataset/label
# training (testing) dataset compose of:
# source view (texture) : 7
# source view (depth) : 7
# orientation diff. (Yaw) : 7
# orientation diff. (Pitch) : 7
# orientation diff. (Row) : 7
# position diff. (x) : 7
# position diff. (y) : 7
# position diff. (z) : 7
# -------------------------------
# total: 56
import pickle
import numpy as np
import pandas
import os
import csv
import sys
class dataset:
def __init__(self, pkl_path='', csvFilePath=''):
if(pkl_path==''):
print("error: pkl_path is empty")
return -1
f=open(pkl_path,'rb')
data=pickle.load(f)
self.csvFilePath = csvFilePath
self.pkl_depth = data['depth'] # numpy array
self.pkl_imgs = data['imgs'] # numpy array
self.pkl_camera_para = data['c_para'] # pandas dataframe
self.pkl_idx_frames = data['fn_frames'] # list
self.pkl_idx_sourceView = data['fn_sv'] # numpy array
self.traning_label = []
self.get_opt_label()
self.gray_scale_imgs()
self.depth2Distance_depthmaps()
# ---------------------------------------------------------------
# get label data from .csv file
def get_opt_label(self):
idx = 0
opt_numofPass = [0,0,0,0,0]
max_CEL = -999999.0
with open(self.csvFilePath, newline='') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
if(float(row['CEL'])>max_CEL):
opt_numofPass[0] = row['Frame']
opt_numofPass[1] = row['Synthesized.View']
opt_numofPass[2] = row['p1']
opt_numofPass[3] = row['p2']
opt_numofPass[4] = row['p3']
max_CEL = max(float(row['CEL']),max_CEL)
idx += 1
if(idx == 63):
self.traning_label.append(opt_numofPass)
idx = 0
opt_numofPass = [0,0,0,0,0]
max_CEL = -999999
# ---------------------------------------------------------------
# reshape depth map
def depth2Distance_depthmaps(self):
self.pkl_depth = np.delete(self.pkl_depth,[1,2],axis=-1).reshape((7, 5, 256, 256))
# new_map = np.zeros((7, 5, 256, 256))
# for view in range(7):
# for frame in range(5):
# new_map[view][frame] = np.delete(self.pkl_depth[view][frame],[1,2],axis=2).reshape(256,256)
# self.pkl_depth = new_map
# ---------------------------------------------------------------
# convert imgs to gray scale
def gray_scale(self,img, width, height):
img = np.sum(img,axis=2)/3
return img.reshape(256,256)
# gray scale all of imgs in dataset
def gray_scale_imgs(self):
new_imgs = np.zeros((7, 5, 256, 256))
for view in range(7):
for frame in range(5):
new_img = self.gray_scale(self.pkl_imgs[view][frame],256,256)
new_imgs[view][frame] = new_img
self.pkl_imgs = new_imgs
# ---------------------------------------------------------------
# map generator
def para_map_generator(self,source_para, target_para, width, height):
para_diff = target_para - source_para
return np.full((width,height), para_diff)
# ---------------------------------------------------------------
# output training data
def output_training_data(self):
datas = []
for row in self.traning_label:
single_data = np.zeros((56, 256, 256))
idx = 0
frameIdx = -1
for i in range(len(self.pkl_idx_frames)):
if(str(row[0])==self.pkl_idx_frames[i]):
frameIdx = i
targetViewIdx = int(row[1].replace("v",""))
# imgs
for view in range(7):
single_data[idx] = self.pkl_imgs[view][frameIdx]
idx+=1
# depth
for view in range(7):
single_data[idx] = self.pkl_depth[view][frameIdx]
idx+=1
# position
for sourceViewIdx in self.pkl_idx_sourceView:
for para_idx in range(1,4):
source_para = self.pkl_camera_para[f'Position{para_idx}'][int(sourceViewIdx)]
target_para = self.pkl_camera_para[f'Position{para_idx}'][targetViewIdx]
single_data[idx] = self.para_map_generator(source_para, target_para, 256, 256)
idx+=1
# orientation
for sourceViewIdx in self.pkl_idx_sourceView:
for para_idx in range(1,4):
source_para = self.pkl_camera_para[f'Rotation{para_idx}'][int(sourceViewIdx)]
target_para = self.pkl_camera_para[f'Rotation{para_idx}'][targetViewIdx]
single_data[idx] = self.para_map_generator(source_para, target_para, 256, 256)
idx+=1
datas.append(single_data)
T_data = np.zeros((len(datas), 56, 256, 256))
for i in range(0,len(datas)):
T_data[i] = datas[i]
return T_data
def produce_data(dataset_name_list,T_data_path,T_label_path, pklFolderPath, csvFolderPath):
allofdataset = []
alloflabel = []
for i in range(len(dataset_name_list)):
print(f"producing dataset'{dataset_name_list[i]}'")
dataset_temp = dataset(pkl_path = f"{pklFolderPath}/{dataset_name_list[i]}.pkl",
csvFilePath = f"{csvFolderPath}/{dataset_name_list[i]}.csv")
dataset_temp_T_data = dataset_temp.output_training_data()
allofdataset.append(dataset_temp_T_data)
for row in dataset_temp.traning_label:
temp = np.zeros((3))
temp[0] = (int(row[2])/7)
temp[1] = ((int(row[3])-int(row[2]))/7)
temp[2] = ((int(row[4])-int(row[3]))/7)
alloflabel.append(temp)
# combine all of data
for i in range(1,len(allofdataset)):
allofdataset[0] = np.concatenate((allofdataset[0],allofdataset[i]),axis=0)
T_data = allofdataset[0]
# combine all of lable
T_label = np.zeros((len(alloflabel), 3))
for i in range(len(alloflabel)):
T_label[i] = alloflabel[i]
T_data = np.transpose(T_data,(0,2,3,1))
print(type(T_data))
print(T_data.shape)
print(type(T_label))
print(T_label.shape)
np.save(T_data_path, T_data)
np.save(T_label_path,T_label)
print("done")
if __name__ == "__main__":
dataset_type = sys.argv[1]
if(dataset_type == "PTP"):
pklFolderPath = "code_silver/prepare_datasets_PTP/pickle"
csvFolderPath = "code_silver/raw_datasets/states/PTP"
dataset_list = ["IntelFrog","OrangeKitchen","PoznanFencing","PoznanStreet","TechnicolorPainter","PoznanCarpark","PoznanHall"]
for i in range(len(dataset_list)):
# training
testing_data = dataset_list[i]
dataset_name_list = dataset_list.copy()
dataset_name_list.remove(testing_data)
T_data_path = f'CNN_dataset/Training/Training_data_{testing_data}.npy'
T_label_path = f'CNN_dataset/Training/Training_label_{testing_data}.npy'
produce_data(dataset_name_list,T_data_path,T_label_path, pklFolderPath, csvFolderPath)
# testing
produce_data(dataset_list,"CNN_dataset/Testing/Testing_data.npy","CNN_dataset/Testing/Testing_label.npy", pklFolderPath, csvFolderPath)
elif(dataset_type == "ERP"):
pklFolderPath = "code_silver/prepare_datasets_ERP/pickle"
dataset_list = ["ClassroomVideo","TechnicolorHijack","TechnicolorMuseum"]
# training
# csvFolderPath = "code_silver/raw_datasets/states/ERP/train"
# T_data_path = 'CNN_dataset/Training/Training_data_NSV.npy'
# T_label_path = 'CNN_dataset/Training/Training_label_NSV.npy'
# produce_data(dataset_list,T_data_path,T_label_path, pklFolderPath, csvFolderPath)
# testing
csvFolderPath = "code_silver/raw_datasets/states/ERP/test"
T_data_path = 'CNN_dataset/Testing/Testing_data_RND.npy'
T_label_path = 'CNN_dataset/Testing/Testing_label_RND.npy'
produce_data(dataset_list,T_data_path,T_label_path, pklFolderPath, csvFolderPath)
|
[
"zackl852l@gmail.com"
] |
zackl852l@gmail.com
|
6c8b062cee0cd780d80f6ae73dd3510bd18e1fcc
|
6eb097cccbc0e040eb940663f85ce7eacb2be95b
|
/Desafio061.py
|
fe389e9b9bf333e9d84a854ccc59923d3a836ef6
|
[] |
no_license
|
figueiredorodrigo/Exercicios-Guanabara
|
c7cdb534b3f7c2db0e2bffc2b4376af035213b3a
|
621000882ab3aa080415bb04336fd1713ab85b5d
|
refs/heads/main
| 2023-06-02T07:10:22.555624
| 2021-06-15T16:33:26
| 2021-06-15T16:33:26
| 376,381,603
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 228
|
py
|
#Progressão aritmética com comand while
p1 = int(input('Primeeiro termo: '))
rz = int(input('Informe a razão da P.A: '))
tr = p1
cont = 1
while cont <= 10:
print(f' {tr} -> ', end= '')
tr += rz
cont += 1
|
[
"noreply@github.com"
] |
noreply@github.com
|
640d43fb3cede03c64b09ac5e1455b53828ac1db
|
94967d680586b2a727fc93483d32d38d1ed5a30f
|
/wed_test/post/urls.py
|
00dd243ffcc7164f0f0987dde8c5b1e74f53ecc1
|
[] |
no_license
|
gangbok119/wed_test
|
93379695fc62cfd6b6012022a342c9a64adf139f
|
08a46402b6d414a29dcd8d321f7ad898bba81b54
|
refs/heads/master
| 2021-07-13T22:05:14.907466
| 2017-10-18T02:59:02
| 2017-10-18T02:59:02
| 107,340,791
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 363
|
py
|
from django.conf.urls import url
from .views import post_list, post_detail, post_create, post_delete
urlpatterns = [
url(r'post/$', post_list, name='post_list'),
url(r'post/(?P<pk>\d+)/$',post_detail, name='post_detail'),
url(r'post/create',post_create,name='post_create'),
url(r'post/delete/(?P<pk>\d+)/$', post_delete, name='post_delete'),
]
|
[
"gangbok119@gmail.com"
] |
gangbok119@gmail.com
|
70456061b62a6c44867abca2486de5c1e3cbbd30
|
2316ce8a21d44a5d09284968ef42530633dc10d2
|
/sample_code/ep264/rev04/t.py
|
bb11896bd80b2ae0441665c1f237b272afbbc397
|
[] |
no_license
|
AlexanderWinkelmeier/explains
|
160de2c41fc5fc0156b482b41f89644dc585c4f3
|
d47ec53e384e4303a2d8e71fab9073a1a8d2d6bc
|
refs/heads/master
| 2023-07-30T04:55:31.234482
| 2021-09-15T02:59:42
| 2021-09-15T02:59:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 297
|
py
|
import os.path
import tempfile
SOME_FILE = 'foo.txt'
fd, temp_path = tempfile.mkstemp(dir=os.path.dirname(SOME_FILE))
try:
with open(fd, 'w') as f:
f.write('these are the new contents\n')
os.replace(temp_path, SOME_FILE)
except BaseException:
os.remove(temp_path)
raise
|
[
"int3l@users.noreply.github.com"
] |
int3l@users.noreply.github.com
|
965f17a1aaf3ff1e2f6716cbbb9d4fe6820df128
|
b906e5e26dfd67ce3a2ede2b4ae0163d166c7ff4
|
/test/test_systems.py
|
3f1d159e20688b7e6058eb1c1ed03bd670dca24c
|
[] |
no_license
|
DPCN-US/dpcn-config
|
f7513a68d1e8ca4ad297c5122d4d8da092ec5c96
|
00781f2f89f450da5f06c2b11b739156871dbc53
|
refs/heads/master
| 2020-12-22T00:08:04.837572
| 2020-01-29T23:48:54
| 2020-01-29T23:48:54
| 236,610,006
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,297
|
py
|
import json
import os
import unittest
from const import GROUP_CALL, PRIVATE_ID_MAX, PRIVATE_CALL, GROUP_ID_MAX, SYSTEM_DIR, CONTACT_TYPES
from util import gen_id
class TestSystems(unittest.TestCase):
def setUp(self):
self.systems = []
systems = [f for f in os.listdir(SYSTEM_DIR) if os.path.isfile(os.path.join(SYSTEM_DIR, f))]
for system in systems:
with open(os.path.join(SYSTEM_DIR, system)) as json_file:
try:
self.systems.append(json.load(json_file))
except json.decoder.JSONDecodeError:
self.fail(f"Problem with system file: {system}")
def test_systems_loaded(self):
self.assertGreater(len(self.systems), 0)
def check_range(self, type, max):
for system in self.systems:
for contact in system['contacts']:
c = system['contacts'][contact]
if 'id' in c and c['type'] == type:
self.assertGreaterEqual(c['id'], 1)
self.assertLessEqual(c['id'], max)
def test_private_id(self):
self.check_range(PRIVATE_CALL, PRIVATE_ID_MAX)
def test_group_id(self):
self.check_range(GROUP_CALL, GROUP_ID_MAX)
def test_duplicate_id(self):
id_list = []
for system in self.systems:
for contact in system['contacts']:
if 'id' in system['contacts'][contact]:
id = system['contacts'][contact]['id']
else:
id = gen_id(contact, system['contacts'][contact])['id']
if id:
id_list.append(id)
self.assertEqual(len(id_list), len(set(id_list)), "System does not have unique IDs!")
def test_type(self):
for system in self.systems:
for contact in system['contacts']:
if 'type' in system['contacts'][contact]:
type = system['contacts'][contact]['type']
if type not in CONTACT_TYPES:
self.fail(f"{system['name']} {contact} unsupported type: {type}")
def test_contact_channel_sites(self):
for system in self.systems:
for contact in system['contacts']:
if 'channel' in system['contacts'][contact]:
for site_id in system['contacts'][contact]['channel']['sites']:
if site_id != 0 and str(site_id) not in system['sites'].keys():
self.fail(f"Site ID {site_id} not in {system['sites'].keys()}")
def test_system_name(self):
for system in self.systems:
if 'name' not in system:
self.fail(f"No 'name' in system {system}")
def test_system_type(self):
for system in self.systems:
if 'type' not in system:
self.fail(f"No 'type' in system {system}")
def test_system_sites(self):
for system in self.systems:
if 'sites' not in system:
self.fail(f"No 'sites' in system {system}")
def test_system_contacts(self):
for system in self.systems:
if 'contacts' not in system:
self.fail(f"No 'contacts' in system {system}")
if __name__ == '__main__':
unittest.main()
|
[
"p.rosenberg-watt@cablelabs.com"
] |
p.rosenberg-watt@cablelabs.com
|
5ff54b552765c5b276453da88a68b974732c1129
|
61d6d68f098f9f2f177227ef8e4ab3c526d007a2
|
/pony/social/oauth.py
|
dcf078c899f76e8bb6201bd1d7432e12a385dc2b
|
[] |
no_license
|
skotcarruth/pony
|
4185471222884825e09fa362f98a23735ebde213
|
e2ed5690b8ec0642b821492b87d57cfe2a5f8197
|
refs/heads/master
| 2021-01-25T09:00:17.351117
| 2010-12-05T02:10:32
| 2010-12-05T02:10:32
| 1,038,517
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,873
|
py
|
import json
import oauth2 as oauth
import urllib
import urlparse
from django.conf import settings
class HandlerException(Exception):
pass
class MissingRequestToken(HandlerException):
pass
class MissingAccessToken(HandlerException):
pass
class ServiceError(HandlerException):
pass
class TwitterOAuth(object):
"""Common interface for OAuth and talking to social services."""
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZE_URL = 'https://api.twitter.com/oauth/authorize'
# Replace with (category, resource)
RESOURCE_URL = 'http://api.twitter.com/1/%s/%s.json'
def __init__(self, oauth_token=None, oauth_token_secret=None):
self.request_token = None
self.oauth_token = oauth_token
self.oauth_token_secret = oauth_token_secret
def get_request_token(self, callback_url=None):
consumer = oauth.Consumer(settings.TWITTER_KEY, settings.TWITTER_SECRET)
client = oauth.Client(consumer)
body = None
if callback_url:
body = urllib.urlencode({'oauth_callback': callback_url})
resp, content = client.request(self.REQUEST_TOKEN_URL, 'POST', body=body)
self.request_token = dict(urlparse.parse_qsl(content))
def get_authorize_url(self):
if not self.request_token:
raise MissingRequestToken('You must request a request token first.')
authorize_url = '%s?oauth_token=%s' % (
self.AUTHORIZE_URL,
self.request_token['oauth_token'],
)
return authorize_url
def get_access_token(self, oauth_token, oauth_verifier):
# Query twitter for the access token
consumer = oauth.Consumer(settings.TWITTER_KEY, settings.TWITTER_SECRET)
token = oauth.Token(oauth_token, settings.TWITTER_SECRET)
token.set_verifier(oauth_verifier)
client = oauth.Client(consumer, token)
resp, content = client.request(self.ACCESS_TOKEN_URL, 'POST')
# Test for success or error response
if resp['status'] != '200' or '<error>' in content:
raise ServiceError(content)
self.access_token = dict(urlparse.parse_qsl(content))
return self.access_token
def access_resource(self, category, resource, params={}, method='GET'):
# Construct the url to query
url = self.RESOURCE_URL % (category, resource)
resource = '%s?%s' % (url, urllib.urlencode(params))
consumer = oauth.Consumer(settings.TWITTER_KEY, settings.TWITTER_SECRET)
token = oauth.Token(key=self.oauth_token, secret=self.oauth_token_secret)
client = oauth.Client(consumer, token)
# Query the API and parse the json
resp, content = client.request(resource, method)
return json.loads(content)
|
[
"jmschenck@gmail.com"
] |
jmschenck@gmail.com
|
d87f665c92a1532d0fdb4ce5b3a9363650a05057
|
dc828d51874752625cb40466c870a613333dbb73
|
/advanced_section/advcbv/basic_app/migrations/0001_initial.py
|
dbd08956b8c886e11dbebca4767b1399c85cbdf8
|
[] |
no_license
|
Pyot/django-udemy
|
55c4907f75fa5fb05219880bf59e62ac82a59b6b
|
2401a7231c681d33f5fd231392b487e15253e54f
|
refs/heads/master
| 2020-12-30T10:11:47.282402
| 2017-08-22T14:15:24
| 2017-08-22T14:15:24
| 99,236,318
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,191
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-08-21 13:42
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='School',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('principal', models.CharField(max_length=256)),
('location', models.CharField(max_length=256)),
],
),
migrations.CreateModel(
name='Student',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('age', models.PositiveIntegerField()),
('school', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='students', to='basic_app.School')),
],
),
]
|
[
"piotr.a.ludwig@gmail.com"
] |
piotr.a.ludwig@gmail.com
|
47c2380c0dfc291c77d5e3ba1266a146f6eb86eb
|
3c4f5b7a01638ba2e7a18567634c5998120354d8
|
/source/test_build.py
|
03e80315df6080d19501ec82b58974201e7c2320
|
[] |
no_license
|
zakf/dehr
|
d97ebaafa44388cb105b104bd271e1058dea6619
|
18d6bd6b0cb06d4ac02ff21c5ccbfbf5c450b008
|
refs/heads/master
| 2021-01-20T22:29:34.952985
| 2016-06-23T16:45:35
| 2016-06-23T16:45:35
| 61,582,071
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,525
|
py
|
# File test_build.py
import unittest
from build import *
class InitialTest(unittest.TestCase):
def test_platform_versions(self):
check_versions()
class AllPageDataTest(unittest.TestCase):
def test_all_page_data_part_init(self):
with self.assertRaisesRegexp(BuildError, 'NEEDS a Boolean'):
AllPageDataPart(None)
def test_apd_read_only(self):
apdp = AllPageDataPart(True)
self.assertEqual(apdp.read_only, True)
with self.assertRaisesRegexp(BuildError, 'READ ONLY'):
apdp.add_title('Escitalopram (Lexapro)', 'lexapro.html')
apdp.read_only = False
apdp.add_title('Escitalopram (Lexapro)', 'lexapro.html')
apdp.add_title('Sertraline (Zoloft)', 'zoloft.html')
apdp.add_title('Aaron Sorkin', 'zyzzylvaria.html')
self.assertEqual(len(apdp.titles), 3)
self.assertEqual(apdp.get_titles(), [
'Aaron Sorkin',
'Escitalopram (Lexapro)',
'Sertraline (Zoloft)'])
def test_apd_find_url(self):
apd = AllPageData()
apd.add_alias('LEXApro', 'lexapro.html')
apd.add_alias('escitalopram', 'lexapro.html')
self.assertEqual(len(apd.next.aliases), 2)
apd.prior.aliases = apd.next.aliases.copy()
self.assertEqual(apd.find_url('lexapro'), 'lexapro.html')
self.assertEqual(apd.find_url('lexaPRO'), 'lexapro.html')
self.assertEqual(apd.find_url('escitaLOPram'), 'lexapro.html')
with self.assertRaisesRegexp(UrlLookupError, 'all_page_data.py'):
apd.find_url('not_in_the_dict')
def test_next_to_str(self):
apd = AllPageData()
apd.add_title('Olanzapine (Zyprexa)', 'zyprexa.html')
apd.add_title('Ziprasidone (Geodon)', 'geodon.html')
apd.add_alias('Olanzapine', 'zyprexa.html')
apd.add_alias('Zyprexa', 'zyprexa.html')
apd.add_alias('Ziprasidone', 'geodon.html')
apd.add_alias('Geodon', 'geodon.html')
self.assertEqual(
apd.next_to_str('bunny_ocean'),
"""bunny_ocean_titles = OrderedDict([
('Olanzapine (Zyprexa)', 'zyprexa.html'),
('Ziprasidone (Geodon)', 'geodon.html'),
])
bunny_ocean_aliases = OrderedDict([
('olanzapine', 'zyprexa.html'),
('zyprexa', 'zyprexa.html'),
('ziprasidone', 'geodon.html'),
('geodon', 'geodon.html'),
])
""")
#============================== If Name Is Main ===============================#
if __name__ == '__main__':
unittest.main()
|
[
"zakf@mit.edu"
] |
zakf@mit.edu
|
0457cef64ea7b68406f6e46a7669f6fc1dce58d8
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2518/49823/278350.py
|
2cb366ad69dc313778213eda8c71db7c66cfe53f
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 407
|
py
|
def al(a,b):
l=[]
a=sorted(a)
b=sorted(b)
p,r=0,0
for i in range(len(a)):
while(b[p]<a[i] and p<len(b)-1):
p+=1
if(p==0):
d=abs(b[p]-a[i])
else:
d=min(abs(a[i]-b[p-1]),abs(b[p]-a[i]))
r=max(r,d)
print(r)
if __name__ == '__main__':
al([int(i) for i in input().split(',')],[int(i) for i in input().split(',')])
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
a0f4e9d01aa2e0ae53881a6ab6188054d056db7c
|
c39b768f9b1d4b63ba90e9d6f5f0f8644ab2cee8
|
/problem4.py
|
b13e27b8f428518a6e5fa16e64c8d2c5559c70ff
|
[
"MIT"
] |
permissive
|
doubrtom/project-euler-solutions
|
9f46c65bb1c2f5571878c43ffdc553ef1364c8ad
|
1c6e97d1d519d96419c924150196cbc5d6198547
|
refs/heads/master
| 2021-09-12T11:07:40.320153
| 2018-04-16T08:29:20
| 2018-04-16T08:29:20
| 113,043,992
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,570
|
py
|
import time
def is_palindrom(x):
rev_x = int(str(x)[::-1])
return x - rev_x == 0
def find_palindrom_version_a():
"""Find palindrom. Basic version."""
max_palindrome = 0
for a in range(999, 99, -1):
for b in range(999, 99, -1):
mul = a * b
if is_palindrom(mul) and mul > max_palindrome:
max_palindrome = mul
return max_palindrome
def find_palindrom_version_b():
"""Find palindrom. Optimized version.
Do not test same numbers twice, multiplication is commutative.
(i.e. 7 * 9 = 9 * 7)
Stop inner loop at number from previous palindrom.
"""
max_palindrome = 0
number_b = 99
for a in range(999, 99, -1):
for b in range(a, number_b, -1):
mul = a * b
if is_palindrom(mul) and mul > max_palindrome:
max_palindrome = mul
number_b = b
break
return max_palindrome
def time_versions():
start_time = time.time()
max_palindrome_a = find_palindrom_version_a()
end_time = time.time()
calculation_a_time = end_time - start_time
start_time = time.time()
max_palindrome_b = find_palindrom_version_b()
end_time = time.time()
calculation_b_time = end_time - start_time
print("Max palindrome is a = {}, b = {}".format(
max_palindrome_a, max_palindrome_b)
)
print("Time a = {} s, b = {} s".format(
calculation_a_time, calculation_b_time)
)
# time_versions()
print("Max palindrome is {}".format(find_palindrom_version_b()))
|
[
"doubravskytomas@gmail.com"
] |
doubravskytomas@gmail.com
|
e75108296d253c026ab723812805fca3e0782f7f
|
388edd237d63c25851327d6dbd1b47ade07592f2
|
/benchcloud/drivers/dropbox_driver.py
|
83aacd177485816763dc93f99a4ac91346790ca1
|
[
"Apache-2.0"
] |
permissive
|
zenja/benchmarking-cloud-storage-systems
|
d63db5d0d6e3746a5ef998755787c87bcdf231ec
|
d5df1760c66aee2c641af7c777922dff2bb7cbbd
|
refs/heads/master
| 2022-08-21T18:19:53.253620
| 2014-10-12T07:26:17
| 2014-10-12T07:28:01
| 18,504,616
| 2
| 0
|
Apache-2.0
| 2022-07-07T22:55:42
| 2014-04-07T02:33:00
|
Python
|
UTF-8
|
Python
| false
| false
| 3,602
|
py
|
import dropbox
from driver import Driver
class DropboxDriver(Driver):
def __init__(self):
super(DropboxDriver, self).__init__()
self.app_key = self.parser.get("dropbox", "app_key")
self.app_secret = self.parser.get("dropbox", "app_secret")
def connect(self, include_guest=False):
try:
self.access_token = self.parser.get("dropbox", "access_token")
self.client = dropbox.client.DropboxClient(self.access_token)
#print 'linked account: ', client.account_info()
if include_guest:
self.guest_access_token = self.parser.get("dropbox", "guest_access_token")
self.guest_client = dropbox.client.DropboxClient(self.guest_access_token)
except Exception as e:
print type(e), e
def acquire_access_token(self, guest=False):
flow = dropbox.client.DropboxOAuth2FlowNoRedirect(self.app_key, self.app_secret)
authorize_url = flow.start()
print '1. Go to: ' + authorize_url
print '2. Click "Allow" (you might have to log in first)'
print '3. Copy the authorization code.'
code = raw_input("Enter the authorization code here: ").strip()
# This will fail if the user enters an invalid authorization code
access_token, user_id = flow.finish(code)
if not guest:
self.parser.set('dropbox', 'access_token', access_token)
self.parser.set('dropbox', 'user_id', user_id)
else:
self.parser.set('dropbox', 'guest_access_token', access_token)
self.parser.set('dropbox', 'guest_user_id', user_id)
with open(self.config_path, "w") as f:
self.parser.write(f)
def upload(self, local_filename, remote_filename):
try:
with open(local_filename, 'rb') as f:
response = self.client.put_file(remote_filename, f)
return response
except Exception as e:
print e
def download(self, remote_filename, local_filename):
try:
f, metadata = self.client.get_file_and_metadata(remote_filename)
with open(local_filename, 'wb') as out:
out.write(f.read())
return f
except Exception as e:
print e
def share(self, host_filename, guest_filename):
copy_ref = self.client.create_copy_ref(host_filename)['copy_ref']
self.guest_client.add_copy_ref(copy_ref, guest_filename)
def list_files(self, remote_dir='/'):
""" List all files in a remote directory
Return:
A map containing the metadata of all files in the directory.
Structure of the resulting map:
[{'path': file1_path, 'size': file1_size, 'is_dir': is_file1_a_directory},
{'path': file2_path, 'size': file2_size, 'is_dir': is_file2_a_directory},
...,
]
"""
result = []
folder_metadata = self.client.metadata(remote_dir)
for file_metadata in folder_metadata['contents']:
item = {}
item['path'] = file_metadata['path']
item['size'] = file_metadata['size']
item['is_dir'] = file_metadata['is_dir']
result.append(item)
return result
if __name__ == "__main__":
dbox = DropboxDriver()
dbox.connect(include_guest=True)
#dbox.share(host_filename="/CV_CL/Xing_CV.txt", guest_filename="/Xing_CV.txt")
#dbox.download(remote_filename="/CV_CL/Xing_CV.pdf", local_filename="./cv.pdf")
#print dbox.list_files(remote_dir='/CV_CL')
|
[
"zenja.wang@gmail.com"
] |
zenja.wang@gmail.com
|
675e01ef775c7565dc433efce7f5f97e1b532ad7
|
d5966d109ace494481513304439a0bd738565dc9
|
/tornado/test/web_test.py
|
137516fb99023461f1167ad5f9d98a13df903e2f
|
[
"Apache-2.0"
] |
permissive
|
nottombrown/tornado
|
33dc9ed845ae9288e2226d06f8a872f4880eb596
|
d2b05aea0b68338ab21279ced867cb637df0ffae
|
refs/heads/master
| 2021-01-17T22:04:25.014115
| 2011-08-04T18:08:44
| 2011-08-04T18:08:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,433
|
py
|
from tornado.escape import json_decode, utf8, to_unicode, recursive_unicode, native_str
from tornado.iostream import IOStream
from tornado.template import DictLoader
from tornado.testing import LogTrapTestCase, AsyncHTTPTestCase
from tornado.util import b, bytes_type
from tornado.web import RequestHandler, _O, authenticated, Application, asynchronous, url, HTTPError
import binascii
import logging
import re
import socket
import sys
class CookieTestRequestHandler(RequestHandler):
# stub out enough methods to make the secure_cookie functions work
def __init__(self):
# don't call super.__init__
self._cookies = {}
self.application = _O(settings=dict(cookie_secret='0123456789'))
def get_cookie(self, name):
return self._cookies.get(name)
def set_cookie(self, name, value, expires_days=None):
self._cookies[name] = value
class SecureCookieTest(LogTrapTestCase):
def test_round_trip(self):
handler = CookieTestRequestHandler()
handler.set_secure_cookie('foo', b('bar'))
self.assertEqual(handler.get_secure_cookie('foo'), b('bar'))
def test_cookie_tampering_future_timestamp(self):
handler = CookieTestRequestHandler()
# this string base64-encodes to '12345678'
handler.set_secure_cookie('foo', binascii.a2b_hex(b('d76df8e7aefc')))
cookie = handler._cookies['foo']
match = re.match(b(r'12345678\|([0-9]+)\|([0-9a-f]+)'), cookie)
assert match
timestamp = match.group(1)
sig = match.group(2)
self.assertEqual(handler._cookie_signature('foo', '12345678',
timestamp), sig)
# shifting digits from payload to timestamp doesn't alter signature
# (this is not desirable behavior, just confirming that that's how it
# works)
self.assertEqual(
handler._cookie_signature('foo', '1234', b('5678') + timestamp),
sig)
# tamper with the cookie
handler._cookies['foo'] = utf8('1234|5678%s|%s' % (timestamp, sig))
# it gets rejected
assert handler.get_secure_cookie('foo') is None
class CookieTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
class SetCookieHandler(RequestHandler):
def get(self):
# Try setting cookies with different argument types
# to ensure that everything gets encoded correctly
self.set_cookie("str", "asdf")
self.set_cookie("unicode", u"qwer")
self.set_cookie("bytes", b("zxcv"))
class GetCookieHandler(RequestHandler):
def get(self):
self.write(self.get_cookie("foo"))
class SetCookieDomainHandler(RequestHandler):
def get(self):
# unicode domain and path arguments shouldn't break things
# either (see bug #285)
self.set_cookie("unicode_args", "blah", domain=u"foo.com",
path=u"/foo")
class SetCookieSpecialCharHandler(RequestHandler):
def get(self):
self.set_cookie("equals", "a=b")
self.set_cookie("semicolon", "a;b")
self.set_cookie("quote", 'a"b')
return Application([
("/set", SetCookieHandler),
("/get", GetCookieHandler),
("/set_domain", SetCookieDomainHandler),
("/special_char", SetCookieSpecialCharHandler),
])
def test_set_cookie(self):
response = self.fetch("/set")
self.assertEqual(response.headers.get_list("Set-Cookie"),
["str=asdf; Path=/",
"unicode=qwer; Path=/",
"bytes=zxcv; Path=/"])
def test_get_cookie(self):
response = self.fetch("/get", headers={"Cookie": "foo=bar"})
self.assertEqual(response.body, b("bar"))
response = self.fetch("/get", headers={"Cookie": 'foo="bar"'})
self.assertEqual(response.body, b("bar"))
def test_set_cookie_domain(self):
response = self.fetch("/set_domain")
self.assertEqual(response.headers.get_list("Set-Cookie"),
["unicode_args=blah; Domain=foo.com; Path=/foo"])
def test_cookie_special_char(self):
response = self.fetch("/special_char")
headers = response.headers.get_list("Set-Cookie")
self.assertEqual(len(headers), 3)
self.assertEqual(headers[0], 'equals="a=b"; Path=/')
# python 2.7 octal-escapes the semicolon; older versions leave it alone
self.assertTrue(headers[1] in ('semicolon="a;b"; Path=/',
'semicolon="a\\073b"; Path=/'),
headers[1])
self.assertEqual(headers[2], 'quote="a\\"b"; Path=/')
data = [('foo=a=b', 'a=b'),
('foo="a=b"', 'a=b'),
('foo="a;b"', 'a;b'),
#('foo=a\\073b', 'a;b'), # even encoded, ";" is a delimiter
('foo="a\\073b"', 'a;b'),
('foo="a\\"b"', 'a"b'),
]
for header, expected in data:
logging.info("trying %r", header)
response = self.fetch("/get", headers={"Cookie": header})
self.assertEqual(response.body, utf8(expected))
class AuthRedirectRequestHandler(RequestHandler):
def initialize(self, login_url):
self.login_url = login_url
def get_login_url(self):
return self.login_url
@authenticated
def get(self):
# we'll never actually get here because the test doesn't follow redirects
self.send_error(500)
class AuthRedirectTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/relative', AuthRedirectRequestHandler,
dict(login_url='/login')),
('/absolute', AuthRedirectRequestHandler,
dict(login_url='http://example.com/login'))])
def test_relative_auth_redirect(self):
self.http_client.fetch(self.get_url('/relative'), self.stop,
follow_redirects=False)
response = self.wait()
self.assertEqual(response.code, 302)
self.assertEqual(response.headers['Location'], '/login?next=%2Frelative')
def test_absolute_auth_redirect(self):
self.http_client.fetch(self.get_url('/absolute'), self.stop,
follow_redirects=False)
response = self.wait()
self.assertEqual(response.code, 302)
self.assertTrue(re.match(
'http://example.com/login\?next=http%3A%2F%2Flocalhost%3A[0-9]+%2Fabsolute',
response.headers['Location']), response.headers['Location'])
class ConnectionCloseHandler(RequestHandler):
def initialize(self, test):
self.test = test
@asynchronous
def get(self):
self.test.on_handler_waiting()
def on_connection_close(self):
self.test.on_connection_close()
class ConnectionCloseTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', ConnectionCloseHandler, dict(test=self))])
def test_connection_close(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
s.connect(("localhost", self.get_http_port()))
self.stream = IOStream(s, io_loop=self.io_loop)
self.stream.write(b("GET / HTTP/1.0\r\n\r\n"))
self.wait()
def on_handler_waiting(self):
logging.info('handler waiting')
self.stream.close()
def on_connection_close(self):
logging.info('connection closed')
self.stop()
class EchoHandler(RequestHandler):
def get(self, path):
# Type checks: web.py interfaces convert argument values to
# unicode strings (by default, but see also decode_argument).
# In httpserver.py (i.e. self.request.arguments), they're left
# as bytes. Keys are always native strings.
for key in self.request.arguments:
assert type(key) == str, repr(key)
for value in self.request.arguments[key]:
assert type(value) == bytes_type, repr(value)
for value in self.get_arguments(key):
assert type(value) == unicode, repr(value)
assert type(path) == unicode, repr(path)
self.write(dict(path=path,
args=recursive_unicode(self.request.arguments)))
class RequestEncodingTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([("/(.*)", EchoHandler)])
def test_question_mark(self):
# Ensure that url-encoded question marks are handled properly
self.assertEqual(json_decode(self.fetch('/%3F').body),
dict(path='?', args={}))
self.assertEqual(json_decode(self.fetch('/%3F?%3F=%3F').body),
dict(path='?', args={'?': ['?']}))
def test_path_encoding(self):
# Path components and query arguments should be decoded the same way
self.assertEqual(json_decode(self.fetch('/%C3%A9?arg=%C3%A9').body),
{u"path":u"\u00e9",
u"args": {u"arg": [u"\u00e9"]}})
class TypeCheckHandler(RequestHandler):
def prepare(self):
self.errors = {}
self.check_type('status', self.get_status(), int)
# get_argument is an exception from the general rule of using
# type str for non-body data mainly for historical reasons.
self.check_type('argument', self.get_argument('foo'), unicode)
self.check_type('cookie_key', self.cookies.keys()[0], str)
self.check_type('cookie_value', self.cookies.values()[0].value, str)
# secure cookies
self.check_type('xsrf_token', self.xsrf_token, bytes_type)
self.check_type('xsrf_form_html', self.xsrf_form_html(), str)
self.check_type('reverse_url', self.reverse_url('typecheck', 'foo'), str)
self.check_type('request_summary', self._request_summary(), str)
def get(self, path_component):
# path_component uses type unicode instead of str for consistency
# with get_argument()
self.check_type('path_component', path_component, unicode)
self.write(self.errors)
def post(self, path_component):
self.check_type('path_component', path_component, unicode)
self.write(self.errors)
def check_type(self, name, obj, expected_type):
actual_type = type(obj)
if expected_type != actual_type:
self.errors[name] = "expected %s, got %s" % (expected_type,
actual_type)
class DecodeArgHandler(RequestHandler):
def decode_argument(self, value, name=None):
assert type(value) == bytes_type, repr(value)
# use self.request.arguments directly to avoid recursion
if 'encoding' in self.request.arguments:
return value.decode(to_unicode(self.request.arguments['encoding'][0]))
else:
return value
def get(self, arg):
def describe(s):
if type(s) == bytes_type:
return ["bytes", native_str(binascii.b2a_hex(s))]
elif type(s) == unicode:
return ["unicode", s]
raise Exception("unknown type")
self.write({'path': describe(arg),
'query': describe(self.get_argument("foo")),
})
class LinkifyHandler(RequestHandler):
def get(self):
self.render("linkify.html", message="http://example.com")
class UIModuleResourceHandler(RequestHandler):
def get(self):
self.render("page.html", entries=[1,2])
class OptionalPathHandler(RequestHandler):
def get(self, path):
self.write({"path": path})
class FlowControlHandler(RequestHandler):
# These writes are too small to demonstrate real flow control,
# but at least it shows that the callbacks get run.
@asynchronous
def get(self):
self.write("1")
self.flush(callback=self.step2)
def step2(self):
self.write("2")
self.flush(callback=self.step3)
def step3(self):
self.write("3")
self.finish()
class WebTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
loader = DictLoader({
"linkify.html": "{% module linkify(message) %}",
"page.html": """\
<html><head></head><body>
{% for e in entries %}
{% module Template("entry.html", entry=e) %}
{% end %}
</body></html>""",
"entry.html": """\
{{ set_resources(embedded_css=".entry { margin-bottom: 1em; }", embedded_javascript="js_embed()", css_files=["/base.css", "/foo.css"], javascript_files="/common.js", html_head="<meta>", html_body='<script src="/analytics.js"/>') }}
<div class="entry">...</div>""",
})
urls = [
url("/typecheck/(.*)", TypeCheckHandler, name='typecheck'),
url("/decode_arg/(.*)", DecodeArgHandler),
url("/decode_arg_kw/(?P<arg>.*)", DecodeArgHandler),
url("/linkify", LinkifyHandler),
url("/uimodule_resources", UIModuleResourceHandler),
url("/optional_path/(.+)?", OptionalPathHandler),
url("/flow_control", FlowControlHandler),
]
return Application(urls,
template_loader=loader,
autoescape="xhtml_escape")
def fetch_json(self, *args, **kwargs):
response = self.fetch(*args, **kwargs)
response.rethrow()
return json_decode(response.body)
def test_types(self):
response = self.fetch("/typecheck/asdf?foo=bar",
headers={"Cookie": "cook=ie"})
data = json_decode(response.body)
self.assertEqual(data, {})
response = self.fetch("/typecheck/asdf?foo=bar", method="POST",
headers={"Cookie": "cook=ie"},
body="foo=bar")
def test_decode_argument(self):
# These urls all decode to the same thing
urls = ["/decode_arg/%C3%A9?foo=%C3%A9&encoding=utf-8",
"/decode_arg/%E9?foo=%E9&encoding=latin1",
"/decode_arg_kw/%E9?foo=%E9&encoding=latin1",
]
for url in urls:
response = self.fetch(url)
response.rethrow()
data = json_decode(response.body)
self.assertEqual(data, {u'path': [u'unicode', u'\u00e9'],
u'query': [u'unicode', u'\u00e9'],
})
response = self.fetch("/decode_arg/%C3%A9?foo=%C3%A9")
response.rethrow()
data = json_decode(response.body)
self.assertEqual(data, {u'path': [u'bytes', u'c3a9'],
u'query': [u'bytes', u'c3a9'],
})
def test_uimodule_unescaped(self):
response = self.fetch("/linkify")
self.assertEqual(response.body,
b("<a href=\"http://example.com\">http://example.com</a>"))
def test_uimodule_resources(self):
response = self.fetch("/uimodule_resources")
self.assertEqual(response.body, b("""\
<html><head><link href="/base.css" type="text/css" rel="stylesheet"/><link href="/foo.css" type="text/css" rel="stylesheet"/>
<style type="text/css">
.entry { margin-bottom: 1em; }
</style>
<meta>
</head><body>
<div class="entry">...</div>
<div class="entry">...</div>
<script src="/common.js" type="text/javascript"></script>
<script type="text/javascript">
//<![CDATA[
js_embed()
//]]>
</script>
<script src="/analytics.js"/>
</body></html>"""))
def test_optional_path(self):
self.assertEqual(self.fetch_json("/optional_path/foo"),
{u"path": u"foo"})
self.assertEqual(self.fetch_json("/optional_path/"),
{u"path": None})
def test_flow_control(self):
self.assertEqual(self.fetch("/flow_control").body, b("123"))
class ErrorResponseTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
class DefaultHandler(RequestHandler):
def get(self):
if self.get_argument("status", None):
raise HTTPError(int(self.get_argument("status")))
1/0
class WriteErrorHandler(RequestHandler):
def get(self):
if self.get_argument("status", None):
self.send_error(int(self.get_argument("status")))
else:
1/0
def write_error(self, status_code, **kwargs):
self.set_header("Content-Type", "text/plain")
if "exc_info" in kwargs:
self.write("Exception: %s" % kwargs["exc_info"][0].__name__)
else:
self.write("Status: %d" % status_code)
class GetErrorHtmlHandler(RequestHandler):
def get(self):
if self.get_argument("status", None):
self.send_error(int(self.get_argument("status")))
else:
1/0
def get_error_html(self, status_code, **kwargs):
self.set_header("Content-Type", "text/plain")
if "exception" in kwargs:
self.write("Exception: %s" % sys.exc_info()[0].__name__)
else:
self.write("Status: %d" % status_code)
class FailedWriteErrorHandler(RequestHandler):
def get(self):
1/0
def write_error(self, status_code, **kwargs):
raise Exception("exception in write_error")
return Application([
url("/default", DefaultHandler),
url("/write_error", WriteErrorHandler),
url("/get_error_html", GetErrorHtmlHandler),
url("/failed_write_error", FailedWriteErrorHandler),
])
def test_default(self):
response = self.fetch("/default")
self.assertEqual(response.code, 500)
self.assertTrue(b("500: Internal Server Error") in response.body)
response = self.fetch("/default?status=503")
self.assertEqual(response.code, 503)
self.assertTrue(b("503: Service Unavailable") in response.body)
def test_write_error(self):
response = self.fetch("/write_error")
self.assertEqual(response.code, 500)
self.assertEqual(b("Exception: ZeroDivisionError"), response.body)
response = self.fetch("/write_error?status=503")
self.assertEqual(response.code, 503)
self.assertEqual(b("Status: 503"), response.body)
def test_get_error_html(self):
response = self.fetch("/get_error_html")
self.assertEqual(response.code, 500)
self.assertEqual(b("Exception: ZeroDivisionError"), response.body)
response = self.fetch("/get_error_html?status=503")
self.assertEqual(response.code, 503)
self.assertEqual(b("Status: 503"), response.body)
def test_failed_write_error(self):
response = self.fetch("/failed_write_error")
self.assertEqual(response.code, 500)
self.assertEqual(b(""), response.body)
|
[
"ben@bendarnell.com"
] |
ben@bendarnell.com
|
abe2ef7281f30eb1f7a5c1cf37d8dbdf6f438ee4
|
a0b0eb383ecfeaeed3d2b0271657a0c32472bf8e
|
/51nod/1124.py
|
d33a8352381c7a94d03f500bb8556591843573aa
|
[
"Apache-2.0"
] |
permissive
|
tangjz/acm-icpc
|
45764d717611d545976309f10bebf79c81182b57
|
f1f3f15f7ed12c0ece39ad0dd044bfe35df9136d
|
refs/heads/master
| 2023-04-07T10:23:07.075717
| 2022-12-24T15:30:19
| 2022-12-26T06:22:53
| 13,367,317
| 53
| 20
|
Apache-2.0
| 2022-12-26T06:22:54
| 2013-10-06T18:57:09
|
C++
|
UTF-8
|
Python
| false
| false
| 383
|
py
|
t = int(input())
for tt in range(t) :
n = int(input())
if n == 1 :
print(1)
continue
r, e = 1, 0
while n > 0 :
for i in range(n % 5) :
r = r * (i + 1) % 5
n = n // 5
if n & 1 :
r = 5 - r
e = (e + n) & 3
for i in range(e) :
r = r * 3 % 5
if r & 1 :
r = r + 5
print(r)
|
[
"t251346744@gmail.com"
] |
t251346744@gmail.com
|
d624f0f31475e20cf326c3801557efd950c56e71
|
616af66b01ecf3f605a5b37ccc2551029abce4fd
|
/env/bin/django-admin
|
253eb2edd74a48e005b3fb5d4fbf10e6e16ce1c0
|
[] |
no_license
|
akshatconsult/django
|
63702391ea9fd7a313d814a9a031fc1525315823
|
92f57a93a9443fd2e5b180e03d0350483cff8106
|
refs/heads/main
| 2023-08-31T16:53:12.756726
| 2021-08-03T06:56:07
| 2021-08-03T06:56:07
| 391,746,877
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 313
|
#!/Users/dota/Documents/python-training-master/django/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
|
[
"87183864+akshatconsult@users.noreply.github.com"
] |
87183864+akshatconsult@users.noreply.github.com
|
|
8babad0b3861b25d13449ece6a04d57d9d8be14c
|
e4129c12d4a23759db0f3f0dcf52f837459fdc96
|
/Taller.py
|
8c99ee21017724369fbd6e6f670fea29f9a24d59
|
[] |
no_license
|
ddizoya/taller
|
f442ffb6727013a7cd47861b67e80c99e90df04a
|
9401696b9460278bfb23f004fd709335891ef363
|
refs/heads/master
| 2020-05-29T11:45:06.842021
| 2016-04-04T20:34:02
| 2016-04-04T20:34:02
| 55,224,494
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,120
|
py
|
import sqlite3 as dbapi
from gi.repository import Gtk, Gdk
import Generarpdf as pdf
class Taller:
"""
La clase Taller, contiene todos los metodos de consultas a la base de datos,
el treeview, y todos los metodos necesarios para el funcionamiento del programa
Este cursor, genera la tabla en la base de datos sqlite,
al estar creada la dejo comentada como muestra
"""
# cursor.execute("CREATE TABLE taller (matricula VARCHAR(7) PRIMARY KEY NOT NULL,"
# "vehiculo VARCHAR(20),"
# "kilometros INT,"
# "fecha VARCHAR(50) ,"
# "cliente VARCHAR(10),"
# "cif VARCHAR(10),"
# "telefono INT,"
# "direccion VARCHAR(10))")
def __init__(self):
"""
Declaramos atributo condicion para realizar las excepciones
Conexion con la base de datos.
Siempre se debe hacer un commit al terminar una consulta
Declaramos los nombres de las señales,
que reciben los botones de glade,
para llamar al metodo correspondiente.
Abrimos y conectamos a la interfaz de taller.glade
"""
self.condicion= bool
self.bd = dbapi.connect("basedatos.dat")
self.cursor = self.bd.cursor()
self.builder = Gtk.Builder()
self.builder.add_from_file("Taller.glade")
self.inicializar()
self.ventana = self.builder.get_object("Taller")
sinais = {"on_insertar_clicked": self.insertar,
"on_borrar_clicked": self.borrar,
"on_modificar_clicked": self.modificar,
"on_ayuda_clicked": self.informacion,
"on_imprimir_clicked": self.imprimir,
"delete-event": Gtk.main_quit}
self.builder.connect_signals(sinais)
self.ventana.set_title("Taller.")
self.ventana.show_all()
def inicializar(self):
"""
treeview o tabla, en el que se
muestran los datos de la base de datos
"""
self.box = self.builder.get_object("box2")
self.scroll = Gtk.ScrolledWindow()
self.scroll.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.vista = Gtk.TreeView()
self.box.add(self.scroll)
self.scroll.add(self.vista)
self.scroll.set_size_request(500, 500)
self.scroll.show()
self.lista = Gtk.ListStore(str, str, str, str, str, str, str, str)
self.lista.clear()
self.cursor.execute("select * from taller")
for clientes in self.cursor:
self.lista.append(clientes)
self.vista.set_model(self.lista)
for i, title in enumerate(["MATRICULA","VEHICULO","KILOMETROS","FECHA ENTREGA","CLIENTE","CIF/NIF", "TELEFONO", "DIRECCION"]):
render = Gtk.CellRendererText()
columna = Gtk.TreeViewColumn(title, render, text=i)
self.vista.append_column(columna)
def informacion(self, widget):
"""
Metodo Informacion:
Da una pequeña orientacion al usuario explicando
como usar los botones y el programa en si
"""
self.popup("-->Boton +:\nAñade un nuevo cliente a la base de datos\n-->Boton -:\nQuita un cliente de la base de datos\n(solo clica en el treeview la fila seleccionada)\n-->Boton Lapiz:\nEste boton permite modificar un cliente de la base, solo escribe la matricula y los campos que quieras cambiar\n-->Boton ?:\nSi no lo necesitaras no estarias aqui :D\n-->Boton Imprimir:\nGenera un pdf con la lista de clientes.")
def borrar(self, widget):
"""
Metodo borrar:
Que borra seleccionando la fila en en el treeview
"""
selection = self.vista.get_selection()
model, selec = selection.get_selected()
if selec != None:
self.matricula = model[selec][0]
self.cursor.execute("delete from taller where matricula ='" + self.matricula + "'")
self.actualizar()
self.bd.commit()
self.popup("Borrado")
def modificar(self, modificar):
"""
Metodo Modificar: Modifica a traves de la primary Key
"""
matricula = self.builder.get_object("matricula").get_text()
vehiculo = self.builder.get_object("vehiculo").get_text()
kilometros = self.builder.get_object("kilometros").get_text()
fecha = self.builder.get_object("fecha").get_text()
cliente = self.builder.get_object("cliente").get_text()
cifnif = self.builder.get_object("cifnif").get_text()
telefono = self.builder.get_object("telefono").get_text()
direccion = self.builder.get_object("direccion").get_text()
if kilometros.isdigit and len(cifnif)==9 and telefono.isdigit and len(telefono)==9:
self.condicion = True
else:
self.popup("Datos invalidos.")
self.condicion = False
if(self.condicion):
try:
self.cursor.execute("update taller set vehiculo ='" + vehiculo + "'"
",kilometros='" + kilometros + "'"
",fecha='" + fecha + "'"
",cliente='" + cliente + "'"
",cif='" + cifnif +"'"
",telefono='" + telefono +"'"
",direccion='" + direccion +"' where matricula='" + matricula + "'")
self.popup("Modificado")
self.bd.commit()
self.actualizar()
except dbapi.IntegrityError:
self.popup("La matricula ya existe")
def insertar(self, control):
"""
Metodo insertar:
Inserta a la base de datos
todos los campos recogiendo
el texto de los Gtxentrys del glade.
"""
matricula = self.builder.get_object("matricula").get_text()
vehiculo = self.builder.get_object("vehiculo").get_text()
kilometros = self.builder.get_object("kilometros").get_text()
fecha = self.builder.get_object("fecha").get_text()
cliente = self.builder.get_object("cliente").get_text()
cifnif = self.builder.get_object("cifnif").get_text()
telefono = self.builder.get_object("telefono").get_text()
direccion = self.builder.get_object("direccion").get_text()
if kilometros.isdigit and len(cifnif)==9 and telefono.isdigit and len(telefono)==9:
self.condicion = True
else:
self.popup("Datos invalidos. ")
self.condicion = False
if(self.condicion):
try:
self.cursor.execute(
"insert into taller values('" + matricula + "'"
",'" + vehiculo + "'"
",'" + kilometros + "'"
",'" + fecha+"'"
",'" + cliente + "'"
",'" + cifnif +"'"
",'" + telefono +"'"
",'" + direccion +"')")
self.popup("Insertado")
self.actualizar()
self.bd.commit()
except dbapi.IntegrityError:
self.popup("La matricula ya existe")
def actualizar(self):
"""
Metodo actualizar:
Este metodo simplemente actualiza
la tabla de la base de datos,
haciendo un select y
refrescando el treeview
"""
self.lista.clear()
self.cursor.execute("select * from taller")
#print(self.cursor.fetchall())
for merla in self.cursor:
self.lista.append(merla)
self.vista.set_model(self.lista)
def cerrar(self, widget):
""""
Metodo cerrar:
Destruye la ventana emergente que nos
muestra el mensaje de informacion
"""
self.ventana.destroy()
def imprimir(self,widget):
"""
Metodo Imprimir:
Este metodo simplemente llama a la clase Generarpdf.py
para generar el pdf, con el contenido de la base de datos
"""
obj = pdf.PDF()
obj.pdf()
def popup(self, texto):
"""
Este metodo abre
una ventana emergente
que muestra el texto
correspondiente que
le pasa cada metodo
"""
window = Gtk.Window(title="Aviso")
label = Gtk.Label(texto)
label.set_padding(30, 30)
window.add(label)
window.connect("delete-event", self.cerrar)
window.set_position(Gtk.BaselinePosition.CENTER)
#window.set_position(Gtk.PositionType.TOP)
window.show_all()
|
[
"davidpazolopez@gmail.com"
] |
davidpazolopez@gmail.com
|
2236a5e5f32a1ba386ed2514c85ada3d8fbcf015
|
4e82810f034d3a9950c10997ba33fddfc06fcc8f
|
/MyDoom/cfg_generator.py
|
18888eb9e6867c7c80263afc548a873fb594ecfd
|
[] |
no_license
|
henriknero/DV2579_project
|
bb226e1ae27b9188cae03a4037d42741673719ae
|
68883b379c7b33416623fd41d1a38e6e071c0671
|
refs/heads/master
| 2020-12-03T19:04:48.025224
| 2020-01-07T14:03:53
| 2020-01-07T14:03:53
| 231,441,121
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 510
|
py
|
import angr
import logging
logging.getLogger('angr').setLevel('ERROR')
target = 0x004A3DB0
p = angr.Project('strip-girl-2.0bdcom_patches.exe', auto_load_libs=True)
print('Generating CFGEmulated')
cfg = p.analyses.CFGEmulated(keep_state=True)
print('Generating CDG')
cdg = p.analyses.CDG(cfg)
print('Generating DDG')
ddg = p.analyses.DDG(cfg)
print('Finding node')
target_node = cfg.model.get_any_node(target, anyaddr=True)
back_slice = p.analyses.BackwardSlice(cfg, cdg, ddg, targets=[(target_node,-1)])
|
[
"henriknero@gmail.com"
] |
henriknero@gmail.com
|
53e96ad958d483b7b85fb9c3119b9e98031ef73c
|
922b6d67ca8dcc1573bddd0aa7193107f42b6207
|
/dms/web/base.py
|
4d5a1c7e76049df5a818300081908190a9e6437b
|
[
"MIT"
] |
permissive
|
zhmsg/dms
|
40c91ea3945fd8dfcd0b056f4bcf324774c4e88c
|
a1ae1430893d9dde8f45bba0e50818f0224fcd8a
|
refs/heads/master
| 2023-09-03T23:00:44.615748
| 2023-08-25T00:13:59
| 2023-08-25T00:13:59
| 42,572,830
| 0
| 2
|
MIT
| 2022-07-06T19:54:32
| 2015-09-16T07:50:17
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,419
|
py
|
# !/usr/bin/env python
# coding: utf-8
from flask import Blueprint, g, Response, jsonify, redirect
from flask_login import login_required
import functools
from flask_helper.view import View as OView
from dms.utils.log import getLogger
from dms.utils.manager import Explorer
__author__ = 'zhouhenglc'
class RegisterData(object):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = object.__new__(cls, *args)
return cls._instance
def __init__(self):
self._dict = {}
def get(self, key, default=None):
return self._dict.get(key, default)
def set(self, key, value):
self._dict[key] = value
def set_default(self, key, default):
if key not in self._dict:
self._dict[key] = default
def append(self, key, value):
_values = self.get(key)
if not _values:
_values = []
_values.append(value)
self.set(key, _values)
def update(self, key, **kwargs):
_values = self.get(key)
if not _values:
_values = {}
_values.update(**kwargs)
self.set(key, _values)
REGISTER_DATA = RegisterData()
explorer = Explorer.get_instance()
class View(OView):
def __init__(self, name, import_name, *args, **kwargs):
self.auth_required = kwargs.pop('auth_required', True)
self.required_resource = kwargs.pop('required_resource', [])
super().__init__(name, import_name, *args, **kwargs)
if self.auth_required:
@self.before_request
@login_required
def before_request():
for rr in self.required_resource:
if rr in explorer.missing_config:
redirect_url = "/config?keys=%s" % \
",".join(explorer.missing_config[rr])
return redirect(redirect_url)
def get_global_endpoint(self, endpoint=None, view_func=None):
if endpoint:
sub_endpoint = endpoint
elif view_func:
sub_endpoint = view_func.func_name
else:
return None
g_endpoint = "%s.%s" % (self.name, sub_endpoint)
return g_endpoint
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
if view_func:
@functools.wraps(view_func)
def inner(*args, **kwargs):
r = view_func(*args, **kwargs)
if isinstance(r, Response):
return r
elif isinstance(r, bool):
return 'True' if r else 'False'
elif isinstance(r, dict):
return jsonify(r)
elif isinstance(r, list):
rs = []
for item in r:
if hasattr(item, 'to_dict'):
rs.append(item.to_dict())
else:
rs.append(item)
return jsonify(rs)
elif hasattr(r, 'to_json'):
return r.to_json()
elif hasattr(r, 'to_dict'):
return jsonify(r.to_dict())
return r
OView.add_url_rule(self, rule, endpoint, inner, **options)
else:
OView.add_url_rule(self, rule, endpoint, view_func, **options)
|
[
"zhouhenglc@inspur.com"
] |
zhouhenglc@inspur.com
|
87d5ac6dbfe5558297a98172e06f7a77e461a57f
|
cb56e1554f43ef93b470019e5a36ddc26680d837
|
/DjangoAyushh/first_site/first_site/wsgi.py
|
2ae3b1b5a0a4a9042a3af49b19677cd72ff23d53
|
[] |
no_license
|
Ayush900/initiating-django
|
6790ed4fde82a18af661922a7e3f7165a6d10b98
|
ea7a2c3f3467dc92f229468fb3de274e1143a3c8
|
refs/heads/master
| 2020-07-05T20:52:05.169025
| 2019-08-16T18:14:49
| 2019-08-16T18:14:49
| 202,770,951
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 397
|
py
|
"""
WSGI config for first_site project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'first_site.settings')
application = get_wsgi_application()
|
[
"ayush.mehrotra900@gmail.com"
] |
ayush.mehrotra900@gmail.com
|
573df83ec52893cb5a1e6a6100cf8d39238f3f64
|
b85899190bb2d3cea186547c9b5f6fb0557fa1b0
|
/monster.py
|
e0209ffc62525c32b7877fd44c00a17058849328
|
[] |
no_license
|
chansen22/dnd
|
0ac80ee5159f3f710033a9e0def3fdff5b017349
|
375f1366b82d8f0825da6e72506da76300da4447
|
refs/heads/master
| 2021-01-10T19:40:29.544369
| 2012-05-09T04:06:31
| 2012-05-09T04:06:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 293
|
py
|
class Monster:
def __init__(self, data):
self.monsterId = data[0]
self.monsterType = data[1]
self.monsterHits = data[2]
def display(self):
print("Type:", self.monsterType, "Id:", self.monsterId, "Hits:", self.monsterHits)
def displayId(self):
return self.monsterId
|
[
"moosekid5@gmail.com"
] |
moosekid5@gmail.com
|
5f5ff7ca8f54dedb5109c21d04fda2d9c944d44b
|
717e6e71a173c41e6d5ee7f7644a999f68905be1
|
/src/dialogue_system/disease_classifier.py
|
980adb7b8ba2ef640cf29b283714bd26ea790a1b
|
[] |
no_license
|
york2210/MedicalChatbot-HRL
|
30d2b4ea4731a13f2e883dd1037bc84d6c6ac696
|
73004d3dbda3b9ead972db125fda53a788cf1157
|
refs/heads/master
| 2022-09-25T02:50:00.385905
| 2020-06-02T08:44:55
| 2020-06-02T08:44:55
| 267,846,504
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,139
|
py
|
import torch
import torch.nn.functional
import os
import numpy as np
from collections import namedtuple
import pickle
import copy
import random
class Model(torch.nn.Module):
"""
DQN model with one fully connected layer, written in pytorch.
"""
def __init__(self, input_size, hidden_size, output_size):
super(Model, self).__init__()
# different layers. Two layers.
self.policy_layer = torch.nn.Sequential(
torch.nn.Linear(input_size, hidden_size, bias=True),
torch.nn.Dropout(0.3),
torch.nn.LeakyReLU(),
#torch.nn.Linear(hidden_size,hidden_size),
#torch.nn.Dropout(0.5),
#torch.nn.LeakyReLU(),
torch.nn.Linear(hidden_size, output_size, bias=True)
)
# one layer.
#self.policy_layer = torch.nn.Linear(input_size, output_size, bias=True)
def forward(self, x):
if torch.cuda.is_available():
x.cuda()
q_values = self.policy_layer(x.float())
return q_values
class dl_classifier(object):
def __init__(self, input_size, hidden_size, output_size, parameter):
self.parameter = parameter
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.model = Model(input_size=input_size, hidden_size=hidden_size, output_size=output_size).to(self.device)
weight_p, bias_p = [], []
for name, p in self.model.named_parameters():
if 'bias' in name:
bias_p.append(p)
else:
weight_p.append(p)
self.optimizer = torch.optim.Adam([
{'params': weight_p, 'weight_decay': 0.001}, # with L2 regularization
{'params': bias_p, 'weight_decay': 0} # no L2 regularization.
], lr=0.0004)
#], lr=parameter.get("dqn_learning_rate"))
self.criterion = torch.nn.CrossEntropyLoss()
named_tuple = ("slot","disease")
self.Transition = namedtuple('Transition', named_tuple)
#self.test_batch = self.create_data(train_mode=False)
#if self.params.get("train_mode") is False and self.params.get("agent_id").lower() == 'agentdqn':
# self.restore_model(self.params.get("saved_model"))
def train(self, batch):
batch = self.Transition(*zip(*batch))
#print(batch.slot.shape)
slot = torch.LongTensor(batch.slot).to(self.device)
disease = torch.LongTensor(batch.disease).to(self.device)
out = self.model.forward(slot)
#print(disease.shape)
#print(out.shape)
#print(out.shape, disease)
loss = self.criterion(out, disease)
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
return {"loss": loss.item()}
def predict(self, slots):
self.model.eval()
# print(batch.slot.shape)
slots = torch.LongTensor(slots).to(self.device)
Ys = self.model.forward(slots)
max_index = np.argmax(Ys.detach().cpu().numpy(), axis=1)
self.model.train()
return Ys, max_index
def train_dl_classifier(self, epochs):
batch_size = self.parameter.get("batch_size")
#print(batch_size)
#print(self.total_batch[0])
total_batch = self.create_data(train_mode=True)
for iter in range(epochs):
batch = random.sample(total_batch, batch_size)
#print(batch[0][0].shape)
loss = self.train(batch)
if iter%100==0:
print('epoch:{},loss:{:.4f}'.format(iter, loss["loss"]))
def test_dl_classifier(self):
self.model.eval()
self.test_batch = self.create_data(train_mode=False)
batch = self.Transition(*zip(*self.test_batch))
slot = torch.LongTensor(batch.slot).to(self.device)
#disease = torch.LongTensor(batch.disease).to(self.device)
disease = batch.disease
Ys, pred = self.predict(slot)
#print(pred)
num_correct = len([1 for i in range(len(disease)) if disease[i]==pred[i]])
print("the test accuracy is %f", num_correct / len(self.test_batch))
self.model.train()
def test(self, test_batch):
#self.model.eval()
batch = self.Transition(*zip(*test_batch))
slot = torch.LongTensor(batch.slot).to(self.device)
#disease = torch.LongTensor(batch.disease).to(self.device)
disease = batch.disease
Ys, pred = self.predict(slot.cpu())
#print(pred)
num_correct = len([1 for i in range(len(disease)) if disease[i]==pred[i]])
#print("the test accuracy is %f", num_correct / len(self.test_batch))
test_acc = num_correct / len(test_batch)
#self.model.train()
return test_acc
def create_data(self, train_mode):
goal_set = pickle.load(open(self.parameter.get("goal_set"), 'rb'))
self.slot_set = pickle.load(open(self.parameter.get("slot_set"), 'rb'))
disease_symptom = pickle.load(open(self.parameter.get("disease_symptom"),'rb'))
self.disease2id = {}
for disease, v in disease_symptom.items():
self.disease2id[disease] = v['index']
self.slot_set.pop('disease')
disease_y = []
# total_set = random.sample(goal_set['train'], 10000)
if train_mode==True:
total_set = copy.deepcopy(goal_set["train"])
else:
total_set = copy.deepcopy(goal_set["test"])
total_batch = []
for i, dialogue in enumerate(total_set):
slots_exp = [0] * len(self.slot_set)
tag = dialogue['disease_tag']
# tag_group=disease_symptom1[tag]['symptom']
disease_y.append(tag)
goal = dialogue['goal']
explicit = goal['explicit_inform_slots']
for exp_slot, value in explicit.items():
#try:
slot_id = self.slot_set[exp_slot]
if value == True:
slots_exp[slot_id] = 1
#except:
# pass
if sum(slots_exp) == 0:
print("############################")
total_batch.append((slots_exp, self.disease2id[tag]))
#print("the disease data creation is over")
return total_batch
def save_model(self, model_performance, episodes_index, checkpoint_path):
if os.path.isdir(checkpoint_path) == False:
os.makedirs(checkpoint_path)
agent_id = self.parameter.get("agent_id").lower()
disease_number = self.parameter.get("disease_number")
success_rate = model_performance["success_rate"]
average_reward = model_performance["average_reward"]
average_turn = model_performance["average_turn"]
average_match_rate = model_performance["average_match_rate"]
average_match_rate2 = model_performance["average_match_rate2"]
model_file_name = os.path.join(checkpoint_path, "model_d" + str(disease_number) + str(agent_id) + "_s" + str(
success_rate) + "_r" + str(average_reward) + "_t" + str(average_turn) \
+ "_mr" + str(average_match_rate) + "_mr2-" + str(
average_match_rate2) + "_e-" + str(episodes_index) + ".pkl")
torch.save(self.model.state_dict(), model_file_name)
def restore_model(self, saved_model):
"""
Restoring the trained parameters for the model. Both current and target net are restored from the same parameter.
Args:
saved_model (str): the file name which is the trained model.
"""
print("loading trained model", saved_model)
if torch.cuda.is_available() is False:
map_location = 'cpu'
else:
map_location = None
self.model.load_state_dict(torch.load(saved_model,map_location=map_location))
def eval_mode(self):
self.model.eval()
|
[
"lkeb2210@gmail.com"
] |
lkeb2210@gmail.com
|
3b368f4bde0884863f26ff8302e96f97e0254648
|
5897a1b176f9c404fe421c61342f20820f685c07
|
/RIS/OSL/zbrush/normalMap.py
|
0d231757d9fd9b6a6e7b2b9114f427cc59a996c7
|
[] |
no_license
|
NCCA/Renderman
|
d3269e0c7b5e58a69d8744051100013339516ad0
|
ebf17298e0ee56899a3288a3ff8eb4c4a0f806e8
|
refs/heads/master
| 2023-06-09T07:51:29.251270
| 2023-05-25T11:13:37
| 2023-05-25T11:13:37
| 51,373,926
| 17
| 11
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,215
|
py
|
#!/usr/bin/python
import prman
# import the python functions
import sys
sys.path.append("../../common")
from functions import drawTeapot
import Obj
from Camera import Camera
from Vec4 import Vec4
from Transformation import *
import random
ri = prman.Ri() # create an instance of the RenderMan interface
filename = "__render"
# this is the begining of the rib archive generation we can only
# make RI calls after this function else we get a core dump
ri.Begin("__render")
# now we add the display element using the usual elements
# FILENAME DISPLAY Type Output format
ri.Display("zbrush.exr", "it", "rgba")
ri.Format(1024, 720, 1)
# setup the raytrace / integrators
ri.Hider("raytrace", {"int incremental": [1]})
ri.PixelVariance(0.01)
ri.ShadingRate(0.1)
ri.Integrator("PxrPathTracer", "integrator")
# now set the projection to perspective
ri.Projection(ri.PERSPECTIVE, {ri.FOV: 30})
# Simple translate for our camera
cam = Camera(Vec4(0, 0.9, -3.9), Vec4(0, 0.2, 0), Vec4(0, 1, 0))
cam.place(ri)
# now we start our world
ri.WorldBegin()
# Lighting We need geo to emit light
ri.TransformBegin()
ri.AttributeBegin()
ri.Declare("domeLight", "string")
lightTx = Transformation()
lightTx.setPosition(0, 1, 0)
lightTx.setRotation(90, 0, 0)
lightTx.setScale(1, 1, 1)
ri.ConcatTransform(lightTx.getMatrix())
ri.Light("PxrDomeLight", "domeLight", {"float exposure": [1.0], "string lightColorMap": ["../../disney/studio2.tx"]})
ri.AttributeEnd()
ri.TransformEnd()
# load mesh
troll = Obj.Obj("../../meshes/troll.obj")
tx = Transformation()
ri.Pattern("PxrTexture", "TrollColour", {"string filename": "../../meshes/TrollColour.tx"})
ri.Pattern("PxrTexture", "TrollSpecular", {"string filename": "../../meshes/TrollSpec.tx"})
ri.Pattern("PxrTexture", "TrollNMap", {"string filename": "../../meshes/TrollNormal.tx"})
ri.Pattern("PxrNormalMap", "TrollBump", {"string filename": "../../meshes/TrollNormal.tx", "float bumpScale": [2]})
ri.Bxdf(
"PxrDisney",
"bxdf",
{
"reference color baseColor": ["TrollColour:resultRGB"],
"reference color subsurfaceColor": ["TrollSpecular:resultRGB"],
"float subsurface": [0.4],
"reference normal bumpNormal": ["TrollBump:resultN"],
"float metallic": [0.1],
"float specular": [0.1],
"float roughness": [0.3],
},
)
ypos = 0.55
ri.TransformBegin()
tx.setPosition(-1, ypos, 0)
tx.setRotation(0, -45, 0)
ri.ConcatTransform(tx.getMatrix())
troll.Polygon(ri)
ri.TransformEnd()
ri.TransformBegin()
tx.setPosition(0, ypos, 0)
tx.setRotation(0, 45, 0)
ri.ConcatTransform(tx.getMatrix())
troll.Polygon(ri)
ri.TransformEnd()
ri.TransformBegin()
tx.setPosition(1, ypos, 0)
tx.setRotation(0, 200, 0)
ri.ConcatTransform(tx.getMatrix())
troll.Polygon(ri)
ri.TransformEnd()
# floor
ri.TransformBegin()
ri.Bxdf(
"PxrDisney",
"bxdf",
{
"color baseColor": [1, 1, 1],
"float roughness": [0.2],
},
)
# ri.Bxdf( "PxrDiffuse","bxdf", { "reference color diffuseColor" : ["colourChecker:resultRGB"] })
s = 12.0
face = [-s, 0, -s, s, 0, -s, -s, 0, s, s, 0, s]
ri.Patch("bilinear", {"P": face})
ri.TransformEnd()
# end our world
ri.WorldEnd()
# and finally end the rib file
ri.End()
|
[
"jmacey@bournemouth.ac.uk"
] |
jmacey@bournemouth.ac.uk
|
ffe3990f711621b831344228f34dd8c71b7f39c8
|
bc4c1b3388b9bcc7fdab65699c5ae1f6dbc5531f
|
/jtVAE/molvae/pretrain.py
|
8f3e19fc8ffc56b18063dd65cc8948877933ee3a
|
[
"MIT"
] |
permissive
|
JennyW5/ComparisonsDGM
|
fd7235fc5a5e49424a3bc39df483fbafa0582d3d
|
0a26e9f9d9aba7df0525a22da6eeab376464d19d
|
refs/heads/master
| 2023-01-07T05:00:27.958540
| 2020-09-16T16:14:35
| 2020-09-16T17:15:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,935
|
py
|
import torch
import torch.nn as nn
import torch.optim as optim
import torch.optim.lr_scheduler as lr_scheduler
from torch.utils.data import DataLoader
from torch.autograd import Variable
import math, random, sys
from optparse import OptionParser
from collections import deque
from jtnn import *
import rdkit
lg = rdkit.RDLogger.logger()
lg.setLevel(rdkit.RDLogger.CRITICAL)
parser = OptionParser()
parser.add_option("-t", "--train", dest="train_path")
parser.add_option("-v", "--vocab", dest="vocab_path")
parser.add_option("-s", "--save_dir", dest="save_path")
parser.add_option("-b", "--batch", dest="batch_size", default=40)
parser.add_option("-w", "--hidden", dest="hidden_size", default=200)
parser.add_option("-l", "--latent", dest="latent_size", default=56)
parser.add_option("-d", "--depth", dest="depth", default=3)
opts,args = parser.parse_args()
vocab = [x.strip("\r\n ") for x in open(opts.vocab_path)]
vocab = Vocab(vocab)
batch_size = int(opts.batch_size)
hidden_size = int(opts.hidden_size)
latent_size = int(opts.latent_size)
depth = int(opts.depth)
model = JTNNVAE(vocab, hidden_size, latent_size, depth)
for param in model.parameters():
if param.dim() == 1:
nn.init.constant(param, 0)
else:
nn.init.xavier_normal(param)
model = model.cpu()
print "Model #Params: %dK" % (sum([x.nelement() for x in model.parameters()]) / 1000,)
optimizer = optim.Adam(model.parameters(), lr=1e-3)
scheduler = lr_scheduler.ExponentialLR(optimizer, 0.9)
scheduler.step()
dataset = MoleculeDataset(opts.train_path)
MAX_EPOCH = 3
PRINT_ITER = 20
for epoch in xrange(MAX_EPOCH):
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=4, collate_fn=lambda x:x, drop_last=True)
word_acc,topo_acc,assm_acc,steo_acc = 0,0,0,0
for it, batch in enumerate(dataloader):
for mol_tree in batch:
for node in mol_tree.nodes:
if node.label not in node.cands:
node.cands.append(node.label)
node.cand_mols.append(node.label_mol)
model.zero_grad()
loss, kl_div, wacc, tacc, sacc, dacc = model(batch, beta=0)
loss.backward()
optimizer.step()
word_acc += wacc
topo_acc += tacc
assm_acc += sacc
steo_acc += dacc
if (it + 1) % PRINT_ITER == 0:
word_acc = word_acc / PRINT_ITER * 100
topo_acc = topo_acc / PRINT_ITER * 100
assm_acc = assm_acc / PRINT_ITER * 100
steo_acc = steo_acc / PRINT_ITER * 100
print "KL: %.1f, Word: %.2f, Topo: %.2f, Assm: %.2f, Steo: %.2f" % (kl_div, word_acc, topo_acc, assm_acc, steo_acc)
word_acc,topo_acc,assm_acc,steo_acc = 0,0,0,0
sys.stdout.flush()
scheduler.step()
print "learning rate: %.6f" % scheduler.get_lr()[0]
torch.save(model.state_dict(), opts.save_path + "/model.iter-" + str(epoch))
|
[
"davider1994@gmail.com"
] |
davider1994@gmail.com
|
82913e44e04e182fb860e0677d0f68fddef97a1d
|
a0d6a944cb47024bffe439ad233b1f07cb4830d0
|
/main.py
|
116384fe249e2c17bfb76f3c0b7a53f728273b7f
|
[] |
no_license
|
ImtiazVision/Calculator
|
3b8e5b4d1844a5889b9d7175a38293438b120b4e
|
d4570cf121b06ca9a74a1db084b8c4ca43e27bfb
|
refs/heads/master
| 2023-01-24T00:53:21.820121
| 2020-12-06T18:04:58
| 2020-12-06T18:04:58
| 319,098,222
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 660
|
py
|
#If the bill was $150.00, split between 5 people, with 12% tip.
#Each person should pay (150.00 / 5) * 1.12 = 33.6
#Round the result to 2 decimal places.
print("Welcome to the tip calculator!")
bill = float(input("What was the total bill? $"))
tip = int(input("How much tip would you like to give? 10, 12, or 15? "))
people = int(input("How many people to split the bill?"))
tip_as_percent = tip / 100
total_tip_amount = bill * tip_as_percent
total_bill = bill + total_tip_amount
bill_per_person = total_bill / people
final_amount = round(bill_per_person, 2)
final_amount = "{:.2f}".format(bill_per_person)
print(f"Each person should pay: ${final_amount}")
|
[
"ahmedimtiaz.ahmed@gmail.com"
] |
ahmedimtiaz.ahmed@gmail.com
|
6dd5e90c13cbc8921188a2a55e954bfeb8c45d71
|
21b5ad37b812ed78799d4efc1649579cc83d32fb
|
/pro/migrations/0007_auto_20200222_1157.py
|
4b4329dc3c21c4faddc276aeb4688a4472386e24
|
[] |
no_license
|
SaifulAbir/django-js-api
|
b6f18c319f8109884e71095ad49e08e50485bb25
|
fbf174b9cde2e7d25b4898f511df9c6f96d406cf
|
refs/heads/master
| 2023-02-12T16:09:21.508702
| 2021-01-14T09:05:15
| 2021-01-14T09:05:15
| 329,713,528
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 642
|
py
|
# Generated by Django 3.0.3 on 2020-02-22 11:57
from django.db import migrations, models
import p7.validators
class Migration(migrations.Migration):
dependencies = [
('pro', '0006_merge_20200222_1117'),
]
operations = [
migrations.RenameField(
model_name='professional',
old_name='user_id',
new_name='user',
),
migrations.AlterField(
model_name='professional',
name='password',
field=models.CharField(max_length=255, validators=[p7.validators.check_valid_password, p7.validators.MinLengthValidator(8)]),
),
]
|
[
"rashed@ishraak.com"
] |
rashed@ishraak.com
|
e7c95327204dbb298ab53f6b4b0716bf482e9e08
|
c031e29827db8c9794059a5839ab151959de5bef
|
/csvreader.py
|
a1e676678debb7dade709ec8616487aef4247419
|
[] |
no_license
|
Sebelino/InstantBoss2
|
f97d0f6205f91b1b4c1f4e4b2d25a169c415972f
|
0c58cddf8d72defaa29e6f50b5794336838cf49b
|
refs/heads/master
| 2016-09-06T17:51:34.338951
| 2014-11-09T11:41:38
| 2014-11-09T11:41:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,121
|
py
|
#!/bin/python2
import sys,time,thread,os,argparse,select,csv,datetime,re
""" :returns A processed representation of the csv file. """
def readcsv(path):
with open(path,'r') as csvfile:
reader = csv.reader(csvfile)
fmt = "%Y-%m-%dT%H:%M:%S"
totime = lambda t: datetime.datetime.fromtimestamp(time.mktime(time.strptime(t,fmt)))
return [(totime(a),totime(b),s) for (a,b,s) in reader]
parser = argparse.ArgumentParser()
parser.add_argument("-i","--input",type=str,metavar='file',default='schedule.csv',
help="The file which is to be read. If not specified, the output will\
be written to ./dat/schedule.csv.")
parser.add_argument("-t","--topic",type=str,metavar='topic',
help="The topic to filter on.")
parser.add_argument("-p","--period",type=str,metavar='period',
help="The time interval. Format: \" A:B\", where the first day is today+A and the last\
day is today+B. By default A=-Inf, B=+Inf")
parser.add_argument("-s","--sum",action='store_true',
help="Sum the results.")
args = parser.parse_args()
working_dir = os.path.dirname(os.path.realpath(__file__))
data_dir = "dat"
path = os.path.join(data_dir,args.input)
intervals = readcsv(path)
if args.period:
if args.period:
m = re.match('(-?\d*):(-?\d*)',args.period.strip())
if not m:
raise Exception("Incorrect format for argument \"period\".")
forever = 36500
first = int(m.group(1)) if m.group(1) else -forever
last = int(m.group(2)) if m.group(2) else forever
intervals = [(a,t,s) for (a,t,s) in intervals
if datetime.datetime.now().date()+datetime.timedelta(days=first) <= a.date()
and t.date() <= datetime.datetime.now().date()+datetime.timedelta(days=last)
]
intervals = [(a.ctime(),b-a,s) for (a,b,s) in intervals]
if args.topic:
intervals = [(a,t,s) for (a,t,s) in intervals if s == args.topic]
datesum = datetime.timedelta(0)
for (a,t,s) in intervals:
datesum += t
output = [[datesum]] if args.sum else intervals
for row in output:
print ' | '.join([str(cell) for cell in row])
|
[
"sebelino@hotmail.com"
] |
sebelino@hotmail.com
|
9cb6004c3307089e00b452b54ce70d0c4b4dbc6f
|
2a3a7b50e02677417994b8784e2d21b92acf7ecc
|
/best-time-to-buy-and-sell-stock/best-time-to-buy-and-sell-stock.py
|
921b289eb6205f9dd669ddfcbbaf4e82b0322c1b
|
[] |
no_license
|
yiswang/LeetCode
|
0c263e8b5a22b8542582b48e02eecd77b18bd365
|
c349a4da002071d43a4c4230d1ac776289488f0e
|
refs/heads/master
| 2021-01-11T20:20:39.772212
| 2017-03-26T17:16:17
| 2017-03-26T17:16:17
| 78,953,041
| 1
| 0
| null | 2017-01-14T17:25:45
| 2017-01-14T16:51:29
| null |
UTF-8
|
Python
| false
| false
| 2,200
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
###############################################################################
#
# Author: Yishan Wang <wangys8807@gmail.com>
# File: best-time-to-buy-and-sell-stock.py
# Create Date: 2017-02-06
# Usage: best-time-to-buy-and-sell-stock.py
# Description:
#
# LeetCode problem 121. Best Time to Buy and Sell Stock
#
# Difficulty: Easy
#
# Say you have an array for which the ith element is the price of a given stock
# on day i.
#
# If you were only permitted to complete at most one transaction (ie, buy one
# and sell one share of the stock), design an algorithm to find the maximum
# profit.
#
# Example 1: Input: [7, 1, 5, 3, 6, 4] Output: 5
#
# max. difference = 6-1 = 5 (not 7-1 = 6, as selling price needs to be larger
# than buying price)
#
# Example 2:
# Input: [7, 6, 4, 3, 1]
# Output: 0
#
# In this case, no transaction is done, i.e. max profit = 0.
#
###############################################################################
#
# Just traverse the price list one time. Record the current minimum price and
# maximum profit; compute prices[i] - minimum price and compare it with the
# current maximum profit in each loop. The time complexity is O(n).
#
# Run time on LeetCode: 42ms, beat 94.67%; 45ms, beat 88.63%
#
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
if not prices:
return 0
max_profit = 0
min_price = prices[0]
for i in xrange(1, len(prices)):
if prices[i] - min_price > max_profit:
max_profit = prices[i] - min_price
if prices[i] < min_price:
min_price = prices[i]
return max_profit
if __name__ == "__main__":
import time
test_cases = [
[],
[1],
[7, 1, 5, 3, 6, 4],
[7, 6, 4, 3],
[1, 2, 3, 4, 5],
]
solu = Solution()
start_t = time.clock()
for prices in test_cases:
print prices
res = solu.maxProfit(prices)
print res
print ""
end_t = time.clock()
print "cpu time: %s" % (end_t - start_t)
print ""
|
[
"wangys8807@gmail.com"
] |
wangys8807@gmail.com
|
0663ca2468470dd94deb42af8ca3dab1a2cc3333
|
8e97cb7c8668a9061683ea3ba893dab32029fac9
|
/pytorch_toolkit/person_reidentification/data/datamanager.py
|
75b80c905990d162e028c8e00d6e2abce522f5de
|
[
"Apache-2.0"
] |
permissive
|
DmitriySidnev/openvino_training_extensions
|
e01703bea292f11ffc20d50a1a06f0565059d5c7
|
c553a56088f0055baba838b68c9299e19683227e
|
refs/heads/develop
| 2021-06-14T06:32:12.373813
| 2020-05-13T13:25:15
| 2020-05-13T13:25:15
| 180,546,423
| 0
| 1
|
Apache-2.0
| 2019-04-15T13:39:48
| 2019-04-10T09:17:55
|
Python
|
UTF-8
|
Python
| false
| false
| 5,895
|
py
|
"""
MIT License
Copyright (c) 2018 Kaiyang Zhou
Copyright (c) 2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import torch
from torchreid.data.datamanager import DataManager
from torchreid.data.datasets import __image_datasets
from .datasets.globalme import GlobalMe
from .transforms import build_transforms
from .sampler import build_train_sampler
__image_datasets['globalme'] = GlobalMe
def init_image_dataset(name, **kwargs):
"""Initializes an image dataset."""
avai_datasets = list(__image_datasets.keys())
if name not in avai_datasets:
raise ValueError('Invalid dataset name. Received "{}", '
'but expected to be one of {}'.format(name, avai_datasets))
return __image_datasets[name](**kwargs)
class ImageDataManagerWithTransforms(DataManager):
data_type = 'image'
def __init__(self, root='', sources=None, targets=None, height=256, width=128, transforms='random_flip',
norm_mean=None, norm_std=None, use_gpu=True, split_id=0, combineall=False,
batch_size_train=32, batch_size_test=32, workers=4, num_instances=4, train_sampler='',
cuhk03_labeled=False, cuhk03_classic_split=False, market1501_500k=False, apply_masks_to_test=False):
super(ImageDataManagerWithTransforms, self).__init__(
sources=sources, targets=targets, height=height, width=width,
transforms=None, norm_mean=norm_mean, norm_std=norm_std, use_gpu=use_gpu
)
self.transform_tr, self.transform_te = build_transforms(
self.height, self.width, transforms=transforms,
norm_mean=norm_mean, norm_std=norm_std,
apply_masks_to_test=apply_masks_to_test
)
print('=> Loading train (source) dataset')
trainset = []
for name in self.sources:
trainset_ = init_image_dataset(
name,
transform=self.transform_tr,
mode='train',
combineall=combineall,
root=root,
split_id=split_id,
cuhk03_labeled=cuhk03_labeled,
cuhk03_classic_split=cuhk03_classic_split,
market1501_500k=market1501_500k
)
trainset.append(trainset_)
trainset = sum(trainset)
self._num_train_pids = trainset.num_train_pids
self._num_train_cams = trainset.num_train_cams
train_sampler = build_train_sampler(
trainset.train, train_sampler,
batch_size=batch_size_train,
num_instances=num_instances
)
self.trainloader = torch.utils.data.DataLoader(
trainset,
sampler=train_sampler,
batch_size=batch_size_train,
shuffle=False,
num_workers=workers,
pin_memory=self.use_gpu,
drop_last=True
)
print('=> Loading test (target) dataset')
self.testloader = {name: {'query': None, 'gallery': None} for name in self.targets}
self.testdataset = {name: {'query': None, 'gallery': None} for name in self.targets}
for name in self.targets:
# build query loader
queryset = init_image_dataset(
name,
transform=self.transform_te,
mode='query',
combineall=combineall,
root=root,
split_id=split_id,
cuhk03_labeled=cuhk03_labeled,
cuhk03_classic_split=cuhk03_classic_split,
market1501_500k=market1501_500k
)
self.testloader[name]['query'] = torch.utils.data.DataLoader(
queryset,
batch_size=batch_size_test,
shuffle=False,
num_workers=workers,
pin_memory=self.use_gpu,
drop_last=False
)
# build gallery loader
galleryset = init_image_dataset(
name,
transform=self.transform_te,
mode='gallery',
combineall=combineall,
verbose=False,
root=root,
split_id=split_id,
cuhk03_labeled=cuhk03_labeled,
cuhk03_classic_split=cuhk03_classic_split,
market1501_500k=market1501_500k
)
self.testloader[name]['gallery'] = torch.utils.data.DataLoader(
galleryset,
batch_size=batch_size_test,
shuffle=False,
num_workers=workers,
pin_memory=self.use_gpu,
drop_last=False
)
self.testdataset[name]['query'] = queryset.query
self.testdataset[name]['gallery'] = galleryset.gallery
print('\n')
print(' **************** Summary ****************')
print(' train : {}'.format(self.sources))
print(' # train datasets : {}'.format(len(self.sources)))
print(' # train ids : {}'.format(self.num_train_pids))
print(' # train images : {}'.format(len(trainset)))
print(' # train cameras : {}'.format(self.num_train_cams))
print(' test : {}'.format(self.targets))
print(' *****************************************')
print('\n')
|
[
"48012821+AlexanderDokuchaev@users.noreply.github.com"
] |
48012821+AlexanderDokuchaev@users.noreply.github.com
|
0534417e3ceeed9e4e7b1c8de0b0730066e5cbe3
|
a81844069bce15b04bde230c245e1cbd03ceb066
|
/remind/TxtConvert.py
|
92d60aabba901202a74fd627ab25ddcee2cf773a
|
[] |
no_license
|
haichaoyuan/python
|
612632323af2044414c9261dedc524dcb7c24283
|
030078b187decf36cfce922cd4d829f2f962b06b
|
refs/heads/master
| 2020-12-30T14:20:22.072388
| 2017-08-31T14:56:57
| 2017-08-31T14:56:57
| 91,312,403
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 723
|
py
|
import codecs # 用于读取文件
import re
# 文件读写,字符串判断存在字符串,截取
file2Read = codecs.open("res/dataForConvert", 'r', 'utf-8')
file2Write = codecs.open("res/dataForConvert2", 'w', 'utf-8')
try:
txts = file2Read.readlines( )
for seg in txts:
seg = seg.replace('\r', '').replace('\n', '').replace('\t', '')
if(seg):
print(seg)
indexValue = '---'
findIndex = seg.find(indexValue)
if(findIndex != -1):
seg2 = seg[findIndex + 3:]
if (seg2):
file2Write.write(seg2)
file2Write.write('\r\n')
finally:
file2Read.close( )
file2Write.close( )
|
[
"944951213@qq.com"
] |
944951213@qq.com
|
f7fd2302cfd2960183ff0937e126697563b69460
|
5292096eb43fd55513c3e834ad491876a1ddddcf
|
/chapter10/github.py
|
4e4f9ed5cf32a88de8ff75ed3c0c529fe4103b23
|
[] |
no_license
|
gabearwin/spider
|
1c4da0dc0629bd31512de53c00bdf63a77f1fb39
|
94260f03e6fc0c08b5c890ebcf1946e6dbc3b75e
|
refs/heads/master
| 2020-05-03T10:22:39.472486
| 2019-11-07T16:13:40
| 2019-11-07T16:13:40
| 178,577,824
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,359
|
py
|
# -*- coding: UTF-8 -*-
import requests
from lxml import etree
class Login(object):
def __init__(self):
self.headers = {
'Referer': 'https://github.com/',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36',
'Host': 'github.com'
}
self.login_url = 'https://github.com/login'
self.post_url = 'https://github.com/session'
self.feed_url = 'https://github.com/dashboard-feed'
self.profile_url = 'https://github.com/settings/profile'
self.session = requests.Session()
def token(self):
response = self.session.get(self.login_url, headers=self.headers)
selector = etree.HTML(response.text)
# 获取第二个 input 的 value 属性
token = selector.xpath('//div//input[2]/@value')
print('token:', token)
return token
def login(self, email, password):
post_data = {
'commit': 'Sign in',
'utf8': '✓',
'authenticity_token': self.token()[0],
'login': email,
'password': password
}
self.session.post(self.post_url, data=post_data, headers=self.headers)
# 这里要再次请求一下
response = self.session.get(self.feed_url, headers=self.headers)
if response.status_code == 200:
self.dynamics(response.text)
response = self.session.get(self.profile_url, headers=self.headers)
if response.status_code == 200:
self.profile(response.text)
def dynamics(self, html):
selector = etree.HTML(html, etree.HTMLParser(encoding='utf-8'))
for item in selector.xpath('//body/div/div'):
item = item.xpath('.//div[@class="d-flex flex-column width-full"]/div[1]//text()')
news = ' '.join([s.replace('\n', '').replace(' ', '') for s in item]).strip()
print(news)
def profile(self, html):
selector = etree.HTML(html)
name = selector.xpath('//input[@id="user_profile_name"]/@value')[0]
email = selector.xpath('//select[@id="user_profile_email"]/option[@value!=""]/text()')
print(name, email)
if __name__ == "__main__":
login = Login()
login.login(email='gabear@outlook.com', password='password')
|
[
"gaxiong@ebay.com"
] |
gaxiong@ebay.com
|
14b3e84322e0a33ba7d78e86df79a56d7711239c
|
56c8655e0d08fec848376b841f655b79765d19e0
|
/sample-config/_etc_kangaroo/kangaroo-api-gunicorn.py
|
b16eae33a455a691f2272bef3de2782ff50e4950
|
[] |
no_license
|
nikitamarchenko/kangaroo
|
37215109599ba9434ce8a1dca6c746affd5e8733
|
6100dad9757fd64c7f1d5c880982a19979b5e62f
|
refs/heads/master
| 2021-01-10T06:22:19.538758
| 2015-11-09T18:04:44
| 2015-11-09T18:04:44
| 45,856,586
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 104
|
py
|
host = '0.0.0.0'
port = '8080'
workers = 1
#worker_class = 'gevent'
bind = "{}:{}".format(host, port)
|
[
"nmarchenko@mirantis.com"
] |
nmarchenko@mirantis.com
|
3959a8164572c0798f563048759f116819dcb744
|
f167eb1ea15ffb0f0809fab7e6cd08492c109b32
|
/爬虫反爬/7.1.12selenium前进后退.py
|
23f1e3fa98b34c0bb727562fe1aed34129ea594f
|
[] |
no_license
|
miaoJianKang-cyber/2021
|
92ad3846e8a546d096acc50a1ce10897a2b08e2b
|
5ece3ecfec5ee0cf814b9c90bec5ac954d7fc580
|
refs/heads/master
| 2023-03-22T08:38:45.716011
| 2021-03-02T03:41:21
| 2021-03-02T03:41:21
| 341,480,328
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 264
|
py
|
# coding:utf-8
import time
from selenium import webdriver
browser = webdriver.Chrome()
browser.get("https://www.zhihu.com")
browser.get("https://www.taobao.com")
browser.get("https://www.python.com")
browser.back()
time.sleep(5)
browser.forward()
browser.close()
|
[
"miaojiankang"
] |
miaojiankang
|
fd57ba8957343492c0b209bc9b119fae7100d90b
|
6ee9cb1649a50a8877e73fe94d5e05f62cfd941d
|
/main/settings.py
|
d1c29127fb6bb698cbea7fce07ce52e9a9fc0381
|
[] |
no_license
|
jaechoi15/myapp
|
9fd56364cd3082e190c4c62714ee602179294dd7
|
af87f597cd39f8f056251a9a25bbcdac17589f07
|
refs/heads/master
| 2021-04-15T08:03:25.095770
| 2018-03-25T04:48:10
| 2018-03-25T04:48:10
| 126,645,269
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,109
|
py
|
"""
Django settings for main project.
Generated by 'django-admin startproject' using Django 1.11.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'y%z(i@5nb77umfcc*yz^hskshft@6=^u=cn^z&fmt^_mr(1gck'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'apps.my_app'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'main.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'main.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
|
[
"jchoi625@gmail.com"
] |
jchoi625@gmail.com
|
cf1e3075185cefc817f86f6636ba6ca84b9a73ae
|
2ff7e53d5e512cd762217ca54317982e07a2bb0c
|
/eve/devtools/script/behaviortools/clientdebugadaptors.py
|
2a22a85a0875ed2b83664cddb9e4a59eb4130b2b
|
[] |
no_license
|
nanxijw/Clara-Pretty-One-Dick
|
66d3d69426642b79e8fd4cc8e0bec23adeeca6d6
|
50de3488a2140343c364efc2615cf6e67f152be0
|
refs/heads/master
| 2021-01-19T09:25:07.555284
| 2015-02-17T21:49:33
| 2015-02-17T21:49:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,637
|
py
|
#Embedded file name: eve/devtools/script/behaviortools\clientdebugadaptors.py
import logging
from brennivin.messenger import Messenger
import eve.common.script.net.eveMoniker as moniker
from eve.devtools.script.behaviortools.debugwindow import BehaviorDebugWindow
import uthread2
logger = logging.getLogger(__name__)
EVENT_BEHAVIOR_DEBUG_UPDATE = 'OnBehaviorDebugUpdate'
EVENT_BEHAVIOR_DEBUG_CONNECT_REQUEST = 'OnBehaviorDebugConnectRequest'
EVENT_BEHAVIOR_DEBUG_DISCONNECT_REQUEST = 'OnBehaviorDebugDisconnectRequest'
class UpdateListener(object):
def __init__(self):
self.messenger = Messenger()
self.behaviorDebuggersByItemId = {}
sm.RegisterForNotifyEvent(self, EVENT_BEHAVIOR_DEBUG_UPDATE)
sm.RegisterForNotifyEvent(self, EVENT_BEHAVIOR_DEBUG_CONNECT_REQUEST)
sm.RegisterForNotifyEvent(self, EVENT_BEHAVIOR_DEBUG_DISCONNECT_REQUEST)
def AddObserverForItemId(self, itemId, handler):
if itemId in self.messenger.signalsByMessageName:
self.messenger.signalsByMessageName[itemId].clear()
self.messenger.SubscribeToMessage(itemId, handler)
def RemoveObserverForItemId(self, itemId, handler):
try:
self.messenger.UnsubscribeFromMessage(itemId, handler)
except:
logger.error('Failed to remove observer itemID=%s handler=%s', itemId, handler)
def OnBehaviorDebugUpdate(self, itemID, *args, **kwargs):
self.messenger.SendMessage(itemID, *args, **kwargs)
def TryConnectDebugger(self, itemID):
try:
debugger = ClientBehaviorDebugger(itemID)
debugger.Connect()
self.behaviorDebuggersByItemId[itemID] = debugger
except:
logger.exception('failed to connect to debugger for itemID=%s', itemID)
def OnBehaviorDebugConnectRequest(self, itemIDs):
itemIDs = sorted(itemIDs)
for itemID in itemIDs:
self.TryConnectDebugger(itemID)
def TryDisconnectDebugger(self, itemID):
try:
debugger = self.behaviorDebuggersByItemId.pop(itemID)
debugger.Disconnect()
except:
logger.exception('failed to disconnect to debugger for itemID=%s', itemID)
def OnBehaviorDebugDisconnectRequest(self, itemIDs):
for itemID in itemIDs:
self.TryDisconnectDebugger(itemID)
def HasDebugger(self, itemID):
return itemID in self.behaviorDebuggersByItemId
updateListener = UpdateListener()
class ClientBehaviorDebugger(object):
def __init__(self, itemID):
self.itemID = itemID
self.tree = []
self.treeMap = {}
self.events = []
self.debugWindow = None
self.isConnected = False
def Connect(self):
logger.debug('Debugger connecting to behavior of entity %s', self.itemID)
updateListener.AddObserverForItemId(self.itemID, self.OnBehaviorDebugUpdate)
entityLocation = moniker.GetEntityLocation()
treeData = entityLocation.EnableBehaviorDebugging(self.itemID)
self.isConnected = True
uthread2.StartTasklet(self.SetupDebugTree, treeData)
def Disconnect(self):
logger.debug('Debugger disconnecting from behavior of entity %s', self.itemID)
try:
updateListener.RemoveObserverForItemId(self.itemID, self.OnBehaviorDebugUpdate)
entityLocation = moniker.GetEntityLocation()
entityLocation.DisableBehaviorDebugging(self.itemID)
self.isConnected = False
if self.debugWindow is not None:
self.debugWindow.Close()
sm.UnregisterForNotifyEvent(self, 'OnSessionChanged')
except:
logger.exception('Failed while disconnecting :(')
def OnBehaviorDebugUpdate(self, events, taskStatuses, tasksSeen, blackboards, *args, **kwargs):
if self.debugWindow is None:
return
self.debugWindow.LoadEvents(events)
self.debugWindow.UpdateStatuses(taskStatuses)
self.debugWindow.UpdateTasksSeen(tasksSeen)
self.debugWindow.LoadBlackboard(blackboards)
def SetupDebugTree(self, treeData):
self.debugWindow = BehaviorDebugWindow.Open(windowID='BehaviorDebugWindow_%d' % self.itemID)
self.debugWindow.SetController(self)
self.debugWindow.LoadBehaviorTree(treeData)
sm.RegisterForNotifyEvent(self, 'OnSessionChanged')
def IsConnected(self):
return self.isConnected
def OnSessionChanged(self, isRemote, sess, change):
if 'solarsystemid2' in change:
if self.debugWindow is not None:
self.debugWindow.Close()
|
[
"billchang.e@gmail.com"
] |
billchang.e@gmail.com
|
b456868220f259dd731baa03574cdb1f809808e0
|
c446814f3eb63b24b33e0de31a5a401fcc18b771
|
/bs_converter/data_converter.py
|
a5898d2eda2f3869982e9b68d1735a63fb399b9e
|
[] |
no_license
|
ScriptonBasestar-toolbox/sb-tools-py2
|
c7a23215b6a392d0b156965329fa627d01378f03
|
ddc2cc6ce0eb34025e1911b66c39a7bd3743f570
|
refs/heads/master
| 2021-05-27T02:13:55.985419
| 2014-03-13T14:10:41
| 2014-03-13T14:10:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 284
|
py
|
__author__ = 'archmagece'
def to_number(num_str):
if len(str().strip()) == 0:
return 0
else:
try:
return int(num_str)
except:
try:
return float(num_str)
except:
return None
pass
|
[
"archmagece@gmail.com"
] |
archmagece@gmail.com
|
6383995e35ee51c384da1285d358de91724811e2
|
2432996ac1615cd36d61f0feeff8a359d2b438d8
|
/env/lib/python3.8/site-packages/_pyinstaller_hooks_contrib/hooks/stdhooks/hook-rdflib.py
|
1ef29499af98b492b37a7bc902fb9532e1abc901
|
[
"GPL-1.0-or-later",
"GPL-2.0-or-later",
"GPL-2.0-only",
"Apache-2.0"
] |
permissive
|
Parveshdhull/AutoTyper
|
dd65d53ece7c13fbc1ead7ce372947483e05e2e3
|
7fabb30e15b770d790b69c2e4eaf9bbf5a4d180c
|
refs/heads/main
| 2023-05-08T14:10:35.404160
| 2023-05-07T20:43:15
| 2023-05-07T20:43:15
| 315,415,751
| 26
| 18
|
Apache-2.0
| 2023-05-07T20:43:16
| 2020-11-23T19:13:05
|
Python
|
UTF-8
|
Python
| false
| false
| 539
|
py
|
# ------------------------------------------------------------------
# Copyright (c) 2020 PyInstaller Development Team.
#
# This file is distributed under the terms of the GNU General Public
# License (version 2.0 or later).
#
# The full license is available in LICENSE.GPL.txt, distributed with
# this software.
#
# SPDX-License-Identifier: GPL-2.0-or-later
# ------------------------------------------------------------------
from PyInstaller.utils.hooks import collect_submodules
hiddenimports = collect_submodules('rdflib.plugins')
|
[
"parvesh.dhullmonu@gmail.com"
] |
parvesh.dhullmonu@gmail.com
|
e8c8c284cf95755e802be1ad7317c1c03acd6ab9
|
a5ef36079edf4571d954878558f231b624bbc3d8
|
/ch5_ifStatements/tryItYourself/2_tryitYourSelf_5-34567.py
|
426f6ad394e1d55dd4fc0ccfb04942e6e0240d44
|
[] |
no_license
|
kingzLoFitness/pythonCrashCourse
|
da82817a51103de4f5e9937eea25e8df3adbd7a5
|
fb27d31ab3b85e7b727dc17f7ce430f856ddbd79
|
refs/heads/main
| 2023-05-28T13:52:38.365000
| 2023-03-14T11:59:39
| 2023-03-14T11:59:39
| 116,973,174
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,075
|
py
|
# Try it YourSelf
'''
5-3. Alien Colors #1: Imagine an alien was just shot down in a game. Create a variable called alien_color and assign it a value of 'green', 'yellow', or 'red'.
- Write an if statement to test whether the alien's color is green. If it is,print a message that the player just earned 5 points.
- Write one version of this program that passes the if test and another that fails. (The version that fails will have no output)
'''
alien_color = 'green'
if alien_color == 'green':
print("Congrates player, you earned 5 points.")
if alien_color != 'red':
print("Too bad you didn't get the red version of the Alien. That would've earned you more points.")
if alien_color == 'yellow':
print("Wow, you just earned even more points for hitting up the yellow side of the Alien.")
print()
'''
5-4. Alien Color #2: Choose a color for an alien as you did in Exercies 5-3, and write an if-else chain.
- If the alien's color is green, print a statement that the player just earned 5 pints for shooting the alien.
- if the alien color isn't green, print a statement that the player just eaerned 10 points.
- Write the version of this program that runs the if block and another that runs the else block.
'''
alien_color = 'red'
if alien_color == 'green':
print("Congrate player, you earned 5 points for shooting the alien.")
else:
print("You just earned 10 poaints on that red color shot of the alien.")
print()
alien_color = 'green'
if alien_color == 'green':
print("Congrates player, you earned 5 points for shooting the alien.")
else:
print("You just earned 10 poaints on that red color shot of the alien.")
'''
5-5. Alien Color #3: Turn your if-else chain from Exercise 5-4 into an if-elif-else chain.
- If the alien is green, print a message that the player earned 5 points.
- If the alien is yellow, print a message that the player earened 10 points.
- If the alien is red, print a message that the player earned 15 points.
- Write three version of this program, making sure each message is printed for the apppropriate color alien.
'''
print()
alien_color = 'red'
if alien_color == 'green':
print("Congrate player, you earned 5 points for shooting the alien.")
elif alien_color == 'yellow':
print("You just earned 10 poaints on that red color shot of the alien.")
elif alien_color == 'red':
print('Hey Player, you just earned a whopping 15 pints.')
print()
alien_color = 'yellow'
if alien_color == 'green':
print("Congrate player, you earned 5 points for shooting the alien.")
elif alien_color == 'yellow':
print("You just earned 10 poaints on that yellow color shot of the alien.")
elif alien_color == 'red':
print('Hey Player, you just earned a whopping 15 pints.')
print()
alien_color = 'green'
if alien_color == 'green':
print("Congrate player, you earned 5 points for shooting the alien that was green.")
elif alien_color == 'yellow':
print("You just earned 10 poaints on that yellow color shot of the alien.")
elif alien_color == 'red':
print('Hey Player, you just earned a whopping 15 pints on that red alien.')
print()
'''
5-6. Stage of Life: Write an if-elif-else chain that determins a person's stage of life. Set a value for the variable age, and then:
- If the person is less than 2 years old, print a message that the person is a baby.
- If the person is at least 2 years old but less than 4, print a message that the peson is a toddler.
- If the person is at least 4 years old but less than 13, print a message that the person is a kid.
- If the person is at least 13 years old but less than 20, print a message that the person is a teenager.
- If the person is at least 20 years old but less than 65, print a message that the person is an adult.
- If the person is age 65 or older, print a message that the person is an elder.
'''
age = 42
if age < 2:
print("Awe, your just a baby.")
elif age >= 2 and age < 4:
print("Hey little toddler.")
elif age >= 4 and age < 13:
print("Hello, I see your the age of a kid.")
elif age >= 13 and age < 20:
print("Hey, I see your a teenager.")
elif age >= 20 and age < 65:
print("Hey person, I see your at adult age.")
elif age >= 65:
print("Welcome, elder.")
print()
'''
5-7. Favorite Fruit: Make a list of your favorite fruits, and then write a series of independent if statements that check for certain fruits in your list.
- Make a list of your three favorite fruits, and call it favorite_fruits
- Write five if statements. Each should check whether a certain kind of fruit is in your list. If the fruit is in your list, the if block should print a statement, such as You really like bananas!
'''
favorite_fruit = ['apple', 'orange', 'plum']
if 'apple' in favorite_fruit:
print("Wow, your favorite fruit is apple.")
if 'orange' in favorite_fruit:
print("Wow, your favorite fruit is orange.")
if 'plum' in favorite_fruit:
print("Wow, your favorite fruit is plum.")
if 'pear' in favorite_fruit:
print("Wow, your favorite fruit is pear.")
if 'mango' in favorite_fruit:
print("Wow, your favorite fruit is mango®.")
|
[
"kingzlofitness@gmail.com"
] |
kingzlofitness@gmail.com
|
688a128e075f30a70a6ea08efe7cec90924ed7f7
|
8a42f54d67b564f86c49bedebcda16f9612ef537
|
/nycc
|
7ff8147f8fbb5e99e78fbc66c6fa9238b73f6130
|
[] |
no_license
|
franzbertani/yeelightController
|
69c1818e8a4c02423bbae20d9fd0f5b0ae116f17
|
a59d8f85bad6e11d57744c4c6beba7fecc5b98ff
|
refs/heads/master
| 2020-03-14T11:36:52.833967
| 2018-06-08T11:51:20
| 2018-06-08T11:51:20
| 131,593,978
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,080
|
#!/usr/bin/env python3
import getopt
import json
import os.path
import sys
from os.path import expanduser, dirname
from os import listdir, makedirs
from yeelight import *
def usage():
print("Usage:")
print("-i --init IP_ADDRESS")
print("-c --color HEX_COLOR or -t --temperature COLOR_TEMPERATURE; color settings override temperature")
print("-b --brightness BRIGHTNESS[0-100]")
print("-o --off")
print("-l --list-scenes list all scene presets")
print("-s --scene SCENE_NAME apply scene preset")
print("-j --save-json SCENE_NAME save current settings as new scene")
print("-r --reset")
def create_default_file(ip_addr):
properties['brightness'] = DEFAULT_BRIGHTNESS
properties['color_rgb'] = DEFAULT_COLOR_RGB
properties['color_temp'] = DEFAULT_COLOR_TEMP
properties['ip_addr'] = ip_addr
if not os.path.exists(os.path.dirname(YEELIGHT_CONFIG_FOLDER)):
try:
os.makedirs(os.path.dirname(YEELIGHT_CONFIG_FOLDER))
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
if not os.path.exists(os.path.dirname(DEFAULT_SCENE_LOCATION)):
try:
os.makedirs(os.path.dirname(DEFAULT_SCENE_LOCATION))
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
with open(DEFAULT_JSON_NAME, "w+") as outFile:
json.dump(properties, outFile)
def reset():
global properties
properties = {}
with open(USER_JSON_NAME, "w") as outFile:
json.dump(properties, outFile)
def save_user_file():
with open(USER_JSON_NAME, "w") as outFile:
json.dump(user_properties, outFile)
def read_properties():
global properties
global user_properties
try:
with open(DEFAULT_JSON_NAME, "r") as infile:
properties = json.load(infile)
except IOError:
print("default settings file missing, baaad!\nRun with -i IP_ADDRESS")
sys.exit(2)
try:
with open(USER_JSON_NAME, "r") as infile:
user_properties = json.load(infile)
for k, v in user_properties.items():
properties[k] = v
print("Found user settings for " + k)
except IOError:
print("Default settings applied")
def read_scene(scene_name):
global properties
try:
with open(DEFAULT_SCENE_LOCATION + scene_name + ".json", "r") as infile:
scene_properties = json.load(infile)
for k, v in scene_properties.items():
properties[k] = v
print("Applied scene setting for " + k)
except IOError:
print("Scene file missing, no scene called " + scene_name)
def list_scenes():
for f in listdir(DEFAULT_SCENE_LOCATION):
with open(DEFAULT_SCENE_LOCATION + f, 'r') as infile:
scene_properties = json.load(infile)
print(f.split('.')[0] + ' - ' + json.dumps(scene_properties))
def save_scene(scene_name):
global properties
scene_filename = DEFAULT_SCENE_LOCATION + scene_name + ".json"
overwrite = True
if os.path.isfile(scene_filename):
answer = input("Scene " + scene_name + " already there, overwrite? y/n\n").lower()
overwrite = answer == 'y' or answer == 'yes'
if overwrite:
try:
with open(DEFAULT_SCENE_LOCATION + scene_name + ".json", "w") as outfile:
json.dump(properties, outfile)
except IOError:
print("Error in saving the new scene JSON")
def apply_properties(bulb):
bulb.turn_on()
bulb.set_brightness(int(properties['brightness']))
if properties['color_rgb']:
bulb.set_rgb(properties['color_rgb'][0], properties[
'color_rgb'][1], properties['color_rgb'][2])
else:
bulb.set_color_temp(int(properties['color_temp']))
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hi:c:t:b:s:j:rol", [
"help", "color", "temperature", "brightness", "scene", "save-json", "off", "reset", "list-scenes"])
except getopt.GetoptError as err:
print(str(err))
usage()
sys.exit(2)
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-i", "--init"):
create_default_file(a)
elif o in ("-r", "--reset"):
reset()
elif o in ("-l", "--list-scenes"):
list_scenes()
return
read_properties()
bulb = Bulb(properties['ip_addr'])
updateUserFile = False
for o, a in opts:
if o in ("-c", "--color"):
red = int(a[0:2], 16)
green = int(a[2:4], 16)
blue = int(a[4:6], 16)
user_properties['color_rgb'] = (red, green, blue)
properties['color_rgb'] = (red, green, blue)
updateUserFile = True
elif o in ("-t", "--temperature"):
user_properties['color_temp'] = a
properties['color_temp'] = a
updateUserFile = True
elif o in ("-b", "--brightness"):
user_properties['brightness'] = a
properties['brightness'] = a
updateUserFile = True
elif o in ("-s", "--scene"):
read_scene(a)
elif o in ("-j", "--save-json"):
save_scene(a)
return
elif o in ("-o", "--off"):
bulb.turn_off()
return
if updateUserFile:
print("Saving new user properties")
save_user_file()
apply_properties(bulb)
if __name__ == "__main__":
DEFAULT_BRIGHTNESS = "100"
DEFAULT_COLOR_TEMP = "4411"
DEFAULT_COLOR_RGB = []
home = expanduser("~")
YEELIGHT_CONFIG_FOLDER = home + "/.config/yeelight_controller/"
DEFAULT_SCENE_LOCATION = YEELIGHT_CONFIG_FOLDER + "yeelight_scene/"
DEFAULT_JSON_NAME = YEELIGHT_CONFIG_FOLDER + "default_yeelight_properties.json"
USER_JSON_NAME = YEELIGHT_CONFIG_FOLDER + "user_yeelight_properties.json"
properties = {}
user_properties = {}
main()
|
[
"franz.bertani@gmail.com"
] |
franz.bertani@gmail.com
|
|
a8530152d2ad2eb77244acfea224f23b463e6157
|
07ecd4aa29475b7fff8249e00666d3bd4474f6fb
|
/venv/bin/fonttools
|
953d52a3f7001fd88c40338e43e81ce0275574ce
|
[] |
no_license
|
davelab6/overlapping-axis-test
|
756d5e2cb40a6a1a01980b1f7e6ccca572ce3c2c
|
a7564b70e36c97141f3ef896c3412c93b3f406cb
|
refs/heads/master
| 2022-07-18T05:41:29.037691
| 2020-05-21T20:13:25
| 2020-05-21T20:13:25
| 266,013,570
| 1
| 0
| null | 2020-05-22T03:57:53
| 2020-05-22T03:57:52
| null |
UTF-8
|
Python
| false
| false
| 273
|
#!/Users/stephennixon/type-repos/overlapping-axis-test/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from fontTools.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"arrowtypeco@gmail.com"
] |
arrowtypeco@gmail.com
|
|
5fa49bd76440f4bf39b4a40212e258923dd20e15
|
033c99e6c2dfdbb86ad45c156a94b2ac83672174
|
/com/maoyan/study/basic/basic5_if.py
|
b07be7b0823bd1af09e893dca7ab7867e091e9cb
|
[] |
no_license
|
dlnu123/python_study
|
6c326ee57349efd72fd40a46632d1e5ae07d27db
|
15b8bbe768a37d7138e210ed95d652fb59345dca
|
refs/heads/master
| 2020-07-08T04:18:08.907097
| 2019-09-26T07:33:10
| 2019-09-26T07:33:10
| 203,562,150
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 362
|
py
|
birth = input('birth: ')
if int(birth) < 2000:
print('00前')
else:
print('00后')
height = 1.75
weight = 80.5
bmi = weight / height**2
if bmi < 18.5:
print("过轻")
elif 18.5 <= bmi < 25:
print("正常")
elif bmi >= 25 and bmi < 28:
print("过重")
elif bmi >= 28 and bmi < 32:
print("肥胖")
elif bmi >= 32:
print("严重肥胖")
|
[
"wangliang30@maoyan.com"
] |
wangliang30@maoyan.com
|
4d407f6e2bc77a4f4273d0d427cba9b0bc1a9305
|
565ae0e4b864a79d661c7fa476c9f470b0662f89
|
/python/hw10/1000_rolls.py
|
4f38e8ebe140705cf4b898c98426afdf7070f9c9
|
[] |
no_license
|
chjlarson/Classwork
|
4a01e46bd22fb0aee1d58f2132e66d8306ebe39f
|
59bef084c0eeb643c24f6b4b2d5b536fdf788cf1
|
refs/heads/master
| 2021-07-11T22:44:50.048804
| 2017-10-10T20:51:10
| 2017-10-10T20:51:10
| 106,466,079
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 702
|
py
|
# Christopher Larson
# CSCI 238 Homework #10, PRoblem #2
# 1000_rolls.[y
# 11/03/13
#
# This program will roll 2 die objects 1000 times and count how any times a sum
# of 2, 3, or 12 is rolled.
import dice
NUM_ROLLS = 1000
def main():
print('This program rolls a pair of dice repeatedly %d times'\
% NUM_ROLLS)
total_rolls = 0
for num in range(1000):
die1 = dice.Die()
die2 = dice.Die()
die1.roll
die2.roll
sum = int(die1) + int(die2)
if sum == 2 or sum == 3 or sum == 12:
total_rolls += 1
print('Total number of rolls that equal 2, 3, or 12 within the %d rolls: %d'\
% (NUM_ROLLS, total_rolls))
main()
|
[
"chjlarson@gmail.com"
] |
chjlarson@gmail.com
|
18e808289e997474a40dcc860d3943fab7f4d180
|
4b73f0ac2f227d6b1f4482b381fd725b0b522f9f
|
/ansible_playbook_wrapper/__init__.py
|
faf1eee54fcc9868cf1cba6e1f660ea8d418466c
|
[
"MIT"
] |
permissive
|
succhiello/ansible-playbook-wrapper
|
c78fe76f9863adb4ff9dedf518dae1b67672dd20
|
6bb7181a30f9818d86b50ce2d916a13c673c0929
|
refs/heads/master
| 2020-05-20T10:59:16.258976
| 2013-09-01T13:29:46
| 2013-09-01T13:29:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 556
|
py
|
# -*- coding: utf-8 -*-
from argparse import ArgumentParser
from ansible_playbook_wrapper.command.play import PlayCommand
def main():
parser = ArgumentParser()
sub_parsers = parser.add_subparsers(help='commands')
play_parser = sub_parsers.add_parser('play', help='play playbook')
for arg_info in PlayCommand.ARGUMENT_INFO:
play_parser.add_argument(*(arg_info[0]), **(arg_info[1]))
play_parser.set_defaults(command_class=PlayCommand)
parsed_args = parser.parse_args()
parsed_args.command_class(parsed_args).run()
|
[
"satoshi_ebihara@xica.net"
] |
satoshi_ebihara@xica.net
|
e4d908501733dc04ceeb22eb54cc350fb16fdbe7
|
37a6915af48fbd2f00bb61a2445a998bdbba0ba3
|
/homework03_B05505004.py
|
68cb1d1e1b2b1b85e41a31034654260a0648b986
|
[] |
no_license
|
bdalan/ESOE-CS101-2016
|
7e926f7054b59c83af64adb31ed3dc7935ab108a
|
fcfd9fe3567258aa41d3178dff10679b607fe045
|
refs/heads/master
| 2021-01-18T05:27:36.595688
| 2017-01-06T19:27:01
| 2017-01-06T19:27:01
| 68,806,006
| 1
| 0
| null | 2016-09-21T10:25:16
| 2016-09-21T10:25:16
| null |
UTF-8
|
Python
| false
| false
| 4,230
|
py
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# 繳交日期:2016.10.17
# 作業內容:
# 1. 請閱讀 Wikipedia 維基百科 IEEE754 條目 (https://zh.wikipedia.org/wiki/IEEE_754)
# 2. 請試玩 http://armorgames.com/play/17826/logical-element
# 3. 請利用以下空白範本設計一支程式。程式可輸入一段字串,並自動計算出字串中包括空白字元出現的機率。
# 並由高排到低。
from operator import itemgetter #operator module
def charFreqLister(string):
string=input("請輸入一個字串:")
resultLIST=[]
print("確認輸入:",string)
string_length=len(string)
for n in string:
times=string.count(n)
frequency=times/string_length
resultLIST.append((frequency,n))
resultLIST1=set(resultLIST)
resultLIST=list(resultLIST1)
resultLIST.sort(key=itemgetter(0), reverse= True)
return resultLIST
string=""
print(charFreqLister(string).frequency)
''''
# 3.1 加分題 (有做有加分,沒做不扣分):請用課堂中提到的「霍夫曼編碼]
# (https://zh.wikipedia.org/wiki/霍夫曼編碼) 為你之前設計的
# 程式加上轉碼壓縮的功能。
# e.g.,
def huffmanTranslater(string):
inputlist=charFreqLister(string)
x=0
a=[]
b=[]
c=[]
for x in (range(len(inputlist))-1):
while len(inputlist)>0:
a[x]=min(inputlist,key=itemgetter(0))
inputlist.remove(a[x])
b[x]=min(inputlist,key=itemgetter(0))
inputlist.remove(a[x])
c[x]=
resultLIST = [(freq, char, code), (freq, char, code), (freq, char, code),...]
return resultLIST
# 4 請參考以下 condNOT() 的例子,設計四個 func() 依以下條件,能算出 condition02 ~ 04 的值
#condition00 not condition01
def condNOT(inputSTR_X):
outputSTR = ""
for i in inputSTR_X:
if i == "0":
outputSTR = outputSTR + "1"
else:
outputSTR = outputSTR + "0"
return outputSTR
#condition00 and condition02
def condAND(inputSTR_X, inputSTR_Y):
x=len(inputSTR_X)
a=0
outputSTR=""
while a <= (x-1):
if inputSTR_X[a] =="1" and inputSTR_Y[a]=="1":
outputSTR=outputSTR+"1"
else:
outputSTR=outputSTR+"0"
a+=1
return outputSTR
#condition00 or condition03
def condOR(inputSTR_X, inputSTR_Y):
outputSTR=""
for n in range(len(inputSTR_X)):
if int(inputSTR_X[n])+int(inputSTR_Y[n]) >= 1:
outputSTR=outputSTR+"1"
else:
outputSTR=outputSTR+"0"
return outputSTR
#condition00 xor condition04
def conXOR(inputSTR_X, inputSTR_Y):
outputSTR=""
for i in range(len(inputSTR_X)):
if int(inputSTR_X[i])+int(inputSTR_Y[i])>=1:
outputSTR=outputSTR+"0"
else:
outputSTR=outputSTR+"1"
return outputSTR
if __name__== "__main__":
condition00X = "010111001010100001100011"
condition00Y = "010000110001011100101001"
condition01 = condNOT(condition00X)
print(condition01)
# 5 請完成以下課本習題並將答案以字串型 (str or unicode) 填入。
print("Ans:")
Ch3P3_20a = "0100 0000 1110 0110 0000 0000 0000 0000"
Ch3P3_20b = "1100 0001 0100 1010 0100 0000 0000 0000"
Ch3P3_20c = "0100 0000 1100 1101 0000 0000 0000 0000"
Ch3P3_20d = "1011 1110 1100 0000 0000 0000 0000 0000"
print("========")
Ch3P3_28a = "234"
Ch3P3_28b = "overflow"
Ch3P3_28c = "874"
Ch3P3_28d = "888"
print("========")
Ch3P3_30a = "234"
Ch3P3_30b = "overflow"
Ch3P3_30c = "875"
Ch3P3_30d = "889"
print("========")
Ch4P4_3a = ""
Ch4P4_3b = ""
Ch4P4_3c = ""
Ch4P4_3d = ""
print("========")
Ch4P4_4a = ""
Ch4P4_4b = ""
Ch4P4_4c = ""
Ch4P4_4d = ""
print("========")
Ch4P4_13a = ""
Ch4P4_13b = ""
Ch4P4_13c = ""
Ch4P4_13d = ""
print("========")
Ch4P4_15a = ""
Ch4P4_15b = ""
Ch4P4_15c = ""
Ch4P4_15d = ""
print("========")
Ch4P4_16a = ""
Ch4P4_16b = ""
Ch4P4_16c = ""
Ch4P4_16d = ""
'''''
|
[
"james60708@gmail.com"
] |
james60708@gmail.com
|
bad0141d830a8379daeaf2e3cb693b8206344569
|
f539c6b009b9e7964e8cd7b44963afef7b36b2e5
|
/scripts/pose_aruco.py
|
e8378930bbe156d0c2d4ada6bf65384575e65d47
|
[] |
no_license
|
danielmessi13/pose_msgs
|
8144eaf1c838a3178babd2ddfc8327a38863a7b7
|
990f18a9737e740d13adf6d40f955bac94298724
|
refs/heads/master
| 2020-08-22T14:16:51.892139
| 2019-10-20T19:12:05
| 2019-10-20T19:12:05
| 216,413,082
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,690
|
py
|
#!/usr/bin/env python3
from pose_msgs.msg import TransformStampedCertainty
import rospy
from std_msgs.msg import Float64
from tf2_msgs.msg import TFMessage
import math
l_pose_x = 0
l_pose_y = 0
pose_x = 0
pose_y = 0
theta = 0
def callback(data):
# print("ID: " + data.transforms[0].child_frame_id)
# print("X: " + str(data.transforms[0].transform.translation.x))
# print("Y: " + str(data.transforms[0].transform.translation.y))
# print("W (Theta): " + str(data.transforms[0].transform.rotation.w))
global l_pose_x
global pose_x
global l_pose_y
global pose_y
global theta
if data.transforms[0].child_frame_id == "marker_id0":
l_pose_x = data.transforms[0].transform.translation.x
l_pose_y = data.transforms[0].transform.translation.y
orientation = data.transforms[0].transform.rotation
(roll, pitch, theta) = quaternion_to_euler(orientation.x, orientation.y, orientation.z, orientation.w)
if l_pose_y > 0:
l_pose_y = (-1 * l_pose_y)
else:
l_pose_y = abs(l_pose_y)
else:
pose_x = data.transforms[0].transform.translation.x
pose_y = data.transforms[0].transform.translation.y
if pose_y > 0:
pose_y = (-1 * pose_y)
else:
pose_y = abs(pose_y)
K_ANGLE = 20
pub = rospy.Publisher('front_back', Float64, queue_size=512)
pub2 = rospy.Publisher('left_right', Float64, queue_size=512)
distancia = abs(math.sqrt((l_pose_x - pose_x) ** 2 + (l_pose_y - pose_y) ** 2))
arc_to_move = math.atan2(l_pose_y - pose_y, l_pose_x - pose_x)
angle = (arc_to_move - theta) * K_ANGLE
# print("Arco a se mover: " + str(arc_to_move))
# print("Theta: " + str(theta))
# print("Angulo: " + str(angle))
# print(theta)
pub.publish(Float64(distancia))
pub2.publish(Float64(angle))
def listener_joy():
l_pose_x = 0
l_pose_y = 0
pose_x = 0
pose_y = 0
rospy.init_node('pose_aruco', anonymous=True)
rospy.Subscriber("/tf", TFMessage, callback)
rospy.spin()
def quaternion_to_euler(x, y, z, w):
import math
t0 = +2.0 * (w * x + y * z)
t1 = +1.0 - 2.0 * (x * x + y * y)
X = math.degrees(math.atan2(t0, t1))
t2 = +2.0 * (w * y - z * x)
t2 = +1.0 if t2 > +1.0 else t2
t2 = -1.0 if t2 < -1.0 else t2
Y = math.degrees(math.asin(t2))
t3 = +2.0 * (w * z + x * y)
t4 = +1.0 - 2.0 * (y * y + z * z)
Z = math.atan2(t3, t4)
return X, Y, Z
if __name__ == '__main__':
try:
listener_joy()
except rospy.ROSInterruptException:
pass
|
[
"noreply@github.com"
] |
noreply@github.com
|
10b3183e7d5be00c176ded1f3607756e2cd3966a
|
52fc2d772dd76ea0b727a81b27213bddd4a0efc8
|
/forum/settings.py
|
0fab39257bf33c0cd99b86311ec8f86cac579112
|
[] |
no_license
|
sqab/forum
|
a149524dc8dc38512bda13398fba45979026154a
|
d5c3a3e8728170603fd2e93cf6e6a2f10c0f4595
|
refs/heads/master
| 2020-04-02T00:49:41.980494
| 2018-10-19T18:11:18
| 2018-10-19T18:11:18
| 153,823,619
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,601
|
py
|
"""
Django settings for forum project.
Generated by 'django-admin startproject' using Django 2.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-)enmp@a3y^@@jb8c_0@k)g!-(b(kz@e*ggu)kwu6#7oy67cou'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'avatar',
'posts'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'forum.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'forum.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
_PATH = os.path.abspath(os.path.dirname(__file__))
STATIC_ROOT = os.path.join(_PATH, 'files', 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(_PATH, 'static'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
MEDIA_ROOT = os.path.join(_PATH, 'files', 'media')
MEDIA_URL = '/media/'
ADMIN_MEDIA_PREFIX = '/static/admin/'
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/accounts/login/'
|
[
"sqab169@gmail.com"
] |
sqab169@gmail.com
|
3a024094c06a4a9fd5e957e56d10e8ca361b9331
|
d6eb6561c706e374e34f950f7daba24e9cda97bc
|
/src/cnn_models/models/gan_models/gan_mnist.py
|
10b83086ba645901420124b4f998d1fd52634b49
|
[] |
no_license
|
filip141/ML_Models
|
8c472d0bb0d531b8b9487c8ef5ba08c5358eabe5
|
69527eca80108776341cd6eeffc8f73f0e2970c3
|
refs/heads/master
| 2021-09-05T09:13:45.645496
| 2018-01-25T23:49:36
| 2018-01-25T23:49:36
| 103,442,226
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,188
|
py
|
import logging
import numpy as np
from simple_network.models import GANScheme
from simple_network.layers import DeconvolutionLayer, FullyConnectedLayer, ConvolutionalLayer, ReshapeLayer, \
LeakyReluLayer, BatchNormalizationLayer, DropoutLayer, Flatten, TanhLayer, LinearLayer, MiniBatchDiscrimination, \
SpatialDropoutLayer, GlobalAveragePoolingLayer
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class GANNetwork(GANScheme):
def __init__(self, generator_input_size, discriminator_input_size, log_path, batch_size, labels="none",
labels_size=10, use_dropout=False, global_avg=False):
super(GANNetwork, self).__init__(generator_input_size, discriminator_input_size, log_path, batch_size,
labels=labels, labels_size=labels_size)
self.use_dropout = use_dropout
self.global_avg = global_avg
self.batch_size = batch_size
def build_generator(self, generator):
generator.add(FullyConnectedLayer(out_neurons=12544, initializer="xavier",
name='fully_connected_g_1'))
generator.add(LeakyReluLayer(alpha=0.2, name="leaky_relu_g_1"))
if self.use_dropout:
generator.add(DropoutLayer(percent=0.5))
generator.add(ReshapeLayer(output_shape=[7, 7, 256], name="reshape_g_1"))
# Deconvolution 1
generator.add(DeconvolutionLayer([4, 4, 64], output_shape=[14, 14, 64], initializer="xavier",
name='deconv_layer_g_2', stride=2, batch_size=self.batch_size))
generator.add(BatchNormalizationLayer(name="batch_normalization_g_2"))
generator.add(LeakyReluLayer(alpha=0.2, name="leaky_relu_g_2"))
if self.use_dropout:
generator.add(SpatialDropoutLayer(percent=0.4, name="dropout_g_2"))
# Deconvolution 2
generator.add(DeconvolutionLayer([4, 4, 1], output_shape=self.generator_output_size, initializer="xavier",
name='deconv_layer_g_3', stride=2, batch_size=self.batch_size))
generator.add(TanhLayer(name="tanh_g_3"))
def build_discriminator(self, discriminator):
# Convolutional 1
discriminator.add(ConvolutionalLayer([4, 4, 64], initializer="xavier", name='convo_layer_d_1', stride=2))
discriminator.add(BatchNormalizationLayer(name="batch_normalization_d_1"))
discriminator.add(LeakyReluLayer(alpha=0.2, name="leaky_relu_d_1"))
if self.use_dropout:
discriminator.add(SpatialDropoutLayer(percent=0.4, name="dropout_d_1"))
# Convolutional 2
discriminator.add(ConvolutionalLayer([4, 4, 128], initializer="xavier", name='convo_layer_d_2', stride=2))
discriminator.add(BatchNormalizationLayer(name="batch_normalization_d_2"))
discriminator.add(LeakyReluLayer(alpha=0.2, name="leaky_relu_d_2"))
if self.use_dropout:
discriminator.add(SpatialDropoutLayer(percent=0.4, name="dropout_d_2"))
# Dense 1
if self.global_avg:
discriminator.add(GlobalAveragePoolingLayer(name="global_avg_d"))
else:
discriminator.add(Flatten(name="flatten_d"))
discriminator.add(FullyConnectedLayer(out_neurons=256, initializer="xavier", name='fully_connected_d_3'))
discriminator.add(LeakyReluLayer(alpha=0.2, name="leaky_relu_d_3"))
if self.use_dropout:
discriminator.add(DropoutLayer(percent=0.5))
# discriminator.add(MiniBatchDiscrimination(batch_size=self.batch_size))
# Dense 2
discriminator.add(FullyConnectedLayer(out_neurons=1, initializer="xavier", name='fully_connected_d_4'))
discriminator.add(LinearLayer(name="linear_d_4"))
if __name__ == '__main__':
from cnn_models.iterators.mnist import MNISTDataset
for drop_arg in [(True, "Drop"), (False, "NoDrop")]:
for divergence_arg in [("kl-qp-loss", "KLQP"), ("feature-matching", "FMatch")]:
for global_avg_arg in [(True, "GAvg"), (False, "NGAvg")]:
if drop_arg[1] == "Drop" and divergence_arg[1] == "KLQP" and global_avg_arg[1] == "GAvg":
continue
mnist = MNISTDataset("/home/filip/Datasets", resolution="28x28", one_hot=True)
gan = GANNetwork(generator_input_size=100, discriminator_input_size=[28, 28, 1],
log_path="/home/filip/tensor_logs/GAN_MNIST_EXP/{}_BN_LSmooth_NonCond_{}_{}".format(
divergence_arg[1], drop_arg[1], global_avg_arg[1]),
batch_size=128, use_dropout=drop_arg[0], global_avg=global_avg_arg[0])
gan.set_discriminator_optimizer("Adam", beta_1=0.5)
gan.set_generator_optimizer("Adam", beta_1=0.5)
gan.set_loss(divergence_arg[0], label_smooth=True)
gan.model_compile(generator_learning_rate=0.0002, discriminator_learning_rate=0.00005)
gan.train(mnist, generator_steps=1, discriminator_steps=1, train_step=128, epochs=15,
sample_per_epoch=391)
|
[
"filip141@gmail.com"
] |
filip141@gmail.com
|
e60c607287bab75ad3c8bd40437cacd67838444e
|
82b946da326148a3c1c1f687f96c0da165bb2c15
|
/sdk/python/pulumi_azure_native/streamanalytics/v20200301/input.py
|
3a86e730433d5039270923b5be2f82279ac23e72
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
morrell/pulumi-azure-native
|
3916e978382366607f3df0a669f24cb16293ff5e
|
cd3ba4b9cb08c5e1df7674c1c71695b80e443f08
|
refs/heads/master
| 2023-06-20T19:37:05.414924
| 2021-07-19T20:57:53
| 2021-07-19T20:57:53
| 387,815,163
| 0
| 0
|
Apache-2.0
| 2021-07-20T14:18:29
| 2021-07-20T14:18:28
| null |
UTF-8
|
Python
| false
| false
| 10,175
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['InputInitArgs', 'Input']
@pulumi.input_type
class InputInitArgs:
def __init__(__self__, *,
job_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
input_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union['ReferenceInputPropertiesArgs', 'StreamInputPropertiesArgs']]] = None):
"""
The set of arguments for constructing a Input resource.
:param pulumi.Input[str] job_name: The name of the streaming job.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] input_name: The name of the input.
:param pulumi.Input[str] name: Resource name
:param pulumi.Input[Union['ReferenceInputPropertiesArgs', 'StreamInputPropertiesArgs']] properties: The properties that are associated with an input. Required on PUT (CreateOrReplace) requests.
"""
pulumi.set(__self__, "job_name", job_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if input_name is not None:
pulumi.set(__self__, "input_name", input_name)
if name is not None:
pulumi.set(__self__, "name", name)
if properties is not None:
pulumi.set(__self__, "properties", properties)
@property
@pulumi.getter(name="jobName")
def job_name(self) -> pulumi.Input[str]:
"""
The name of the streaming job.
"""
return pulumi.get(self, "job_name")
@job_name.setter
def job_name(self, value: pulumi.Input[str]):
pulumi.set(self, "job_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="inputName")
def input_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the input.
"""
return pulumi.get(self, "input_name")
@input_name.setter
def input_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "input_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input[Union['ReferenceInputPropertiesArgs', 'StreamInputPropertiesArgs']]]:
"""
The properties that are associated with an input. Required on PUT (CreateOrReplace) requests.
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input[Union['ReferenceInputPropertiesArgs', 'StreamInputPropertiesArgs']]]):
pulumi.set(self, "properties", value)
class Input(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
input_name: Optional[pulumi.Input[str]] = None,
job_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union[pulumi.InputType['ReferenceInputPropertiesArgs'], pulumi.InputType['StreamInputPropertiesArgs']]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
An input object, containing all information associated with the named input. All inputs are contained under a streaming job.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] input_name: The name of the input.
:param pulumi.Input[str] job_name: The name of the streaming job.
:param pulumi.Input[str] name: Resource name
:param pulumi.Input[Union[pulumi.InputType['ReferenceInputPropertiesArgs'], pulumi.InputType['StreamInputPropertiesArgs']]] properties: The properties that are associated with an input. Required on PUT (CreateOrReplace) requests.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: InputInitArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
An input object, containing all information associated with the named input. All inputs are contained under a streaming job.
:param str resource_name: The name of the resource.
:param InputInitArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(InputInitArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
input_name: Optional[pulumi.Input[str]] = None,
job_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union[pulumi.InputType['ReferenceInputPropertiesArgs'], pulumi.InputType['StreamInputPropertiesArgs']]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = InputInitArgs.__new__(InputInitArgs)
__props__.__dict__["input_name"] = input_name
if job_name is None and not opts.urn:
raise TypeError("Missing required property 'job_name'")
__props__.__dict__["job_name"] = job_name
__props__.__dict__["name"] = name
__props__.__dict__["properties"] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:streamanalytics/v20200301:Input"), pulumi.Alias(type_="azure-native:streamanalytics:Input"), pulumi.Alias(type_="azure-nextgen:streamanalytics:Input"), pulumi.Alias(type_="azure-native:streamanalytics/v20160301:Input"), pulumi.Alias(type_="azure-nextgen:streamanalytics/v20160301:Input"), pulumi.Alias(type_="azure-native:streamanalytics/v20170401preview:Input"), pulumi.Alias(type_="azure-nextgen:streamanalytics/v20170401preview:Input")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Input, __self__).__init__(
'azure-native:streamanalytics/v20200301:Input',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Input':
"""
Get an existing Input resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = InputInitArgs.__new__(InputInitArgs)
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["type"] = None
return Input(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[Optional[str]]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output[Any]:
"""
The properties that are associated with an input. Required on PUT (CreateOrReplace) requests.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
|
[
"noreply@github.com"
] |
noreply@github.com
|
e563ba0992313e308df0576b38087655e601304c
|
3a18da31e4870d660dbf18ff6fc56413e5a1239c
|
/syntaxerror.py
|
11a0bcdb9049143d9d8b0e0bb5d7045187e962a4
|
[] |
no_license
|
suvarnalaxmi/Industrialtraining_10Aug2021
|
04c5406275759271fd2a211aab232a55d5ade855
|
3ed06f3eaff1d6920c11c4132c052e98c21789f8
|
refs/heads/main
| 2023-07-28T20:59:29.457329
| 2021-09-15T12:46:38
| 2021-09-15T12:46:38
| 394,552,770
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 128
|
py
|
#syntax error
a = 10000
try:
if(a > 2000)
print(a,"is greater than 2000")
except:
print("This is syntax error")
|
[
"suvarnalaxmi25@gmail.com"
] |
suvarnalaxmi25@gmail.com
|
5a18ee4bd9a21f0a951f474c8837dc8a9b3f6414
|
551d781ca39059a756c85297071413532a31fbb3
|
/Basic-IO.py
|
50a89c0ba038b7991bd9e46312ec8469d4cf64bb
|
[] |
no_license
|
VDLE/Test-Location
|
f9484f10d2041efdf3306c978056e48564a9d47c
|
7f62c5338a005acfd6a0316c2dc73b62923152e8
|
refs/heads/master
| 2020-03-11T23:33:11.902992
| 2018-04-20T07:41:39
| 2018-04-20T07:41:39
| 130,324,854
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 27
|
py
|
import csv
# Connor sucks
|
[
"vdacle@gmail.com"
] |
vdacle@gmail.com
|
4ac21bfa5c8507abc4cb0dd4aba87afea74a988a
|
1da112d29c98d7ed16d1eadff407a3801f8e9085
|
/modeling/backbone_all/common.py
|
3df13a2e92ef75e0eae067c53144d0c43f16087d
|
[] |
no_license
|
Kelvin-001/Semantic-segmentation
|
80529d5167810dcf073f1611cf311a8c61f6d71f
|
3a57bb7049ba9a50729f3d3121aa079d2bc2affb
|
refs/heads/main
| 2023-03-22T12:29:51.432593
| 2021-03-17T14:21:11
| 2021-03-17T14:21:11
| 348,682,887
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,038
|
py
|
import math
from inspect import isfunction
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import Linear, Conv2d, BatchNorm1d, BatchNorm2d, PReLU, ReLU, Sigmoid, Dropout, MaxPool2d, \
AdaptiveAvgPool2d, Sequential, Module
from collections import namedtuple
class Swish(nn.Module):
def forward(self, x):
return x * torch.sigmoid(x)
class HSigmoid(nn.Module):
def forward(self, x):
return F.relu6(x + 3.0, inplace=True) / 6.0
class HSwish(nn.Module):
def __init__(self, inplace=False):
super(HSwish, self).__init__()
self.inplace = inplace
def forward(self, x):
return x * F.relu6(x + 3.0, inplace=self.inplace) / 6.0
def get_activation_layer(activation):
assert (activation is not None)
if isfunction(activation):
return activation()
elif isinstance(activation, str):
if activation == "relu":
return nn.ReLU(inplace=True)
elif activation == "relu6":
return nn.ReLU6(inplace=True)
elif activation == "swish":
return Swish()
elif activation == "hswish":
return HSwish(inplace=True)
elif activation == "sigmoid":
return nn.Sigmoid()
elif activation == "hsigmoid":
return HSigmoid()
elif activation == "identity":
return Identity()
else:
raise NotImplementedError()
else:
assert (isinstance(activation, nn.Module))
return activation
class InterpolationBlock(nn.Module):
def __init__(self,
scale_factor,
mode="bilinear",
align_corners=True):
super(InterpolationBlock, self).__init__()
self.scale_factor = scale_factor
self.mode = mode
self.align_corners = align_corners
def forward(self, x):
return F.interpolate(
input=x,
scale_factor=self.scale_factor,
mode=self.mode,
align_corners=self.align_corners)
def __repr__(self):
s = "{name}(scale_factor={scale_factor}, mode={mode}, align_corners={align_corners})"
return s.format(
name=self.__class__.__name__,
scale_factor=self.scale_factor,
mode=self.mode,
align_corners=self.align_corners)
def calc_flops(self, x):
assert (x.shape[0] == 1)
if self.mode == "bilinear":
num_flops = 9 * x.numel()
else:
num_flops = 4 * x.numel()
num_macs = 0
return num_flops, num_macs
class IBN(nn.Module):
def __init__(self,
channels,
first_fraction=0.5,
inst_first=True):
super(IBN, self).__init__()
self.inst_first = inst_first
h1_channels = int(math.floor(channels * first_fraction))
h2_channels = channels - h1_channels
self.split_sections = [h1_channels, h2_channels]
if self.inst_first:
self.inst_norm = nn.InstanceNorm2d(
num_features=h1_channels,
affine=True)
self.batch_norm = nn.BatchNorm2d(num_features=h2_channels)
else:
self.batch_norm = nn.BatchNorm2d(num_features=h1_channels)
self.inst_norm = nn.InstanceNorm2d(
num_features=h2_channels,
affine=True)
def forward(self, x):
x1, x2 = torch.split(x, split_size_or_sections=self.split_sections, dim=1)
if self.inst_first:
x1 = self.inst_norm(x1.contiguous())
x2 = self.batch_norm(x2.contiguous())
else:
x1 = self.batch_norm(x1.contiguous())
x2 = self.inst_norm(x2.contiguous())
x = torch.cat((x1, x2), dim=1)
return x
class Flatten(Module):
def forward(self, input):
return input.view(input.size(0), -1)
def l2_norm(input, axis=1):
norm = torch.norm(input, 2, axis, True)
output = torch.div(input, norm)
return output
class SEModule(Module):
def __init__(self, channels, reduction):
super(SEModule, self).__init__()
self.avg_pool = AdaptiveAvgPool2d(1)
self.fc1 = Conv2d(channels, channels // reduction, kernel_size=1, padding=0, bias=False)
nn.init.xavier_uniform_(self.fc1.weight.data)
self.relu = ReLU(inplace=True)
self.fc2 = Conv2d(channels // reduction, channels, kernel_size=1, padding=0, bias=False)
self.sigmoid = Sigmoid()
def forward(self, x):
module_input = x
x = self.avg_pool(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return module_input * x
class bottleneck_IR(Module):
def __init__(self, in_channel, depth, stride=1):
super(bottleneck_IR, self).__init__()
if in_channel == depth:
self.shortcut_layer = MaxPool2d(1, stride)
else:
self.shortcut_layer = Sequential(
Conv2d(in_channel, depth, (1, 1), stride, bias=False), BatchNorm2d(depth))
self.res_layer = Sequential(
BatchNorm2d(in_channel),
Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
PReLU(depth),
Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
BatchNorm2d(depth))
def forward(self, x):
shortcut = self.shortcut_layer(x)
res = self.res_layer(x)
return res + shortcut
class bottleneck_IR_SE(Module):
def __init__(self, in_channel, depth, stride=1):
super(bottleneck_IR_SE, self).__init__()
if in_channel == depth:
self.shortcut_layer = MaxPool2d(1, stride)
else:
self.shortcut_layer = Sequential(
Conv2d(in_channel, depth, (1, 1), stride, bias=False),
BatchNorm2d(depth))
self.res_layer = Sequential(
BatchNorm2d(in_channel),
Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
PReLU(depth),
Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
BatchNorm2d(depth),
SEModule(depth, 16)
)
def forward(self, x):
shortcut = self.shortcut_layer(x)
res = self.res_layer(x)
return res + shortcut
from torch.nn.modules.utils import _pair
class SplAtConv2d(nn.Module):
"""Split-Attention Conv2d
"""
def __init__(self, in_channels, channels, kernel_size, stride=(1, 1), padding=(0, 0),
dilation=(1, 1), groups=1, bias=True,
radix=2, reduction_factor=4,
rectify=False, rectify_avg=False, norm_layer=None,
dropblock_prob=0.0, **kwargs):
super(SplAtConv2d, self).__init__()
padding = _pair(padding)
self.rectify = rectify and (padding[0] > 0 or padding[1] > 0)
self.rectify_avg = rectify_avg
inter_channels = max(in_channels*radix//reduction_factor, 32)
self.radix = radix
self.cardinality = groups
self.channels = channels
self.dropblock_prob = dropblock_prob
if self.rectify:
from rfconv import RFConv2d
self.conv = RFConv2d(in_channels, channels*radix, kernel_size, stride, padding, dilation,
groups=groups*radix, bias=bias, average_mode=rectify_avg, **kwargs)
else:
self.conv = Conv2d(in_channels, channels*radix, kernel_size, stride, padding, dilation,
groups=groups*radix, bias=bias, **kwargs)
self.use_bn = norm_layer is not None
if self.use_bn:
self.bn0 = norm_layer(channels*radix)
self.relu = ReLU(inplace=True)
self.fc1 = Conv2d(channels, inter_channels, 1, groups=self.cardinality)
if self.use_bn:
self.bn1 = norm_layer(inter_channels)
self.fc2 = Conv2d(inter_channels, channels*radix, 1, groups=self.cardinality)
if dropblock_prob > 0.0:
self.dropblock = DropBlock2D(dropblock_prob, 3)
self.rsoftmax = rSoftMax(radix, groups)
def forward(self, x):
x = self.conv(x)
if self.use_bn:
x = self.bn0(x)
if self.dropblock_prob > 0.0:
x = self.dropblock(x)
x = self.relu(x)
batch, rchannel = x.shape[:2]
if self.radix > 1:
splited = torch.split(x, rchannel//self.radix, dim=1)
gap = sum(splited)
else:
gap = x
gap = F.adaptive_avg_pool2d(gap, 1)
gap = self.fc1(gap)
if self.use_bn:
gap = self.bn1(gap)
gap = self.relu(gap)
atten = self.fc2(gap)
atten = self.rsoftmax(atten).view(batch, -1, 1, 1)
if self.radix > 1:
attens = torch.split(atten, rchannel//self.radix, dim=1)
out = sum([att*split for (att, split) in zip(attens, splited)])
else:
out = atten * x
return out.contiguous()
class rSoftMax(nn.Module):
def __init__(self, radix, cardinality):
super().__init__()
self.radix = radix
self.cardinality = cardinality
def forward(self, x):
batch = x.size(0)
if self.radix > 1:
x = x.view(batch, self.cardinality, self.radix, -1).transpose(1, 2)
x = F.softmax(x, dim=1)
x = x.reshape(batch, -1)
else:
x = torch.sigmoid(x)
return x
class DropBlock2D(nn.Module):
def __init__(self, drop_prob, block_size, share_channel=False):
super(DropBlock2D, self).__init__()
self.register_buffer('i', torch.zeros(1, dtype=torch.int64))
self.register_buffer('drop_prob', drop_prob * torch.ones(1, dtype=torch.float32))
self.inited = False
self.step_size = 0.0
self.start_step = 0
self.nr_steps = 0
self.block_size = block_size
self.share_channel = share_channel
def reset(self):
"""stop DropBlock"""
self.inited = True
self.i[0] = 0
self.drop_prob = 0.0
def reset_steps(self, start_step, nr_steps, start_value=0, stop_value=None):
self.inited = True
stop_value = self.drop_prob.item() if stop_value is None else stop_value
self.i[0] = 0
self.drop_prob[0] = start_value
self.step_size = (stop_value - start_value) / nr_steps
self.nr_steps = nr_steps
self.start_step = start_step
def forward(self, x):
if not self.training or self.drop_prob.item() == 0.:
return x
else:
self.step()
# get gamma value
gamma = self._compute_gamma(x)
# sample mask and place on input device
if self.share_channel:
mask = (torch.rand(*x.shape[2:], device=x.device, dtype=x.dtype) < gamma).unsqueeze(0).unsqueeze(0)
else:
mask = (torch.rand(*x.shape[1:], device=x.device, dtype=x.dtype) < gamma).unsqueeze(0)
# compute block mask
block_mask, keeped = self._compute_block_mask(mask)
# apply block mask
out = x * block_mask
# scale output
out = out * (block_mask.numel() / keeped).to(out)
return out
def _compute_block_mask(self, mask):
block_mask = F.max_pool2d(mask,
kernel_size=(self.block_size, self.block_size),
stride=(1, 1),
padding=self.block_size // 2)
keeped = block_mask.numel() - block_mask.sum().to(torch.float32)
block_mask = 1 - block_mask
return block_mask, keeped
def _compute_gamma(self, x):
_, c, h, w = x.size()
gamma = self.drop_prob.item() / (self.block_size ** 2) * (h * w) / \
((w - self.block_size + 1) * (h - self.block_size + 1))
return gamma
def step(self):
assert self.inited
idx = self.i.item()
if idx > self.start_step and idx < self.start_step + self.nr_steps:
self.drop_prob += self.step_size
self.i += 1
def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict,
missing_keys, unexpected_keys, error_msgs):
idx_key = prefix + 'i'
drop_prob_key = prefix + 'drop_prob'
if idx_key not in state_dict:
state_dict[idx_key] = torch.zeros(1, dtype=torch.int64)
if idx_key not in drop_prob_key:
state_dict[drop_prob_key] = torch.ones(1, dtype=torch.float32)
super(DropBlock2D, self)._load_from_state_dict(
state_dict, prefix, local_metadata, strict,
missing_keys, unexpected_keys, error_msgs)
def _save_to_state_dict(self, destination, prefix, keep_vars):
"""overwrite save method"""
pass
def extra_repr(self):
return 'drop_prob={}, step_size={}'.format(self.drop_prob, self.step_size)
def reset_dropblock(start_step, nr_steps, start_value, stop_value, m):
if isinstance(m, DropBlock2D):
m.reset_steps(start_step, nr_steps, start_value, stop_value)
def get_activation_layer(activation, out_channels):
"""
Create activation layer from string/function.
"""
assert (activation is not None)
if activation == "relu":
return nn.ReLU(inplace=True)
elif activation == "relu6":
return nn.ReLU6(inplace=True)
elif activation == "sigmoid":
return nn.Sigmoid()
elif activation == "prelu":
return nn.PReLU(out_channels)
else:
raise NotImplementedError()
class ConvBlock(nn.Module):
"""
Standard convolution block with Batch normalization and activation.
"""
def __init__(self, in_channels, out_channels, kernel_size, stride, padding,
dilation=1, groups=1, bias=False, use_bn=True, bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
super(ConvBlock, self).__init__()
self.activate = (activation is not None)
self.use_bn = use_bn
self.use_pad = (isinstance(padding, (list, tuple)) and (len(padding) == 4))
if self.use_pad:
self.pad = nn.ZeroPad2d(padding=padding)
padding = 0
self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size,
stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias)
if self.use_bn:
self.bn = nn.BatchNorm2d(num_features=out_channels, eps=bn_eps)
if self.activate:
self.activ = get_activation_layer(activation, out_channels)
def forward(self, x):
if self.use_pad:
x = self.pad(x)
x = self.conv(x)
if self.use_bn:
x = self.bn(x)
if self.activate:
x = self.activ(x)
return x
def conv1x1_block(in_channels, out_channels, stride=1, padding=0, groups=1,
bias=False, use_bn=True, bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
return ConvBlock(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=stride,
padding=padding, groups=groups, bias=bias, use_bn=use_bn, bn_eps=bn_eps, activation=activation)
def conv3x3_block(in_channels, out_channels, stride=1, padding=1, dilation=1, groups=1,
bias=False, use_bn=True, bn_eps=1e-5, activation=(lambda: nn.ReLU(inplace=True))):
return ConvBlock(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride,
padding=padding, dilation=dilation, groups=groups, bias=bias, use_bn=use_bn, bn_eps=bn_eps,
activation=activation)
def dwconv_block(in_channels, out_channels, kernel_size, stride=1, padding=1, dilation=1,
bias=False, use_bn=True, bn_eps=1e-5, activation=(lambda: nn.ReLU(inplace=True))):
return ConvBlock(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, groups=out_channels, bias=bias, use_bn=use_bn, bn_eps=bn_eps, activation=activation)
def dwconv3x3_block(in_channels, out_channels, stride=1, padding=1, dilation=1,
bias=False, bn_eps=1e-5, activation=(lambda: nn.ReLU(inplace=True))):
return dwconv_block(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride,
padding=padding, dilation=dilation, bias=bias, bn_eps=bn_eps, activation=activation)
def dwconv5x5_block(in_channels, out_channels, stride=1, padding=2, dilation=1,
bias=False, bn_eps=1e-5, activation=(lambda: nn.ReLU(inplace=True))):
return dwconv_block( in_channels=in_channels, out_channels=out_channels, kernel_size=5, stride=stride,
padding=padding, dilation=dilation, bias=bias, bn_eps=bn_eps, activation=activation)
class SEBlock(nn.Module):
def __init__(self, channels, reduction=16, round_mid=False, use_conv=True,
mid_activation=(lambda: nn.ReLU(inplace=True)),
out_activation=(lambda: nn.Sigmoid())):
super(SEBlock, self).__init__()
self.use_conv = use_conv
mid_channels = channels // reduction if not round_mid else round_channels(float(channels) / reduction)
self.pool = nn.AdaptiveAvgPool2d(output_size=1)
if use_conv:
self.conv1 = nn.Conv2d(in_channels=channels, out_channels=mid_channels, kernel_size=1,
stride=1, groups=1, bias=True)
else:
self.fc1 = nn.Linear(in_features=channels, out_features=mid_channels)
self.activ = nn.ReLU(inplace=True)
if use_conv:
self.conv2 = nn.Conv2d(in_channels=mid_channels, out_channels=channels, kernel_size=1,
stride=1, groups=1, bias=True)
else:
self.fc2 = nn.Linear(in_features=mid_channels, out_features=channels)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
w = self.pool(x)
if not self.use_conv:
w = w.view(x.size(0), -1)
w = self.conv1(w) if self.use_conv else self.fc1(w)
w = self.activ(w)
w = self.conv2(w) if self.use_conv else self.fc2(w)
w = self.sigmoid(w)
if not self.use_conv:
w = w.unsqueeze(2).unsqueeze(3)
x = x * w
return x
class SpatialGate(nn.Module):
def __init__(self):
super(SpatialGate, self).__init__()
self.conv = ConvBlock(in_channels=2, out_channels=1, kernel_size=7,
stride=1, padding=3, bias=False, use_bn=True, activation=None)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
att1 = x.max(dim=1)[0].unsqueeze(1)
att2 = x.mean(dim=1).unsqueeze(1)
att = torch.cat((att1, att2), dim=1)
att = self.conv(att)
att = self.sigmoid(att)
x = x * att
return x
|
[
"noreply@github.com"
] |
noreply@github.com
|
2be17deb37ca708c239c897ac25062d7ce0a84ec
|
6960dcdd51b2ff4d0e1ea5e721310b82ec0df3c5
|
/DeepBeliefNetwork/settings.py
|
9e279ea78c674a685c72d0d0100539904a229be3
|
[] |
no_license
|
WalidKhelifa/Deep-belief-network-for-pneumonia-detection
|
4f908b11c8fcffbeda6ca0b707f1aef24b9c7488
|
64046cf79c0cec0abb48a7a93d5e42303b5e04ab
|
refs/heads/master
| 2023-01-10T08:07:51.444014
| 2020-11-16T14:34:20
| 2020-11-16T14:34:20
| 313,324,940
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,334
|
py
|
"""
Django settings for DeepBeliefNetwork project.
Generated by 'django-admin startproject' using Django 3.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'nq^*ani2r81f$u+%&c!)tve4)4m_&7)+=@d0%&_dep8rza##=#'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'DbnSiteWeb.apps.DbnsitewebConfig'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'DeepBeliefNetwork.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'DeepBeliefNetwork.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR,'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
MIDIA_ROOT = os.path.join(BASE_DIR,'media')
MEDIA_URL = '/media/'
|
[
"56236244+WalidKhelifa@users.noreply.github.com"
] |
56236244+WalidKhelifa@users.noreply.github.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.