blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fbdf2e39230a6f0c81bf9a0a4d41242ef6b9ba23
|
a0fdea61891c67c48cff01738f7a2bb6a38635ce
|
/documentation/mcss.conf.py
|
715bc44768d6f9d0dac92c0ac630505fb3e02aa8
|
[] |
no_license
|
yunusemreyilmazz/MarkovPasswords
|
02620f8bd4a631a5bc1b8acd21110ff7ed97723d
|
4aebd0a99064308eff1360a047f4a4b6607752e5
|
refs/heads/main
| 2023-06-16T06:19:21.223307
| 2021-05-17T09:39:58
| 2021-05-17T09:39:58
| 358,263,430
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 464
|
py
|
import os
HTML_EXTRA_FILES = "../README.md"
DOXYFILE = 'Doxyfile-mcss'
MAIN_PROJECT_URL = "https://github.com/FlameOfIgnis/CNG491-Project"
SHOW_UNDOCUMENTED = True
HTML_HEADER = open('documentation/includes/header.html').read()
x = ["images/" + (item) for item in os.listdir('documentation/images/')]
print(x)
HTML_EXTRA_FILES = x
FINE_PRINT = """CNG 491 - 2020 <br>
Ata Hakçıl <br>
Ömer Yıldıztugay <br>
Celal Sahir Çetiner <br>
Yunus Emre Yılmaz"""
|
[
"ahakcil@gmail.com"
] |
ahakcil@gmail.com
|
b7bb9e2667cf24be506017cb6e95edf3f301d4ed
|
c0a72c0bda2dd0bd1b8e811622e63f337065e29f
|
/hrl/frameworks/options/option.py
|
62ec974a569d323515797eaf19d4fd7741f0942e
|
[] |
no_license
|
konichuvak/hrl
|
45337654fba87ac119ad242c314b19fc828cd0f3
|
c654c91a9cfe5d34c778723977794dfa3e213776
|
refs/heads/master
| 2020-05-16T00:46:00.664032
| 2019-10-17T03:39:32
| 2019-10-17T03:39:32
| 182,587,564
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,664
|
py
|
from typing import Callable
class Option:
def __init__(self,
termination: Callable,
policy: Callable,
initiation: Callable,
id: str = None):
self.I = initiation
self.β = termination
self.π = policy
self.id = str(id)
def initiation(self, s, *args, **kwargs):
return self.I(s, *args, **kwargs)
def termination(self, s, *args, **kwargs):
return self.β(s, *args, **kwargs)
def policy(self, s, *args, **kwargs):
return self.π(s, *args, **kwargs)
def __repr__(self):
return self.id
class MarkovOption:
def __init__(self,
option: Option,
k: int = 0,
starting_state=None,
cumulant=0):
""" An option object that is currently being executed by the agent
:param option: an plain option to wrap the trackers around
:param k: duration of the option so far
:param starting_state: state in which the option was initiated
:param cumulant: accumulated signal so far
"""
self.k = k
self.starting_state = starting_state
self.cumulant = cumulant
self.option = option
def __getattribute__(self, name):
try:
return getattr(object.__getattribute__(self, 'option'), name)
except AttributeError:
return self.__dict__[name]
def __repr__(self):
return self.option
def reset(self):
self.k = 0
self.starting_state = None
self.cumulant = 0
|
[
"arialinvlad@gmail.com"
] |
arialinvlad@gmail.com
|
26544908ba48d80336509c2244ed04b85b7fc2ce
|
f805abde328858fee80890e09b5ecda9bf66d269
|
/vigbridge-static/paygo-drop_or_forward.py
|
5b4a88a3b7c071afc1691d8a660ad27a521913ca
|
[
"MIT"
] |
permissive
|
luispedrosa/vigor
|
a6880d6fc4f4c4b8ac91ccfac3fb986edf87a5c1
|
945990ad714bc84ce1ffa676ba1d22b79597b7dd
|
refs/heads/master
| 2022-12-11T01:25:27.001993
| 2021-07-16T23:14:24
| 2021-07-16T23:14:24
| 214,473,468
| 1
| 2
|
MIT
| 2022-01-25T19:20:26
| 2019-10-11T15:42:10
|
C
|
UTF-8
|
Python
| false
| false
| 417
|
py
|
import table_updates
h = pop_header(ether, on_mismatch=([],[]))
static_key = StaticKeyc(h.daddr, received_on_port)
if stat_emap.has(static_key):
if (stat_emap.get(static_key) == -2 or stat_emap.get(static_key) == received_on_port):
return ([],[])
elif dyn_emap.has(h.daddr) and dyn_vals.get(dyn_emap.get(h.daddr)) == DynamicValuec(received_on_port):
return ([],[])
else:
return ([...],[ether(h)])
|
[
"fchamicapereira@gmail.com"
] |
fchamicapereira@gmail.com
|
12efa0c024a4d99d986aa818a7c90acda314472b
|
50f88fd9cae1a4870edae36d2a437cc9af3bd2dd
|
/fav.py
|
7e9cd408424ebee7fdd652df82403b3020b7b489
|
[] |
no_license
|
MustangNM/Anime-Potatoes-
|
2c7f27554248498732bf38edbcda8294a074ca4d
|
ddb0874c6c3cc2e8c896f2b9eba70aca2cabbfee
|
refs/heads/master
| 2021-04-30T04:07:31.088164
| 2018-02-14T17:00:49
| 2018-02-14T17:00:49
| 121,528,754
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,167
|
py
|
import mediaa
import index
# list of movies
aSilentVoice = mediaa.Movie("A Silent Voice", "Great Movie", "silent.jpg",
"https://www.youtube.com/watch?v=syqBF25r1Ak")
KimiNoNawa = mediaa.Movie("Your Name", "A love story", "Kimi.no.Na.wa.jpg",
"https://www.youtube.com/watch?v=hRfHcp2GjVI")
TheLast = mediaa.Movie("The Last", "Ninja Love", "TheLast.jpg",
"https://www.youtube.com/watch?v=tA3yE4_t6SY")
BorutoTheMovie = mediaa.Movie("Boruto The Movie", "Spoiled Kid of Hokage",
"Boruto.jpg",
"https://www.youtube.com/watch?v=Qyonn5Vbg7s")
WolfChildren = mediaa.Movie("Wolf Children", "Human Wolves", "wolf.jpg",
"https://www.youtube.com/watch?v=MZpWdYruu48")
FiveCent = mediaa.Movie("5 Centimeters per Second", "Story", "5cent.jpg",
"https://www.youtube.com/watch?v=wdM7athAem0")
# Creating Array of Movies
movies = [aSilentVoice, KimiNoNawa, TheLast, BorutoTheMovie,
WolfChildren,FiveCent]
# Opening the website
index.open_movies_page(movies)
|
[
"noreply@github.com"
] |
MustangNM.noreply@github.com
|
14a94b626bfcf3afd62e4dfc6074dbfac75ffcfb
|
0a2918121f792d63a480fd04fe45167ccf5c20b8
|
/scrapper.py
|
332a63b8430c316e92e1ed336421481f050d84c9
|
[] |
no_license
|
orionmontoyac/house_scraping
|
c61c6f717e7a10b015c4567e2506929954faaa28
|
73e07b2bcdad017f53738c5aae23201563174c48
|
refs/heads/master
| 2021-04-06T22:09:01.786225
| 2020-03-23T17:11:32
| 2020-03-23T17:11:32
| 248,618,791
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,244
|
py
|
from house_page_object import *
def _save_houses(news_site_uid, houses):
now = datetime.datetime.now()
csv_headers = list(filter(lambda property: not property.startswith('_'), dir(houses[0])))
out_file_name = '{news_site_uid}_{datetime}_houses.csv'.format(news_site_uid=news_site_uid, datetime=now.strftime('%Y_%m_%d'))
with open(out_file_name, mode='w+') as f:
writer = csv.DictWriter(f, fieldnames=csv_headers)
writer.writeheader()
for house in houses:
logging.info('Save house: {}'.format(house._link))
row = {prop:str(getattr(house, prop)) for prop in csv_headers}#[str(getattr(house, prop)) for prop in csv_headers]
writer.writerow(row)
def _build_link(host, link):
if is_well_formed_url.match(link):
return link
elif is_root_path.match(link):
return '{host}{uri}'.format(host=host, uri=link)
else:
return '{host}/{uri}'.format(host=host, uri=link)
def _fetch_house(news_site_uid, host, link):
logging.info('Start fetching house at {}'.format(link))
house = None
try:
house = housePage(news_site_uid, _build_link(host, link))
except (HTTPError, MaxRetryError) as e:
logging.warn('Error while fetching house!', exc_info=False)
if house and not house._valid_house():
logging.warn('Invalid house.')
return None
return house
def _houses_scraper(news_site_uid):
host = config()[news_site_uid]['url']
logging.info('Beginning scraper for {}'.format(host))
logging.info('Finding links in homepage...')
house_links = _find_house_links_in_homepage(news_site_uid)
logging.info('{} house links found in homepage'.format(len(house_links)))
houses = []
for link in house_links:
house = _fetch_house(news_site_uid, host, link)
if house:
logging.info('house fetched!')
houses.append(house)
#break
_save_houses(news_site_uid, houses)
def _find_house_links_in_homepage(news_site_uid):
homepage = HomePage(news_site_uid, config()[news_site_uid]['url_search'])
return homepage.house_links
if __name__ == "__main__":
_houses_scraper('finca_raiz')
|
[
"solar@iMac-de-SOLAR.local"
] |
solar@iMac-de-SOLAR.local
|
5611d0b092a75ea444e63dcb6ab7086032f9af36
|
cfbb34dd860a0707a5c6937c3e9dfe8d74433e27
|
/survey/migrations/0017_remove_survey_borrower_status.py
|
09efdb8998ed1dc700e22f7f9f2c54e3ccb28f39
|
[] |
no_license
|
andamuthu/test
|
eba4876fc886877eeb458f4a37e96ddb02dbfed1
|
49db219a905ea3ebcb71120dac3f1d66821cc752
|
refs/heads/master
| 2022-12-09T13:19:31.618651
| 2020-09-07T04:10:00
| 2020-09-07T04:10:00
| 292,813,311
| 0
| 0
| null | 2020-09-07T04:10:01
| 2020-09-04T09:58:00
|
Python
|
UTF-8
|
Python
| false
| false
| 336
|
py
|
# Generated by Django 3.0.8 on 2020-07-26 12:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('survey', '0016_auto_20200726_1207'),
]
operations = [
migrations.RemoveField(
model_name='survey',
name='borrower_status',
),
]
|
[
"andamuthu.a@gmail.com"
] |
andamuthu.a@gmail.com
|
5f5772fc46a71ddeaf199e84f8036c0f5b86fbb9
|
98f3a92c7a558fa53266a6ead40e3fa6b7786f5e
|
/servers/admin.py
|
ed4e2ce02044dcc0d079630e561cdf030edc5cb6
|
[] |
no_license
|
yangkun9151/Cjit_Devops
|
e9d7b988b783ca131cc363f796ba86c8ace68122
|
7bf80cb03f1f357c5bc83b076ea1c3ad48ded0de
|
refs/heads/master
| 2023-05-21T23:47:40.200665
| 2020-04-18T05:57:48
| 2020-04-18T05:57:48
| 256,160,314
| 0
| 0
| null | 2021-06-10T22:47:06
| 2020-04-16T08:51:12
|
Python
|
UTF-8
|
Python
| false
| false
| 95
|
py
|
from django.contrib import admin
from .models import Servers
admin.site.register(Servers)
|
[
"245115113@qq.com"
] |
245115113@qq.com
|
cd43e1d10dcc78a00cd676cd6422f7108738d668
|
5e726f41a95e1fc79ed98b777ec85a386f7c7a13
|
/Scrapy/Pipeline/PipeLine.py
|
15562deb29cbaa56f299f0c3c9aee5dd71930889
|
[] |
permissive
|
825477418/XX
|
a3b43ff2061f2ec7e148671db26722e1e6c27195
|
bf46e34749394002eec0fdc65e34c339ce022cab
|
refs/heads/master
| 2022-08-02T23:51:31.009837
| 2020-06-03T13:54:09
| 2020-06-03T13:54:09
| 262,987,137
| 0
| 0
|
MIT
| 2020-06-03T13:54:10
| 2020-05-11T08:43:30
| null |
UTF-8
|
Python
| false
| false
| 4,518
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/8/14 16:09
import json
import os
import time
from logzero import logger
from thrift.transport import TSocket
import XX.DB.SqlAlchemyHelper as sa
import XX.Encrypt.EncryptHelper as enc
import XX.File.FileHelper as cf
import XX.HTML.HtmlHelper as chtml
import XX.Tools.BuiltinFunctions as bf
# File pipeline:放到今日文件中
class FilePipeline(object):
@classmethod
def from_crawler(cls, crawler):
settings = crawler.settings
cls.cacheFilePath = settings.get("FUN_CACHE_FILE_PATH")
cls.settings = settings
return cls()
def process_item(self, item, spider):
# 数据处理
item = chtml.parseDict(item)
today = time.strftime("%Y_%m_%d", time.localtime(int(time.time())))
json_str = json.dumps(item, ensure_ascii=False)
# 保存数据到文件
file_path = FilePipeline.settings.get("ROOT_PATH_JSON") + spider.name + os.sep + today + ".json"
cf.FileHelper.save_file(file_path, json_str + "\n")
return item
# 放到MySQL数据库
class MysqlPipeline(object):
@classmethod
def from_crawler(cls, crawler):
cls.settings = crawler.settings
cls.session = sa.SqlAlchemyHelper.get_session_by_cfg(cls.settings.get("MCFG"))
return cls()
def process_item(self, item, spider):
import importlib
module = importlib.import_module("Util.Json2Mysql", MysqlPipeline.settings.get("PROJECT_PATH"))
if hasattr(module, spider.name):
getattr(module, spider.name)(item, self.session)
else:
logger.info("No Json2Mysql function")
return item
# 放到Mongo数据库
class MongoPipeline(object):
def process_item(self, item, spider):
return item
# 放到Kakfa队列
class KafkaPipeline(object):
def __init__(self):
from pykafka import KafkaClient
self.client = KafkaClient(hosts="LOCALHOST" + ":6667")
def process_item(self, item, spider):
topicdocu = self.client.topics[spider.name]
producer = topicdocu.get_producer()
# 数据处理
item = chtml.parseDict(item)
json_str = json.dumps(item, ensure_ascii=False)
producer.produce(json_str)
bf.printFromHead(spider.name + "\tAdd kafka")
return item
class HivePipeline(object):
def process_item(self, item, spider):
return item
class SparkPipeline(object):
def process_item(self, item, spider):
return item
class StormPipeline(object):
def process_item(self, item, spider):
return item
class HBasePipeline(object):
@classmethod
def from_crawler(cls, crawler):
cls.settings = crawler.settings
def __init__(self):
self.transport = TSocket.TSocket(self.settings.get("HBASE_HOST", "localhost"), self.settings.get("HBASE_PORT", 9090))
self.transport.open()
self.protocol = TBinaryProtocol.TBinaryProtocol(self.transport)
self.client = Hbase.Client(self.protocol)
# 判断是否有表,没有则生成
tables = self.client.getTableNames()
self.table_name = "crawl_" + self.settings.get("PROJECT_NAME", self.settings.get("BOT_NAME", "crawl"))
if self.table_name not in tables:
source = ColumnDescriptor(name='source')
data = ColumnDescriptor(name='data')
self.client.createTable(self.table_name, [source, data])
def process_item(self, item, spider):
# 保存到crawl_project表中 spider_name+md5(url) rowkey中,data:json_str中
# crawl_project > spider_name+md5(url) > data:json_str
url = item.get("url")
if url:
row = spider.name + "_" + enc.Encrypt.md5(url)
mutations = list()
mutations.append(Mutation(column="data:json", value=str(json.dumps(item, ensure_ascii=False))))
self.client.mutateRow(self.table_name, row, mutations)
logger.info("Pipeline Data 2 HBase\t" + row)
else:
logger.info("No url from spider \t" + spider.name)
return item
def close_spider(self, spider):
self.transport.close()
class TestPipeline(object):
@classmethod
def from_crawler(cls, crawler):
cls.settings = crawler.settings
def process_item(self, item, spider):
print("===" * 44)
print(TestPipeline.settings)
print(dir(spider))
print(dir(self))
print("===" * 44)
|
[
"billsteve@sina.com"
] |
billsteve@sina.com
|
cf901aa9b34691b0af99752116c4af9c235fbbe8
|
d7c0c76cab2d74bd6dad63f68873b9b8a56e6909
|
/课件/第三节课/shiguangkey/shiguangkey/wsgi.py
|
6825808deb7fa0a88f9fe80dad368cab257e406d
|
[] |
no_license
|
LinearPi/ClassDoc
|
6c871b03bcd6f081c071236d1ce8d131ba45f52a
|
5408f98f76edfbfaeab392d4437725f81badd279
|
refs/heads/master
| 2022-11-30T04:29:45.879839
| 2019-06-17T06:51:51
| 2019-06-17T06:51:51
| 128,883,724
| 0
| 0
| null | 2022-11-22T02:31:50
| 2018-04-10T06:21:55
|
CSS
|
UTF-8
|
Python
| false
| false
| 400
|
py
|
"""
WSGI config for shiguangkey project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shiguangkey.settings")
application = get_wsgi_application()
|
[
"807745654@qq.com"
] |
807745654@qq.com
|
7d9f251613e25a7b8026c8f78e8ba0808bd4c7ea
|
e0c38c2c98e0ee1d206debd2ccb98635ef248020
|
/maps/foliumMaps.py
|
fece96dc896e75a634255768c6898114b3c6f1c0
|
[
"MIT"
] |
permissive
|
selinerguncu/Yelp-Spatial-Analysis
|
81f629d5b0be7ab6cea4de3421c044606b6534ff
|
befbcb927ef225bda9ffaea0fd41a88344f9693c
|
refs/heads/master
| 2020-12-30T11:02:01.419106
| 2017-09-08T14:35:27
| 2017-09-08T14:35:27
| 98,835,161
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,568
|
py
|
import folium
from folium import plugins
import numpy as np
import sqlite3 as sqlite
import os
import sys
import pandas as pd
#extract data from yelp DB and clean it:
DB_PATH = "/Users/selinerguncu/Desktop/PythonProjects/Fun Projects/Yelp/data/yelpCleanDB.sqlite"
conn = sqlite.connect(DB_PATH)
#######################################
############ organize data ############
#######################################
def organizeData(mapParameters):
business = str(mapParameters['business'])
region = str(mapParameters['region'])
price = str(mapParameters['price'])
rating = float(mapParameters['rating'])
print('mapParameters', mapParameters)
# if 'zipcode' in mapParameters.keys():
# zipcode = str(mapParameters['zipcode'])
# sql = "SELECT longitude, latitude, query_latitude, query_latitude, query_category, query_price, city, zip_code, price, rating, review_count FROM Business WHERE query_category = '%s' AND city = '%s' AND zip_code = '%s' AND price = '%s' AND rating = '%r'" % (business, city, zipcode, price, rating)
# coordinates = pd.read_sql_query(sql, conn)
# else:
# sql = "SELECT longitude, latitude, query_latitude, query_latitude, query_category, query_price, city, zip_code, price, rating, review_count FROM Business WHERE query_category = '%s' AND city = '%s' AND price = '%s' AND rating = '%r'" % (business, city, price, rating)
# coordinates = pd.read_sql_query(sql, conn)
# print('here')
sql = '''SELECT longitude, latitude, query_latitude, query_latitude, query_category, query_price, city, zip_code, price, rating, review_count, region
FROM CleanBusinessData
WHERE query_category = '%s' AND price = '%s' AND rating = '%r' AND region = '%r''' % (business, price, rating, region)
if region == 'Bay Area':
sql = '''SELECT longitude, latitude, query_latitude, query_latitude, query_category, query_price, city, zip_code, price, rating, review_count, region
FROM CleanBusinessData
WHERE query_category = '%s' AND price = '%s' AND rating = '%r' AND city != '%s' ''' % (business, price, rating, 'San Francisco')
elif region == 'Peninsula':
sql = '''SELECT longitude, latitude, query_latitude, query_latitude, query_category, query_price, city, zip_code, price, rating, review_count, region
FROM CleanBusinessData
WHERE query_category = '%s' AND price = '%s' AND rating = '%r' AND city != '%s' AND city != '%s' AND city != '%s' ''' % (business, price, rating, 'San Francisco', 'San Francisco - Downtown', 'San Francisco - Outer')
elif region == 'San Francisco':
sql = '''SELECT longitude, latitude, query_latitude, query_latitude, query_category, query_price, city, zip_code, price, rating, review_count, region
FROM CleanBusinessData
WHERE query_category = '%s' AND price = '%s' AND rating = '%r' AND city = ?''' % (business, price, rating, 'San Francisco')
elif region == 'Downtown SF':
sql = '''SELECT longitude, latitude, query_latitude, query_latitude, query_category, query_price, city, zip_code, price, rating, review_count, region
FROM CleanBusinessData
WHERE query_category = '%s' AND price = '%s' AND rating = '%r' AND city = '%s' ''' % (business, price, rating, 'San Francisco - Downtown')
elif region == 'Outer SF':
sql = '''SELECT longitude, latitude, query_latitude, query_latitude, query_category, query_price, city, zip_code, price, rating, review_count, region
FROM CleanBusinessData
WHERE query_category = '%s' AND price = '%s' AND rating = '%r' AND city = '%s' ''' % (business, price, rating, 'San Francisco - Outer')
elif region == 'East Bay':
sql = '''SELECT longitude, latitude, query_latitude, query_latitude, query_category, query_price, city, zip_code, price, rating, review_count, region
FROM CleanBusinessData
WHERE query_category = '%s' AND price = '%s' AND rating = '%r' AND region = '%s' ''' % (business, price, rating, 'eastBay')
elif region == 'North Bay':
sql = '''SELECT longitude, latitude, query_latitude, query_latitude, query_category, query_price, city, zip_code, price, rating, review_count, region
FROM CleanBusinessData
WHERE query_category = '%s' AND price = '%s' AND rating = '%r' AND region = '%s' ''' % (business, price, rating, 'northBay')
coordinates = pd.read_sql_query(sql, conn)
if len(coordinates) <= 1860:
for i in range(len(coordinates)):
if coordinates["longitude"][i] == None:
coordinates["longitude"][i] = coordinates["query_longitude"][i]
if coordinates["latitude"][i] == None:
coordinates["latitude"][i] = coordinates["query_latitude"][i]
# coordinates = []
# for i in range(len(coords)): #max ~1860 coordinates
# coordinate = []
# coordinate.append(coords["latitude"][i])
# coordinate.append(coords["longitude"][i])
# coordinates.append(coordinate)
# # convert list of lists to list of tuples
# coordinates = [tuple([i[0],i[1]]) for i in coordinates]
# # print(coordinates[0:10])
return coordinates
# else:
# print("Too many data points; cannot be mapped!")
#######################################
##### visualize the coordinates #######
#######################################
def makeMarkerMap(coordinates):
# # get center of map
# meanlat = np.mean([float(i[0]) for i in coordinates])
# meanlon = np.mean([float(i[1]) for i in coordinates])
print('coordinates', len(coordinates))
meanlat = np.mean(coordinates['latitude'])
meanlon = np.mean(coordinates['longitude'])
#Initialize map
mapa = folium.Map(location=[meanlat, meanlon],
tiles='Cartodb Positron', zoom_start=10)
# add markers
for i in range(len(coordinates)):
# create popup on click
html="""
Rating: {}<br>
Popularity: {}<br>
Price: {}<br>
"""
html = html.format(coordinates["rating"][i],\
coordinates["review_count"][i],\
coordinates["price"][i])
iframe = folium.Div(html=html, width=150, height=100) #element yok
popup = folium.Popup(iframe, max_width=2650)
# add marker to map
folium.Marker(tuple([coordinates['latitude'][i],coordinates['longitude'][i]]), popup=popup,).add_to(mapa)
return mapa.save("/Users/selinerguncu/Desktop/PythonProjects/Fun Projects/Yelp/yelp/static/foliumMarkers.html")
#######################################
####### cluster nearby points #########
#######################################
def makeClusterMap(coordinates):
from folium.plugins import MarkerCluster # for marker clusters
meanlat = np.mean(coordinates['latitude'])
meanlon = np.mean(coordinates['longitude'])
# initialize map
mapa = folium.Map(location=[meanlat, meanlon],
tiles='Cartodb Positron', zoom_start=10)
coordinatesFinal = []
for i in range(len(coordinates)):
# add marker clusters
coordinate = []
coordinate.append(coordinates["latitude"][i])
coordinate.append(coordinates["longitude"][i])
coordinatesFinal.append(coordinate)
# convert list of lists to list of tuples
coordinatesFinal = [tuple([i[0],i[1]]) for i in coordinatesFinal]
# print('coordinatesFinal', len(coordinatesFinal))
mapa.add_child(MarkerCluster(locations=coordinatesFinal))
return mapa.save("/Users/selinerguncu/Desktop/PythonProjects/Fun Projects/Yelp/yelp/static/foliumCluster.html")
#######################################
####### generate a heat map ###########
#######################################
def makeHeatmapMap(coordinates):
from folium.plugins import HeatMap
meanlat = np.mean(coordinates['latitude'])
meanlon = np.mean(coordinates['longitude'])
# initialize map
mapa = folium.Map(location=[meanlat, meanlon],
tiles='Cartodb Positron', zoom_start=10) #tiles='OpenStreetMap'
coordinatesFinal = []
if len(coordinates) > 1090: #max len is 1090 for the Heat Map
for i in range(1090):
coordinate = []
coordinate.append(coordinates["latitude"][i])
coordinate.append(coordinates["longitude"][i])
coordinatesFinal.append(coordinate)
else:
for i in range(len(coordinates)):
coordinate = []
coordinate.append(coordinates["latitude"][i])
coordinate.append(coordinates["longitude"][i])
coordinatesFinal.append(coordinate)
# convert list of lists to list of tuples
coordinatesFinal = [tuple([i[0],i[1]]) for i in coordinatesFinal]
# add heat
mapa.add_child(HeatMap(coordinatesFinal))
# mapa.add_child(HeatMap((tuple([coordinates['latitude'][i],coordinates['longitude'][i]]))))
return mapa.save("/Users/selinerguncu/Desktop/PythonProjects/Fun Projects/Yelp/yelp/static/foliumHeatmap.html")
# saving the map as an image doesnt seem to work
# import os
# import time
# from selenium import webdriver
# from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
# # for different tiles: https://github.com/python-visualization/folium
# delay=5
# fn='foliumHeatmap.html'
# tmpurl='file:///Users/selinerguncu/Desktop/PythonProjects/Fun%20Projects/Yelp%20Project/Simulation/foliumHeatmap.html'.format(path=os.getcwd(),mapfile=fn)
# mapa.save(fn)
# firefox_capabilities = DesiredCapabilities.FIREFOX
# firefox_capabilities['marionette'] = True
# browser = webdriver.Firefox(capabilities=firefox_capabilities, executable_path='/Users/selinerguncu/Downloads/geckodriver')
# browser.get(tmpurl)
# #Give the map tiles some time to load
# time.sleep(delay)
# browser.save_screenshot('mynewmap.png')
# browser.quit()
|
[
"erguncu@usc.edu"
] |
erguncu@usc.edu
|
7e4b7ae698a95107a84e9fecd32a88e6c4685d6a
|
803db2cc2c44fa50983f04e5a77dc2da87c37583
|
/introduksjon_lerer/evige_lokker.py
|
803c353752b3b65c6efdf8f46a42c6af80d2f45b
|
[] |
no_license
|
NataliPotter/DAT110_eksenpler_2021_New
|
1b44f796f32b2d27f6bb288c8e14a8dcadb76fc5
|
53bfde9462a6722f6948f1dd50d0288c19224a29
|
refs/heads/main
| 2023-03-23T18:45:56.773150
| 2021-03-09T19:57:41
| 2021-03-09T19:57:41
| 346,041,291
| 0
| 0
| null | 2021-03-09T19:57:42
| 2021-03-09T14:51:33
|
Python
|
UTF-8
|
Python
| false
| false
| 273
|
py
|
# Skriv inn et antall tall, så skal den gjøre 1 + 2 + 3 + ... + antall
antall = int(input("Antall tall: "))
tall = 1
resultat = 0
while tall <= antall:
resultat += tall
tall += 1
print(f"Foreløpig resultat: {resultat}")
print(f"Resultatet ble: {resultat}")
|
[
"natali.v.goncharova@gmail.com"
] |
natali.v.goncharova@gmail.com
|
b48869e3cf23fc8447b997428554d005b1fb32f1
|
d1216260d3f91d741781ccf7c0601590a6725f90
|
/agent.py
|
c709ed1a2e7a11957a84f4e1bf846ace92abf372
|
[] |
no_license
|
lornzy/smartcar-p4
|
b40821fc65fad2daac5e0b0cb25889cec6a6d227
|
1a94aca1b2afd377f72ac5917451ca8bd8398d91
|
refs/heads/master
| 2021-07-09T16:40:50.374849
| 2017-10-10T04:00:16
| 2017-10-10T04:00:16
| 105,352,732
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,894
|
py
|
import random
import math
from environment import Agent, Environment
from planner import RoutePlanner
from simulator import Simulator
class LearningAgent(Agent):
""" An agent that learns to drive in the Smartcab world.
This is the object you will be modifying. """
def __init__(self, env, learning=False, epsilon=1.0, alpha=0.5):
super(LearningAgent, self).__init__(env) # Set the agent in the evironment
self.planner = RoutePlanner(self.env, self) # Create a route planner
self.valid_actions = self.env.valid_actions # The set of valid actions
# Set parameters of the learning agent
self.learning = learning # Whether the agent is expected to learn
self.Q = dict() # Create a Q-table which will be a dictionary of tuples
self.epsilon = epsilon # Random exploration factor
self.alpha = alpha # Learning factor
###########
## TO DO ##
###########
# Set any additional class parameters as needed
self.time_trial = 0
def reset(self, destination=None, testing=False):
""" The reset function is called at the beginning of each trial.
'testing' is set to True if testing trials are being used
once training trials have completed. """
# Select the destination as the new location to route to
self.planner.route_to(destination)
###########
## TO DO ##
###########
# Update epsilon using a decay function of your choice
# Update additional class parameters as needed
# If 'testing' is True, set epsilon and alpha to 0
self.time_trial += 1
if testing == False:
self.epsilon = pow(0.995, self.time_trial)
else:
self.epsilon ,self.alpha = 0, 0
return None
def build_state(self):
""" The build_state function is called when the agent requests data from the
environment. The next waypoint, the intersection inputs, and the deadline
are all features available to the agent. """
# Collect data about the environment
waypoint = self.planner.next_waypoint() # The next waypoint
inputs = self.env.sense(self) # Visual input - intersection light and traffic
deadline = self.env.get_deadline(self) # Remaining deadline
###########
## TO DO ##
###########
# Set 'state' as a tuple of relevant data for the agent
state = (waypoint, inputs['light'], inputs['oncoming'], inputs['left'], inputs['right'])
return state
def get_maxQ(self, state):
""" The get_max_Q function is called when the agent is asked to find the
maximum Q-value of all actions based on the 'state' the smartcab is in. """
###########
## TO DO ##
###########
# Calculate the maximum Q-value of all actions for a given state
Q_value = self.Q[state].values()
maxQ = max(Q_value)
return maxQ
def createQ(self, state):
""" The createQ function is called when a state is generated by the agent. """
###########
## TO DO ##
###########
# When learning, check if the 'state' is not in the Q-table
# If it is not, create a new dictionary for that state
# Then, for each action available, set the initial Q-value to 0.0
#action_dict = {}
if state not in self.Q:
self.Q[state] = {}
for action in self.valid_actions:
if action not in self.Q[state]:
self.Q[state][action] = 0.0
Q = self.Q
#print Q
return Q
def choose_action(self, state):
""" The choose_action function is called when the agent is asked to choose
which action to take, based on the 'state' the smartcab is in. """
# Set the agent state and default action
self.state = state
self.next_waypoint = self.planner.next_waypoint()
###########
## TO DO ##
###########
# When not learning, choose a random action
# When learning, choose a random action with 'epsilon' probability
# Otherwise, choose an action with the highest Q-value for the current state
if self.learning:
if random.random() <= self.epsilon:
action = random.choice(self.valid_actions)
else:
max_action = [a for a in self.Q[state] if self.Q[state][a] == self.get_maxQ(state)]
action = random.choice(max_action)
else:
action = random.choice(self.valid_actions)
return action
def learn(self, state, action, reward):
""" The learn function is called after the agent completes an action and
receives an award. This function does not consider future rewards
when conducting learning. """
###########
## TO DO ##
###########
# When learning, implement the value iteration update rule
# Use only the learning rate 'alpha' (do not use the discount factor 'gamma')
if self.learning:
self.Q[state][action] += self.alpha * (reward - self.Q[state][action])
return None
def update(self):
""" The update function is called when a time step is completed in the
environment for a given trial. This function will build the agent
state, choose an action, receive a reward, and learn if enabled. """
state = self.build_state() # Get current state
self.createQ(state) # Create 'state' in Q-table
action = self.choose_action(state) # Choose an action
reward = self.env.act(self, action) # Receive a reward
self.learn(state, action, reward) # Q-learn
return None
def run():
""" Driving function for running the simulation.
Press ESC to close the simulation, or [SPACE] to pause the simulation. """
##############
# Create the environment
# Flags:
# verbose - set to True to display additional output from the simulation
# num_dummies - discrete number of dummy agents in the environment, default is 100
# grid_size - discrete number of intersections (columns, rows), default is (8, 6)
env = Environment()
##############
# Create the driving agent
# Flags:
# learning - set to True to force the driving agent to use Q-learning
# * epsilon - continuous value for the exploration factor, default is 1
# * alpha - continuous value for the learning rate, default is 0.5
agent = env.create_agent(LearningAgent, learning = True)
##############
# Follow the driving agent
# Flags:
# enforce_deadline - set to True to enforce a deadline metric
env.set_primary_agent(agent, enforce_deadline = True)
##############
# Create the simulation
# Flags:
# update_delay - continuous time (in seconds) between actions, default is 2.0 seconds
# display - set to False to disable the GUI if PyGame is enabled
# log_metrics - set to True to log trial and simulation results to /logs
# optimized - set to True to change the default log file name
sim = Simulator(env, update_delay=0.01, log_metrics=True, optimized = True)
##############
# Run the simulator
# Flags:
# tolerance - epsilon tolerance before beginning testing, default is 0.05
# n_test - discrete number of testing trials to perform, default is 0
sim.run(n_test=10, tolerance = 0.05)
if __name__ == '__main__':
run()
|
[
"noreply@github.com"
] |
lornzy.noreply@github.com
|
53d11d7a9d216d841eafcd87ae7bb090a3c17a52
|
1e14d31427714787abd4a1784a59d88591be9a68
|
/IntroductionToML/decision_tree_01.py
|
0f22c652d68e38535a642cd373763a2b2f93b054
|
[] |
no_license
|
jancywen/ml_learning
|
4b6a84f1b6816e7ef6af9c0368eade47e5185d3c
|
aa3e9ee597424e9fdfe6e9b593ce9cc11392994b
|
refs/heads/master
| 2020-03-16T07:51:35.853982
| 2018-08-28T09:44:14
| 2018-08-28T09:44:14
| 132,585,004
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,182
|
py
|
# -*- coding: utf-8 -*-
__author__ = 'wangwenjie'
__data__ = '2018/5/11 上午11:15'
__product__ = 'PyCharm'
__filename__ = 'decision_tree_01'
"""
决策树
"""
import matplotlib.pyplot as plt
import numpy as np
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.datasets import load_breast_cancer
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from sklearn.tree import export_graphviz
cancer = load_breast_cancer()
X_train, X_test, y_train, y_test = train_test_split(cancer.data, cancer.target, stratify=cancer.target, random_state=42)
# 决策树
tree = DecisionTreeClassifier(random_state=0).fit(X_train, y_train)
print('Tree Train score : %.3f' % tree.score(X_train, y_train))
print('Tree Test score: {:.3f}'.format(tree.score(X_test, y_test)))
# 逻辑回归
logistics = LogisticRegression().fit(X_train, y_train)
print('Regression train score: %.3f' % logistics.score(X_train, y_train))
print('Regression test score: %.3f' % logistics.score(X_test, y_test))
# export_graphviz(tree, out_file='tree.dot', class_names=["malignant", "benign"], feature_names=cancer.feature_names, impurity=False, filled=True)
# 随机森林
forest = RandomForestClassifier(n_estimators=100, random_state=0).fit(X_train, y_train)
print('RandomForestClassifier train score: %.3f' % forest.score(X_train, y_train))
print('RandomForestClassifier test score: %.3f' % forest.score(X_test, y_test))
# 梯度提升回归树
gbrt = GradientBoostingClassifier(random_state=0, max_depth=1, n_estimators=100, learning_rate=0.01).fit(X_train, y_train)
print('Grbt train score:{:.3f}'.format(gbrt.score(X_train, y_train)))
print('grbt train score {:.3f}'.format(gbrt.score(X_test, y_test)))
def plot_feature_importance_cancer(model):
n_feature = cancer.data.shape[1]
plt.barh(range(n_feature), model.feature_importances_, align='center')
plt.yticks(np.arange(n_feature), cancer.feature_names)
plt.show()
plot_feature_importance_cancer(tree)
plot_feature_importance_cancer(forest)
plot_feature_importance_cancer(gbrt)
|
[
"wangwenjie@ewell.cc"
] |
wangwenjie@ewell.cc
|
cfec095fd6de03ecdee2408f43de4864cdb3a360
|
8c80a0fa64b858cb27cf19bfd880844d1379ae8a
|
/cellules.py
|
e389a108c401ade15cc692b2f4dfc41ce0e404ea
|
[] |
no_license
|
RaphaelJ/Python
|
1241a54bc8c81d931633b5dce37478157ab194d1
|
ff63214503215c175d931e18881fc55156457a23
|
refs/heads/master
| 2016-09-06T05:29:29.983225
| 2012-09-24T22:50:46
| 2012-09-24T22:50:46
| 2,337,660
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 232
|
py
|
def cellules(x, y):
avg = x // y
rem = x % y
prec_sum_avg_rem = 0
for i in range(1, y+1):
sum_avg_rem = rem * i // y
print (avg + sum_avg_rem - prec_sum_avg_rem)
prec_sum_avg_rem = sum_avg_rem
|
[
"raphaeljavaux@gmail.com"
] |
raphaeljavaux@gmail.com
|
41afd00ba70b9bb4632093db934b2914019e239f
|
5056caf1eb9546dbc10e1953c58d8605d229e4a7
|
/fabfile.py
|
9a3605e6c3b67cb37d96202cb8dd098a4a127d44
|
[] |
no_license
|
ergelo/fab-setupscript
|
cf1d3725c159bdc9002e2e5d046d55df69ec843b
|
708f96fdacb7763099b043c07d6a43c109959c59
|
refs/heads/master
| 2020-12-24T15:13:48.886361
| 2011-11-08T11:13:51
| 2011-11-08T11:13:51
| 2,708,926
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| true
| false
| 763
|
py
|
import settings as s
from fab_helpers import *
env.hosts = s.hosts
def main():
"""
checks the settings and calls the appropriate setup functions
"""
if s.single_machine_setup:
dev_setup()
else:
db_setup()
dev_setup()
#################################
# #
# setup functions #
# #
#################################
def dev_setup():
"setup dev server machine"
root()
update_apt()
user_routine()
user()
apt_setup()
pip_setup()
setup_ssh()
compile_hosts()
github_egg_setup()
webserver_setup_routine()
if s.single_machine_setup:
db_setup_routine()
django_site_setup_routine()
|
[
"bruno.panara@gmail.com"
] |
bruno.panara@gmail.com
|
ace30bcdb3f821bb721e5bedc043f25384bc5d38
|
5f4af71f507ddd668ecfb8640860a40003554871
|
/BiddingSystem_Logic.py
|
53f57868f982e800fa825edc19508a7c5ad5e360
|
[] |
no_license
|
Muhaimeen92/BiddingSystem
|
eab9a2cfb5b7d7bce2c6565ed3feda4b05d8c490
|
dfc64bb69a6c3cb46dd86935ac92a90148c83413
|
refs/heads/main
| 2023-08-01T15:11:19.998331
| 2021-09-06T17:02:29
| 2021-09-06T17:02:29
| 403,695,522
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,586
|
py
|
from BiddingSystem_Static import Campaign, AdCampaign, Request, BidRequest, Generator, AddGenerator, BidGenerator, timer
import json
from collections import defaultdict
class Evaluation:
def __init__(self, ad_campaigns, bid_requests):
self.ad_campaigns = ad_campaigns
self.bid_requests = bid_requests
def load_AdCampaigns(self):
ad_table = {}
for ad_campaign in self.ad_campaigns:
if ad_campaign.domain not in ad_table:
ad_table[ad_campaign.domain] = {}
if ad_campaign.country not in ad_table[ad_campaign.domain]:
ad_table[ad_campaign.domain][ad_campaign.country] = defaultdict(list)
for dimension in ad_campaign.dimensions:
ad_table[ad_campaign.domain][ad_campaign.country][dimension] += [ad_campaign.id]
return ad_table
@timer
def evaluate(self):
"""The evaluate method returns a dictionary with the bid request id as the key and list of add campaign ids
as the values which match the criteria of bid request made"""
evaluation_results = {}
ad_table = self.load_AdCampaigns()
for bid_request in self.bid_requests:
try:
ad_ids = ad_table[bid_request.domain][bid_request.country][bid_request.dimension]
if ad_ids:
evaluation_results[bid_request.id] = ad_ids
except:
continue
return evaluation_results
def evaluataion_results(self):
"""This method returns a json object of existing ad campaigns to evaluate bid requests against,
evaluation results with bid request IDs and matching ad campaign IDs,
number of bid requests processed and
the time to run the evaluation"""
evaluation_results = self.evaluate()
defined_campaigns = []
for ad_campaign in self.ad_campaigns:
defined_campaigns.append(ad_campaign.create_json())
bid_requests_processed = len(self.bid_requests)
return json.dumps({
"defined campaigns": defined_campaigns,
"evaluation results": evaluation_results[0],
"bid requests processed": bid_requests_processed,
"evaluation time": evaluation_results[1]
}, indent=2)
def main():
bids = BidGenerator().generate_bids(100)
adds = AddGenerator().generate_AdCampaigns(10)
evaluation_results = Evaluation(adds, bids).evaluataion_results()
print(evaluation_results)
return evaluation_results
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
Muhaimeen92.noreply@github.com
|
52e8c2cb412b5c1812825c192dbe463a5b1e34dc
|
ecbdf14f5ce530a476223a60e2c9ab5f7894f747
|
/hackrank/min_max.py
|
9f7fd85550f29ffe1bdcf60b3c95563a3698db1c
|
[] |
no_license
|
liangshinegood/lg-Python
|
daf7243ef6cba27276f375a50b78d3781d6d4527
|
6fb1ef9412eaa2b542693944ec2013f28ad0decb
|
refs/heads/master
| 2021-04-12T11:12:03.443876
| 2018-03-23T15:15:36
| 2018-03-23T15:15:36
| 126,433,121
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 234
|
py
|
#思路:输入对应的值,调用numpy.min,求得结果后再调用numpy.max
import numpy
N,M = map(int,input().split())
A = numpy.array([input().split() for _ in range(N)],int)
print(numpy.max(numpy.min(A,axis=1),axis=0))
|
[
"liangshinegood@gmail.com"
] |
liangshinegood@gmail.com
|
8288d2ed8c9cf2c7939cfa000b9f3118cd4e900d
|
e8215b98dcf46417e720cc6ef4a0329474ae9b82
|
/Tkinter Test/venv/Scripts/easy_install-3.7-script.py
|
da9765a503b6bfc8c21745c8a60562ec4bc9965c
|
[] |
no_license
|
rgkaufmann/PythonCodes
|
2d47bab84ec851fc962598f613b1e666a14c8efd
|
a5d5cd993beabdb79897a05b35420ad82f438f51
|
refs/heads/master
| 2021-06-13T23:19:09.109162
| 2021-03-03T06:00:04
| 2021-03-03T06:00:04
| 162,771,012
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 491
|
py
|
#!"C:\Users\ryank\Desktop\Personal Files\Github\PythonCodes\Tkinter Test\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.7')()
)
|
[
"ryankaufmannprof@gmail.com"
] |
ryankaufmannprof@gmail.com
|
510a9f985a0efe43257e5c09347d375e3bf7a034
|
3a6d77832cf48484a42d9e993d3cbec60b449953
|
/articles/form.py
|
072bd279e74a848e466e9d3cf05d0417253ffa91
|
[] |
no_license
|
braylo41/DjangoGram
|
d10ab96d9fe18d030a000ad7d468d87b484ebb8d
|
dc4a119e005838485770d50913bc4b16d8cd59d0
|
refs/heads/master
| 2023-01-12T17:08:47.212258
| 2020-11-18T09:13:48
| 2020-11-18T09:13:48
| 303,852,085
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 175
|
py
|
from django import forms
from .models import Article
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = ['title', 'body', 'image']
|
[
"bray.logan2018@gmail.com"
] |
bray.logan2018@gmail.com
|
04de4d127e94160d5c5142b7f31b4796ded8dacb
|
0f2865ba4c4c900ccc4a993d054390e45a064822
|
/rplugin/python3/defx/source/sftp.py
|
d28a19d10678e2c5d58e74d346f7061b591f040c
|
[
"MIT"
] |
permissive
|
skt041959/defx-sftp
|
01e8763121eeae346cde27eb80c5a4dd3e1aca67
|
836ed150046691861b0332be3ca6aaa8caf13ac9
|
refs/heads/main
| 2023-04-22T09:17:04.643696
| 2021-05-03T12:42:32
| 2021-05-03T12:42:32
| 365,133,407
| 0
| 0
|
MIT
| 2021-05-07T06:16:34
| 2021-05-07T06:16:34
| null |
UTF-8
|
Python
| false
| false
| 2,941
|
py
|
from defx.util import error
from defx.context import Context
from defx.base.source import Base
from paramiko import Transport, SFTPClient, RSAKey
import re
from pathlib import Path
import site
import typing
from pynvim import Nvim
site.addsitedir(str(Path(__file__).parent.parent))
from sftp import SFTPPath # noqa: E402
class Source(Base):
def __init__(self, vim: Nvim) -> None:
super().__init__(vim)
self.name = 'sftp'
self.client: SFTPClient = None
from kind.sftp import Kind
self.kind: Kind = Kind(self.vim, self)
self.username: str = ''
self.hostname: str = ''
self.path_head: str = ''
self.vars = {
'root': None,
}
def init_client(self, hostname, username) -> None:
key_path = self.vim.vars.get('defx_sftp#key_path',
self.vim.call('expand', '~/.ssh/id_rsa'))
transport = Transport((hostname))
rsa_private_key = RSAKey.from_private_key_file(key_path)
transport.connect(username=username, pkey=rsa_private_key)
self.client = SFTPClient.from_transport(transport)
def get_root_candidate(
self, context: Context, path: Path
) -> typing.Dict[str, typing.Any]:
self.vim.call('defx#util#print_message', str(path))
path_str = self._parse_arg(str(path))
path = SFTPPath(self.client, path_str)
word = str(path)
if word[-1:] != '/':
word += '/'
if self.vars['root']:
word = self.vim.call(self.vars['root'], str(path))
word = word.replace('\n', '\\n')
return {
'word': word,
'is_directory': True,
'action__path': path,
}
def gather_candidates(
self, context: Context, path: Path
) -> typing.List[typing.Dict[str, typing.Any]]:
path_str = self._parse_arg(str(path))
path = SFTPPath(self.client, path_str)
candidates = []
for f in path.iterdir():
candidates.append({
'word': f.name,
'is_directory': f.is_dir(),
'action__path': f,
})
return candidates
def _parse_arg(self, path: str) -> str:
head, rmt_path = SFTPPath.parse_path(path)
if head is None:
return path
m = re.match('//(.+)@(.+)', head) # include username?
if m:
username, hostname = m.groups()
else:
hostname = re.match('//(.+)', head).groups()[0]
username = ''
if (username != self.username or
hostname != self.hostname):
# TODO: error handling(cannot connect)
self.init_client(hostname, username)
self.username = username
self.hostname = hostname
if rmt_path == '':
rmt_path = '.'
return self.client.normalize(rmt_path)
|
[
"haru.matu9168@gmail.com"
] |
haru.matu9168@gmail.com
|
85ad9b0323f892ca5498766a84096ba57bbaade4
|
218c8dd2da9cc70305c2720fa9e4e13cc79f1d7e
|
/calculator/calculator.py
|
f774b32a26b7bfdfa610fe723caa614cb112ae6f
|
[] |
no_license
|
fernandoferreiratbe/python-setup
|
5dfcc9e45f6720bf4d455accd83d0cb720485d49
|
92d543017299e5aebd57ab306d6ced1107d3f73d
|
refs/heads/master
| 2023-03-11T09:45:57.799434
| 2021-03-01T12:53:12
| 2021-03-01T12:53:12
| 343,409,314
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 350
|
py
|
# _*_ encoding: utf-8 _*_
class Calculator(object):
def add(self, x, y):
return x + y
def subtract(self, x, y):
return x - y
def multiply(self, x, y):
return x * y
def divide(self, x, y):
if y == 0:
raise ZeroDivisionError('You can not divide a number by zero.')
return x / y
|
[
"fernando.ferreira.tbe@gmail.com"
] |
fernando.ferreira.tbe@gmail.com
|
92828e651c90169efa925b7a8d680a79c9534dce
|
5a965b99a698bae7b2ade1bc3541380bfbe0c59e
|
/29.None.py
|
a008abd2ece7f245207819b48ac79afa4a0e062d
|
[] |
no_license
|
sanjay-3129/Python-Tutorial
|
29b03368db10140af39883e3ceef32ffe3710d64
|
ba505f0ef1e79a6190fddb7159249e28acaf8ae6
|
refs/heads/master
| 2021-10-23T22:50:11.773143
| 2021-10-23T08:00:12
| 2021-10-23T08:00:12
| 230,701,854
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,777
|
py
|
'''
The None object is used to represent the absence of a value.
It is similar to null in other programming languages.
Like other "empty" values, such as 0, [] and the empty string, it is False when converted to a Boolean variable.
When entered at the Python console, it is displayed as the empty string.
'''
#(i)
print("Jus printing None:", None) #None is a null value
print("When comparing two None: "+str(None==None)+"\n")
print("when comparing None with False:", (None==False)) #We notes that None is NoneType while False is BooleanType. So None==False returns False.
print("when comparing None with True:", (None==True)) ##We notes that None is NoneType while True is BooleanType. So None==False returns False.
print("Value of None:", bool(None))
if None:
print("None got interpreted as True")
else:
print("None got interpreted as False")
#(ii)
foo = ""
print("What?",foo);print()
'''Note: it says None is an empty value, and other empty values are empty strings("") and 0, not that they are equal.'''
#(iii) The None object is returned by any function that doesn't explicitly return anything else.
def some_func():
print("Hi!")
var = some_func() #here it prints (Hi!) then the output,
print(var) #output is None bcs we didnt use "return", to return the value. we are just printing it. so None displays.
#example:
foo = print()
if foo == None:
print(1)
else:
print(2)
print()
#example
hoo = '' #its having some empty string in it.
poo = print('any text') #it will display but poo will not have any value
foo = print() #likewise in the above case, foo has no storage of any value
print(hoo == None) #check the output
print(poo == None)
print(foo == None)
|
[
"noreply@github.com"
] |
sanjay-3129.noreply@github.com
|
e23225b6151127ce9730c313dafca0c89005cd87
|
a99123d0d3842cf5b3c311b0bce57aaba88dcc4f
|
/monpetitmarche/blog/admin.py
|
10908684dfd09390086438f135bba60c01c9c7a1
|
[] |
no_license
|
jeg56/django
|
20a0a2d158795c52ece0e8df136c11536ce1448f
|
6d4209e5919e8391752e23e60741cc713dcf0cf8
|
refs/heads/master
| 2020-12-23T07:48:24.968722
| 2020-01-30T11:58:34
| 2020-01-30T11:58:34
| 237,087,595
| 0
| 0
| null | 2020-01-30T11:58:35
| 2020-01-29T21:39:14
|
Python
|
UTF-8
|
Python
| false
| false
| 144
|
py
|
from django.contrib import admin
from .models import Post
from .models import TestTable
admin.site.register(Post)
admin.site.register(TestTable)
|
[
"arnaud.jegoux@gmail.com"
] |
arnaud.jegoux@gmail.com
|
541a5c56a7332e5d66bc2214e29acf49b173ffdf
|
4875939a6612f193cc94b1100c45fc0393c5421b
|
/logica/Transiciones.py
|
2fd5a25c812a56d76502d51c1f8e19a7856e226a
|
[] |
no_license
|
MaikelAndres/ProyectoCompiladores1.1
|
22359e7493112b293f49d0b46d5fcfb6d2910c79
|
cf849a7d3e03af3fd2b49674aae79e9c2e888653
|
refs/heads/master
| 2020-05-21T01:24:29.895514
| 2019-10-15T21:42:44
| 2019-10-15T21:42:44
| 185,855,675
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,239
|
py
|
import Pila
#clase padre
class Trans:
def __init__(self):
self.pila = Pila.Pila()
self.p_1 = True
self.q_2 = False
self.r_final = False
def getP_1(self):
return self.p_1
def getQ_2(self):
return self.q_2
def getR_final(self):
return self.r_final
def activaP_1(self):
self.p_1=True
self.q_2=False
self.r_final=False
def activaQ_2(self):
self.q_2=True
self.p_1=False
self.r_final=False
def activaR_final(self):
self.r_final=True
self.p_1=False
self.q_2=False
#transiciones con b
def b_b_bb(self):
self.pila.quitar()
self.pila.apilar('b')
self.pila.apilar('b')
self.activaP_1()
def b_a_ab(self):
self.pila.quitar()
self.pila.apilar('a')
self.pila.apilar('b')
self.activaP_1()
def b_n_nb(self):
self.pila.quitar()
self.pila.apilar('#')
self.pila.apilar('b')
self.activaP_1()
#transiciones con a
def a_b_ba(self):
self.pila.quitar()
self.pila.apilar('b')
self.pila.apilar('a')
self.activaP_1()
def a_n_na(self):
self.pila.quitar()
self.pila.apilar('#')
self.pila.apilar('a')
self.activaP_1()
def a_a_aa(self):
self.pila.quitar()
self.pila.apilar('a')
self.pila.apilar('a')
self.activaP_1()
#transiciones con c
def c_n_n(self):
self.pila.quitar()
self.pila.apilar('#')
self.activaQ_2()
def c_b_b(self):
self.pila.quitar()
self.pila.apilar('b')
self.activaQ_2()
def c_a_a(self):
self.pila.quitar()
self.pila.apilar('a')
self.activaQ_2()
def b_b_y(self):
self.pila.quitar()
self.activaQ_2()
def a_a_y(self):
self.pila.quitar()
self.activaQ_2()
def y_n_n(self):
self.pila.quitar()
self.pila.apilar('#')
self.activaR_final()
def validar(self):
return True
|
[
"noreply@github.com"
] |
MaikelAndres.noreply@github.com
|
cfe7693a63cc509737708e430b0e01a20ab1b668
|
f7fdb86606ea1e0b7eeab4ccc216ccb5ecc6e97d
|
/apps/appointmentapp/migrations/0001_initial.py
|
67837bd3b7261d2f61d3c374c130f0cd22579658
|
[] |
no_license
|
jkaloya/python_belt_exam
|
93bc43638311edd3fecc6680db5f0f8772846402
|
bc4a22a0fdebb6bcb39843bbe6918181b55a1f6d
|
refs/heads/master
| 2020-12-31T00:52:59.724673
| 2017-01-31T23:32:19
| 2017-01-31T23:32:19
| 80,562,347
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,099
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-01-31 20:52
from __future__ import unicode_literals
import apps.appointmentapp.models
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('status', models.IntegerField(choices=[(1, 'Done'), (2, 'Pending'), (3, 'Missed')], default=2)),
('date', models.DateField(validators=[apps.appointmentapp.models.validate_future_date])),
('time', models.TimeField()),
('created_at', models.DateField(auto_now_add=True)),
('updated_at', models.DateField(auto_now=True)),
],
managers=[
('taskMgr', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=255)),
('last_name', models.CharField(max_length=255)),
('email', models.CharField(max_length=255)),
('password', models.CharField(max_length=255)),
('created_at', models.DateField(auto_now_add=True)),
('updated_at', models.DateField(auto_now=True)),
],
managers=[
('Usermgr', django.db.models.manager.Manager()),
],
),
migrations.AddField(
model_name='task',
name='creator',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='itemcreater', to='appointmentapp.User'),
),
]
|
[
"jkaloya@MacBook-Pro-2.local"
] |
jkaloya@MacBook-Pro-2.local
|
459a01ac146738b3a40827a2f553be5797d92b5e
|
bd4d78d90ffdb35dcf16224ff19442206b2716e9
|
/RoomModesV2.py
|
9d6b0a862b0065df55381fc7dec89f3f979a42d3
|
[] |
no_license
|
lizlgrzyb/Room-Modes
|
f2f75afc6f4493392060ce9f07c3ca313ee48f1b
|
627b96d6038cb2a2bf4006a317c2d11c2940bacb
|
refs/heads/master
| 2021-05-04T23:50:33.750726
| 2018-02-02T00:26:41
| 2018-02-02T00:26:41
| 119,425,314
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,116
|
py
|
#This program takes the dimensions of the room as
#an argument, and returns the axial, room modes
#within that given area via graph.
#Importing nessicary materials
import matplotlib.pyplot as plt
import numpy as np
import math
#Taking user input to determine the area being evaluated,
# the units that should be used for evaluation, and the
#number of room modes to be calculated.
length=input("Enter the length of the room: ")
width=input("Enter the width of the room: ")
height=input("Enter the height of the room: ")
harmonic=input("Number of harmonics to calculate: ")
harmonic=int(harmonic)#Value for number of harmonics
length=float(length) #Value for length as float
width=float(width) #Value for width as float
height=float(height) #Value for height as float
units=input("Is the measurement in feet or meters? (Enter f for feet, and m for meters): ")
#Setting up units for speed of sound based off of user input
if units == "f":
c=1125.33 #Feet per second (speed of sound)
elif units == "m":
c=343 #Meters per second (speed of sound)
else: #If f or m is not entered, user is prompted to resubmit either m or f
print("Please enter either m or f to indicate units.")
units=input("Is the measurement in feet or meters? (Enter f for feet, and m for meters): ")
#This section calculates the axial room modes (between paralell walls)
axialN = range(1,harmonic+1) #Sets the number of harmonics to be calculated (x axis)
axialL = [] #List of room modes created as a result of room length
axialW = [] #List of room modes created as a result of room width
axialH = [] #List of room modes created as a result of room height
axialAll = [] #List of all axial modes (for histogram)
v = c/2 #Adjusting speed of sound for equation
for i in range(1,harmonic+1):
axialL.append(v*math.sqrt(i**2/length**2)) #Calculating and appending room modes to list axialL
axialW.append(v*math.sqrt(i**2/width**2)) #Calculating and appending room modes to list axialW
axialH.append(v*math.sqrt(i**2/height**2)) #Calculating and appending room modes to list axialH
#Appending to a list to store all axial modes
for f in axialL:
axialAll.append(f)
for f in axialW:
axialAll.append(f)
for f in axialH:
axialAll.append(f)
#Generating scatterplot of axial room modes with frequency vs harmonic number
plt.scatter(axialN, axialL, label="Length ") #Plotting length modes
plt.scatter(axialN, axialW, label="Width") #Plotting width modes
plt.scatter(axialN, axialH, label="Height") #Plotting height modes
plt.title('Axial Modes')
plt.ylabel('Frequency (Hz)')
plt.xlabel('Harmonic Number')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.show()
#Generating histogram of axial room modes grouped to frequency bands
bins = np.linspace(math.ceil(min(axialAll)),
math.floor(max(axialAll)),
20) # fixed number of bins
plt.xlim([0, max(axialAll)+5])
plt.hist(axialAll, bins=bins, alpha=0.5)
plt.title('Axial Modes Per Frequency Range (fixed number of groups)')
plt.xlabel('Frequency Range (20 Groups)')
plt.ylabel('Number of Modes')
plt.show()
|
[
"Gayle@Gayles-MacBook-Pro.local"
] |
Gayle@Gayles-MacBook-Pro.local
|
2481d85d475e22868833a6a4c1a9f10d5167e005
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/coverage-big-4079.py
|
2a05d715a81f2afdf05f6cbef244c71ef369dced
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038
| 2022-02-03T15:42:39
| 2022-02-03T15:42:39
| 451,969,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,348
|
py
|
count:int = 0
count2:int = 0
count3:int = 0
count4:int = 0
count5:int = 0
def foo(s: str) -> int:
return len(s)
def foo2(s: str, s2: str) -> int:
return len(s)
def foo3(s: str, s2: str, s3: str) -> int:
return len(s)
def foo4(s: str, s2: str, s3: str, s4: str) -> int:
return len(s)
def foo5(s: str, s2: str, s3: str, s4: str, s5: str) -> int:
return len(s)
class bar(object):
p: bool = True
def baz(self:"bar", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar2(object):
p: bool = True
p2: bool = True
def baz(self:"bar2", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar2", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar3(object):
p: bool = True
p2: bool = True
p3: bool = True
def baz(self:"bar3", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar3", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar3", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar4(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
def baz(self:"bar4", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar4", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar4", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar4", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar5(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
p5: bool = True
def baz(self:"bar5", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar5", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar5", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
$Exp.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz5(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int], xx5: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
x5:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
y5:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
def qux5(y: int, y2: int, y3: int, y4: int, y5: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
nonlocal x5
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
print(bar().baz([1,2]))
|
[
"647530+Virtlink@users.noreply.github.com"
] |
647530+Virtlink@users.noreply.github.com
|
6b63d1623978dcba7792d56ad483f38edcf4156c
|
161dad54f942177075b234475d2b41e908cde718
|
/blog/models.py
|
148d9cc4989542370dbb74a3ede2642d1318ddc8
|
[] |
no_license
|
TeYoMe/DMblog
|
a5711ab33251629a86d1519535ddc51756e6566f
|
b852e558d6cae0d18219e6e6caf6b27a4756c80f
|
refs/heads/master
| 2021-01-25T13:00:12.452373
| 2018-03-02T03:05:47
| 2018-03-02T03:05:47
| 123,522,051
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,658
|
py
|
from django.db import models
from django.core.urlresolvers import reverse
class PublishedManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(status='p')
class Category(models.Model):
name = models.CharField('分类名', max_length=30, unique=True)
imgurl = models.CharField('分类图片', max_length=200, blank=True)
objects = models.Manager()
class Meta:
ordering = ['name']
verbose_name = '分类'
verbose_name_plural = verbose_name
def get_absolute_url(self):
return reverse('blog:CateDetail', args=[self.name])
def __str__(self):
return self.name
class Post(models.Model):
STATUS_CHOICES = (
('d', '草稿'),
('p', '发表'),
)
COMMENT_STATUS = (
('O', '打开'),
('c', '关闭'),
)
title = models.CharField('标题', max_length=200, unique=True)
category = models.ForeignKey('Category', verbose_name='分类', blank=False, null=False)
tags = models.ManyToManyField('Tag', verbose_name='标签云', blank=True)
views= models.PositiveIntegerField(default=0)
body = models.TextField('正文')
created_time = models.DateTimeField('创建时间', auto_now_add=True)
last_mod_time = models.DateTimeField('修改时间', auto_now=True)
pub_time = models.DateTimeField('发布时间', blank=True, null=True)
status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES, default='p')
comment_status = models.CharField('评论状态', max_length=1, choices=COMMENT_STATUS, default='o')
objects = models.Manager()
published = PublishedManager()
class Meta:
ordering = ['-pub_time']
verbose_name = '文章'
verbose_name_plural = verbose_name
get_latest_by = 'created_time'
def increase_views(self):
self.views += 1
self.save(update_fields=['views'])
def get_absolute_url(self):
return reverse('blog:PostDetail',
args=[self.pub_time.year,
self.pub_time.strftime('%m'),
self.pub_time.strftime('%d'),
self.title])
def __str__(self):
return self.title
class Tag(models.Model):
name = models.CharField('标签', max_length=30, unique=True)
objects = models.Manager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('blog:TagDetail', args=[self.name])
class Meta:
ordering = ['id']
verbose_name = '标签'
verbose_name_plural = verbose_name
|
[
"2027598917@qq.com"
] |
2027598917@qq.com
|
147f6d1feac51fa7a3bb8519b7d88ed3884e6cda
|
e9bffc770b30fe8ed1a28c4d60cbae013a95c0a5
|
/Django/Code/mysite/library/admin.py
|
a4a5bd862c89aecce2aa6df008d68b70b14f8d27
|
[] |
no_license
|
robotautas/kursas
|
2fa0506e3473112ca0f1aa3f26c0084f28b6daa7
|
11d991b5fdd0e5c5a6d3b6e271cf9877124a659c
|
refs/heads/master
| 2023-08-31T18:56:08.184618
| 2023-08-31T08:03:57
| 2023-08-31T08:03:57
| 227,419,547
| 19
| 90
| null | 2023-05-19T11:15:04
| 2019-12-11T17:11:03
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 971
|
py
|
from django.contrib import admin
from .models import Author, Genre, Book, BookInstance
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
class BookInstanceAdmin(admin.ModelAdmin):
list_display = ('book', 'status', 'due_back', 'id')
list_editable = ('status', 'due_back')
list_filter = ('status', 'due_back')
search_fields = ('id', 'book__title')
fieldsets = (
(None, {
'fields': ('book', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'display_books')
admin.site.register(Book, BookAdmin)
admin.site.register(Author, AuthorAdmin)
admin.site.register(Genre)
admin.site.register(BookInstance, BookInstanceAdmin)
|
[
"jotautas.treigys@gmail.com"
] |
jotautas.treigys@gmail.com
|
43b67be874649b4beaad1b6d9039f7f297c766c4
|
a912f143a97ad67d61efb29a37a4bb2b36272c48
|
/scripts/putnam/Fantastic Mini-Mods/pydwarf.materialsplus.py
|
503bf892eae54323eb6bd92f6f27643ac0ab1ba8
|
[
"Zlib"
] |
permissive
|
johny5w/PyDwarf
|
bde43748f0b64c246b17edb749a68472620b2b32
|
1ac698f395d036a46b63f63a2c2dd28494759082
|
refs/heads/master
| 2021-01-20T16:47:41.095478
| 2015-05-23T12:56:28
| 2015-05-23T12:56:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,510
|
py
|
import os
import pydwarf
from raws import rawsfile, rawstoken
permitted_reactions = '''
\t[PERMITTED_REACTION:MANGANESE_STEEL_MAKING_BARS]
\t[PERMITTED_REACTION:MANGANESE_STEEL_MAKING_ORE]
\t[PERMITTED_REACTION:HIGH_SPEED_STEEL_MAKING]
\t[PERMITTED_REACTION:BERYLLIUM_MACRO_PUTNAM]
\t[PERMITTED_REACTION:KROLL_MACRO_PUTNAM]
\t[PERMITTED_REACTION:BERYLLIUM_REFINING_PUTNAM_GEMS]
\t[PERMITTED_REACTION:BERYLLIUM_REFINING_PUTNAM_BOULDER]
\t[PERMITTED_REACTION:MAKE_SULFURIC_ACID_PUTNAM]
\t[PERMITTED_REACTION:KROLL_PROCESS_BOULDER_PUTNAM]
\t[PERMITTED_REACTION:KROLL_PROCESS_GEM_PUTNAM]
\t[PERMITTED_REACTION:PIDGEON_PROCESS_PUTNAM]'''
# Utility function for putting new properties after an inorganic's USE_MATERIAL_TEMPLATE token, if it has one
# Otherwise, the property is just added after the INORGANIC object token.
def addaftertemplate(inorganic, addition):
template = inorganic.getuntil(exact_value='USE_MATERIAL_TEMPLATE', until_exact_value='INORGANIC')
addafter = template if template else inorganic
return addafter.add(addition)
@pydwarf.urist(
name = 'materials plus',
version = 'alpha',
author = ('Putnam', 'Sophie Kirschner'),
description = 'Adds a bunch of materials to the game.',
compatibility = (pydwarf.df_0_34, pydwarf.df_0_40)
)
def materialsplus(raws):
exceptions = 0
try:
for zircon in raws.all(exact_value='INORGANIC', re_args=['.* ZIRCON']):
addaftertemplate(zircon, 'MATERIAL_REACTION_PRODUCT:KROLL_PROCESS:INORGANIC:ZIRCONIUM_PUTNAM')
pydwarf.log.debug('Added reaction to zircons.')
except:
pydwarf.log.exception('Failed to add reaction to zircons.')
exceptions += 1
try:
for beryl in raws.all(exact_value='INORGANIC', re_args=['.* BERYL|HELIODOR|MORGANITE|GOSHENITE|EMERALD']):
addaftertemplate(beryl, 'REACTION_CLASS:BERYLLIUM')
pydwarf.log.debug('Added reaction to beryls.')
except:
pydwarf.log.exception('Failed to add reaction to beryls.')
exceptions += 1
try:
chromite = raws.get('INORGANIC:CHROMITE')
pyrolusite = raws.get('INORGANIC:PYROLUSITE')
addaftertemplate(chromite, '[METAL_ORE:CHROMIUM_PUTNAM:100][METAL_ORE:IRON:50]')
addaftertemplate(pyrolusite, 'METAL_ORE:MANGANESE_PUTNAM:100')
pydwarf.log.debug('Added titanium ores.')
except:
pydwarf.log.exception('Failed to add titanium ores.')
exceptions += 1
try:
for silicon in raws.all(exact_value='INORGANIC', re_args=['ANDESITE|OLIVINE|HORNBLENDE|SERPENTINE|ORTHOCLASE|MICROCLINE|MICA']):
addaftertemplate(silicon, 'REACTION_CLASS:SILICON')
pydwarf.log.debug('Added silicon reactions.')
except:
pydwarf.log.exception('Failed to add silicon reactions.')
exceptions += 1
try:
dolomite = raws.get('INORGANIC:DOLOMITE')
addaftertemplate(dolomite, 'REACTION_CLASS:PIDGEON_PROCESS')
pydwarf.log.debug('Added reaction to dolomite.')
except:
pydwarf.log.exception('Failed to add reaction to dolomite.')
exceptions += 1
try:
raws.get('ENTITY:MOUNTAIN').get(exact_value='PERMITTED_REACTION').add(permitted_reactions, reverse=True)
pydwarf.log.debug('Added permitted reactions.')
except:
pydwarf.log.exception('Failed to add permitted reactions.')
exceptions += 1
for root, dirs, files in os.walk(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'Materials Plus')):
for filename in files:
suffix = '_mat_plus.txt'
if filename.endswith(suffix):
path = os.path.join(root, filename)
destname = 'putnam_%s' % filename[:-len(suffix)]
rfile = raws.getfile(destname)
if rfile:
pydwarf.log.debug('Appending data to file %s from %s...' % (destname, path))
with open(path, 'rb') as matplus: rfile.add(matplus)
else:
with open(path, 'rb') as matplus: raws.addfile(rfile=rawsfile(header=destname, rfile=matplus))
pydwarf.log.debug('Adding data to new file %s.' % destname)
if exceptions == 0:
return pydwarf.success()
else:
return pydwarf.failure('Failed to complete %d operations.' % exceptions)
|
[
"sophie@thehumangeo.com"
] |
sophie@thehumangeo.com
|
2ba2eebd706830142539b0bf84840ac31b9f221e
|
06c1179ff523f2de0b2caf68cc1f93b1012ced77
|
/bot/utils/__init__.py
|
5d3a88cd3716652febc9864e3cf975a737a872be
|
[] |
no_license
|
jpark9013/Discord-Bot
|
6ab6bae3070ff9542dd862fc7fc2e732c3f8a3b1
|
290c638cf46379219ee5ac9426bf0ee98ee79776
|
refs/heads/master
| 2022-12-06T17:53:32.814677
| 2020-08-28T01:25:01
| 2020-08-28T01:25:01
| 281,536,004
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 49
|
py
|
__all__ = ["format", "paginator", "permissions"]
|
[
"jpark9013@gmail.com"
] |
jpark9013@gmail.com
|
2f3eedae276c137379edc09c54ff1f17e0e09bd2
|
b1aa8cb635a0f1488b6eb6ca4b0edc6bbcbf8673
|
/sample_question/divisible.py
|
bb03fb9ea829705dfed26e30d3c4c75a850a3c50
|
[] |
no_license
|
Tansiya/tansiya-training-prgm
|
f106f042159e9d5635ae6c6529e0e18e4afce73b
|
b24958c1453c08cd0fd902f0b6e92a3dee7ee057
|
refs/heads/master
| 2020-03-30T09:27:58.571427
| 2018-10-27T10:31:06
| 2018-10-27T10:31:06
| 151,076,739
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 570
|
py
|
""" find the factorial number and divide the 7 and but are not a multiple of 5.print the values factorial or list"""
#assign a factorial and using function
def fact(x):
if (x == 0):
return 1
else:
return x * fact(x - 1)
#assign a list and using function
def maths():
l= []
#creating for loop function
for i in range(900, 1000):
if (i%7==0) and (i%5!=0):
factorial = fact(i)
l.append(str(factorial))
return l
#find k value
k = maths()
#print the result factorial and list
print ("Result is", ','.join(k))
#print ("factorial is", factorial)
|
[
"tansiyamalarkodi3396@gmail.com"
] |
tansiyamalarkodi3396@gmail.com
|
36baaf3a20ce83d3a6c8ba5ff5520dfea9c221a4
|
f6ab35c3c5f899df0c0ee074de8f8df30227ffe2
|
/main/migrations/0003_bugreport.py
|
339e86ba41ebea2281621593ccd51d79e4b43551
|
[
"MIT"
] |
permissive
|
josylad/RoomScout
|
f3614291bbfdd226110e038fb60d593ab3093c7e
|
a3d067dd67dfdd43702ea2e89064213dbd469157
|
refs/heads/master
| 2020-12-27T09:22:47.486710
| 2020-02-20T10:18:05
| 2020-02-20T10:18:05
| 237,850,614
| 0
| 0
|
MIT
| 2020-02-20T10:18:06
| 2020-02-02T23:08:10
|
Python
|
UTF-8
|
Python
| false
| false
| 646
|
py
|
# Generated by Django 2.2.4 on 2019-09-23 16:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20190922_2118'),
]
operations = [
migrations.CreateModel(
name='BugReport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('sender', models.EmailField(default='', max_length=254)),
('subject', models.TextField(default='')),
('message', models.TextField(default='')),
('ip', models.GenericIPAddressField()),
],
),
]
|
[
"nova1104@live.com"
] |
nova1104@live.com
|
ca4769c5895b8ad278b14f94504182d144f01b95
|
13505f9ba1f90ec62de6bd3f03f3df26bee0ab09
|
/app.py
|
494df93a82231531726f732a365fcd87cb27b0a0
|
[] |
no_license
|
ohdDev/FSND-capstone
|
e5d3e6a4a0745f1abdb5125aa27e770d07c94366
|
cbe8f34d1e0857afe2f56f4fdaa69afa883129cf
|
refs/heads/master
| 2023-03-14T01:25:49.913658
| 2021-03-05T00:31:55
| 2021-03-05T00:31:55
| 344,076,472
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,139
|
py
|
import os
from flask import Flask, request, abort, jsonify
from models import setup_db, Movies, Actors
from flask_cors import CORS
from auth import AuthError, requires_auth
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(test_config=None):
app=Flask(__name__)
setup_db(app)
CORS(app)
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization,true')
response.headers.add('Access-Control-Allow-Methods', 'GET,PATCH,POST,DELETE,OPTIONS')
return response
@app.route('/')
def be_cool():
return "Be cool, man, be coooool! You're almost a FSND grad!"
@app.route('/actors', methods=['GET'])
@requires_auth('get:actors')
def get_actors(jwt):
actors = Actors.query.all()
actors_format = [actor.format() for actor in actors]
if len(actors_format) == 0:
abort(404)
else:
return jsonify({
'success':True,
'actors': actors_format
}),200
@app.route('/movies', methods=['GET'])
@requires_auth('get:movies')
def get_movies(jwt):
movies = Movies.query.all()
movies_format = [movie.format() for movie in movies]
if len(movies_format) == 0:
abort(404)
else:
return jsonify({
'success':True,
'movies': movies_format
}),200
@app.route('/actors', methods=['POST'])
@requires_auth('post:actors')
def submit_actors(jwt):
body = request.get_json()
new_name = body.get('name',None)
new_age = body.get('age',None)
new_gender = body.get('gender',None)
if new_name is None or new_age is None or new_gender is None:
abort(400)
try:
actors = Actors(name=new_name, age=new_age, gender=new_gender)
actors.insert()
return jsonify({
'success':True,
'actors':actors.format()
}),200
except:
abort(422)
@app.route('/movies', methods=['POST'])
@requires_auth('post:movies')
def submit_movies(jwt):
body = request.get_json()
new_name = body.get('name',None)
new_release_date = body.get('release_date',None)
if new_name is None or new_release_date is None:
abort(400)
try:
movies = Movies(name=new_name, release_date=new_release_date)
movies.insert()
return jsonify({
'success':True,
'movies':movies.format()
}),200
except:
abort(422)
@app.route('/actors/<int:id>', methods=['PATCH'])
@requires_auth('patch:actors')
def update_actors(token, id):
try:
body = request.get_json()
actor = Actors.query.filter(Actors.id == id).one_or_none()
if actor is None:
abort(404)
if 'name' in body:
actor.name = body.get('name')
if 'age' in body:
actor.age = body.get('age')
if 'gender' in body:
actor.gender = body.get('gender')
actor.update()
return jsonify({
'success':True,
'actors': actor.format()
}), 200
except:
abort(400)
@app.route('/movies/<int:id>', methods=['PATCH'])
@requires_auth('patch:movies')
def update_movies(token, id):
try:
body = request.get_json()
movie = Movies.query.filter(Movies.id == id).one_or_none()
if movie is None:
abort(404)
if 'name' in body:
movie.name = body.get('name')
if 'release_date' in body:
movie.release_date = body.get('release_date')
movie.update()
return jsonify({
'success':True,
'movies': movie.format()
}), 200
except:
abort(400)
@app.route('/actors/<int:id>', methods=['DELETE'])
@requires_auth('delete:actors')
def delete_actors(token, id):
try:
actor = Actors.query.filter(Actors.id == id).one_or_none()
if actor is None:
abort(404)
actor.delete()
return jsonify({
'success':True,
'delete': id
}), 200
except:
abort(422)
@app.route('/movies/<int:id>', methods=['DELETE'])
@requires_auth('delete:movies')
def delete_movies(token, id):
try:
movie = Movies.query.filter(Movies.id == id).one_or_none()
if movie is None:
abort(404)
movie.delete()
return jsonify({
'success':True,
'delete': id
}), 200
except:
abort(422)
@app.errorhandler(422)
def unprocessable(error):
return jsonify({
"success": False,
"error": 422,
"message": "unprocessable"
}), 422
@app.errorhandler(404)
def not_found(error):
return jsonify({
"success": False,
"error": 404,
"message": "resource not found"
}), 404
@app.errorhandler(401)
def unauthorized(error):
return jsonify({
'success': False,
'error': 401,
'message': "unauthorized"
}), 401
@app.errorhandler(400)
def bad_request(error):
return jsonify({
"success": False,
"error": 400,
"message": "bad request"
}), 400
@app.errorhandler(AuthError)
def not_authenticated(error):
return jsonify({
"success": False,
"error": error.status_code,
"message": error.error
}), 401
return app
app = create_app()
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080, debug=True)
|
[
"w.ohd95@gmail.com"
] |
w.ohd95@gmail.com
|
59b2593d0beb148c783eff6a76d9b084c0fab175
|
d442d1f39ea86a85612d0b2014cf7a0b4aaf6e4e
|
/main.py
|
41445742e8557aeb16e38c0c992dbda64f1049f3
|
[] |
no_license
|
avaloshka/Day_29_Password_Generator
|
8f1f49df5c90ee8ccb3d12a85af3eccd036faf25
|
80f473d0355fabf5dfb4a11efdca5c05e1298bd1
|
refs/heads/main
| 2023-06-05T02:39:37.624069
| 2021-06-26T17:54:22
| 2021-06-26T17:54:22
| 378,091,056
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,837
|
py
|
import tkinter
from tkinter import END
from tkinter import messagebox
import random
import string
import pyperclip
import json
WEB = None
# ---------------------------- PASSWORD GENERATOR ------------------------------- #
def generate_password():
letters = string.ascii_lowercase
up_letters = string.ascii_uppercase
numbers = string.digits
symbols = string.punctuation
number_of_char_required_in_a_password = 16
all_char = letters + up_letters + numbers + symbols
collection = []
for _ in range(number_of_char_required_in_a_password):
n = random.choice(all_char)
collection.append(n)
random.shuffle(collection)
password = ''.join(collection)
password_entry.delete(0, END)
password_entry.insert(0, password)
pyperclip.copy(password)
# ---------------------------- SAVE PASSWORD ------------------------------- #
def save():
web = website_entry.get()
email = email_entry.get()
password = password_entry.get()
new_data = {
web: {
"email": email,
"password": password,
}
}
if len(web) == 0 and len(password) == 0:
messagebox.showinfo(message="Please enter required info")
else:
try:
with open("data.json", "r") as data_file:
data = json.load(data_file)
except FileNotFoundError:
with open("data.json", "w") as data_file:
json.dump(new_data, data_file, indent=4)
else:
data.update(new_data)
with open("data.json", "w") as data_file:
json.dump(data, data_file, indent=4)
finally:
website_entry.delete(0, END)
password_entry.delete(0, END)
# ----------------------------Retreve a password
def find_password():
website = website_entry.get()
try:
with open("data.json") as data_file:
data = json.load(data_file)
except FileNotFoundError:
messagebox.showinfo(title="Error", message="No Data Found")
else:
if website in data:
email = data[website]["email"]
password = data[website]["password"]
messagebox.showinfo(title="website", message=f"Email: {email}\nPassword: {password}")
else:
messagebox.showinfo(title="No Data", message="No Data Found")
# ---------------------------- UI SETUP -
window = tkinter.Tk()
window.title("Password Manager")
window.config(padx=20, pady=20)
canvas = tkinter.Canvas(width=200, height=189)
logo_img = tkinter.PhotoImage(file="logo.png")
canvas.create_image(100, 100, image=logo_img)
canvas.grid(column=1, row=0)
# Website
website_label = tkinter.Label(text="Website:")
website_label.grid(column=0, row=1)
website_entry = tkinter.Entry(width=32)
website_entry.grid(column=1, row=1)
website_entry.focus()
# Email
email_label = tkinter.Label(text="Email/Username:")
email_label.grid(column=0, row=2)
email_entry = tkinter.Entry(width=51)
email_entry.grid(column=1, row=2, columnspan=2)
email_entry.insert(0, "enter your email")
# Password
password_label = tkinter.Label(text="Password:")
password_label.grid(column=0, row=3)
password_entry = tkinter.Entry(width=32)
password_entry.grid(column=1, row=3)
# "Generate Password" button
generate_password_button = tkinter.Button(text="Generate Password", command=generate_password)
generate_password_button.grid(column=2, row=3)
# "Add" button
add_button = tkinter.Button(text="Add", width=27, command=save)
add_button.grid(column=1, row=4)
# "Search" button
search_button = tkinter.Button(text="Search", width=15, command=find_password)
search_button.grid(column=2, row=1)
window.mainloop()
|
[
"noreply@github.com"
] |
avaloshka.noreply@github.com
|
373f663a57489c8b441d1942ab19631dc3ab477d
|
c1c8aa53670d25857f1fbde77c9a5afc8eb8503f
|
/weblog/migrations/0006_auto_20170828_1148.py
|
e07b448627f9f7d52f9e8605c72dfd3f8bee1009
|
[] |
no_license
|
ricale/private-block
|
177690393f5590f2d6160b24756638cf0ac18a16
|
bedb33defaaeb5e4ca35b274406f79ebce909cdf
|
refs/heads/master
| 2020-05-22T03:56:27.639483
| 2018-01-11T06:25:59
| 2018-01-11T06:25:59
| 65,257,341
| 0
| 0
| null | 2018-01-11T06:26:00
| 2016-08-09T02:51:40
|
Python
|
UTF-8
|
Python
| false
| false
| 1,031
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-28 02:48
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('weblog', '0005_post_category'),
]
operations = [
migrations.AddField(
model_name='category',
name='depth',
field=models.IntegerField(default=1),
),
migrations.AddField(
model_name='category',
name='family',
field=models.IntegerField(default=1),
),
migrations.AddField(
model_name='category',
name='order_in_parent',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='category',
name='parent',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='weblog.Category'),
),
]
|
[
"KIM.kangseong@gmail.com"
] |
KIM.kangseong@gmail.com
|
285bd3f58ba4329c58773cb361b56b8cbc72ae46
|
e8cd62402d54dce1b6325335d4cbfa8b2394cf84
|
/blog/migrations/0001_initial.py
|
e6ae6d9a972f7cf94e10765a4ed3d3b5a359dc8c
|
[] |
no_license
|
07legion/my-first-blog
|
c603a44cd0f349459b02ce06fa1927bb84da7aac
|
8c93249b29fe01974c7d37fc5c96bd9c687c9368
|
refs/heads/master
| 2023-05-13T01:53:45.429640
| 2021-06-04T04:31:10
| 2021-06-04T04:31:10
| 373,373,236
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 987
|
py
|
# Generated by Django 2.2.24 on 2021-06-03 03:26
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"sharmaamish39@gmail.com"
] |
sharmaamish39@gmail.com
|
ec2323c56afae2e06a5a3139251e27471c4b6ab6
|
6a285406e7cd181c12d6a949c894bafda2d881d4
|
/scripts/check_expansion/check_expansion.py
|
36a373b14f3478c1e7b6db496cd2be0be79a3859
|
[] |
no_license
|
jngaravitoc/time-dependent-BFE
|
e26ed886fcf4eb3215e96c092f6b12cba41aa983
|
529295cec801a50de62ba8cf6b1cba530f08957a
|
refs/heads/master
| 2023-09-01T06:47:02.535033
| 2023-08-24T14:41:04
| 2023-08-24T14:41:04
| 235,460,058
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,377
|
py
|
import numpy as np
import matplotlib
#matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
plt.style.use('/mnt/home/nico/matplotlib.mplstyle')
from astropy import units as u
import gala.coordinates as gc
import gala.dynamics as gd
import gala.potential as gp
import gala.integrate as gi
from gala.units import galactic
import pynbody
import bfe
import nba
# Read data
# Plot coefficients
def hist_coefficients(coeff_matrix, figname=0):
fig, ax = plt.subplots(1, 1, figsize=(6, 4))
im = ax.imshow(np.log10(np.abs(coeff_matrix.T)), origin='lower', cmap='viridis')
fig.colorbar(im, ax=ax)
plt.show()
if figname!=0:
plt.savefig(figname, bbox_inches='tight')
plt.close()
def particle_density_profile(snapshot):
s = pynbody.load(snapshot)
#h = s.halos()
pynbody.analysis.angmom.faceon(s)
s.physical_units()
p = pynbody.analysis.profile.Profile(s, min=0.0, max=400, nbins=256, ndim=3)
return p['rbins'], p['density']
# Plot density profiles
def density_profile(S, T, rmin, rmax, m, rs, snapshot):
pot = gp.SCFPotential(m=m*u.Msun, r_s=rs*u.kpc, Snlm=S, Tnlm=T, units=galactic)
x = np.logspace(np.log10(rmin), np.log10(rmax), 128)
xyz = np.zeros((3, len(x)))
xyz[0] = x
sims_profile = particle_density_profile(snapshot)
fig, ax = plt.subplots(1, 1)
ax.plot(x, pot.density(xyz), label='SCF')
ax.plot(sims_profile[0], sims_profile[1], label='Gadget')
ax.set_xscale('log')
ax.set_yscale('log')
ax.legend()
plt.show()
def density_contour(S, T, grid_size, m, rs, snap, ngrid=128, delta_rho=False):
S0 = np.zeros_like(S)
T0 = np.zeros_like(T)
S0[0,0,0] = S[0,0,0]
T0[0,0,0] = T[0,0,0]
circle1 = plt.Circle((0, 0), 100, color='w', fill=False, ls='--', alpha=0.7)
pot = gp.SCFPotential(m=m*u.Msun, r_s=rs*u.kpc, Snlm=S, Tnlm=T, units=galactic)
x0 = np.linspace(grid_size[0], grid_size[1], ngrid)
y0 = np.linspace(grid_size[0], grid_size[1], ngrid)
#x = np.linspace(grid_size[0]-orbit[snap,1], grid_size[1]-orbit[snap,1], ngrid)
#y = np.linspace(grid_size[0]-orbit[snap,2], grid_size[1]-orbit[snap,2], ngrid)
x = np.linspace(grid_size[0], grid_size[1], ngrid)
y = np.linspace(grid_size[0], grid_size[1], ngrid)
grid = np.meshgrid(x, y)
xyz = np.zeros((3, ngrid**2))
xyz[1] = grid[0].flatten()
xyz[2] = grid[1].flatten()
rho = pot.density(xyz)
#rho_0 = pot_0.density(xyz)
fig, ax = plt.subplots(1, 1, figsize=(8,8))
if delta_rho == False :
levels = np.linspace(np.min(np.log10(np.abs(rho.value))), np.max(np.log10(np.abs(rho.value))), 20)
ax.contourf(x0, y0, np.log10(np.abs(rho.value.reshape(ngrid, ngrid))), 20, cmap='inferno',
origin='lower', extent=[-250, 250, -250, 250])
elif delta_rho == True :
pot_0 = gp.SCFPotential(m=m*u.Msun, r_s=rs*u.kpc, Snlm=S0, Tnlm=T0, units=galactic)
rho_0 = pot_0.density(xyz)
ax.contourf(x0, y0, (rho.value/rho_0.value).reshape(128, 128)-1 , 20, cmap='inferno',
origin='lower', vmin=-0.4, vmax=0.4, extent=[-250, 250, -250, 250])
#x.scatter(orbit[snap,7], orbit[snap,8], c='w')
#ax.plot(orbit[:snap+1,7], orbit[:snap+1,8], lw='1.5', c='w', alpha=0.7)
ax.add_patch(circle1)
ax.set_xticks([])
ax.set_yticks([])
ax.plot([-150, -50], [-220, -220], c='w')
ax.text( -150, -210, r'$\rm{100\ kpc}$', c='w', fontsize=22)
ax.text( -200, 220, r'$t={:0.1f}\ $Gyr'.format(snap*0.02), c='w', fontsize=22)
plt.savefig('density_contour_{:03d}.png'.format(snap), bbox_inches='tight', dpi=300)
plt.close()
# Plot enclosed mass?
# Plot delta rho plot
if __name__ == "__main__":
for i in range(0, 100, 10):
coefficients = "/mnt/home/nico/projects/time-dependent-BFE/data/expansion/MW2_100M_beta1_vir_OM3_G4/BFE_MW_grav_MO3_nsamp_1e6_host_snap_{:03d}".format(i)
#hist_coefficients(S[:,:,0])
#density_profile(S, T, rmin=0.10, rmax=400, m=1e10, rs=expansion[2][0], snapshot=snapshot)
grid_size = [-250, 250]
expansion = bfe.ios.read_coefficients(coefficients)
S, T = expansion[0]
print(expansion[2][0])
density_contour(S, T, grid_size, m=1e10, rs=expansion[2][0], snap=i)
#particle_density_profile(snapshot)
|
[
"jngaravitoc@email.arizona.edu"
] |
jngaravitoc@email.arizona.edu
|
7a4878c76aa17420d73c206deeb3c9c60a71f66a
|
dd636f4abc446dc9a1d738d394c49884d0d305ac
|
/app/core/management/commands/wait_for_db.py
|
08fcd67c1656ba95773e34cd7f87971fd69a6cec
|
[
"MIT"
] |
permissive
|
locspoc/recipe-app-api
|
3824ccdc409623266278136bf1d6471d25b6e2bd
|
86a0e3893c27f4eee9f29c9ad68e4f8092b57b98
|
refs/heads/main
| 2023-04-16T06:35:16.345532
| 2021-05-01T00:57:01
| 2021-05-01T00:57:01
| 349,620,260
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 671
|
py
|
import time
from django.db import connections
from django.db.utils import OperationalError
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""Django command to pause execution until database is available"""
def handle(self, *args, **options):
self.stdout.write('Waiting for database...')
db_conn = None
while not db_conn:
try:
db_conn = connections['default']
except OperationalError:
self.stdout.write('Database unvailable, waiting 1 second...')
time.sleep(1)
self.stdout.write(self.style.SUCCESS('Database available!'))
|
[
"mrlocspoc@gmail.com"
] |
mrlocspoc@gmail.com
|
63cff32c562a62ea23fc6c8cfdd282617065883a
|
529a5686f69e38527809f86c055ccd70095065cd
|
/scripts/prepare_txt_done_data_file.py
|
6c3e00ef6300dd57d6d3de652c50581a3f9bb91d
|
[
"MIT"
] |
permissive
|
AvashnaGovender/Tacotron
|
925ad68050bde7a5b3b0f6686295ff6ea0cb241d
|
b4d710ffb0f9e7ef0096d1993b8a24cae4f0d557
|
refs/heads/master
| 2023-01-14T01:23:04.539018
| 2020-11-27T08:55:53
| 2020-11-27T08:55:53
| 277,764,431
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 552
|
py
|
import os
import sys
import numpy as np
import codecs
if __name__ == "__main__":
if len(sys.argv)!=3:
print('Usage: python src/prepare_txt_done_data_file.py <meta_file> <utts.data>\n')
sys.exit(0)
meta_file = sys.argv[1]
out_file = sys.argv[2]
out_f = open(out_file,'w')
with open(meta_file, "r") as f:
content = f.readlines()
for text in content:
data = text.split("|")
t = data[2]
file_id = data[0]
out_f.write("( "+file_id+" \" "+t+" \")\n")
out_f.close()
|
[
"38498073+AvashnaGovender@users.noreply.github.com"
] |
38498073+AvashnaGovender@users.noreply.github.com
|
1a001cf8f6a9d8db86f80d2e07c8aa293af28736
|
0ba0448cf8c125669f07ec1254b2ba8b0a8112b3
|
/Django/databash/datasets/urls.py
|
784d50093441a2b8c45a9fee4b123f1540bb1aa5
|
[
"MIT"
] |
permissive
|
sethkabir/Cicada-3301-Hackathon
|
e6f6d1684b57e21c2e5ecb20333ca8a77f9365e5
|
e91454b237121efa6642607e990395d2e51ff048
|
refs/heads/main
| 2023-05-08T15:46:06.173622
| 2021-05-29T07:23:28
| 2021-05-29T07:23:28
| 371,747,293
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 312
|
py
|
from django.urls import path
from django.urls.resolvers import URLPattern
from . import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('datasets/', views.datasets , name='datasets'),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
[
"sanchjain101@gmail.com"
] |
sanchjain101@gmail.com
|
2f5457ea24011bda9e4d5f8c0bfe6274053603bd
|
eccdbca906891de626be8ee7d11573d3fa74248d
|
/N-queens.py
|
422fab1bf4db460573fdf583de364436bfd89308
|
[] |
no_license
|
TerryCh1995/N-queens
|
bfb179774052183615f16aa06b1efc5eee5528a8
|
ab62c677e71973e8fe1444cbda723b560495c2ad
|
refs/heads/master
| 2021-05-11T17:32:26.698661
| 2018-01-17T07:42:06
| 2018-01-17T07:42:06
| 117,800,858
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,526
|
py
|
import random
import time
from datetime import timedelta
#冲突检查,在定义state时,采用state来标志每个皇后的位置,其中索引用来表示横坐标,基对应的值表示纵坐标,例如: state[0]=3,表示该皇后位于第1行的第4列上
def conflict(state, nextX):
nextY = len(state)
for i in range(nextY):
#如果下一个皇后的位置与当前的皇后位置相邻(包括上下,左右)或在同一对角线上,则说明有冲突,需要重新摆放
if abs(state[i]-nextX) in (0, nextY-i):
return True
return False
#采用生成器的方式来产生每一个皇后的位置,并用递归来实现下一个皇后的位置。
def queens(num, state=()):
for pos in range(num):
if not conflict(state, pos):
#产生当前皇后的位置信息
if len(state) == num-1:
yield (pos, )
#否则,把当前皇后的位置信息,添加到状态列表里,并传递给下一皇后。
else:
for result in queens(num, state+(pos,)):
yield (pos, ) + result
#为了直观表现棋盘,用X表示每个皇后的位置
def prettyprint(solution):
def line(pos, length=len(solution)):
return '. ' * (pos) + 'X ' + '. '*(length-pos-1)
for pos in solution:
print(line(pos))
if __name__ == "__main__":
start_time = time.time()
prettyprint(random.choice(list(queens(13))))
print(str(time.time()-start_time))
|
[
"624495528@qq.com"
] |
624495528@qq.com
|
9ca1ae42d2994c511aa05fb67a74159c6e6d4488
|
15fa13fad9a05a51843c3ed6cf1f8afbb33aae66
|
/examples/reference/save/save.pde
|
2b5b228b7744b1a986067a42dde526639429d72c
|
[] |
no_license
|
kazimuth/python-mode-processing
|
4ad39f18c9637206fa7c691ac328baae0fc21b1a
|
e6274f89e0464b771870327a56ce01bff629e0fb
|
refs/heads/master
| 2021-01-22T05:27:44.912530
| 2014-04-22T17:20:15
| 2014-04-22T17:20:15
| 10,946,779
| 4
| 0
| null | 2013-12-31T01:23:52
| 2013-06-25T18:40:55
|
Java
|
UTF-8
|
Python
| false
| false
| 163
|
pde
|
line(20, 20, 80, 80);
# Saves a PNG file named "diagonal.png"
save("diagonal.png");
# Saves a PNG file named "cross.png"
line(80, 20, 20, 80);
save("cross.png");
|
[
"martin_p@lineone.net"
] |
martin_p@lineone.net
|
c65e79d20c1da3b65054b30115487a08497c16c4
|
27e646d5671e1ea6f87c5f3f23d732e5208a6719
|
/rust/build03/comparedat.py
|
621795c4ea0ef28c44a47dd218e967e9163654d3
|
[] |
no_license
|
richardlford/digsim
|
950683c9bba244e447b81302d1caf77fa71e96a8
|
4da30b04be3c66762050e56f1eb5a533d6d806d7
|
refs/heads/master
| 2023-02-10T01:57:40.474901
| 2019-08-14T17:10:01
| 2019-08-14T17:10:01
| 325,349,160
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 676
|
py
|
#!/usr/bin/python3
import os
from pprint import pprint as pp
# cwd = list(os.path.split(os.getcwd()))[1]
# pp(cwd)
cwd = "build01"
canon = []
with open("../../f77/" + cwd + "/output.dat","r") as ifile:
for line in ifile:
canon.append(tuple(map(float,line.split())))
belief = []
with open("output.dat","r") as ifile:
for line in ifile:
belief.append(tuple(map(float,line.split())))
diffs = []
for i,line in enumerate(canon):
for j,x in enumerate(line):
diff = round(x - belief[i][j],4)
if diff > 0.0:
diffs.append((i+1,j+1,diff))
if len(diffs):
print("Differences")
pp(diffs)
else:
print("No Differences")
|
[
"roy.crippen@archarithms.com"
] |
roy.crippen@archarithms.com
|
97960e3328b8f1cfd55840e4f352b67dd9e8d23c
|
fc81b9c13f4fa2218c0fe6e6666a737cb427bd8a
|
/mledge-master-8be9042002b67467223306dbefc7bc8e772a95d9/ICML/MultiTask/dataset_utils.py
|
a26559e579379c4b0d1cbd802977f193d180342e
|
[] |
no_license
|
WangyaqiAlly/burned_in_captions
|
21b6dcb3a8350d732b42b8dca6fd1627168fef4c
|
46dad50a8336e305ceabe2e598d2290fafb48824
|
refs/heads/master
| 2020-03-28T14:10:20.096202
| 2018-11-14T08:37:45
| 2018-11-14T08:37:45
| 148,463,109
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,218
|
py
|
import tensorflow as tf
import os, sys, pickle
import numpy as np
from scipy import linalg
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_bool('aug_trans', False, "")
tf.app.flags.DEFINE_bool('aug_flip', False, "")
def unpickle(file):
fp = open(file, 'rb')
if sys.version_info.major == 2:
data = pickle.load(fp)
elif sys.version_info.major == 3:
data = pickle.load(fp, encoding='latin-1')
fp.close()
return data
def ZCA(data, reg=1e-6):
mean = np.mean(data, axis=0)
mdata = data - mean
sigma = np.dot(mdata.T, mdata) / mdata.shape[0]
U, S, V = linalg.svd(sigma)
components = np.dot(np.dot(U, np.diag(1 / np.sqrt(S) + reg)), U.T)
whiten = np.dot(data - mean, components.T)
return components, mean, whiten
def _int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def convert_images_and_labels(images, labels, filepath):
num_examples = labels.shape[0]
if images.shape[0] != num_examples:
raise ValueError("Images size %d does not match label size %d." %
(images.shape[0], num_examples))
#print('Writing', filepath)
writer = tf.python_io.TFRecordWriter(filepath)
for index in range(num_examples):
image = images[index].tolist()
image_feature = tf.train.Feature(float_list=tf.train.FloatList(value=image))
example = tf.train.Example(features=tf.train.Features(feature={
'height': _int64_feature(32),
'width': _int64_feature(32),
'depth': _int64_feature(3),
'label': _int64_feature(int(labels[index])),
'image': image_feature}))
writer.write(example.SerializeToString())
writer.close()
def read(filename_queue):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
# Defaults are not specified since both keys are required.
features={
# dtan2: images are 64x64 rather than 32x32
#'image': tf.FixedLenFeature([3072], tf.float32),
'image': tf.FixedLenFeature([3072*4], tf.float32),
'label': tf.FixedLenFeature([], tf.int64),
})
# Convert label from a scalar uint8 tensor to an int32 scalar.
image = features['image']
# dtan2: images are 64x64
#image = tf.reshape(image, [32, 32, 3])
image = tf.reshape(image, [64, 64, 3])
# dtan2: update the number of labels
label = tf.one_hot(tf.cast(features['label'], tf.int32), FLAGS.cls_num)
#label = tf.one_hot(tf.cast(features['label'], tf.int32), 10)
return image, label
def generate_batch(
example,
min_queue_examples,
batch_size, shuffle):
"""
Arg:
list of tensors.
"""
num_preprocess_threads = 1
if shuffle:
ret = tf.train.shuffle_batch(
example,
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size,
min_after_dequeue=min_queue_examples)
else:
ret = tf.train.batch(
example,
batch_size=batch_size,
num_threads=num_preprocess_threads,
allow_smaller_final_batch=True,
capacity=min_queue_examples + 3 * batch_size)
return ret
def transform(image):
#dtan2: changed from 32x32 to 64x64...
w = 64
image = tf.reshape(image, [w, w, 3])
if FLAGS.aug_trans or FLAGS.aug_flip:
print("augmentation")
if FLAGS.aug_trans:
image = tf.pad(image, [[2, 2], [2, 2], [0, 0]])
image = tf.random_crop(image, [w, w, 3])
if FLAGS.aug_flip:
image = tf.image.random_flip_left_right(image)
return image
def generate_filename_queue(filenames, data_dir, num_epochs=None):
print("filenames in queue:", filenames)
for i in range(len(filenames)):
filenames[i] = os.path.join(data_dir, filenames[i])
return tf.train.string_input_producer(filenames, num_epochs=num_epochs)
|
[
"yaqi-wan15@mails.tsinghua.edu.cn"
] |
yaqi-wan15@mails.tsinghua.edu.cn
|
f53f49552b8909f3cef6a07045dbfc113e7e8581
|
4823948cb075cdd51d523a5f72ff4119edebe7ab
|
/opencenteragent/plugins/output/plugin_sleep.py
|
ac148dd5cb54a6beb0b2d2aeeb1a5276815c1ffd
|
[] |
no_license
|
hughsaunders/opencenter-agent
|
738647dd462c38a1da2f078eb1fed9e3e6452b7f
|
e9facb74ceb9f03391dcd5ee3c61ff27406305cd
|
refs/heads/master
| 2021-01-17T23:49:42.465661
| 2013-02-26T21:38:45
| 2013-02-26T21:38:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,531
|
py
|
#!/usr/bin/env python
#
# Copyright 2012, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
import time
name = 'sleep'
def setup(config={}):
LOG.debug('doing setup for sleep handler')
register_action('sleep', handle_sleep,
args={"sleep_interval": {"required": True, "type": "int"}})
def handle_sleep(input_data):
action = input_data['action']
payload = input_data['payload']
sleep_time = int(payload.get('sleep_interval', 5))
success_percentage = payload.get('success_percentage', 100)
result = random.randrange(1, 100)
result_code = 1
if result <= success_percentage:
result_code = 0
time.sleep(sleep_time)
result_str = ['success!', 'fail!'][result_code]
return {'result_code': result_code,
'result_str': result_str,
'result_data': {'sleep_interval': sleep_time,
'success_percentage': success_percentage,
'random': result}}
|
[
"jason.cannavale@rackspace.com"
] |
jason.cannavale@rackspace.com
|
efacab27cdad747145ceafc36a9158f2b215c1a8
|
a935f2800e48bb7069fe966972eef56fc8ea2007
|
/events/tasks.py
|
0aa59c22887be428476faba38eb6b1be105339e8
|
[] |
no_license
|
ptrus/findtofun
|
88ae4328b38110167559728e635e5e3b73566410
|
58f0ce6777f7d4cbb5d697a9c4b79dad7d81bca0
|
refs/heads/master
| 2021-01-17T23:39:06.658549
| 2013-05-21T20:32:59
| 2013-05-21T20:32:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,442
|
py
|
from djcelery import celery
from celery.utils.log import get_task_logger
from facepy import GraphAPI
from events.models import FbEvent, FbUser
# from copy import deepcopy
logger = get_task_logger(__name__)
@celery.task
def process_events(access_token):
graph = GraphAPI(access_token)
query_events = """
SELECT
all_members_count,
attending_count,
creator,
declined_count,
description,
eid,
end_time,
name,
not_replied_count,
pic,
pic_big,
pic_cover,
pic_small,
pic_square,
privacy,
start_time,
ticket_uri,
timezone,
unsure_count,
venue
FROM event
WHERE eid IN (SELECT eid FROM event_member WHERE
(uid = me() OR
uid IN (SELECT uid2 FROM friend WHERE uid1 = me())))
AND start_time > now() LIMIT 20
"""
events_results = graph.fql(query_events)
events_objects = []
eids = []
for event_data in events_results["data"]:
eid = event_data["eid"]
if eid in eids:
continue
else:
eids.append(eid)
try:
event = FbEvent.objects.get(pk=eid)
update_event.s(args=(event, event_data))
except FbEvent.DoesNotExist:
log_event("CREATED", event_data)
event = FbEvent.objects.create_event(**event_data)
events_objects.append(event)
FbEvent.objects.bulk_create(events_objects)
for eid in eids:
# process_users_in_events.delay(access_token, eid)
process_users_in_events(access_token, eid)
def get_query_for_querying_users(eid, rsvp_status):
return ("""
SELECT
uid,
name,
age_range,
current_address,
sex
FROM user
WHERE uid IN
(SELECT uid FROM event_member WHERE eid = %s AND rsvp_status = '%s')
""") % (eid, rsvp_status)
@celery.task
def process_users_in_events(access_token, eid):
graph = GraphAPI(access_token)
for rsvp_status in ["attending", "unsure", "declined", "not_replied"]:
process_users_in_events_by_rsvp_status(
graph,
eid,
get_query_for_querying_users(eid, rsvp_status),
rsvp_status)
def process_users_in_events_by_rsvp_status(graph, eid, query_users,
rsvp_status):
users_results = graph.fql(query_users)
ThroughModel = FbEvent.users.through
users_objects = []
through_objects = []
for user_data in users_results["data"]:
uid = user_data['uid']
try:
user = FbUser.objects.get(pk=uid)
update_user.s(args=(user, user_data))
except FbUser.DoesNotExist:
log_user("CREATED", user_data)
user = FbUser.objects.create_user(**user_data)
users_objects.append(user)
through_props = dict(
fbevent_id=eid,
fbuser_id=uid,
rsvp_status=rsvp_status)
if ThroughModel.objects.filter(**through_props).exists() is False:
through_objects.append(ThroughModel(**through_props))
FbUser.objects.bulk_create(users_objects)
ThroughModel.objects.bulk_create(through_objects)
# return dict(
# users_objects=users_objects,
# through_objects=through_objects
# )
@celery.task
def update_event(event, event_data):
has_changed = FbEvent.objects.update_event(event, **event_data)
if has_changed:
log_event("UPDATED", event_data)
@celery.task
def update_user(user, user_data):
has_changed = FbUser.objects.update_user(user, **user_data)
if has_changed:
log_user("UPDATED", user_data)
def log_event(title, event_data):
logger.info("%s event: %s, %s" % (
title,
event_data.get("eid", ""),
event_data.get("name", "")))
def log_user(title, user_data):
logger.info("%s user: %s" % (title, user_data))
|
[
"Tomi@me.com"
] |
Tomi@me.com
|
2429a385627bfa93b1a51418d01fb03830dcb6d9
|
abb3174a489f5a00ff3392b485d43c62395c1929
|
/model_big.py
|
9e1bb2abeb5c5b79b7eb747f8388b501b478058e
|
[] |
no_license
|
sWizad/SingleImgMPI
|
3fab9787fed6613527f6dc11fdd1948c869d7d7b
|
f77cce1cd92aab1ac41273c5dbe92ed6d697ae13
|
refs/heads/master
| 2020-12-09T08:18:29.369114
| 2020-01-28T11:01:51
| 2020-01-28T11:01:51
| 233,247,474
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,825
|
py
|
## Make MPI by direct optimization
import os, sys
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from tensorflow.compat.v1 import ConfigProto
import cv2
from sfm_utils import SfMData
#from view_gen import generateWebGL, generateConfigGL
from utils import *
#from localpath import getLocalPath
slim = tf.contrib.slim
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_boolean("invz", True, "using inverse depth, ignore dmax in this case")
tf.app.flags.DEFINE_boolean("predict", False, "making a video")
tf.app.flags.DEFINE_boolean("restart", False, "making a last video frame")
tf.app.flags.DEFINE_float("scale", 0.75, "scale input image by")
tf.app.flags.DEFINE_integer("offset", 16, "offset size to mpi")
tf.app.flags.DEFINE_integer("layers", 24, "number of planes")
tf.app.flags.DEFINE_integer("sublayers", 1, "number of sub planes")
tf.app.flags.DEFINE_integer("epoch", 1000, "Training steps")
tf.app.flags.DEFINE_integer("batch_size", 1, "Size of mini-batch.")
tf.app.flags.DEFINE_integer("index", 0, "index number")
tf.app.flags.DEFINE_string("dataset", "temple0", "which dataset in the datasets folder")
tf.app.flags.DEFINE_string("input", "tem", "input tfrecord")
tf.app.flags.DEFINE_string("ref_img", "01-cam_06", "reference image")
#tf.app.flags.DEFINE_string("ref_img", "0051.png", "reference image such that MPI is perfectly parallel to")
sub_sam = max(FLAGS.sublayers,1)
num_mpi = FLAGS.layers
offset = FLAGS.offset
dmin, dmax = -1,-1#0.2,15
MODEL_VERSION = 'BIG/'
def touint8(img):
return tf.cast(img * 255.0, tf.uint8)
def crop_sample(sfm,features,cxy=[0,0]):
#center = #tf.concat(cxy,-1)
center = tf.cast(cxy,tf.float32)
center = tf.expand_dims(tf.expand_dims(tf.expand_dims(center,0),0),0)
h2 = sfm.sh//2
coor = [cxy[0]+sfm.offset+h2,cxy[1]+sfm.offset+h2,1]#tf.concat([cxy[0]+sfm.offset+32,cxy[1]+sfm.offset+32,1],-1)
coor = tf.cast(coor,tf.float32)
coor = tf.expand_dims(tf.expand_dims(coor,0),0)
H = computeHomography(sfm, features, sfm.features, 3.8)
newCoords = tf.matmul(coor, tf.linalg.inv(H), transpose_b=True)
ncenter = newCoords[:,:,:2]/newCoords[:,:,2:3]-sfm.offset-h2
ncx = tf.cast(ncenter[0,0,0],tf.int32)
ncy = tf.cast(ncenter[0,0,1],tf.int32)
ncx = tf.clip_by_value(ncx,0,sfm.w - sfm.sw)
ncy = tf.clip_by_value(ncy,0,sfm.h - sfm.sh)
ncenter = tf.expand_dims(ncenter,0)
crop_img = tf.image.crop_to_bounding_box(features['img'],ncy,ncx,sfm.sh,sfm.sw)
img_tile = tf.tile(crop_img,[num_mpi,1,1,1])
psv1 = tf.contrib.resampler.resampler(img_tile,InvzHomoWarp(sfm,features,sfm.features,i=0,center=ncenter-center))
psv1 = tf.reshape(psv1,(1,num_mpi*sfm.nh,sfm.nw,3))
return crop_img , psv1, ncenter-center*0
def train(sfm,cx,cy):
sfm.sh = 120 *3
sfm.sw = 200 *3
sfm.num_mpi = num_mpi
sfm.offset = offset
print(getPlanes(sfm))
sfm.nh = sfm.sh + 2*offset
sfm.nw = sfm.sw + 2*offset
bh = sfm.h + 2*offset
bw = sfm.w + 2*offset
#cx, cy = 600, 400
iter = tf.compat.v1.placeholder(tf.float32, shape=[], name='iter')
features0 = load_data(FLAGS.dataset,FLAGS.input,[sfm.h,sfm.w],1,is_shuff = True)
crop_img, psv1, nc = crop_sample(sfm,features0,[cx,cy])
int_mpi1 = np.random.uniform(-1, 1,[num_mpi, bh, bw, 3]).astype(np.float32)
int_mpi2 = np.random.uniform(-5,-3,[num_mpi, bh, bw, 1]).astype(np.float32)
ref_img = -np.log(np.maximum(1/sfm.ref_img-1,0.001))
int_mpi1[:,offset:sfm.h + offset,offset:sfm.w + offset,:] = np.array([ref_img])
int_mpi2[-1] = -1
tt = True
with tf.compat.v1.variable_scope("Net%d"%(FLAGS.index)):
mpic = tf.compat.v1.get_variable("mpi_c", initializer=int_mpi1, trainable=tt)
mpia = tf.compat.v1.get_variable("mpi_a", initializer=int_mpi2, trainable=tt)
crop_mpic = tf.image.crop_to_bounding_box(mpic,cy,cx,sfm.nh,sfm.nw)
crop_mpia = tf.image.crop_to_bounding_box(mpia,cy,cx,sfm.nh,sfm.nw)
#crop_mpic = mpic[:,cy:cy+sfm.nh,cx:cx+sfm.nw]
#crop_mpia = mpia[:,cy:cy+sfm.nh,cx:cx+sfm.nw]
mpic_sig = tf.sigmoid(crop_mpic)
mpia_sig = tf.sigmoid(crop_mpia)
lr = tf.compat.v1.train.exponential_decay(0.1,iter,1000,0.5)
optimizer = tf.compat.v1.train.AdamOptimizer(lr)
fac = 1.0 #tf.maximum(1 - iter/(1500),0.2)
tva = tf.constant(0.1) * fac
tvc = tf.constant(0.005) * fac
mpi_sig = tf.concat([mpic_sig,mpia_sig],-1)
#mpi_sig = tf.Print(mpi_sig,[nc])
img_out = network( sfm, features0, sfm.features, mpi_sig,center = nc*0)
#TODO use normailze before tv
loss = 0
loss += 100000 * tf.reduce_mean(tf.square(img_out[0] - crop_img[-1]))
loss += tvc * tf.reduce_mean(tf.image.total_variation(mpic_sig))
loss += tva * tf.reduce_mean(tf.image.total_variation (mpia_sig))
train_op =optimizer.minimize(loss)
image_out = tf.clip_by_value(img_out,0.0,1.0)
a_long = tf.reshape(mpi_sig[:,:,:,3:4],(1,num_mpi*sfm.nh,sfm.nw,1))
c_long = tf.reshape(mpi_sig[:,:,:,:3],(1,num_mpi*sfm.nh,sfm.nw,3))
summary = tf.compat.v1.summary.merge([
tf.compat.v1.summary.scalar("post0/all_loss", loss),
tf.compat.v1.summary.image("post0/out1",touint8(tf.concat([crop_img[-1:],image_out],1))),
tf.compat.v1.summary.image("post0/out1",touint8(tf.concat([image_out,crop_img[-1:]],1))),
tf.compat.v1.summary.image("post1/o_alpha",touint8(a_long)),
tf.compat.v1.summary.image("post1/o_color",touint8(c_long)),
tf.compat.v1.summary.image("post1/o_acolor",touint8(c_long*a_long)),
tf.compat.v1.summary.image("post2/p1",touint8(psv1)),
])
config = ConfigProto()
config.gpu_options.allow_growth = True
localpp = "TensorB/"+MODEL_VERSION+FLAGS.dataset
if FLAGS.index==0:
if os.path.exists(localpp):
os.system("rm -rf " +localpp )
if not os.path.exists(localpp):
os.makedirs(localpp)
writer = tf.compat.v1.summary.FileWriter(localpp)
#writer.add_graph(sess.graph)
sess = tf.compat.v1.Session(config=config)
if not FLAGS.restart:
sess.run(tf.compat.v1.global_variables_initializer())
localpp = './model/'+MODEL_VERSION + FLAGS.dataset
saver = tf.train.Saver()
ckpt = tf.train.latest_checkpoint(localpp)
saver.restore(sess, ckpt)
else:
sess.run(tf.compat.v1.global_variables_initializer())
localpp = './model/'+MODEL_VERSION + FLAGS.dataset
if not os.path.exists(localpp):
os.makedirs(localpp)
saver = tf.train.Saver()
#print("Var=",np.sum([np.prod(v.get_shape().as_list()) for v in tf.trainable_variables()]))
los = 0
n = num_mpi//20
for i in range(FLAGS.epoch + 3):
feedlis = {iter:i}
_,los = sess.run([train_op,loss],feed_dict=feedlis)
if i%50==0:
print(i, "loss = ",los )
if i%20 == 0:
summ = sess.run(summary,feed_dict=feedlis)
writer.add_summary(summ,i)
if i%200==199:
saver.save(sess, localpp + '/' + str(000))
saver.save(sess, localpp + '/mpi')
def predict(sfm):
def parser(serialized_example):
fs = tf.parse_single_example(
serialized_example,
features={
"r": tf.FixedLenFeature([9], tf.float32),
"t": tf.FixedLenFeature([3], tf.float32),
'pxFocalLength':tf.FixedLenFeature([], tf.float32),
'pyFocalLength':tf.FixedLenFeature([], tf.float32),
'principalPoint0':tf.FixedLenFeature([], tf.float32),
'principalPoint1':tf.FixedLenFeature([], tf.float32)
})
fs["r"] = tf.reshape(fs["r"], [3, 3])
fs["t"] = tf.reshape(fs["t"], [3, 1])
return fs
lod_in = tf.compat.v1.placeholder(tf.float32, shape=[], name='lod_in')
#testset = tf.data.TFRecordDataset(["datasets/" + FLAGS.dataset + "/" + FLAGS.input +".test"])
localpp = "/home2/suttisak/datasets/spaces_dataset/data/resize_2k/" + FLAGS.dataset + "/" + FLAGS.input + ".test"
testset = tf.data.TFRecordDataset([localpp])
testset = testset.map(parser).repeat().batch(1).make_one_shot_iterator()
features = testset.get_next()
rot = features["r"][0]
tra = features["t"][0]
sfm.sh = 120
sfm.sw = 200
sfm.num_mpi = num_mpi
sfm.offset = offset
print(getPlanes(sfm))
sfm.nh = sfm.sh + 2*offset
sfm.nw = sfm.sw + 2*offset
bh = sfm.h + 2*offset
bw = sfm.w + 2*offset
sfm.num_mpi = num_mpi
sfm.offset = offset
int_mpi1 = np.random.uniform(-1, 1,[num_mpi, bh, bw, 3]).astype(np.float32)
int_mpi2 = np.random.uniform(-5,-3,[num_mpi, bh, bw, 1]).astype(np.float32)
int_mpi2[-1] = -1
with tf.compat.v1.variable_scope("Net%d"%(FLAGS.index)):
mpic = tf.compat.v1.get_variable("mpi_c", initializer=int_mpi1, trainable=False)
mpia = tf.compat.v1.get_variable("mpi_a", initializer=int_mpi2, trainable=False)
new_mpi = tf.concat([tf.tile(mpic,[sub_sam,1,1,1]),mpia],-1)
mpi_sig = tf.sigmoid(new_mpi)
img_out = network(sfm,features,sfm.features,mpi_sig)
with tf.compat.v1.variable_scope("post%d"%(FLAGS.index)):
image_out= tf.clip_by_value(img_out[0],0.0,1.0)
config = ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.compat.v1.Session(config=config)
sess.run(tf.compat.v1.global_variables_initializer())
t_vars = slim.get_variables_to_restore()
variables_to_restore = [var for var in t_vars if 'Net' in var.name]
#variables_to_restore = slim.get_variables_to_restore()
print(variables_to_restore)
saver = tf.train.Saver(variables_to_restore)
localpp = './model/'+MODEL_VERSION + FLAGS.dataset #+ '/mpi'
ckpt = tf.train.latest_checkpoint(localpp )
saver.restore(sess, ckpt)
#webpath = "webpath/"
webpath = "/var/www/html/suttisak/data/"
if not os.path.exists(webpath + FLAGS.dataset):
os.system("mkdir " + webpath + FLAGS.dataset)
if False: # make sample picture and video
for i in range(0,300,1):
#feed = sess.run(features)
#out = sess.run(image_out,feed_dict={lod_in:0,rot:feed["r"][0],tra:feed["t"][0]})
out = sess.run(image_out,feed_dict={lod_in:0})
if (i%50==0):
print(i)
plt.imsave("webpath/"+FLAGS.dataset+"/%04d.png"%( i),out)
plt.imsave("result/frame/"+FLAGS.dataset+"_%04d.png"%( i),out)
cmd = 'ffmpeg -y -i ' + 'result/frame/'+FLAGS.dataset+'_%04d.png -c:v libx264 -vf "pad=ceil(iw/2)*2:ceil(ih/2)*2" -pix_fmt yuv420p webpath/'+FLAGS.dataset+'/moving.mp4'
print(cmd)
os.system(cmd)
if True: # make web viewer
ret = sess.run(mpi_sig,feed_dict={lod_in:0})
maxcol = 12
mpis = []
cols = []
for i in range(num_mpi):
cols.append(ret[i,:,:,:4])
if len(cols) == maxcol:
mpis.append(np.concatenate(cols,1))
cols = []
#mpis.append(ret[i,:,:,:4])
if len(cols):
while len(cols)<maxcol:
cols.append(np.zeros_like(cols[-1]))
mpis.append(np.concatenate(cols,1))
plt.imsave(webpath + FLAGS.dataset+ "/sublayer.png", np.ones_like(mpis[0][:, :, :3]))
mpis = np.concatenate(mpis, 0)
plt.imsave(webpath + FLAGS.dataset+ "/mpi%02d.png"%(FLAGS.index), mpis)
plt.imsave("webpath/" + FLAGS.dataset+ "/mpi%02d.png"%(FLAGS.index), mpis)
#plt.imsave(webpath + FLAGS.dataset+ "/mpi.png", mpis)
#plt.imsave(webpath + FLAGS.dataset+ "/mpi_alpha.png", np.tile(mpis[:, :, 3:], (1, 1, 3)))
namelist = "["
for ii in range(FLAGS.index+1):
namelist += "\"%02d\","%(ii)
namelist += "]"
ref_r = sfm.ref_r
ref_t = sfm.ref_t
with open(webpath + FLAGS.dataset+ "/extrinsics%02d.txt"%(FLAGS.index), "w") as fo:
for i in range(3):
for j in range(3):
fo.write(str(ref_r[ i, j]) + " ")
fo.write(" ".join([str(x) for x in np.nditer(ref_t)]) + "\n")
generateConfigGL(webpath + FLAGS.dataset+ "/config.js", sfm.w, sfm.h, getPlanes(sfm),namelist,sub_sam, sfm.ref_fx, sfm.ref_px, sfm.ref_py,FLAGS.scale,FLAGS.offset)
generateConfigGL("webpath/" + FLAGS.dataset+ "/config.js", sfm.w, sfm.h, getPlanes(sfm),namelist,sub_sam, sfm.ref_fx, sfm.ref_px, sfm.ref_py,FLAGS.scale,FLAGS.offset)
def main(argv):
sfm = SfMData(FLAGS.dataset,
FLAGS.ref_img,
"",
FLAGS.scale,
dmin,
dmax)
if FLAGS.predict:
predict(sfm)
else:
#for cx in [200,600]:
#for cy in [400,800]:
train(sfm,0,0)
print("Jub Jub!!")
if __name__ == "__main__":
sys.excepthook = colored_hook(
os.path.dirname(os.path.realpath(__file__)))
tf.compat.v1.app.run()
|
[
"suttisak@v03.vll.ist"
] |
suttisak@v03.vll.ist
|
a7a4c82d7babe5a5178fe675ff518a0da8e7edd7
|
9eedcae2097f724fa8cde0a7ff01f851becdb2e9
|
/datapeace/venv/bin/sqlformat
|
d9920ae115c41a21819f76363a3b71420cadef5a
|
[] |
no_license
|
Chhavnish/datapeace_user_management_system
|
7ea4a16f4e206e05543006b0557756095c8ca1d3
|
7f4256cbbfc0974ba0c047a0971b50b66624eb2c
|
refs/heads/master
| 2020-05-15T00:52:56.088542
| 2019-04-18T06:21:39
| 2019-04-18T06:21:39
| 182,016,738
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 258
|
#!/Users/cmittal/Desktop/Extra/datapeace/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from sqlparse.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"v-mchhavnish@expediagroup.com"
] |
v-mchhavnish@expediagroup.com
|
|
ba9bd981bd880e5f72294cb26838efd16f6b1a93
|
f4e16d12b5b95fb05ebfca7d2a3338715077cb63
|
/extend/migrations/0001_initial.py
|
69a3c0b71cf6db59efe5ab947990b5696178fc10
|
[] |
no_license
|
z747553743/Lab4
|
6110fbd598114c13914e4b8eb7d7d26b11801524
|
1d674ad857f1db81af79f2afadabca2d97f6ea78
|
refs/heads/master
| 2021-01-20T11:42:01.335368
| 2015-11-12T08:24:29
| 2015-11-12T08:24:29
| 45,901,193
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,255
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('AuthorID', models.CharField(max_length=30)),
('Name', models.CharField(max_length=30)),
('Age', models.CharField(max_length=10)),
('Country', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('ISBN', models.CharField(max_length=30)),
('Title', models.CharField(max_length=30)),
('AuthorID', models.CharField(max_length=30)),
('Publisher', models.CharField(max_length=30)),
('PublishDate', models.CharField(max_length=30)),
('Price', models.CharField(max_length=20)),
],
),
]
|
[
"747553743qq.com"
] |
747553743qq.com
|
aad9488676d3f5b3bfee0855179e545f1e14d504
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/GaXXfmpM72yCHag9T_4.py
|
22925ca97568e06a1a2019dab94c6ca385feb516
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 69
|
py
|
def unique(lst):
return [l for l in lst if lst.count(l) == 1][0]
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
538183605cea0ff1b5fef40d2c62d45448a7cfcf
|
c3cff2173c371541de2175f90a68b5d91e98b353
|
/admin/tool.py
|
b062697243ccab9c839d8a9098db0a8d91f6fb68
|
[] |
no_license
|
suruleredotdev/suruleredotdev.github.io
|
7490bf61b1af0424f0df845cebc2d6a332c5555e
|
e55896e07d1fd18275c7c7d36561a99b58f6763b
|
refs/heads/master
| 2023-01-25T02:07:51.391593
| 2023-01-20T04:36:26
| 2023-01-20T04:36:26
| 224,273,040
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 475
|
py
|
#!/usr/bin/python3
import flask
import os
from subprocess import Popen, PIPE, STDOUT
import logging
app = flask.Flask(__name__)
app.jinja_env.add_extension('pypugjs.ext.jinja.PyPugJSExtension')
@app.route('/')
def start():
run("./serve.rb")
def run(cmd):
process = Popen(cmd, stdout=STDOUT, stderr=STDOUT)
return process
def log(pipe, prefix=""):
for line in iter(pipe.readline, b''): # b'\n'-separated lines
logging.info('%s %r', prefix, line)
|
[
"kaderele@gmail.com"
] |
kaderele@gmail.com
|
e9b8e93e36efe9ee9c406bf969fd120cc56c3151
|
ea44a1681e276b3cc85226b53de217f6096a05d4
|
/fhir/resources/models/bundle.py
|
e8589ec8f2e903e432742ea6bcaa7820df89456e
|
[
"BSD-3-Clause"
] |
permissive
|
stephanie-howson/fhir.resources
|
69d2a5a6b0fe4387b82e984255b24027b37985c4
|
126e9dc6e14541f74e69ef7c1a0b8a74aa981905
|
refs/heads/master
| 2020-05-04T22:24:49.826585
| 2019-06-27T15:51:26
| 2019-06-27T15:51:26
| 179,511,579
| 0
| 0
| null | 2019-04-04T14:14:53
| 2019-04-04T14:14:52
| null |
UTF-8
|
Python
| false
| false
| 11,353
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-9a13c5160d (http://hl7.org/fhir/StructureDefinition/Bundle) on 2019-04-12.
# 2019, SMART Health IT.
from . import resource
class Bundle(resource.Resource):
""" Contains a collection of resources.
A container for a collection of resources.
"""
resource_type = "Bundle"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.entry = None
""" Entry in the bundle - will have a resource or information.
List of `BundleEntry` items (represented as `dict` in JSON). """
self.identifier = None
""" Persistent identifier for the bundle.
Type `Identifier` (represented as `dict` in JSON). """
self.link = None
""" Links related to this Bundle.
List of `BundleLink` items (represented as `dict` in JSON). """
self.signature = None
""" Digital Signature.
Type `Signature` (represented as `dict` in JSON). """
self.timestamp = None
""" When the bundle was assembled.
Type `FHIRDate` (represented as `str` in JSON). """
self.total = None
""" If search, the total number of matches.
Type `int`. """
self.type = None
""" document | message | transaction | transaction-response | batch |
batch-response | history | searchset | collection.
Type `str`. """
super(Bundle, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(Bundle, self).elementProperties()
js.extend([
("entry", "entry", BundleEntry, True, None, False),
("identifier", "identifier", identifier.Identifier, False, None, False),
("link", "link", BundleLink, True, None, False),
("signature", "signature", signature.Signature, False, None, False),
("timestamp", "timestamp", fhirdate.FHIRDate, False, None, False),
("total", "total", int, False, None, False),
("type", "type", str, False, None, True),
])
return js
from . import backboneelement
class BundleEntry(backboneelement.BackboneElement):
""" Entry in the bundle - will have a resource or information.
An entry in a bundle resource - will either contain a resource or
information about a resource (transactions and history only).
"""
resource_type = "BundleEntry"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.fullUrl = None
""" URI for resource (Absolute URL server address or URI for UUID/OID).
Type `str`. """
self.link = None
""" Links related to this entry.
List of `BundleLink` items (represented as `dict` in JSON). """
self.request = None
""" Additional execution information (transaction/batch/history).
Type `BundleEntryRequest` (represented as `dict` in JSON). """
self.resource = None
""" A resource in the bundle.
Type `Resource` (represented as `dict` in JSON). """
self.response = None
""" Results of execution (transaction/batch/history).
Type `BundleEntryResponse` (represented as `dict` in JSON). """
self.search = None
""" Search related information.
Type `BundleEntrySearch` (represented as `dict` in JSON). """
super(BundleEntry, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(BundleEntry, self).elementProperties()
js.extend([
("fullUrl", "fullUrl", str, False, None, False),
("link", "link", BundleLink, True, None, False),
("request", "request", BundleEntryRequest, False, None, False),
("resource", "resource", resource.Resource, False, None, False),
("response", "response", BundleEntryResponse, False, None, False),
("search", "search", BundleEntrySearch, False, None, False),
])
return js
class BundleEntryRequest(backboneelement.BackboneElement):
""" Additional execution information (transaction/batch/history).
Additional information about how this entry should be processed as part of
a transaction or batch. For history, it shows how the entry was processed
to create the version contained in the entry.
"""
resource_type = "BundleEntryRequest"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.ifMatch = None
""" For managing update contention.
Type `str`. """
self.ifModifiedSince = None
""" For managing cache currency.
Type `FHIRDate` (represented as `str` in JSON). """
self.ifNoneExist = None
""" For conditional creates.
Type `str`. """
self.ifNoneMatch = None
""" For managing cache currency.
Type `str`. """
self.method = None
""" GET | HEAD | POST | PUT | DELETE | PATCH.
Type `str`. """
self.url = None
""" URL for HTTP equivalent of this entry.
Type `str`. """
super(BundleEntryRequest, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(BundleEntryRequest, self).elementProperties()
js.extend([
("ifMatch", "ifMatch", str, False, None, False),
("ifModifiedSince", "ifModifiedSince", fhirdate.FHIRDate, False, None, False),
("ifNoneExist", "ifNoneExist", str, False, None, False),
("ifNoneMatch", "ifNoneMatch", str, False, None, False),
("method", "method", str, False, None, True),
("url", "url", str, False, None, True),
])
return js
class BundleEntryResponse(backboneelement.BackboneElement):
""" Results of execution (transaction/batch/history).
Indicates the results of processing the corresponding 'request' entry in
the batch or transaction being responded to or what the results of an
operation where when returning history.
"""
resource_type = "BundleEntryResponse"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.etag = None
""" The Etag for the resource (if relevant).
Type `str`. """
self.lastModified = None
""" Server's date time modified.
Type `FHIRDate` (represented as `str` in JSON). """
self.location = None
""" The location (if the operation returns a location).
Type `str`. """
self.outcome = None
""" OperationOutcome with hints and warnings (for batch/transaction).
Type `Resource` (represented as `dict` in JSON). """
self.status = None
""" Status response code (text optional).
Type `str`. """
super(BundleEntryResponse, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(BundleEntryResponse, self).elementProperties()
js.extend([
("etag", "etag", str, False, None, False),
("lastModified", "lastModified", fhirdate.FHIRDate, False, None, False),
("location", "location", str, False, None, False),
("outcome", "outcome", resource.Resource, False, None, False),
("status", "status", str, False, None, True),
])
return js
class BundleEntrySearch(backboneelement.BackboneElement):
""" Search related information.
Information about the search process that lead to the creation of this
entry.
"""
resource_type = "BundleEntrySearch"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.mode = None
""" match | include | outcome - why this is in the result set.
Type `str`. """
self.score = None
""" Search ranking (between 0 and 1).
Type `float`. """
super(BundleEntrySearch, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(BundleEntrySearch, self).elementProperties()
js.extend([
("mode", "mode", str, False, None, False),
("score", "score", float, False, None, False),
])
return js
class BundleLink(backboneelement.BackboneElement):
""" Links related to this Bundle.
A series of links that provide context to this bundle.
"""
resource_type = "BundleLink"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.relation = None
""" See http://www.iana.org/assignments/link-relations/link-
relations.xhtml#link-relations-1.
Type `str`. """
self.url = None
""" Reference details for the link.
Type `str`. """
super(BundleLink, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(BundleLink, self).elementProperties()
js.extend([
("relation", "relation", str, False, None, True),
("url", "url", str, False, None, True),
])
return js
from . import fhirdate
from . import identifier
from . import signature
|
[
"noreply@github.com"
] |
stephanie-howson.noreply@github.com
|
4958c8f8de2376b57b77ccc6d602f2b6daf0f752
|
b070ca9185ba9ef59245f829d01fbefd696e8bfe
|
/aeriscloud/vagrant.py
|
ed987842125f6cfe78d74a4d7dcdf91096062b55
|
[
"MIT"
] |
permissive
|
AerisCloud/AerisCloud
|
73aaa84b6d09f8fa5590f0c736a93cc0d252cebc
|
dad93c6273411cf8275ada8511984f4decf5386a
|
refs/heads/develop
| 2021-01-17T15:40:49.065158
| 2017-08-16T02:52:41
| 2017-08-16T02:52:41
| 54,878,917
| 9
| 4
| null | 2017-08-16T02:52:42
| 2016-03-28T08:41:50
|
Python
|
UTF-8
|
Python
| false
| false
| 8,278
|
py
|
from __future__ import print_function
import json
import os
import platform
import re
import six
from subprocess32 import call, Popen, PIPE
from .ansible import ansible_env
from .config import aeriscloud_path, data_dir, verbosity, default_organization
from .log import get_logger
from .organization import Organization
from .utils import timestamp, cd
logger = get_logger('vagrant')
VAGRANT_DATA_FOLDER = os.path.join(os.getenv('HOME'), '.vagrant.d')
class Machine(object):
def __init__(self, id, json_data):
self.id = id
self.vagrant_path = json_data.get('local_data_path')
self.name = json_data.get('name')
self.provider = json_data.get('provider')
self.state = json_data.get('state')
self.vagrantfile = os.path.join(
json_data.get('vagrantfile_path'),
json_data.get('vagrantfile_name', 'Vagrantfile') or 'Vagrantfile'
)
self.extra_data = json_data.get('extra_data', {})
self.data_path = os.path.join(
self.vagrant_path,
'machines',
self.name,
self.provider
)
self._uuid = None
@property
def uuid(self):
id_file = os.path.join(self.data_path, 'id')
if not os.path.exists(id_file):
return None
if not self._uuid:
with open(id_file) as fd:
self._uuid = fd.read()
return self._uuid
class MachineIndex(object):
machines = {}
machine_index_file = os.path.join(VAGRANT_DATA_FOLDER, 'data',
'machine-index', 'index')
def __init__(self):
if MachineIndex.machines:
return
with open(MachineIndex.machine_index_file) as fd:
machine_index = json.load(fd)
machines = machine_index.get('machines', {})
for mid, json_data in six.iteritems(machines):
MachineIndex.machines[mid] = Machine(mid, json_data)
def get(self, mid):
return MachineIndex.machines.get(mid)
def get_by_name(self, name):
for machine in MachineIndex.machines.values():
if machine.name == name:
return machine
return None
def get_by_uuid(self, uuid):
for machine in MachineIndex.machines.values():
if machine.uuid == uuid:
return machine
return None
class NFS(object):
nfs_exports = '/etc/exports'
re_exports_headers = re.compile(
r'^# VAGRANT-(?P<type>BEGIN|END):(?P<uid> [0-9]+) '
r'(?P<uuid>[a-z0-9-]+)', re.I)
re_exports_path = re.compile(r'^("[^"]+"|\S+)', re.I)
def __init__(self, export_file=nfs_exports):
self.exports = {}
self.export_file = export_file
self.parse_exports()
def parse_exports(self):
if not os.path.exists(self.export_file):
return
current_uuid = None
current_exports = []
with open(self.export_file) as fd:
for line in fd:
match = NFS.re_exports_headers.match(line.strip())
if match:
# store exports
if match.group('type') == 'END':
self.exports[current_uuid] = current_exports
current_uuid = None
current_exports = []
continue
# ignore uids that are not ours
if match.group('uid') and \
int(match.group('uid').strip()) != os.getuid():
continue
current_uuid = match.group('uuid')
elif current_uuid:
path_match = NFS.re_exports_path.match(line.strip())
if not path_match:
continue
export_path = path_match.group(0).strip('"')
current_exports.append(export_path)
def fix_anomalies(self):
to_prune = []
# machine_index = MachineIndex()
for uuid, exports in six.iteritems(self.exports):
# machine = machine_index.get_by_uuid(uuid)
# machine cannot be found in the index
# if not machine:
# to_prune.append(uuid)
# continue
# one of the path does not exists anymore
if [path for path in exports if not os.path.exists(path)]:
to_prune.append(uuid)
continue
# remove all exports that have issues
for uuid in to_prune:
logger.info('pruning NFS entry for %s' % uuid)
# taken from vagrant/plugins/hosts/linux/cap/nfs.rb
extended_re_flag = '-r'
sed_expr = '\\\x01^# VAGRANT-BEGIN:( {user})? {id}\x01,' \
'\\\x01^# VAGRANT-END:( {user})? {id}\x01 d'.format(
id=uuid,
user=os.getuid()
)
if platform.system() == 'Darwin':
extended_re_flag = '-E'
sed_expr = '/^# VAGRANT-BEGIN:( {user})? {id}/,' \
'/^# VAGRANT-END:( {user})? {id}/ d'.format(
id=uuid,
user=os.getuid()
)
cmd = [
'sed',
extended_re_flag,
'-e',
sed_expr,
'-ibak',
self.export_file
]
# if we do not have write access, use sudo
if not os.access(self.export_file, os.W_OK):
cmd = [
'sudo',
'-p'
'Fixing invalid NFS exports. Administrators privileges '
'are required\n[sudo] password for %u',
'--'
] + cmd
if call(cmd) != 0:
raise RuntimeError('could not prune invalid nfs exports '
'"%s" from /etc/exports' % uuid)
def run(pro, *args, **kwargs):
"""
Run vagrant within a project
:param pro: .project.Project
:param args: list[string]
:param kwargs: dict[string,string]
:return:
"""
with cd(pro.folder()):
# fix invalid exports for vagrant
NFS().fix_anomalies()
new_env = ansible_env(os.environ.copy())
new_env['PATH'] = os.pathsep.join([
new_env['PATH'],
os.path.join(aeriscloud_path, 'venv/bin')
])
new_env['VAGRANT_DOTFILE_PATH'] = pro.vagrant_dir()
new_env['VAGRANT_CWD'] = pro.vagrant_working_dir()
new_env['VAGRANT_DISKS_PATH'] = os.path.join(data_dir(), 'disks')
# We might want to remove that or bump the verbosity level even more
if verbosity() >= 4:
new_env['VAGRANT_LOG'] = 'info'
new_env['AERISCLOUD_PATH'] = aeriscloud_path
new_env['AERISCLOUD_ORGANIZATIONS_DIR'] = os.path.join(data_dir(),
'organizations')
org = default_organization()
if org:
new_env['AERISCLOUD_DEFAULT_ORGANIZATION'] = org
organization_name = pro.organization()
if organization_name:
organization = Organization(organization_name)
else:
organization = Organization(org)
basebox_url = organization.basebox_url()
if basebox_url:
new_env['VAGRANT_SERVER_URL'] = basebox_url
args = ['vagrant'] + list(args)
logger.debug('running: %s\nenv: %r', ' '.join(args), new_env)
# support for the vagrant prompt
if args[1] == 'destroy':
return call(args, env=new_env, **kwargs)
else:
process = Popen(args, env=new_env, stdout=PIPE,
bufsize=1, **kwargs)
for line in iter(process.stdout.readline, b''):
timestamp(line[:-1])
# empty output buffers
process.poll()
return process.returncode
def version():
try:
from sh import vagrant
return str(vagrant('--version'))[8:].rstrip()
except ImportError:
return None
|
[
"ekenler@wizcorp.jp"
] |
ekenler@wizcorp.jp
|
7493a8116456207dcd792e75c00189176aa93994
|
af9bf5dbf42e05e78a016c976027988165293f96
|
/list2.py
|
3f956dd8317e201d85c33e5d263bcbd663a42582
|
[] |
no_license
|
basiliskorres/exetasi
|
5c8681073d67072e811b3be2f5f9950b0530621e
|
c7ae87fa9f2e9c56efdfe17892a6be73cf78d6d6
|
refs/heads/master
| 2020-04-06T04:05:51.773039
| 2017-02-24T11:05:07
| 2017-02-24T11:05:07
| 83,032,993
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 74
|
py
|
x = [0,1,2,3,4,5]
import statistics
s = statistics.stdev(x)
print (s)
|
[
"noreply@github.com"
] |
basiliskorres.noreply@github.com
|
bc29e6c0f5f901e1a2c45442f76d771066b15de3
|
b65dc4d7e4047b99b5ce8e4c5464a6cc9d105ed9
|
/courses/langtons_ant.py
|
0fd8f0f1503f3089f42ad50d5663fab887fab853
|
[
"MIT"
] |
permissive
|
sonumathur/Python
|
2adfeba0320a324b2ac83fb13bcfc0558989aeba
|
6bc459a3621cd89710bc96f70e894e3929aea1dc
|
refs/heads/master
| 2023-08-28T20:14:32.065229
| 2021-10-18T11:23:05
| 2021-10-18T11:23:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,298
|
py
|
import pygame
class Ant():
def __init__(self, origin):
self.loc = origin
self.heading = 3
def move_ant(self, board):
if board[self.loc]:
self.heading = (self.heading - 1) % 4
board[self.loc] = False
else:
self.heading = (self.heading + 1) % 4
board[self.loc] = True
moves = [
(0, -1), # up
(1, 0), # right
(0, 1), # down
(-1, 0), # left
]
self.loc = (self.loc[0] + moves[self.heading][0], self.loc[1] + moves[self.heading][1])
return board
def draw(self):
offset_x = self.loc[0] * step
offset_y = self.loc[1] * step
pygame.draw.circle(screen, ANT_COLOR1, (offset_x + side // 2, offset_y + side // 2), side // 2)
if self.heading == 0:
hx, hy = (0.5, 0.25)
elif self.heading == 1:
hx, hy = (0.75, 0.5)
elif self.heading == 2:
hx, hy = (0.5, 0.75)
elif self.heading == 3:
hx, hy = (0.25, 0.5)
pygame.draw.circle(screen, ANT_COLOR2, (offset_x + int(side * hx), offset_y + int(side * hy)), side // 4)
N = 50
SIZE = 500
LIGHT = (180, 180, 180)
DARK = (20, 20, 20)
BORDER_COLOR = (0, 50, 0)
ANT_COLOR1 = (0, 0, 250)
ANT_COLOR2 = (255, 100, 100)
FPS = 3
pygame.init()
clock = pygame.time.Clock()
# false is light, true is dark
board = {(x, y) : False for x in range(N) for y in range(N)}
screen = pygame.display.set_mode([SIZE, SIZE])
step = SIZE / N
side = int(round(step))
origin = (N // 2, N // 2)
ant = Ant(origin)
running = True
while running:
screen.fill(LIGHT)
board = ant.move_ant(board)
for key, value in board.items():
if value:
x = int(step * key[0])
y = int(step * key[1])
pygame.draw.rect(screen, DARK, (x, y, side, side))
for n_int in range(0, N + 1):
n = int(round(n_int * step))
pygame.draw.line(screen, BORDER_COLOR, (n, 0), (n, SIZE), width=1)
pygame.draw.line(screen, BORDER_COLOR, (0, n), (SIZE, n), width=1)
ant.draw()
pygame.display.flip()
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
clock.tick(FPS)
pygame.quit()
|
[
"you@example.com"
] |
you@example.com
|
b513f276b4d8ecd56a16b6df4477924e5903dbec
|
4ba19fd50e0b91860b6ca54d7cb54111509d12b7
|
/core/migrations/0005_auto_20171120_1629.py
|
9f4cc78aa9368171f8c071044c2ace9f26783337
|
[] |
no_license
|
lauragomess/ProjetoIntegradorSite
|
d0f036929923a49ad482b4564544067d161969a7
|
60c06cd371a9b74ec68df6f0dc8a453160d6a6aa
|
refs/heads/master
| 2021-08-23T16:44:58.337249
| 2017-12-05T18:52:00
| 2017-12-05T18:52:00
| 112,805,908
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,189
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-20 18:29
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('core', '0004_aluno'),
]
operations = [
migrations.CreateModel(
name='Avaliador',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.RemoveField(
model_name='projeto',
name='avaliador',
),
migrations.AddField(
model_name='avaliador',
name='projeto',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Projeto'),
),
migrations.AddField(
model_name='avaliador',
name='user',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
[
"31424112+lauragomess@users.noreply.github.com"
] |
31424112+lauragomess@users.noreply.github.com
|
03a2f096090bbf4c177404b3af5aac47dad54ce8
|
e00ecfef9d441340660c3acee7d9622654e403fd
|
/ch14_sort/14-24.py
|
e5a3d3e1f3efc5e5f7fe9e72c1b600c7a60a70d5
|
[] |
no_license
|
wasiwasi/pycote
|
ce8c32adae41b58bb075303ddf106b7b9b7c6fd9
|
7c451cd1ec743a478090e73d6ff8cbfd15d3b990
|
refs/heads/main
| 2023-07-11T19:26:07.862256
| 2021-08-22T13:47:42
| 2021-08-22T13:47:42
| 386,192,079
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 162
|
py
|
# 안테나
n = int(input())
houses = list(map(int,input().split()))
houses.sort()
# 중앙에서 가장 가까운 집을 찾음
print(houses[(n - 1) // 2])
|
[
"kbm4250@naver.com"
] |
kbm4250@naver.com
|
2c6463fb808f00fb69724478575d70a328acf1fe
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03478/s455409776.py
|
a8cd217bb4eecbecbc989ed96b2dec2f8f965310
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 282
|
py
|
n, a, b = map(int, input().split())
def calc_digit_sum(num):
sums = 0
while num > 0:
sums += num % 10
num //= 10
return sums
answer = 0
for num in range(1, n+1):
sums = calc_digit_sum(num)
if a <= sums <= b:
answer += num
print(answer)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
f0d745d3620cb14966ac123eed3ef062ff3bfb2b
|
69e318f2b60175108bc74ee669bfe16287a71cb6
|
/plugins/modules/fortios_extender_controller_extender.py
|
f6987b6f9b43623c42497888f3cb3bc3e2f4b548
|
[] |
no_license
|
chillancezen/ansible-galaxy-fortios-collection
|
5268a5fd97fb4594772349b8d89cb818ec54b3bd
|
66a331cd4493d1b0f49798d5c2cd6ef5aeba84d3
|
refs/heads/master
| 2022-04-09T19:20:59.073193
| 2020-03-26T07:17:09
| 2020-03-26T07:17:09
| 250,185,374
| 0
| 0
| null | 2020-03-26T07:06:16
| 2020-03-26T07:06:16
| null |
UTF-8
|
Python
| false
| false
| 21,761
|
py
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_extender_controller_extender
short_description: Extender controller configuration in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify extender_controller feature and extender category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
version_added: 2.9
extender_controller_extender:
description:
- Extender controller configuration.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
aaa_shared_secret:
description:
- AAA shared secret.
type: str
access_point_name:
description:
- Access point name(APN).
type: str
admin:
description:
- FortiExtender Administration (enable or disable).
type: str
choices:
- disable
- discovered
- enable
at_dial_script:
description:
- Initialization AT commands specific to the MODEM.
type: str
billing_start_day:
description:
- Billing start day.
type: int
cdma_aaa_spi:
description:
- CDMA AAA SPI.
type: str
cdma_ha_spi:
description:
- CDMA HA SPI.
type: str
cdma_nai:
description:
- NAI for CDMA MODEMS.
type: str
conn_status:
description:
- Connection status.
type: int
description:
description:
- Description.
type: str
dial_mode:
description:
- Dial mode (dial-on-demand or always-connect).
type: str
choices:
- dial-on-demand
- always-connect
dial_status:
description:
- Dial status.
type: int
ext_name:
description:
- FortiExtender name.
type: str
ha_shared_secret:
description:
- HA shared secret.
type: str
id:
description:
- FortiExtender serial number.
required: true
type: str
ifname:
description:
- FortiExtender interface name.
type: str
initiated_update:
description:
- Allow/disallow network initiated updates to the MODEM.
type: str
choices:
- enable
- disable
mode:
description:
- FortiExtender mode.
type: str
choices:
- standalone
- redundant
modem_passwd:
description:
- MODEM password.
type: str
modem_type:
description:
- MODEM type (CDMA, GSM/LTE or WIMAX).
type: str
choices:
- cdma
- gsm/lte
- wimax
multi_mode:
description:
- MODEM mode of operation(3G,LTE,etc).
type: str
choices:
- auto
- auto-3g
- force-lte
- force-3g
- force-2g
ppp_auth_protocol:
description:
- PPP authentication protocol (PAP,CHAP or auto).
type: str
choices:
- auto
- pap
- chap
ppp_echo_request:
description:
- Enable/disable PPP echo request.
type: str
choices:
- enable
- disable
ppp_password:
description:
- PPP password.
type: str
ppp_username:
description:
- PPP username.
type: str
primary_ha:
description:
- Primary HA.
type: str
quota_limit_mb:
description:
- Monthly quota limit (MB).
type: int
redial:
description:
- Number of redials allowed based on failed attempts.
type: str
choices:
- none
- 1
- 2
- 3
- 4
- 5
- 6
- 7
- 8
- 9
- 10
redundant_intf:
description:
- Redundant interface.
type: str
roaming:
description:
- Enable/disable MODEM roaming.
type: str
choices:
- enable
- disable
role:
description:
- FortiExtender work role(Primary, Secondary, None).
type: str
choices:
- none
- primary
- secondary
secondary_ha:
description:
- Secondary HA.
type: str
sim_pin:
description:
- SIM PIN.
type: str
vdom:
description:
- VDOM
type: int
wimax_auth_protocol:
description:
- WiMax authentication protocol(TLS or TTLS).
type: str
choices:
- tls
- ttls
wimax_carrier:
description:
- WiMax carrier.
type: str
wimax_realm:
description:
- WiMax realm.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Extender controller configuration.
fortios_extender_controller_extender:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
extender_controller_extender:
aaa_shared_secret: "<your_own_value>"
access_point_name: "<your_own_value>"
admin: "disable"
at_dial_script: "<your_own_value>"
billing_start_day: "7"
cdma_aaa_spi: "<your_own_value>"
cdma_ha_spi: "<your_own_value>"
cdma_nai: "<your_own_value>"
conn_status: "11"
description: "<your_own_value>"
dial_mode: "dial-on-demand"
dial_status: "14"
ext_name: "<your_own_value>"
ha_shared_secret: "<your_own_value>"
id: "17"
ifname: "<your_own_value>"
initiated_update: "enable"
mode: "standalone"
modem_passwd: "<your_own_value>"
modem_type: "cdma"
multi_mode: "auto"
ppp_auth_protocol: "auto"
ppp_echo_request: "enable"
ppp_password: "<your_own_value>"
ppp_username: "<your_own_value>"
primary_ha: "<your_own_value>"
quota_limit_mb: "29"
redial: "none"
redundant_intf: "<your_own_value>"
roaming: "enable"
role: "none"
secondary_ha: "<your_own_value>"
sim_pin: "<your_own_value>"
vdom: "36"
wimax_auth_protocol: "tls"
wimax_carrier: "<your_own_value>"
wimax_realm: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import FortiOSHandler
from ansible_collections.fortinet.fortios.plugins.module_utils.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_extender_controller_extender_data(json):
option_list = ['aaa_shared_secret', 'access_point_name', 'admin',
'at_dial_script', 'billing_start_day', 'cdma_aaa_spi',
'cdma_ha_spi', 'cdma_nai', 'conn_status',
'description', 'dial_mode', 'dial_status',
'ext_name', 'ha_shared_secret', 'id',
'ifname', 'initiated_update', 'mode',
'modem_passwd', 'modem_type', 'multi_mode',
'ppp_auth_protocol', 'ppp_echo_request', 'ppp_password',
'ppp_username', 'primary_ha', 'quota_limit_mb',
'redial', 'redundant_intf', 'roaming',
'role', 'secondary_ha', 'sim_pin',
'vdom', 'wimax_auth_protocol', 'wimax_carrier',
'wimax_realm']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def extender_controller_extender(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['extender_controller_extender'] and data['extender_controller_extender']:
state = data['extender_controller_extender']['state']
else:
state = True
extender_controller_extender_data = data['extender_controller_extender']
filtered_data = underscore_to_hyphen(filter_extender_controller_extender_data(extender_controller_extender_data))
if state == "present":
return fos.set('extender-controller',
'extender',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('extender-controller',
'extender',
mkey=filtered_data['id'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_extender_controller(data, fos):
if data['extender_controller_extender']:
resp = extender_controller_extender(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"extender_controller_extender": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"aaa_shared_secret": {"required": False, "type": "str"},
"access_point_name": {"required": False, "type": "str"},
"admin": {"required": False, "type": "str",
"choices": ["disable", "discovered", "enable"]},
"at_dial_script": {"required": False, "type": "str"},
"billing_start_day": {"required": False, "type": "int"},
"cdma_aaa_spi": {"required": False, "type": "str"},
"cdma_ha_spi": {"required": False, "type": "str"},
"cdma_nai": {"required": False, "type": "str"},
"conn_status": {"required": False, "type": "int"},
"description": {"required": False, "type": "str"},
"dial_mode": {"required": False, "type": "str",
"choices": ["dial-on-demand", "always-connect"]},
"dial_status": {"required": False, "type": "int"},
"ext_name": {"required": False, "type": "str"},
"ha_shared_secret": {"required": False, "type": "str"},
"id": {"required": True, "type": "str"},
"ifname": {"required": False, "type": "str"},
"initiated_update": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"mode": {"required": False, "type": "str",
"choices": ["standalone", "redundant"]},
"modem_passwd": {"required": False, "type": "str"},
"modem_type": {"required": False, "type": "str",
"choices": ["cdma", "gsm/lte", "wimax"]},
"multi_mode": {"required": False, "type": "str",
"choices": ["auto", "auto-3g", "force-lte",
"force-3g", "force-2g"]},
"ppp_auth_protocol": {"required": False, "type": "str",
"choices": ["auto", "pap", "chap"]},
"ppp_echo_request": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ppp_password": {"required": False, "type": "str"},
"ppp_username": {"required": False, "type": "str"},
"primary_ha": {"required": False, "type": "str"},
"quota_limit_mb": {"required": False, "type": "int"},
"redial": {"required": False, "type": "str",
"choices": ["none", "1", "2",
"3", "4", "5",
"6", "7", "8",
"9", "10"]},
"redundant_intf": {"required": False, "type": "str"},
"roaming": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"role": {"required": False, "type": "str",
"choices": ["none", "primary", "secondary"]},
"secondary_ha": {"required": False, "type": "str"},
"sim_pin": {"required": False, "type": "str"},
"vdom": {"required": False, "type": "int"},
"wimax_auth_protocol": {"required": False, "type": "str",
"choices": ["tls", "ttls"]},
"wimax_carrier": {"required": False, "type": "str"},
"wimax_realm": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_extender_controller(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_extender_controller(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
[
"fshen01@fortinet.com"
] |
fshen01@fortinet.com
|
f705d5d0993896d6941f93c55a6ae28bf152150e
|
301dcdc581aa4219ad09fa2066d12d8b45b84a66
|
/urlshortner/migrations/0002_url_active.py
|
2a4d5d301ff643ab7b512ecb4522c99dd236fc44
|
[] |
no_license
|
AakashiBhansali/urlshortner
|
6db5248841894459f52a872b05905d2a9d77319f
|
3c36bed51f119833faa60848967c612ff00cd3a1
|
refs/heads/master
| 2020-03-18T03:06:55.801069
| 2018-05-21T12:07:35
| 2018-05-21T12:07:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 376
|
py
|
# Generated by Django 2.0.3 on 2018-03-18 10:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('urlshortner', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='url',
name='active',
field=models.BooleanField(default=True),
),
]
|
[
"jinayshah86@gmail.com"
] |
jinayshah86@gmail.com
|
76152b15258f9dc79ca9eb3ad411f542917e9437
|
35d1b21067f0478280a6e44a0513c65a44ebf3d6
|
/autoGen/tests/__init__.py
|
4f4c91e950556306d05fb6b8af420eb391441427
|
[] |
no_license
|
ruperto7/autoGenDir
|
28113cb1f5da8fabf0b7eb60fb19ce484726b310
|
5f1727bebdb2418d15183ec65b5ff157e2a4989d
|
refs/heads/master
| 2023-04-12T05:35:37.530473
| 2021-03-03T15:17:18
| 2021-03-03T15:17:18
| 338,215,719
| 0
| 0
| null | 2021-03-03T15:17:19
| 2021-02-12T03:33:10
|
Python
|
UTF-8
|
Python
| false
| false
| 248
|
py
|
from .notes27_jan import Notes27JanDeleteTestCase
from .notes27_jan import Notes27JanUpdateTestCase
from .notes27_jan import Notes27JanDetailTestCase
from .notes27_jan import Notes27JanCreateTestCase
from .notes27_jan import Notes27JanListTestCase
|
[
"totoarrieta@yahoo.com"
] |
totoarrieta@yahoo.com
|
2b1a48ebdabf626c00ebda91ba063c75a43f5145
|
9b3a433af0548421808f557c08a8cd5c8c7f2d9e
|
/gdb/repos/pwndbg/tests/gdb-tests/tests/test_command_vmmap.py
|
568378b13ffe1eaa4e7c14dee98c7f5d299fb142
|
[
"MIT"
] |
permissive
|
thawk/dotfiles
|
9984338c18c34f25343d8e8682989bf193f0045b
|
36fd9453da56ebcbce66382001c7d6254f603078
|
refs/heads/master
| 2023-08-29T07:00:10.873545
| 2023-08-09T05:42:52
| 2023-08-09T05:42:52
| 31,524,933
| 5
| 3
| null | 2023-07-20T11:58:40
| 2015-03-02T05:33:30
|
Python
|
UTF-8
|
Python
| false
| false
| 6,501
|
py
|
import tempfile
import gdb
import pytest
import pwndbg
import tests
CRASH_SIMPLE_BINARY = tests.binaries.get("crash_simple.out.hardcoded")
def get_proc_maps():
"""
Example info proc mappings:
pwndbg> info proc mappings
process 26781
Mapped address spaces:
Start Addr End Addr Size Offset objfile
0x400000 0x401000 0x1000 0x0 /opt/pwndbg/tests/gdb-tests/tests/binaries/crash_simple.out
0x7ffff7ffa000 0x7ffff7ffd000 0x3000 0x0 [vvar]
0x7ffff7ffd000 0x7ffff7fff000 0x2000 0x0 [vdso]
0x7ffffffde000 0x7ffffffff000 0x21000 0x0 [stack]
0xffffffffff600000 0xffffffffff601000 0x1000 0x0 [vsyscall]
"""
maps = []
# Note: info proc mappings may not have permissions information,
# so we get it here and fill from `perms`
with open("/proc/%d/maps" % pwndbg.gdblib.proc.pid, "r") as f:
for line in f.read().splitlines():
addrs, perms, offset, _inode, size, objfile = line.split(maxsplit=6)
start, end = map(lambda v: int(v, 16), addrs.split("-"))
offset = offset.lstrip("0") or "0"
size = end - start
maps.append([hex(start), hex(end), perms, hex(size)[2:], offset, objfile])
maps.sort()
return maps
@pytest.mark.parametrize("unload_file", (False, True))
def test_command_vmmap_on_coredump_on_crash_simple_binary(start_binary, unload_file):
"""
Example vmmap when debugging binary:
LEGEND: STACK | HEAP | CODE | DATA | RWX | RODATA
0x400000 0x401000 r-xp 1000 0 /opt/pwndbg/tests/gdb-tests/tests/binaries/crash_simple.out
0x7ffff7ffa000 0x7ffff7ffd000 r--p 3000 0 [vvar]
0x7ffff7ffd000 0x7ffff7fff000 r-xp 2000 0 [vdso]
0x7ffffffde000 0x7ffffffff000 rwxp 21000 0 [stack]
0xffffffffff600000 0xffffffffff601000 r-xp 1000 0 [vsyscall]
The same vmmap when debugging coredump:
LEGEND: STACK | HEAP | CODE | DATA | RWX | RODATA
0x400000 0x401000 r-xp 1000 0 /opt/pwndbg/tests/gdb-tests/tests/binaries/crash_simple.out
0x7ffff7ffd000 0x7ffff7fff000 r-xp 2000 1158 load2
0x7ffffffde000 0x7ffffffff000 rwxp 21000 3158 [stack]
0xffffffffff600000 0xffffffffff601000 r-xp 1000 24158 [vsyscall]
Note that for a core-file, we display the [vdso] page as load2 and we are missing the [vvar] page.
This is... how it is. It just seems that core files (at least those I met) have no info about
the vvar page and also GDB can't access the [vvar] memory with its x/ command during core debugging.
"""
start_binary(CRASH_SIMPLE_BINARY)
# Trigger binary crash
gdb.execute("continue")
expected_maps = get_proc_maps()
vmmaps = gdb.execute("vmmap", to_string=True).splitlines()
# Basic asserts
assert len(vmmaps) == len(expected_maps) + 2 # +2 for header and legend
assert vmmaps[0] == "LEGEND: STACK | HEAP | CODE | DATA | RWX | RODATA"
# Split vmmaps
vmmaps = [i.split() for i in vmmaps[2:]]
# Assert that vmmap output matches expected one
assert vmmaps == expected_maps
# Now, generate core file, so we can then test coredump vmmap
core = tempfile.mktemp()
gdb.execute("generate-core-file %s" % core)
# The test should work fine even if we unload the original binary
if unload_file:
gdb.execute("file")
#### TEST COREDUMP VMMAP
# Now, let's load the generated core file
gdb.execute("core-file %s" % core)
old_len_vmmaps = len(vmmaps)
vmmaps = gdb.execute("vmmap", to_string=True).splitlines()
# Note: we will now see one less vmmap page as [vvar] will be missing
assert vmmaps[0] == "LEGEND: STACK | HEAP | CODE | DATA | RWX | RODATA"
vmmaps = [i.split() for i in vmmaps[2:]]
has_proc_maps = "warning: unable to find mappings in core file" not in gdb.execute(
"info proc mappings", to_string=True
)
if has_proc_maps:
assert len(vmmaps) == old_len_vmmaps - 1
else:
# E.g. on Debian 10 with GDB 8.2.1 the core dump does not contain mappings info
assert len(vmmaps) == old_len_vmmaps - 2
binary_map = next(i for i in expected_maps if CRASH_SIMPLE_BINARY in i[-1])
expected_maps.remove(binary_map)
# Fix up expected maps
next(i for i in expected_maps if i[-1] == "[vdso]")[-1] = "load2"
vdso_map = next(i for i in expected_maps if i[-1] == "[vvar]")
expected_maps.remove(vdso_map)
def assert_maps():
for vmmap, expected_map in zip(vmmaps, expected_maps):
# On different Ubuntu versions, we end up with different results
# Ubuntu 18.04: vmmap.objfile for binary vmmap has binary file path
# Ubuntu 22.04: the same vmmap is named as 'loadX'
# The difference comes from the fact that the `info proc mappings`
# command returns different results on the two.
# It may be a result of different test binary compilation or
# just a difference between GDB versions
#
# Another difference may occur for the vsyscall memory page:
# on Ubuntu 22.04, while vsyscall is ---xp during debugging
# it becomes r-xp and can be readable when we target the coredump
# Likely, this is because on x86/x64 you can't set memory to be
# eXecute only, and maybe generate-core-file was able to dump it?
if vmmap[-1] == expected_map[-1] == "[vsyscall]":
assert vmmap[:2] == expected_map[:2] # start, end
assert vmmap[3] == expected_map[3] or vmmap[3] in ("r-xp", "--xp")
assert vmmap[4:] == expected_map[4:]
continue
assert vmmap[:-1] == expected_map[:-1]
if vmmap[-1].startswith("load"):
continue
assert vmmap[-1] == expected_map[-1]
assert_maps()
# Now also make sure that everything works fine if we remove
# file symbols information from GDB; during writing this test
# a bug with this popped out, so I am double checking it here
gdb.execute("file")
vmmaps = gdb.execute("vmmap", to_string=True).splitlines()
vmmaps = [i.split() for i in vmmaps[2:]]
assert_maps()
|
[
"thawk009@gmail.com"
] |
thawk009@gmail.com
|
631dd4555b098d0170d537dd8b76a065c312b23a
|
13c02796a22a41572633378f4c20f0b37d90ba8f
|
/setup.py
|
0f10028adc32767440e05cf2c327dc755e06d489
|
[] |
no_license
|
Cromlech/dolmen.forms.table
|
97df1d647ab45c24ca3259a815ed897881762dc1
|
c17f0b6d96d92249e7549648bc8df745ed94c785
|
refs/heads/master
| 2016-09-06T15:10:21.638189
| 2014-03-13T15:20:09
| 2014-03-13T15:20:09
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,495
|
py
|
# -*- coding: utf-8 -*-
from os.path import join
from setuptools import setup, find_packages
name = 'dolmen.forms.table'
version = '2.2.1'
readme = open(join('src', 'dolmen', 'forms', 'table', 'README.txt')).read()
history = open(join('docs', 'HISTORY.txt')).read()
install_requires = [
'cromlech.browser >= 0.5',
'cromlech.i18n',
'dolmen.batch >= 0.2',
'dolmen.forms.base >= 2.4',
'dolmen.forms.composed',
'dolmen.template',
'grokcore.component',
'setuptools',
'zope.component',
'zope.interface',
'zope.location',
]
tests_require = [
'cromlech.browser [test]',
'dolmen.location >= 0.2',
'zope.testing',
]
setup(name=name,
version=version,
description="Form as table, to edit more than one content at a time",
long_description=readme + '\n\n' + history,
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='Dolmen form table',
author='The Dolmen Team',
author_email='dolmen@list.dolmen-project.org',
url='http://pypi.python.org/pypi/dolmen.forms.table',
license='BSD',
package_dir={'': 'src'},
packages=find_packages('src'),
namespace_packages=['dolmen', 'dolmen.forms'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
)
|
[
"trollfot@gmail.com"
] |
trollfot@gmail.com
|
b7c35700fe1212856f5a143e9556c752153998f9
|
5a9b75f3a6dab10ebf81596b16142e686045e492
|
/Code/run_exp_new2.py
|
4b9e512209568f8964f0d60b945e53aa64230ca2
|
[] |
no_license
|
135790zs/rsnn
|
91d879be0b219b75a9b828358cdc1b5b8ec0ed8d
|
e514f3ae69ecc232a477af76fc185f9f296a5acc
|
refs/heads/master
| 2023-03-25T03:29:33.120343
| 2021-01-20T18:39:34
| 2021-01-20T18:39:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,874
|
py
|
import sys
#TODO: path
sys.path.append('.')
import torch
import torch.nn as nn
import torch.optim as optim
from torchvision.datasets import MNIST
from torchvision import transforms
import time
from collections import OrderedDict
from torch.utils.data import DataLoader
import json
run_id = sys.argv[1]
#print(sys.argv)
with open('configs/'+run_id+'.json', 'r') as config_file:
config = json.load(config_file)
spec = config['params']
BATCH_SIZE = spec['batch_size']
device = torch.device('cuda')
mnist = MNIST('.', transform=transforms.ToTensor(), download=True) #distortion_transform([0,15], 3)
test = MNIST('.', transform=transforms.ToTensor(), train=False)
data_loader = DataLoader(mnist, batch_size=BATCH_SIZE, drop_last=True, num_workers=0, shuffle=True)
test_loader = DataLoader(test, batch_size=BATCH_SIZE, drop_last=False, num_workers=0)
sample_settings = {
'spkfn' : 'bellec',
'architecture': '1L',
'beta': 0.95,
'control_neuron': 'LIF',
'mem_neuron' : 'Adaptive',
'lr' : 1e-2,
'lr_decay': 0.8,
'1-beta': True,
'decay_out': True,
}
from Code.everything4 import ParallelNetwork2, MeanModule, SequenceWrapper, BaseNeuron, OuterWrapper, DynNetwork,\
CooldownNeuron, OutputNeuron, LIFNeuron, NoResetNeuron, AdaptiveNeuron, FlipFlopNeuron, SeqOnlySpike
built_config = {
'BETA': spec['beta'],
'OFFSET': 7, # was 3 for config24
'SPIKE_FN': spec['spkfn'],
'1-beta': spec['1-beta'],
'ADAPDECAY': 0.9985,
'ADAPSCALE': 180
}
mem_config = {
**built_config,
'BETA': spec['mem_beta']
}
#TODO: revert this
n_control = 220#120
n_mem = 1#100
input_rate = 0.03
n_input = 80+28+30
control_lookup = {
'LIF': LIFNeuron,
'Disc': SeqOnlySpike,
'NoReset': NoResetNeuron
}
mem_lookup = {
'Adaptive': AdaptiveNeuron,
'Cooldown': CooldownNeuron,
'NoReset': NoResetNeuron,
'FlipFlop': FlipFlopNeuron
}
control_neuron = control_lookup[spec['control_neuron']](n_control, built_config)
mem_neuron = mem_lookup[spec['mem_neuron']](n_mem, mem_config)
out_neuron = OutputNeuron(n_control+n_mem, built_config) if spec['decay_out'] else BaseNeuron(n_control+n_mem, built_config)
loop_2L = OrderedDict([
('input', (n_input, input_rate)),
('control', [['input', 'mem'], control_neuron, nn.Linear]),
('mem', [['control'], mem_neuron, nn.Linear]),
('output', [['control', 'mem'], out_neuron, None]),
])
loop_1L = OrderedDict([
('input', (n_input, input_rate)),
('control', [['input', 'control', 'mem'], control_neuron, nn.Linear]),
('mem', [['input', 'control', 'mem'], mem_neuron, nn.Linear]),
('output', [['control', 'mem'], out_neuron, None]),
])
loop = loop_1L if spec['architecture'] == '1L' else loop_2L
outer = OrderedDict([
('input', n_input),
('loop', [['input'], SequenceWrapper(ParallelNetwork2(loop)), None]),
('mean', [['loop'], MeanModule(n_control+n_mem, -56), None]),
('output', [['mean'], BaseNeuron(10, None), nn.Linear]),
])
model = OuterWrapper(DynNetwork(outer), device)
#loop_model = OuterWrapper(make_SequenceWrapper(ParallelNetwork(loop), USE_JIT), device, USE_JIT)
#final_linear = nn.Linear(n_control+n_mem, 10).to(device)
'''
if spec['ported_weights']:
o_weights = pickle.load(open('weight_transplant_enc', 'rb'))
o1 = torch.tensor(o_weights['RecWeights/RecurrentWeight:0']).t()
o2 = torch.tensor(o_weights['InputWeights/InputWeight:0']).t()
o3 = torch.cat((o2, o1), dim=1)
with torch.no_grad():
model.pretrace.layers.loop.model.layers.control_synapse.weight.data[:,:300] = o3[:120] if spec['architecture'] == '1L' else o3[:120, :181]
model.pretrace.layers.loop.model.layers.mem_synapse.weight.data[:,:300] = o3[120:] if spec['architecture'] == '1L' else o3[120:, 180:]
model.pretrace.layers.output_synapse.weight.data = torch.tensor(o_weights['out_weight:0']).t()
'''
params = list(model.parameters())
'''
if spec['NoBias']:
with torch.no_grad():
model.pretrace.layers.loop.model.layers.control_synapse.bias *= 0
model.pretrace.layers.loop.model.layers.mem_synapse.bias *= 0
model.pretrace.layers.output_synapse.bias *= 0
params = [model.pretrace.layers.loop.model.layers.control_synapse.weight,
model.pretrace.layers.loop.model.layers.mem_synapse.weight, model.pretrace.layers.output_synapse.bias,
model.pretrace.layers.output_synapse.weight]
if spec['NoBias'] == 'addBias':
params += [model.pretrace.layers.loop.model.layers.control_synapse.bias,
model.pretrace.layers.loop.model.layers.mem_synapse.bias]
'''
model.to(device)
lr = spec['lr']
optimizer = optim.Adam(params, lr=lr)
ce = nn.CrossEntropyLoss()
ITERATIONS = spec['iterations']#36000
'''
#TODO: check correctness here
with torch.no_grad():
for i in range(100):
loop_model.pretrace.model.layers.mem_synapse.weight[i, i+201] = 0
for i in range(120):
loop_model.pretrace.model.layers.control_synapse.weight[i, i+81] = 0
'''
with torch.no_grad():
rythm = torch.diag(torch.ones([28], device=device))
rythm = rythm.expand(30, 28, 28).reshape(30 * 28, 1, 28)[1:]
rythm2 = torch.diag(torch.ones([30], device=device))
rythm2 = rythm2.view(30, 1, 30).expand(30, 28, 30).reshape(30 * 28, 1, 30)[1:]
#trigger_signal = torch.ones([783+56, 1, 1], device=device)
#trigger_signal[:783] = 0
def encode_input(curr, last):
out = torch.zeros([783+56, curr.shape[1], 2,40], device=curr.device)
out[:783, :, 0, :] = ((torch.arange(40, device=curr.device) < 40 * last) & (torch.arange(40, device=curr.device) > 40 * curr)).float()
out[:783, :, 1, :] = ((torch.arange(40, device=curr.device) > 40 * last) & (torch.arange(40, device=curr.device) < 40 * curr)).float()
#out = torch.cat((out.view([783+56, curr.shape[1], 80]), trigger_signal.expand([783+56, curr.shape[1], 1])), dim=-1)
out = torch.cat((out.view([783+56, curr.shape[1], 80]), rythm.expand([783+56, curr.shape[1], 28]), rythm2.expand([783+56, curr.shape[1], 30])), dim=-1)
return out
stats = {
'grad_norm': [],
'loss': [],
'acc': [],
'batch_var': [],
'val': []
}
grad_norm_history = []
def record_norm():
norms = []
for p in params:
norms.append(p.grad.norm().item())
stats['grad_norm'].append(torch.tensor(norms).norm().item())
def validate():
with torch.no_grad():
i = 0
acc = 0
for inp, target in test_loader:
x = inp.view(inp.shape[0], -1, 1).transpose(0, 1).to(device)
x = encode_input(x[1:], x[:-1])
target = target.to(device)
outputs, _ = model(x)
choice = torch.argmax(outputs, 1)
acc += (choice == target).float().mean()
i += 1
stats['val'].append((acc/i).item())
#print('Acc: ' + str(acc / i))
start = time.time()
i = 1
sumloss = 0
sumacc = 0
k = 0
while i < ITERATIONS:
print('Epoch: ', k)
k = k + 1
validate()
for inp, target in data_loader:
batchstart = time.time()
x = inp.view(BATCH_SIZE, -1, 1).transpose(0,1).to(device)
x = encode_input(x[1:], x[:-1])
#print(x.shape)
target = target.to(device)
optimizer.zero_grad()
out_final, _ = model(x)
#meaned = outputs[-56:].mean(dim=0) #TODO: what is this value really in bellec?
#out_final = final_linear(meaned)
#test_norm = out_final.norm().item()
loss = ce(out_final, target)
loss.backward()
optimizer.step()
with torch.no_grad():
record_norm()
stats['loss'].append(loss.item())
acc = (torch.argmax(out_final, 1) == target).float().mean().item()
stats['acc'].append(acc)
batch_var = out_final.var(0).mean().item()
stats['batch_var'].append(batch_var)
#print(loss.item(), acc, batch_var, test_norm, loop_model.pretrace.model.layers.control_synapse.weight.grad.norm().item(), target[0].item())
sumloss += loss.item()
sumacc += acc
if i%20 == 0:
print(loss.item(), sumloss/20, sumacc/20, time.time()-batchstart, batch_var) #torch.argmax(outputs[-1], 1).float().var()
sumloss = 0
sumacc = 0
if i%2500 == 0:
lr = lr * spec['lr_decay']
optimizer = optim.Adam(params, lr=lr)
print('Learning Rate: ', lr)
i += 1
#pickle.dump(stats, open('stats', 'wb'))
config['stats'] = stats
config['progress'] = i
#config['mem_req'] = torch.cuda.max_memory_allocated()
with open('configs/' + run_id + '.json', 'w') as config_file:
json.dump(config, config_file, indent=2)
model.save('models/'+run_id)
#post_model.save('../../models/post_big11_'+str(k))
print('Total time: ', time.time()-start)
|
[
"eric.koepke@tum.de"
] |
eric.koepke@tum.de
|
c7077f36875fba1bcb9961a40222ec01ebf28678
|
f60b0c051d8ba5088dc4246679b870f577646bb0
|
/172 Fri, 04 May 2012 12:27:33.py
|
5af608982a8030ed68109044a3103c2c4798d26f
|
[] |
no_license
|
joopeed/lp1
|
bbd11fe7749356828a16fc45703e010db5d35464
|
117bf769a048ec1dff53f779b26c9e7adec052ba
|
refs/heads/master
| 2021-01-02T22:50:08.600553
| 2014-04-03T21:15:40
| 2014-04-03T21:15:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 472
|
py
|
# Date: Fri, 04 May 2012 12:27:33 +0000
# Question 172
# coding: utf-8=2# Jo=E3o Pedro Le=F4ncio
# Matricula: 21211940
# Programa=E7=E3o 1 - UFCG 2012.1
def inverte3a3(s):
=09
invertida = list(s[::-1])
# inverte a string e transforma em lista pra poder manipul=E1-la
for i in range(1,len(s),3):
invertida[i-1],invertida[i+1] = invertida[i+1],invertida[i-1]
# inverte as substrings de 3 em 3
=09
return "".join(invertida) # retorna em forma string
|
[
"joopeeds@gmail.com"
] |
joopeeds@gmail.com
|
c430f57c14f8149b37d29047f1f155e0e62e1136
|
be4d3e2bb1d64f691741e7cc69981b80ba1209ec
|
/runserver.py
|
74d369a51b308203f647715d4faf249741cf667b
|
[
"MIT"
] |
permissive
|
rudrapakav58/angular_flask
|
53216c3b1f81c8607da845cf2b17eac8019a9d70
|
bbc05565f9ea26eeb3c5dacfb9f4380808b7f540
|
refs/heads/master
| 2020-04-09T07:59:23.278647
| 2018-12-03T13:01:45
| 2018-12-03T13:01:45
| 160,178,298
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 184
|
py
|
import os
from angular_flask import app
def runserver():
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
if __name__ == '__main__':
runserver()
|
[
"rudra@sanctusit.com"
] |
rudra@sanctusit.com
|
56718630e14ea0fc4877975d520aa034b7e1b620
|
6d18e674d8c08131247642157bca1240b6473e1e
|
/product/serializers.py
|
2e289d090d90c5b62ce46f3c654c0a6489f8723f
|
[] |
no_license
|
Authurton/e-commerce-Django-backend-
|
feb3d58b464126eaeedbeaccad60f331c8811363
|
305f0cf107050a29662b94aa6639032fbe717c44
|
refs/heads/master
| 2023-06-09T08:21:01.710537
| 2021-06-21T11:22:14
| 2021-06-21T11:22:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 705
|
py
|
from django.db.models import fields
from rest_framework import serializers
from .models import Category, Product
class ProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = (
"id",
"name",
"get_absolute_url",
"description",
"price",
"get_image",
"get_thumbnail"
)
class CategorySerializer(serializers.ModelSerializer):
products = ProductSerializer(many=True)
class Meta:
model = Category
fields = (
"id",
"name",
"get_absolute_url",
"products",
)
|
[
"Authurlunga@gmail.com"
] |
Authurlunga@gmail.com
|
a5695cc2dc47790abf2fb0cf6c6c1970070d4a36
|
9135a1c5bcd20f77971085496d9e966d892fb7e9
|
/easyScript/storePassword/python/storePassword.py
|
3b93a969c84bab73e73b5bf68e4359ab4f7dae53
|
[
"Apache-2.0"
] |
permissive
|
bseltz-cohesity/scripts
|
cc54b2b3534175562b0d9cfba85bd63aa5ca2346
|
53c4b057bb4f41ae079fc8236caacf13fd35c10e
|
refs/heads/master
| 2023-08-23T13:13:12.169724
| 2023-08-22T13:21:22
| 2023-08-22T13:21:22
| 142,414,700
| 83
| 44
|
Apache-2.0
| 2023-08-24T11:42:22
| 2018-07-26T08:50:47
|
PowerShell
|
UTF-8
|
Python
| false
| false
| 703
|
py
|
#!/usr/bin/env python
from pyhesity import *
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--vip', type=str, default='helios.cohesity.com')
parser.add_argument('-u', '--username', type=str, default='helios')
parser.add_argument('-d', '--domain', type=str, default='local')
parser.add_argument('-p', '--password', type=str, default=None)
args = parser.parse_args()
vip = args.vip # cluster name/ip
username = args.username # username to connect to cluster
domain = args.domain # domain of username (e.g. local, or AD domain)
password = args.password # password to store
setpwd(v=vip, u=username, d=domain, password=password)
|
[
"bseltzer@cohesity.com"
] |
bseltzer@cohesity.com
|
c06c18d951be044a735f4f88843ab004913b9f5e
|
a779da38efccb47658db1f7fddd961a4fa165901
|
/analysis/week_tweets_test.py
|
3940d4bf548c4cbfdc605ea97d9010b27931fe41
|
[] |
no_license
|
Sofiia2001/Coursework_research
|
0685e3e164694cdac4a392d495ad99dfb309728f
|
d546536d437030734ae654e7684503fc2497c985
|
refs/heads/master
| 2020-04-24T15:28:44.622845
| 2019-05-14T16:54:09
| 2019-05-14T16:54:09
| 172,069,336
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,796
|
py
|
from unittest import TestCase
from week_tweets import WeekTweets
import unittest
class TestWeekTweets(TestCase):
def setUp(self):
self.my_week = WeekTweets('08.04')
self.wrong_week = WeekTweets('31.65')
self.ending_month_week = WeekTweets('30.01')
self.week_1 = WeekTweets('22.04')
self.week_2 = WeekTweets('01.05')
def test_select_week(self):
self.my_week.select_week()
self.assertEqual(self.my_week._week_list, ['2019-04-08', '2019-04-09',
'2019-04-10', '2019-04-11',
'2019-04-12', '2019-04-13', '2019-04-14'])
self.assertEqual(self.wrong_week.select_week(), 'This date does not exist')
def test_select_week_from_end(self):
self.ending_month_week.select_week()
self.assertEqual(self.ending_month_week._week_list, ['2019-01-28', '2019-01-29',
'2019-01-30', '2019-01-31',
'2019-02-01', '2019-02-02',
'2019-02-03'])
def test_maximum_tweets_day(self):
self.assertEqual(self.my_week.maximum_tweets_day(), 'Tuesday')
def test_tweets_for_weekday(self):
self.assertEqual(self.my_week._tweets_for_week_day()['Monday'],
{'general': 2261,
'dates':
{'2019-04-08', '2019-04-29', '2019-04-22', '2019-03-25'},
'posts': {}})
def test_tweets_for_week(self):
self.week_1.tweets_for_week()
self.assertTrue(self.week_1._tweets_week_dict == {'2019-04-22': 671, '2019-04-23': 580,
'2019-04-24': 1002, '2019-04-25': 1133,
'2019-04-26': 960, '2019-04-27': 801,
'2019-04-28': 935})
self.week_2.tweets_for_week()
self.assertTrue(self.week_2._tweets_week_dict == {'2019-04-29': 1163, '2019-04-30': 1153,
'2019-05-01': 1106, '2019-05-02': 909,
'2019-05-03': 1023, '2019-05-04': 117,
'2019-05-05': 0})
def test_find_week_day(self):
self.week_1.tweets_for_week()
self.week_2.tweets_for_week()
self.assertEqual(self.week_1.find_week_day(), 'Thursday')
self.assertEqual(self.week_2.find_week_day(), 'Monday')
if __name__ == '__main__':
unittest.main()
|
[
"723405Ss"
] |
723405Ss
|
791225936567507d75c2dc5130e873969750ced7
|
cde4501b9cdb6675020eaf4e03c6d5e4f237050b
|
/test/series_test.py
|
685164abf6f11192f77a089582bfb07571fc8d55
|
[
"MIT"
] |
permissive
|
vanessagraber/bilby
|
edc955f7abbb87d9749577e1460a8edbd1aea7fe
|
80ee2d123a913d881f2a790b04e2939c46584d27
|
refs/heads/master
| 2020-04-08T21:32:23.908339
| 2018-11-29T00:02:52
| 2018-11-29T00:02:52
| 159,748,097
| 0
| 0
|
MIT
| 2018-11-30T00:58:53
| 2018-11-30T00:58:53
| null |
UTF-8
|
Python
| false
| false
| 4,657
|
py
|
from __future__ import absolute_import
import unittest
import bilby
import numpy as np
class TestCoupledTimeAndFrequencySeries(unittest.TestCase):
def setUp(self):
self.duration = 2
self.sampling_frequency = 4096
self.start_time = -1
self.series = bilby.gw.series.CoupledTimeAndFrequencySeries(duration=self.duration,
sampling_frequency=self.sampling_frequency,
start_time=self.start_time)
def tearDown(self):
del self.duration
del self.sampling_frequency
del self.start_time
del self.series
def test_repr(self):
expected = 'CoupledTimeAndFrequencySeries(duration={}, sampling_frequency={}, start_time={})'\
.format(self.series.duration,
self.series.sampling_frequency,
self.series.start_time)
self.assertEqual(expected, repr(self.series))
def test_duration_from_init(self):
self.assertEqual(self.duration, self.series.duration)
def test_sampling_from_init(self):
self.assertEqual(self.sampling_frequency, self.series.sampling_frequency)
def test_start_time_from_init(self):
self.assertEqual(self.start_time, self.series.start_time)
def test_frequency_array_type(self):
self.assertIsInstance(self.series.frequency_array, np.ndarray)
def test_time_array_type(self):
self.assertIsInstance(self.series.time_array, np.ndarray)
def test_frequency_array_from_init(self):
expected = bilby.core.utils.create_frequency_series(sampling_frequency=self.sampling_frequency,
duration=self.duration)
self.assertTrue(np.array_equal(expected, self.series.frequency_array))
def test_time_array_from_init(self):
expected = bilby.core.utils.create_time_series(sampling_frequency=self.sampling_frequency,
duration=self.duration,
starting_time=self.start_time)
self.assertTrue(np.array_equal(expected, self.series.time_array))
def test_frequency_array_setter(self):
new_sampling_frequency = 100
new_duration = 3
new_frequency_array = bilby.core.utils.create_frequency_series(sampling_frequency=new_sampling_frequency,
duration=new_duration)
self.series.frequency_array = new_frequency_array
self.assertTrue(np.array_equal(new_frequency_array, self.series.frequency_array))
self.assertLessEqual(np.abs(new_sampling_frequency - self.series.sampling_frequency), 1)
self.assertAlmostEqual(new_duration, self.series.duration)
self.assertAlmostEqual(self.start_time, self.series.start_time)
def test_time_array_setter(self):
new_sampling_frequency = 100
new_duration = 3
new_start_time = 4
new_time_array = bilby.core.utils.create_time_series(sampling_frequency=new_sampling_frequency,
duration=new_duration,
starting_time=new_start_time)
self.series.time_array = new_time_array
self.assertTrue(np.array_equal(new_time_array, self.series.time_array))
self.assertAlmostEqual(new_sampling_frequency, self.series.sampling_frequency, places=1)
self.assertAlmostEqual(new_duration, self.series.duration, places=1)
self.assertAlmostEqual(new_start_time, self.series.start_time, places=1)
def test_time_array_without_sampling_frequency(self):
self.series.sampling_frequency = None
self.series.duration = 4
with self.assertRaises(ValueError):
test = self.series.time_array
def test_time_array_without_duration(self):
self.series.sampling_frequency = 4096
self.series.duration = None
with self.assertRaises(ValueError):
test = self.series.time_array
def test_frequency_array_without_sampling_frequency(self):
self.series.sampling_frequency = None
self.series.duration = 4
with self.assertRaises(ValueError):
test = self.series.frequency_array
def test_frequency_array_without_duration(self):
self.series.sampling_frequency = 4096
self.series.duration = None
with self.assertRaises(ValueError):
test = self.series.frequency_array
|
[
"email@moritz-huebner.de"
] |
email@moritz-huebner.de
|
28f5dfeccf6456eaa3b217dc5f9313a1ab5ff7b1
|
b3950a2a6912c9b494d22b9353322c3357df0110
|
/tock/employees/migrations/0025_userdata_billable_expectation.py
|
da8842ab66c628595d88cdd31a26a899ef31febf
|
[
"CC0-1.0",
"LicenseRef-scancode-public-domain",
"MIT"
] |
permissive
|
18F/tock
|
df1fa5e817e690ce0bff315a15799e2f78915882
|
99005d8f6c4605a69fbb620c41f38447ecbee459
|
refs/heads/main
| 2023-08-31T01:34:55.299577
| 2023-08-23T18:49:10
| 2023-08-23T18:49:10
| 30,162,008
| 135
| 50
|
NOASSERTION
| 2023-09-07T18:40:30
| 2015-02-01T22:19:32
|
Python
|
UTF-8
|
Python
| false
| false
| 615
|
py
|
# Generated by Django 2.2.7 on 2019-12-19 14:25
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('employees', '0024_auto_20171229_1156'),
]
operations = [
migrations.AddField(
model_name='userdata',
name='billable_expectation',
field=models.DecimalField(decimal_places=2, default=0.8, max_digits=3, validators=[django.core.validators.MaxValueValidator(limit_value=1)], verbose_name='Percentage of hours which are expected to be billable each week'),
),
]
|
[
"joseph.krzystan@gsa.gov"
] |
joseph.krzystan@gsa.gov
|
707758dcec3f0ee4f884ca88cb1907c569e5b374
|
040515a2fa081bd98d64da726c2700146f15bc61
|
/utils/sentence2windows.py
|
129d1a87c2ce4d0cadd94ff365aceaaf687e4531
|
[] |
no_license
|
daniele-sartiano/DeepWebClassifier
|
c3cf264996eacfd0dbd09da66a833a6a4051d39b
|
16d1168b3bec373956281ec94f7efa5f038605ea
|
refs/heads/master
| 2021-01-11T07:48:31.975861
| 2017-09-28T07:36:59
| 2017-09-28T07:36:59
| 72,100,257
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 497
|
py
|
#!/usr/bin/env python
import sys
import math
def windows(sentence, n=3, end='<END>'):
for token in sentence.split():
prev = 0
s = []
for i in xrange(0, int(math.ceil(len(token)/float(n)))):
s.append(token[i*n:(i+1)*n])
s[-1] = '%s%s' % (s[-1], end)
yield s
def main():
for sentence in sys.stdin:
for el in windows(sentence):
print ' '.join(el),
print
if __name__ == '__main__':
main()
|
[
"daniele.sartiano@gmail.com"
] |
daniele.sartiano@gmail.com
|
8d1d57e3f0a63f0f45c57ffc9828afb89d696d33
|
04d09687e96c92d901ecf70de3328499af57bc4b
|
/30_repeat/repeat.py
|
17d5362bdaeaace84e1a1a01c6a8f35f7b6e3988
|
[] |
no_license
|
hauqxngo/PythonDataStructures
|
3307b6793270bbbc9918f00513aea6b690ec9265
|
37742a5d5df09379f7dc445527a898bccd21781e
|
refs/heads/main
| 2023-06-05T14:42:31.314046
| 2021-06-24T06:38:46
| 2021-06-24T06:38:46
| 379,828,112
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 468
|
py
|
def repeat(phrase, num):
"""Return phrase, repeated num times.
>>> repeat('*', 3)
'***'
>>> repeat('abc', 2)
'abcabc'
>>> repeat('abc', 0)
''
Ignore illegal values of num and return None:
>>> repeat('abc', -1) is None
True
>>> repeat('abc', 'nope') is None
True
"""
if not isinstance(num, int) or num < 0:
return None
else:
return phrase * num
|
[
"hngo1227@gmail.com"
] |
hngo1227@gmail.com
|
e210f5244e6408c5d3ea387244c46ba0dbe0cca9
|
14863d8114f46867eca9eb8b3a4713babdd68976
|
/website-env/Lib/site-packages/cms/tests/test_rendering.py
|
a13f07fb5a3a7440ca0cce7e5f286b942e51dbc1
|
[] |
no_license
|
adrianwizowski/WebSite
|
02c690e424fd4f533fb7d8569ed0cc097bbb6695
|
b8d62d9c27a06200c97caa3698018ed69343c85c
|
refs/heads/master
| 2021-07-01T22:06:03.913197
| 2018-04-28T17:30:11
| 2018-04-28T17:30:11
| 96,202,643
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 26,405
|
py
|
# -*- coding: utf-8 -*-
from django.core.cache import cache
from django.test.utils import override_settings
from sekizai.context import SekizaiContext
from cms import plugin_rendering
from cms.api import create_page, add_plugin
from cms.cache.placeholder import get_placeholder_cache
from cms.models import Page, Placeholder, CMSPlugin
from cms.plugin_rendering import PluginContext
from cms.test_utils.project.placeholderapp.models import Example1
from cms.test_utils.testcases import CMSTestCase
from cms.test_utils.util.fuzzy_int import FuzzyInt
from cms.toolbar.toolbar import CMSToolbar
from cms.views import details
TEMPLATE_NAME = 'tests/rendering/base.html'
INHERIT_TEMPLATE_NAME = 'tests/rendering/inherit.html'
def sample_plugin_processor(instance, placeholder, rendered_content, original_context):
original_context_var = original_context['original_context_var']
return '%s|test_plugin_processor_ok|%s|%s|%s' % (
rendered_content,
instance.body,
placeholder.slot,
original_context_var
)
def sample_plugin_context_processor(instance, placeholder, original_context):
content = 'test_plugin_context_processor_ok|' + instance.body + '|' + \
placeholder.slot + '|' + original_context['original_context_var']
return {
'test_plugin_context_processor': content,
}
@override_settings(
CMS_TEMPLATES=[
(TEMPLATE_NAME, TEMPLATE_NAME),
(INHERIT_TEMPLATE_NAME, INHERIT_TEMPLATE_NAME),
('extra_context.html', 'extra_context.html')
],
)
class RenderingTestCase(CMSTestCase):
def setUp(self):
super(RenderingTestCase, self).setUp()
self.test_user = self._create_user("test", True, True)
with self.login_user_context(self.test_user):
self.test_data = {
'title': u'RenderingTestCase-title',
'slug': u'renderingtestcase-slug',
'reverse_id': u'renderingtestcase-reverse-id',
'text_main': u'RenderingTestCase-main',
'text_sub': u'RenderingTestCase-sub',
}
self.test_data2 = {
'title': u'RenderingTestCase-title2',
'slug': u'RenderingTestCase-slug2',
'reverse_id': u'renderingtestcase-reverse-id2',
}
self.test_data3 = {
'title': u'RenderingTestCase-title3',
'slug': u'RenderingTestCase-slug3',
'reverse_id': u'renderingtestcase-reverse-id3',
'text_sub': u'RenderingTestCase-sub3',
}
self.test_data4 = {
'title': u'RenderingTestCase-title3',
'no_extra': u'no extra var!',
'placeholderconf': {'extra_context': {'extra_context': {'extra_var': 'found extra var'}}},
'extra': u'found extra var',
}
self.test_data5 = {
'title': u'RenderingTestCase-title5',
'slug': u'RenderingTestCase-slug5',
'reverse_id': u'renderingtestcase-reverse-id5',
'text_main': u'RenderingTestCase-main-page5',
'text_sub': u'RenderingTestCase-sub5',
}
self.test_data6 = {
'title': u'RenderingTestCase-title6',
'slug': u'RenderingTestCase-slug6',
'reverse_id': u'renderingtestcase-reverse-id6',
'text_sub': u'RenderingTestCase-sub6',
}
self.insert_test_content()
def insert_test_content(self):
# Insert a page
p = create_page(self.test_data['title'], TEMPLATE_NAME, 'en',
slug=self.test_data['slug'], created_by=self.test_user,
reverse_id=self.test_data['reverse_id'], published=True)
# Placeholders have been inserted on post_save signal:
self.test_placeholders = {}
for placeholder in p.placeholders.all():
self.test_placeholders[placeholder.slot] = placeholder
# Insert some test Text plugins
add_plugin(self.test_placeholders['main'], 'TextPlugin', 'en',
body=self.test_data['text_main'])
add_plugin(self.test_placeholders['sub'], 'TextPlugin', 'en',
body=self.test_data['text_sub'])
p.publish('en')
# Insert another page that is not the home page
p2 = create_page(self.test_data2['title'], INHERIT_TEMPLATE_NAME, 'en',
parent=p, slug=self.test_data2['slug'], published=True,
reverse_id=self.test_data2['reverse_id'])
p2.publish('en')
# Insert another page that is not the home page
p3 = create_page(self.test_data3['title'], INHERIT_TEMPLATE_NAME, 'en',
slug=self.test_data3['slug'], parent=p2,
reverse_id=self.test_data3['reverse_id'], published=True)
# Placeholders have been inserted on post_save signal:
self.test_placeholders3 = {}
for placeholder in p3.placeholders.all():
self.test_placeholders3[placeholder.slot] = placeholder
# # Insert some test Text plugins
add_plugin(self.test_placeholders3['sub'], 'TextPlugin', 'en',
body=self.test_data3['text_sub'])
p3.publish('en')
# Insert another page that is not the home
p4 = create_page(self.test_data4['title'], 'extra_context.html', 'en', parent=p)
# Placeholders have been inserted on post_save signal:
self.test_placeholders4 = {}
for placeholder in p4.placeholders.all():
self.test_placeholders4[placeholder.slot] = placeholder
# Insert some test plugins
add_plugin(self.test_placeholders4['extra_context'], 'ExtraContextPlugin', 'en')
p4.publish('en')
# Insert another page that is not the home page
p5 = create_page(self.test_data5['title'], INHERIT_TEMPLATE_NAME, 'en',
parent=p, slug=self.test_data5['slug'], published=True,
reverse_id=self.test_data5['reverse_id'])
# Placeholders have been inserted on post_save signal:
self.test_placeholders5 = {}
for placeholder in p5.placeholders.all():
self.test_placeholders5[placeholder.slot] = placeholder
# # Insert some test Text plugins
add_plugin(self.test_placeholders5['sub'], 'TextPlugin', 'en',
body=self.test_data5['text_sub'])
add_plugin(self.test_placeholders5['main'], 'TextPlugin', 'en',
body=self.test_data5['text_main'])
p5.publish('en')
# Insert another page that is not the home page
p6 = create_page(self.test_data6['title'], INHERIT_TEMPLATE_NAME, 'en',
slug=self.test_data6['slug'], parent=p5,
reverse_id=self.test_data6['reverse_id'], published=True)
# Placeholders have been inserted on post_save signal:
self.test_placeholders6 = {}
for placeholder in p6.placeholders.all():
self.test_placeholders6[placeholder.slot] = placeholder
# # Insert some test Text plugins
add_plugin(self.test_placeholders6['sub'], 'TextPlugin', 'en',
body=self.test_data6['text_sub'])
p6.publish('en')
# Reload test pages
self.test_page = self.reload(p.publisher_public)
self.test_page2 = self.reload(p2.publisher_public)
self.test_page3 = self.reload(p3.publisher_public)
self.test_page4 = self.reload(p4.publisher_public)
self.test_page5 = self.reload(p5.publisher_public)
self.test_page6 = self.reload(p6.publisher_public)
def strip_rendered(self, content):
return content.strip().replace(u"\n", u"")
@override_settings(CMS_TEMPLATES=[(TEMPLATE_NAME, '')])
def render(self, page, template=None, context_vars=None):
request = self.get_request(page=page)
if context_vars is None:
context_vars = {}
if template is None:
template = page.get_template()
template_obj = self.get_template(template)
output = template_obj.render(context_vars, request)
else:
output = self.render_template_obj(template, context_vars, request)
return self.strip_rendered(output)
@override_settings(CMS_TEMPLATES=[(TEMPLATE_NAME, '')])
def test_details_view(self):
"""
Tests that the `detail` view is working.
"""
response = details(self.get_request(page=self.test_page), '')
response.render()
r = self.strip_rendered(response.content.decode('utf8'))
self.assertEqual(r, u'|' + self.test_data['text_main'] + u'|' + self.test_data['text_sub'] + u'|')
@override_settings(
CMS_PLUGIN_PROCESSORS=('cms.tests.test_rendering.sample_plugin_processor',),
CMS_PLUGIN_CONTEXT_PROCESSORS=('cms.tests.test_rendering.sample_plugin_context_processor',),
)
def test_processors(self):
"""
Tests that default plugin context processors are working, that plugin processors and plugin context processors
can be defined in settings and are working and that extra plugin context processors can be passed to PluginContext.
"""
from djangocms_text_ckeditor.cms_plugins import TextPlugin
from cms.plugin_pool import plugin_pool
instance = CMSPlugin.objects.all()[0].get_plugin_instance()[0]
load_from_string = self.load_template_from_string
@plugin_pool.register_plugin
class ProcessorTestPlugin(TextPlugin):
name = "Test Plugin"
def get_render_template(self, context, instance, placeholder):
t = u'{% load cms_tags %}' + \
u'{{ plugin.counter }}|{{ plugin.instance.body }}|{{ test_passed_plugin_context_processor }}|' \
u'{{ test_plugin_context_processor }}'
return load_from_string(t)
def test_passed_plugin_context_processor(instance, placeholder, context):
return {'test_passed_plugin_context_processor': 'test_passed_plugin_context_processor_ok'}
instance.plugin_type = 'ProcessorTestPlugin'
instance._inst = instance
context = PluginContext({'original_context_var': 'original_context_var_ok'}, instance,
self.test_placeholders['main'], processors=(test_passed_plugin_context_processor,))
plugin_rendering._standard_processors = {}
content_renderer = self.get_content_renderer()
c = content_renderer.render_plugins([instance], context, self.test_placeholders['main'])
r = "".join(c)
expected = (
self.test_data['text_main'] + '|test_passed_plugin_context_processor_ok|test_plugin_context_processor_ok|' +
self.test_data['text_main'] + '|main|original_context_var_ok|test_plugin_processor_ok|' +
self.test_data['text_main'] + '|main|original_context_var_ok'
)
expected = u'1|' + expected
self.assertEqual(r, expected)
plugin_rendering._standard_processors = {}
def test_placeholder(self):
"""
Tests the {% placeholder %} templatetags.
"""
r = self.render(self.test_page)
self.assertEqual(r, u'|' + self.test_data['text_main'] + u'|' + self.test_data['text_sub'] + u'|')
def test_placeholder_extra_context(self):
t = u'{% load cms_tags %}{% placeholder "extra_context" %}'
r = self.render(self.test_page4, template=t)
self.assertEqual(r, self.test_data4['no_extra'])
cache.clear()
with self.settings(CMS_PLACEHOLDER_CONF=self.test_data4['placeholderconf']):
r = self.render(self.test_page4, template=t)
self.assertEqual(r, self.test_data4['extra'])
def test_placeholder_or(self):
"""
Tests the {% placeholder %} templatetags.
"""
t = u'{% load cms_tags %}' + \
u'|{% placeholder "empty" or %}No content{% endplaceholder %}'
r = self.render(self.test_page, template=t)
self.assertEqual(r, u'|No content')
def test_render_placeholder_tag(self):
"""
Tests the {% render_placeholder %} templatetags.
"""
render_placeholder_body = "I'm the render placeholder body"
ex1 = Example1(char_1="char_1", char_2="char_2", char_3="char_3",
char_4="char_4")
ex1.save()
add_plugin(ex1.placeholder, u"TextPlugin", u"en", body=render_placeholder_body)
t = '''{% extends "base.html" %}
{% load cms_tags %}
{% block content %}
<h1>{% render_placeholder ex1.placeholder %}</h1>
<h2>{% render_placeholder ex1.placeholder as tempvar %}</h2>
<h3>{{ tempvar }}</h3>
{% endblock content %}
'''
r = self.render(self.test_page, template=t, context_vars={'ex1': ex1})
self.assertIn(
'<h1>%s</h1>' % render_placeholder_body,
r
)
self.assertIn(
'<h2></h2>',
r
)
self.assertIn(
'<h3>%s</h3>' % render_placeholder_body,
r
)
def test_render_uncached_placeholder_tag(self):
"""
Tests the {% render_uncached_placeholder %} templatetags.
"""
render_uncached_placeholder_body = "I'm the render uncached placeholder body"
ex1 = Example1(char_1="char_1", char_2="char_2", char_3="char_3",
char_4="char_4")
ex1.save()
add_plugin(ex1.placeholder, u"TextPlugin", u"en", body=render_uncached_placeholder_body)
t = '''{% extends "base.html" %}
{% load cms_tags %}
{% block content %}
<h1>{% render_uncached_placeholder ex1.placeholder %}</h1>
<h2>{% render_uncached_placeholder ex1.placeholder as tempvar %}</h2>
<h3>{{ tempvar }}</h3>
{% endblock content %}
'''
r = self.render(self.test_page, template=t, context_vars={'ex1': ex1})
self.assertIn(
'<h1>%s</h1>' % render_uncached_placeholder_body,
r
)
self.assertIn(
'<h2></h2>',
r
)
self.assertIn(
'<h3>%s</h3>' % render_uncached_placeholder_body,
r
)
def test_render_uncached_placeholder_tag_no_use_cache(self):
"""
Tests that {% render_uncached_placeholder %} does not populate cache.
"""
render_uncached_placeholder_body = "I'm the render uncached placeholder body"
ex1 = Example1(char_1="char_1", char_2="char_2", char_3="char_3",
char_4="char_4")
ex1.save()
request = self.get_request('/')
add_plugin(ex1.placeholder, u"TextPlugin", u"en", body=render_uncached_placeholder_body)
template = '{% load cms_tags %}<h1>{% render_uncached_placeholder ex1.placeholder %}</h1>'
cache_value_before = get_placeholder_cache(ex1.placeholder, 'en', 1, request)
self.render(self.test_page, template, {'ex1': ex1})
cache_value_after = get_placeholder_cache(ex1.placeholder, 'en', 1, request)
self.assertEqual(cache_value_before, cache_value_after)
self.assertIsNone(cache_value_after)
def test_render_placeholder_tag_use_cache(self):
"""
Tests that {% render_placeholder %} populates cache.
"""
render_placeholder_body = "I'm the render placeholder body"
ex1 = Example1(char_1="char_1", char_2="char_2", char_3="char_3",
char_4="char_4")
ex1.save()
request = self.get_request('/')
add_plugin(ex1.placeholder, u"TextPlugin", u"en", body=render_placeholder_body)
template = '{% load cms_tags %}<h1>{% render_placeholder ex1.placeholder %}</h1>'
cache_value_before = get_placeholder_cache(ex1.placeholder, 'en', 1, request)
self.render(self.test_page, template, {'ex1': ex1})
cache_value_after = get_placeholder_cache(ex1.placeholder, 'en', 1, request)
self.assertNotEqual(cache_value_before, cache_value_after)
self.assertIsNone(cache_value_before)
self.assertIsNotNone(cache_value_after)
def test_show_placeholder(self):
"""
Tests the {% show_placeholder %} templatetags, using lookup by pk/dict/reverse_id and passing a Page object.
"""
t = u'{% load cms_tags %}' + \
u'|{% show_placeholder "main" ' + str(self.test_page.pk) + ' %}' + \
u'|{% show_placeholder "main" test_dict %}' + \
u'|{% show_placeholder "sub" "' + str(self.test_page.reverse_id) + '" %}' + \
u'|{% show_placeholder "sub" test_page %}'
r = self.render(
self.test_page,
template=t,
context_vars={'test_page': self.test_page, 'test_dict': {'pk': self.test_page.pk}}
)
self.assertEqual(r, (u'|' + self.test_data['text_main']) * 2 + (u'|' + self.test_data['text_sub']) * 2)
def test_show_placeholder_extra_context(self):
t = u'{% load cms_tags %}{% show_uncached_placeholder "extra_context" ' + str(self.test_page4.pk) + ' %}'
r = self.render(self.test_page4, template=t)
self.assertEqual(r, self.test_data4['no_extra'])
cache.clear()
with self.settings(CMS_PLACEHOLDER_CONF=self.test_data4['placeholderconf']):
r = self.render(self.test_page4, template=t)
self.assertEqual(r, self.test_data4['extra'])
def test_show_uncached_placeholder_by_pk(self):
"""
Tests the {% show_uncached_placeholder %} templatetags, using lookup by pk.
"""
template = u'{%% load cms_tags %%}{%% show_uncached_placeholder "main" %s %%}' % self.test_page.pk
output = self.render(self.test_page, template)
self.assertEqual(output, self.test_data['text_main'])
def test_show_uncached_placeholder_by_lookup_dict(self):
template = u'{% load cms_tags %}{% show_uncached_placeholder "main" test_dict %}'
output = self.render(self.test_page, template, {'test_dict': {'pk': self.test_page.pk}})
self.assertEqual(output, self.test_data['text_main'])
def test_show_uncached_placeholder_by_reverse_id(self):
template = u'{%% load cms_tags %%}{%% show_uncached_placeholder "sub" "%s" %%}' % self.test_page.reverse_id
output = self.render(self.test_page, template)
self.assertEqual(output, self.test_data['text_sub'])
def test_show_uncached_placeholder_by_page(self):
template = u'{% load cms_tags %}{% show_uncached_placeholder "sub" test_page %}'
output = self.render(self.test_page, template, {'test_page': self.test_page})
self.assertEqual(output, self.test_data['text_sub'])
def test_show_uncached_placeholder_tag_no_use_cache(self):
"""
Tests that {% show_uncached_placeholder %} does not populate cache.
"""
template = '{% load cms_tags %}<h1>{% show_uncached_placeholder "sub" test_page %}</h1>'
placeholder = self.test_page.placeholders.get(slot='sub')
request = self.get_request(page=self.test_page)
cache_value_before = get_placeholder_cache(placeholder, 'en', 1, request)
output = self.render(self.test_page, template, {'test_page': self.test_page})
cache_value_after = get_placeholder_cache(placeholder, 'en', 1, request)
self.assertEqual(output, '<h1>%s</h1>' % self.test_data['text_sub'])
self.assertEqual(cache_value_before, cache_value_after)
self.assertIsNone(cache_value_after)
def test_page_url_by_pk(self):
template = u'{%% load cms_tags %%}{%% page_url %s %%}' % self.test_page2.pk
output = self.render(self.test_page, template)
self.assertEqual(output, self.test_page2.get_absolute_url())
def test_page_url_by_dictionary(self):
template = u'{% load cms_tags %}{% page_url test_dict %}'
output = self.render(self.test_page, template, {'test_dict': {'pk': self.test_page2.pk}})
self.assertEqual(output, self.test_page2.get_absolute_url())
def test_page_url_by_reverse_id(self):
template = u'{%% load cms_tags %%}{%% page_url "%s" %%}' % self.test_page2.reverse_id
output = self.render(self.test_page, template)
self.assertEqual(output, self.test_page2.get_absolute_url())
def test_page_url_by_reverse_id_not_on_a_page(self):
template = u'{%% load cms_tags %%}{%% page_url "%s" %%}' % self.test_page2.reverse_id
output = self.render(None, template)
self.assertEqual(output, self.test_page2.get_absolute_url())
def test_page_url_by_page(self):
template = u'{% load cms_tags %}{% page_url test_page %}'
output = self.render(self.test_page, template, {'test_page': self.test_page2})
self.assertEqual(output, self.test_page2.get_absolute_url())
def test_page_url_by_page_as(self):
template = u'{% load cms_tags %}{% page_url test_page as test_url %}{{ test_url }}'
output = self.render(self.test_page, template, {'test_page': self.test_page2})
self.assertEqual(output, self.test_page2.get_absolute_url())
#
# To ensure compatible behaviour, test that page_url swallows any
# Page.DoesNotExist exceptions when NOT in DEBUG mode.
#
@override_settings(DEBUG=False)
def test_page_url_on_bogus_page(self):
template = u'{% load cms_tags %}{% page_url "bogus_page" %}'
output = self.render(self.test_page, template, {'test_page': self.test_page2})
self.assertEqual(output, '')
#
# To ensure compatible behaviour, test that page_url will raise a
# Page.DoesNotExist exception when the page argument does not eval to a
# valid page
#
@override_settings(DEBUG=True)
def test_page_url_on_bogus_page_in_debug(self):
template = u'{% load cms_tags %}{% page_url "bogus_page" %}'
self.assertRaises(
Page.DoesNotExist,
self.render,
self.test_page,
template,
{'test_page': self.test_page2}
)
#
# In the 'as varname' form, ensure that the tag will always swallow
# Page.DoesNotExist exceptions both when DEBUG is False and...
#
@override_settings(DEBUG=False)
def test_page_url_as_on_bogus_page(self):
template = u'{% load cms_tags %}{% page_url "bogus_page" as test_url %}{{ test_url }}'
output = self.render(self.test_page, template, {'test_page': self.test_page2})
self.assertEqual(output, '')
#
# ...when it is True.
#
@override_settings(DEBUG=True)
def test_page_url_as_on_bogus_page_in_debug(self):
template = u'{% load cms_tags %}{% page_url "bogus_page" as test_url %}{{ test_url }}'
output = self.render(self.test_page, template, {'test_page': self.test_page2})
self.assertEqual(output, '')
def test_page_attribute(self):
"""
Tests the {% page_attribute %} templatetags, using current page, lookup by pk/dict/reverse_id and passing a Page object.
"""
t = u'{% load cms_tags %}' + \
u'|{% page_attribute title %}' + \
u'{% page_attribute title as title %}' + \
u'|{{ title }}' + \
u'|{% page_attribute title ' + str(self.test_page2.pk) + ' %}' + \
u'{% page_attribute title ' + str(self.test_page2.pk) + ' as title %}' + \
u'|{{ title }}' + \
u'|{% page_attribute title test_dict %}' + \
u'{% page_attribute title test_dict as title %}' + \
u'|{{ title }}' + \
u'|{% page_attribute slug "' + str(self.test_page2.reverse_id) + '" %}' + \
u'{% page_attribute slug "' + str(self.test_page2.reverse_id) + '" as slug %}' + \
u'|{{ slug }}' + \
u'|{% page_attribute slug test_page %}' + \
u'{% page_attribute slug test_page as slug %}' + \
u'|{{ slug }}'
r = self.render(
self.test_page,
template=t,
context_vars={'test_page': self.test_page2, 'test_dict': {'pk': self.test_page2.pk}}
)
self.assertEqual(r, (u'|' + self.test_data['title']) * 2 + (u'|' + self.test_data2['title']) * 4 + (
u'|' + self.test_data2['slug']) * 4)
def test_inherit_placeholder(self):
# a page whose parent has no 'main' placeholder inherits from ancestors
r = self.render(self.test_page3)
self.assertEqual(r, u'|' + self.test_data['text_main'] + '|' + self.test_data3['text_sub'])
# a page whose parent has 'main' placeholder inherits from the parent, not ancestors
r = self.render(self.test_page6)
self.assertEqual(r, u'|' + self.test_data5['text_main'] + '|' + self.test_data6['text_sub'])
def test_inherit_placeholder_override(self):
# Tests that the user can override the inherited content
# in a placeholder by adding plugins to the inherited placeholder.
# a page whose parent has 'main' placeholder inherits from the parent, not ancestors
r = self.render(self.test_page5)
self.assertEqual(r, u'|' + self.test_data5['text_main'] + '|' + self.test_data5['text_sub'])
@override_settings(CMS_PLACEHOLDER_CONF={None: {'language_fallback': False}})
def test_inherit_placeholder_queries(self):
with self.assertNumQueries(FuzzyInt(6,7)):
r = self.render(self.test_page2)
self.assertEqual(r, u'|' + self.test_data['text_main'] + u'|')
def test_render_placeholder_toolbar(self):
placeholder = Placeholder()
placeholder.slot = 'test'
placeholder.pk = placeholder.id = 99
request = self.get_request(page=None)
request.toolbar = CMSToolbar(request)
content_renderer = self.get_content_renderer(request)
context = SekizaiContext()
context['request'] = request
context['cms_content_renderer'] = content_renderer
classes = [
"cms-placeholder-%s" % placeholder.pk,
'cms-placeholder',
]
output = content_renderer.render_editable_placeholder(placeholder, context, 'en')
for cls in classes:
self.assertTrue(cls in output, '%r is not in %r' % (cls, output))
|
[
"visa90@gmail.com"
] |
visa90@gmail.com
|
7b11ef0b23654a3fae68b6e95a59e4684ef14de8
|
91135a7054305bc7ad91f339fbc18134d6f9404d
|
/ips/ip/spi_master_reduced/__init__.py
|
112bc64cdcb7f1e902e9b9a48cca54fe2d509988
|
[
"MIT"
] |
permissive
|
hakehuang/pycpld
|
86c7644b8e253ee513bbb4fabb3d22672acb68a6
|
2c9e11695b6c8905e76434e8c2744a6f57dae253
|
refs/heads/master
| 2021-01-09T20:47:36.108917
| 2020-04-24T10:19:52
| 2020-04-24T10:19:52
| 60,778,468
| 7
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 111
|
py
|
from spi_master_reduced_partial import get_ip_name
from spi_master_reduced_partial import SPI_MASTER_reduced
|
[
"hake.huang@oss.nxp.com"
] |
hake.huang@oss.nxp.com
|
12fd294c29f55e5d60045eb9da1747d7a9b78417
|
bc0caa79fa93f14e0410b88bb7756c36a75f4bd3
|
/verification.py
|
fd8f4df73856ae13361a7e43d305216591fc5578
|
[] |
no_license
|
BlackShad0w95/Blockchain
|
b676a5903381410ca31c8a18a0f409d67f351975
|
a931134075e151df717540d61ee78d92851ef989
|
refs/heads/master
| 2021-02-27T22:23:08.355459
| 2020-07-19T13:27:37
| 2020-07-19T13:27:37
| 245,640,051
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,370
|
py
|
from hash_util import hash_string_256, hash_block
class Verification:
@staticmethod
def valid_proof(transactions, last_hash, proof):
"""Validate a proof of work number and see if it solves the puzzle algorithm (two leading 0s)
Arguments:
:transactions: The transactions of the block for which the proof is created.
:last_hash: The previous block's hash which will be stored in the current block.
:proof: The proof number we're testing.
"""
# Create a string with all the hash inputs
guess = (str([tx.to_ordered_dict() for tx in transactions]) + str(last_hash) + str(proof)).encode()
# Hash the string
# IMPORTANT: This is NOT the same hash as will be stored in the previous_hash. It's a not a block's hash. It's only used for the proof-of-work algorithm.
guess_hash = hash_string_256(guess)
# Only a hash (which is based on the above inputs) which starts with two 0s is treated as valid
# This condition is of course defined by you. You could also require 10 leading 0s - this would take significantly longer (and this allows you to control the speed at which new blocks can be added)
return guess_hash[0:2] == '00'
@classmethod
def verify_chain(cls, blockchain):
""" Verify the current blockchain and return True if it's valid, False otherwise."""
for (index, block) in enumerate(blockchain):
if index == 0:
continue
if block.previous_hash != hash_block(blockchain[index - 1]):
return False
if not cls.valid_proof(block.transactions[:-1], block.previous_hash, block.proof):
print('Proof of work is invalid')
return False
return True
@staticmethod
def verify_transaction(transaction, get_balance):
"""Verify a transaction by checking whether the sender has sufficient coins.
Arguments:
:transaction: The transaction that should be verified.
"""
sender_balance = get_balance()
return sender_balance >= transaction.amount
@classmethod
def verify_transactions(cls, open_transactions, get_balance):
"""Verifies all open transactions."""
return all([cls.verify_transaction(tx, get_balance) for tx in open_transactions])
|
[
"DCiesielczuk@gmail.com"
] |
DCiesielczuk@gmail.com
|
767c94b7c668c92fdfaaf60b48adfe31d6aeb3a8
|
5d493bb2f26cb9ba1879c60b060b2975490ea0cd
|
/venv/bin/django-admin
|
abf2a350c6a69f45300e8a1c28ddcd317ecf040b
|
[] |
no_license
|
amjedsaleel/heroku-sample-hosting
|
cfe96fb58c0a4565b37e617147584dafe247abda
|
aa935a1e7231c47e11b41d62927961c0ceb49f6e
|
refs/heads/master
| 2023-08-11T02:31:43.519211
| 2020-08-26T06:50:55
| 2020-08-26T06:50:55
| 277,723,810
| 0
| 0
| null | 2021-09-22T19:23:58
| 2020-07-07T05:19:21
|
Python
|
UTF-8
|
Python
| false
| false
| 312
|
#!/home/amjed/PycharmProjects/heroku-sample-hosting/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
|
[
"amjedsaleel@gmail.com"
] |
amjedsaleel@gmail.com
|
|
37c1118dc5489084dfb4e8071683eab33cd27eb3
|
a5a99f646e371b45974a6fb6ccc06b0a674818f2
|
/Configuration/Eras/python/Era_Run2_25ns_HIPM_cff.py
|
3e9ad3977dcf0ecdb0d5ed98d51af534d211ebce
|
[
"Apache-2.0"
] |
permissive
|
cms-sw/cmssw
|
4ecd2c1105d59c66d385551230542c6615b9ab58
|
19c178740257eb48367778593da55dcad08b7a4f
|
refs/heads/master
| 2023-08-23T21:57:42.491143
| 2023-08-22T20:22:40
| 2023-08-22T20:22:40
| 10,969,551
| 1,006
| 3,696
|
Apache-2.0
| 2023-09-14T19:14:28
| 2013-06-26T14:09:07
|
C++
|
UTF-8
|
Python
| false
| false
| 262
|
py
|
import FWCore.ParameterSet.Config as cms
from Configuration.Eras.Era_Run2_25ns_cff import Run2_25ns
from Configuration.Eras.Modifier_tracker_apv_vfp30_2016_cff import tracker_apv_vfp30_2016
Run2_25ns_HIPM = cms.ModifierChain(Run2_25ns, tracker_apv_vfp30_2016)
|
[
"matti.kortelainen@cern.ch"
] |
matti.kortelainen@cern.ch
|
6c5a0cf192d5952d2f4df24f25b483a0b06012a2
|
defea90d2930a12c8279437833e28261e19f797b
|
/clickertg/main.py
|
8990be0c4dd0cf882cd4134d198544ea4f7052dd
|
[] |
no_license
|
Ilya069/ppppp
|
f7caa1bc3bb80d797fb7eed0b71211c4228be84e
|
607562dcfeb82b8539e3e9676f2c4b258792281a
|
refs/heads/main
| 2023-07-17T11:58:39.072009
| 2021-09-03T12:42:43
| 2021-09-03T12:42:43
| 402,766,173
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,520
|
py
|
import logging
from aiogram import Bot, Dispatcher, executor, types
from config import API_TOKEN, number, QIWI_SEC_TOKEN, sum, admin
import keyboard as k
import functions as fc
import text as tx
import sqlite3
from qiwipyapi import Wallet
# Логи в консоль
logging.basicConfig(level=logging.INFO)
# Инициализация
bot = Bot(token=API_TOKEN)
dp = Dispatcher(bot)
connection = sqlite3.connect('data.db')
q = connection.cursor()
wallet_p2p = Wallet(number, p2p_sec_key=QIWI_SEC_TOKEN)
@dp.message_handler(commands=['start', 'help']) # Начало работы
async def start(message: types.Message):
q.execute(f"SELECT * FROM users WHERE user_id = {message.chat.id}")
result = q.fetchall()
if len(result) == 0:
q.execute(f"INSERT INTO users (user_id, balance)"
f"VALUES ('{message.chat.id}', '0')")
connection.commit()
await message.answer(tx.sogl, parse_mode='Markdown', reply_markup=k.accept)
else:
await message.answer(
f'Привет, {message.from_user.mention}, кликай и зарабатывай! За каждый клик вам начисляется 10 копеек на баланс.',
reply_markup=k.menu)
@dp.message_handler(content_types=["text"]) # Реакция на текст
async def reaction(message: types.Message):
chat_id = message.chat.id
fc.first(chat_id=chat_id)
if message.text == '👤 Баланс':
bal = q.execute(f'SELECT balance FROM users WHERE user_id = "{message.chat.id}"').fetchone()
connection.commit()
await message.answer(f'Ваш баланс: {fc.toFixed(bal[0], 1)}₽')
elif message.text == '💸 Клик':
q.execute(f'UPDATE users SET balance = balance + 0.1 WHERE user_id IS "{message.chat.id}"')
connection.commit()
await message.answer('Вам начислено +0.1₽')
elif message.text == '🎰 Вывод':
payed = q.execute(f'SELECT payd FROM users WHERE user_id = "{message.chat.id}"').fetchone()
connection.commit()
if payed[0] == 0:
await message.answer(tx.ver ,reply_markup=k.pay)
else:
bal = q.execute(f'SELECT balance FROM users WHERE user_id = "{message.chat.id}"').fetchone()
connection.commit()
await message.answer(f'Заявка на вывод успешно отправлена администраторам, с вами скоро свяжутся.', reply_markup=k.menu)
await bot.send_message(admin, f'Новая заявка на вывод:\nID - {message.chat.id}\nЮзернейм - {message.from_user.mention}\n[Ссылка на пользователя]({message.from_user.url})\nБаланс - {fc.toFixed(bal[0], 1)}\nСвяжитесь с пользователем, чтобы отправить ему деньги.', parse_mode='Markdown')
q.execute(f"UPDATE USERS SET payd = 0 WHERE user_id = {chat_id}")
connection.commit()
elif message.text == 'Оплатить':
link = fc.pay(chat_id=chat_id)
await message.answer(f'Ваш ID - {message.chat.id}\nК оплате - {sum}₽\n[Ссылка для оплаты]({link})', reply_markup=k.buy1, parse_mode='Markdown')
elif message.text == '/admin':
if str(chat_id) == str(admin):
await message.answer('Добро пожаловать в админ панель:', reply_markup=k.apanel)
else:
await message.answer('Черт! Ты меня взломал🙃')
@dp.callback_query_handler(lambda call: True) # Inline часть
async def cal(call):
chat_id = call.message.chat.id
if call.data == 'check':
try:
re = q.execute(f"SELECT bd FROM users WHERE user_id = {chat_id}").fetchone()
status = wallet_p2p.invoice_status(bill_id=re[0])
a = status['status']['value']
if a == 'WAITING':
await call.message.answer('Ошибка! Платёж не найден.', reply_markup=k.buy1)
elif a == 'PAID':
await call.message.answer('Оплата успешно найдена.\nТеперь вы можете вывести баланс.', reply_markup=k.menu)
q.execute(f'UPDATE users SET payd = 1 WHERE user_id IS "{chat_id}"')
q.execute(f'UPDATE users SET bd = 0 WHERE user_id IS "{chat_id}"')
connection.commit()
elif a == 'EXPIRED':
await call.message.answer('Время жизни счета истекло. Счет не оплачен', reply_markup=k.buy1)
elif a == 'REJECTED':
await call.message.answer('Счет отклонен', reply_markup=k.buy1)
elif a == 'UNPAID':
await call.message.answer('Ошибка при проведении оплаты. Счет не оплачен', reply_markup=k.buy1)
except Exception as err:
await call.message.answer('Ошибка!')
elif call.data == 'stats':
re = q.execute(f'SELECT * FROM users').fetchall()
kol = len(re)
bal = q.execute(f"SELECT sum(balance) FROM users").fetchone()
connection.commit()
await call.message.answer(f'Всего пользователей: {kol}\nОбщий баланс всех пользователей: {fc.toFixed(bal[0], 1)}₽')
elif call.data == 'back':
await call.message.answer('Назад..', reply_markup=k.menu)
elif call.data == 'accept':
await call.message.answer(
f'Привет, {call.from_user.mention}, кликай и зарабатывай! За каждый клик вам начисляется 10 копеек на баланс.',
reply_markup=k.menu)
if __name__ == '__main__':
executor.start_polling(dp, skip_updates=True) # Запуск
|
[
"noreply@github.com"
] |
Ilya069.noreply@github.com
|
0a52e9968f56c6b6508688a6df3635b95b32fffa
|
b0776dc9032cdda5dc25f4e0b9c5c292ba5c2957
|
/source_code/237_DeleteNodeInALinkedList.py
|
968ddd543c0ad6442a58e2e49a62b9384f349cf1
|
[] |
no_license
|
CircleZ3791117/CodingPractice
|
af3c0369927abc52b7c52cd35781af83e9ee0923
|
ecbb8fb7f96f644c16dbb0cf7ffb69bc959a5647
|
refs/heads/master
| 2021-06-05T11:41:56.266121
| 2020-01-21T11:24:20
| 2020-01-21T11:24:20
| 113,287,559
| 14
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,264
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'circlezhou'
'''
Description:
Write a function to delete a node (except the tail) in a singly linked list, given only access to that node.
Given linked list -- head = [4,5,1,9], which looks like following:
4 -> 5 -> 1 -> 9
Example 1:
Input: head = [4,5,1,9], node = 5
Output: [4,1,9]
Explanation: You are given the second node with value 5, the linked list
should become 4 -> 1 -> 9 after calling your function.
Example 2:
Input: head = [4,5,1,9], node = 1
Output: [4,5,9]
Explanation: You are given the third node with value 1, the linked list
should become 4 -> 5 -> 9 after calling your function.
Note:
The linked list will have at least two elements.
All of the nodes' values will be unique.
The given node will not be the tail and it will always be a valid node of the linked list.
Do not return anything from your function.
'''
# definition for singly linked list
class ListNode(object):
def __init__(self, x)
self.val = x
self.next = None
class Solution(object):
def deleteNode(self, node):
"""
:type node: ListNode
:rtype: void Do not return anything, modify node in-place instead
"""
node.val = node.next.val
node.next = node.next.next
|
[
"zhouhuan@onescorpion.com"
] |
zhouhuan@onescorpion.com
|
a3fbae8a293967922f56e1fae50c9f4ebc126454
|
5f10b66c511909a85283aef554f0b0902ccca669
|
/parsernews/projectnews/migrations/0001_initial.py
|
ff1ee4e90d6c1d9db4b90fe4be2439c4f55ae6f2
|
[] |
no_license
|
q3tourney4/parsernews
|
bd82562ef0db97df86cc5f52be29b49a7c3c390a
|
4bde12985f313d8a740afb99749718151a1c4306
|
refs/heads/master
| 2023-02-09T14:30:02.621271
| 2020-01-22T18:14:28
| 2020-01-22T18:14:28
| 235,368,481
| 0
| 0
| null | 2023-02-02T05:59:07
| 2020-01-21T14:59:01
|
Python
|
UTF-8
|
Python
| false
| false
| 1,209
|
py
|
# Generated by Django 3.0.2 on 2020-01-19 08:47
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="NewsAggregator",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"title",
models.CharField(
default="", max_length=1024, verbose_name="Заголовок новости"
),
),
(
"url",
models.CharField(
default="", max_length=2048, verbose_name="url новости"
),
),
(
"created",
models.DateTimeField(null=True, verbose_name="Дата новости"),
),
],
),
]
|
[
"a.davydov@reg.ru"
] |
a.davydov@reg.ru
|
e5e6099b01faac1626e8c046a9c1a52db712c2a2
|
cfdc7b7ce996230f76bc880b4bd30d7ba57ea479
|
/news/views.py
|
cd07215ccceb64413797a64117dd98b829d145f8
|
[] |
no_license
|
Pylearn2017/city
|
6d23435f71a5f8b9f4f139b5bd4e573f2b5108a6
|
19091103f6229345863c5e33dc47ea30180134c7
|
refs/heads/main
| 2023-05-02T19:26:48.048951
| 2021-05-15T07:26:29
| 2021-05-15T07:26:29
| 361,100,295
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 331
|
py
|
from django.shortcuts import render
from django.utils import timezone
from .models import Post
# Create your views here.
def post_list(request):
# posts = Post.objects.filter(published_date__lte=timezone.now()).order_by('published_date')
posts = Post.objects.all()
return render(request, 'news/post_list.html', {'posts':posts})
|
[
"alexlik73@gmail.com"
] |
alexlik73@gmail.com
|
c448def6a36651331720a8ac8259a34e02676be8
|
9ca0fe2e70cdc475dad74628fd62c16ef49bbf6b
|
/flaskr/db.py
|
56eaf41ec3a7e91f670029c0b610f3e702a4a479
|
[] |
no_license
|
journey-bit/jackpot
|
9144dce80d1c8d5750eabe1d58a2392d87c1d7ef
|
9d8aefec1eac30e54bd6a91ed8ee687cb412ebec
|
refs/heads/master
| 2022-04-02T21:38:44.092785
| 2020-02-03T16:21:18
| 2020-02-03T16:21:18
| 233,868,364
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 425
|
py
|
from flask import current_app, g
def get_db():
# if 'db' not in g:
# g.db = sqlite3.connect(
# current_app.config['DATABASE'],
# detect_types=sqlite3.PARSE_DECLTYPES
# )
# g.db.row_factory = sqlite3.Row
return g.db
def close_db(e=None):
db = g.pop('db', None)
if db is not None:
db.close()
def init_app(app):
app.teardown_appcontext(close_db)
|
[
"lucky@luckydeMBP.lan"
] |
lucky@luckydeMBP.lan
|
7dc5e816aebb07e25919394fecbd9694deaeec5d
|
8e21663b4b1f2c5fe7da27db5c510d98832c9ebd
|
/test_pet_friends.py
|
55c7b53478c8198c806080a1e9014c709a22e356
|
[] |
no_license
|
JokeRrr1992/19-modul
|
d1257d03534be3d3d5a6e3fe67cc411e98913aff
|
6c693c8265eeb6bc7f7283d584dd471132668721
|
refs/heads/main
| 2023-01-30T04:13:55.261749
| 2020-12-13T15:52:39
| 2020-12-13T15:52:39
| 321,099,833
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,231
|
py
|
import os
from api import PetFriends
from settings import valid_email, valid_password, not_valid_email, not_valid_password
pf = PetFriends()
def test_get_api_key_for_valid_user(email=valid_email, password=valid_password):
""" Проверяем, что запрос api ключа возвращает статус 200 и в результате содержится слово key"""
# Отправляем запрос и сохраняем полученный ответ с кодом статуса в status, а текст ответа в result
status, result = pf.get_api_key(email, password)
# Сверяем полученные данные с ожиданиями
assert status == 200
assert 'key' in result
def test_get_api_key_for_not_valid_email_and_password(email=not_valid_email,
password=not_valid_password):
""" Проверяем, что запрос api ключа с неверным email пользователя возвращает статус 403
и в результате не содержится слово key"""
# Отправляем запрос и сохраняем полученный ответ с кодом статуса в status, а текст ответа в result
status, result = pf.get_api_key(email, password)
# Сверяем полученные данные с ожиданиями
assert status == 403
assert 'key' not in result
def test_get_all_pets_with_valid_key(filter=''):
""" Проверяем, что запрос списка всех питомцев возвращает не пустой список.
Для этого сначала получаем api ключ и сохраняем в переменную auth_key. Далее,
используя этот ключ, запрашиваем список всех питомцев и проверяем, что список не пустой.
Доступное значение параметра filter - 'my_pets' либо '' (пусто) """
_, auth_key = pf.get_api_key(valid_email, valid_password)
status, result = pf.get_list_of_pets(auth_key, filter)
# Сверяем полученный ответ с ожидаемым результатом
assert status == 200
assert len(result['pets']) > 0
def test_add_new_pet_with_valid_data(name='Палкан', animal_type='Кот Котейский',
age='3', pet_photo='images/cat1.jpg'):
"""Проверяем, что запрос на добавление нового питомца с указанными параметрами выполняется
успешно."""
# Получаем полный путь изображения питомца и сохраняем в переменную pet_photo
pet_photo = os.path.join(os.path.dirname(__file__), pet_photo)
# Запрашиваем ключ api и сохраняем в переменую auth_key
_, auth_key = pf.get_api_key(valid_email, valid_password)
# Добавляем нового питомца
status, result = pf.add_new_pet(auth_key, name, animal_type, age, pet_photo)
# Сверяем полученный ответ с ожидаемым результатом
assert status == 200
assert result['name'] == name
assert result['animal_type'] == animal_type
def test_add_new_pet_with_empty_age(name='Кошкин Кот', animal_type='Кот Котейский',
age='', pet_photo='images/cat1.jpg'):
"""Проверяем, что запрос на добавление нового питомца с пустым полем возраста выполняется успешно"""
# Получаем полный путь изображения питомца и сохраняем в переменную pet_photo
pet_photo = os.path.join(os.path.dirname(__file__), pet_photo)
# Запрашиваем ключ api и сохраняем в переменую auth_key
_, auth_key = pf.get_api_key(valid_email, valid_password)
# Добавляем нового питомца
status, result = pf.add_new_pet(auth_key, name, animal_type, age, pet_photo)
# Сверяем полученный ответ с ожидаемым результатом
assert status == 200
assert 'name' in result
def test_add_new_pet_with_negative_age(name='КОТ', animal_type='Кот Котейский',
age='-7', pet_photo='images/cat1.jpg'):
"""Проверяем, что запрос на добавление нового питомца с отрицательным возрастом выполняется успешно"""
# Получаем полный путь изображения питомца и сохраняем в переменную pet_photo
pet_photo = os.path.join(os.path.dirname(__file__), pet_photo)
# Запрашиваем ключ api и сохраняем в переменую auth_key
_, auth_key = pf.get_api_key(valid_email, valid_password)
# Добавляем нового питомца
status, result = pf.add_new_pet(auth_key, name, animal_type, age, pet_photo)
# Сверяем полученный ответ с ожидаемым результатом
assert status == 200
assert 'name' in result
def test_add_new_pet_with_space_in_age(name='КОТ', animal_type='Кот Котейский',
age=' ', pet_photo='images/cat1.jpg'):
"""Проверяем, что запрос на добавление нового питомца с пустым полем возраста выполняется успешно"""
# Получаем полный путь изображения питомца и сохраняем в переменную pet_photo
pet_photo = os.path.join(os.path.dirname(__file__), pet_photo)
# Запрашиваем ключ api и сохраняем в переменую auth_key
_, auth_key = pf.get_api_key(valid_email, valid_password)
# Добавляем нового питомца
status, result = pf.add_new_pet(auth_key, name, animal_type, age, pet_photo)
# Сверяем полученный ответ с ожидаемым результатом
assert status == 200
assert 'name' in result
def test_add_new_pet_with_incorrect_age(name='Йошкин Кот', animal_type='Кот Котейский',
age='333333333333333333333', pet_photo='images/cat1.jpg'):
"""Проверяем, что запрос на добавление нового питомца с некорректным параметром
(возраст питомца = 333333333333333333333) выполняется успешно."""
# Получаем полный путь изображения питомца и сохраняем в переменную pet_photo
pet_photo = os.path.join(os.path.dirname(__file__), pet_photo)
# Запрашиваем ключ api и сохраняем в переменую auth_key
_, auth_key = pf.get_api_key(valid_email, valid_password)
# Добавляем нового питомца
status, result = pf.add_new_pet(auth_key, name, animal_type, age, pet_photo)
# Сверяем полученный ответ с ожидаемым результатом
assert status == 200
assert result['name'] == name
assert result['age'] == age
def test_successful_delete_self_pet():
"""Проверяем возможность удаления питомца"""
# Получаем ключ auth_key и запрашиваем список своих питомцев
_, auth_key = pf.get_api_key(valid_email, valid_password)
_, my_pets = pf.get_list_of_pets(auth_key, 'my_pets')
# Проверяем, если список своих питомцев пустой, то добавляем нового питомца, и опять
# запрашиваем список своих питомцев
if len(my_pets['pets']) == 0:
pf.add_new_pet(auth_key, "Баська", "Котяра", "2", "images/cat1.jpg")
_, my_pets = pf.get_list_of_pets(auth_key, "my_pets")
# Берём id первого питомца из списка и отправляем запрос на удаление
pet_id = my_pets['pets'][0]['id']
status, _ = pf.delete_pet(auth_key, pet_id)
# Ещё раз запрашиваем список своих питомцев
_, my_pets = pf.get_list_of_pets(auth_key, "my_pets")
# Проверяем что статус ответа равен 200 и в списке питомцев нет id удалённого питомца
assert status == 200
assert pet_id not in [pet['id'] for pet in my_pets['pets']]
def test_successful_update_self_pet_info(name='Жанглер', animal_type='Котторт', age=2):
"""Проверяем возможность обновления информации о питомце"""
# Получаем ключ auth_key и список своих питомцев
_, auth_key = pf.get_api_key(valid_email, valid_password)
_, my_pets = pf.get_list_of_pets(auth_key, "my_pets")
# Если список не пустой, то пробуем обновить имя, тип и возраст питомца
if len(my_pets['pets']) > 0:
status, result = pf.update_pet_info(auth_key, my_pets['pets'][0]['id'], name, animal_type, age)
# Проверяем что статус ответа = 200 и имя питомца соответствует заданному
assert status == 200
assert result['name'] == name
else:
# если спиок питомцев пустой, то выдаём исключение с текстом об отсутствии своих питомцев
raise Exception("There is no my pets")
def test_rejection_update_self_pet_info_without_name(name='', animal_type='преампуль', age=2):
"""Проверяем невозможность удаления имени питомца путём передачи пустого поля name -
информация не удаляется """
# Получаем ключ auth_key и список своих питомцев
_, auth_key = pf.get_api_key(valid_email, valid_password)
_, my_pets = pf.get_list_of_pets(auth_key, "my_pets")
# Если список не пустой, то пробуем обновить имя (пустое поле), тип и возраст питомца
if len(my_pets['pets']) > 0:
status, result = pf.update_pet_info(auth_key, my_pets['pets'][0]['id'], name, animal_type, age)
# Проверяем что статус ответа = 200 и имя питомца соответствует заданному
assert status == 200
assert result['name']
else:
# если спиок питомцев пустой, то выдаём исключение с текстом об отсутствии своих питомцев
raise Exception("The list of my pets is empty")
def test_rejection_update_self_pet_info_without_animal_type(name='Васька Кот', animal_type='', age=1):
"""Проверяем невозможность удаления типа питомца путём передачи пустого поля animal_type -
информация не удаляется """
# Получаем ключ auth_key и список своих питомцев
_, auth_key = pf.get_api_key(valid_email, valid_password)
_, my_pets = pf.get_list_of_pets(auth_key, "my_pets")
# Если список не пустой, то пробуем обновить имя (пустое поле), тип и возраст питомца
if len(my_pets['pets']) > 0:
status, result = pf.update_pet_info(auth_key, my_pets['pets'][0]['id'], name, animal_type, age)
# Проверяем что статус ответа = 200 и имя питомца соответствует заданному
assert status == 200
assert result['name'] == name
assert result['animal_type']
else:
# если спиок питомцев пустой, то выдаём исключение с текстом об отсутствии своих питомцев
raise Exception("The list of my pets is empty")
def test_succsessful_update_self_pet_info_with_spase_name(name=' ', animal_type='прекурсор собакена',
age=1):
"""Проверяем возможность удаления имени питомца путём передачи пробела
в поле name - информация перезаписывается успешно."""
# Получаем ключ auth_key и список своих питомцев
_, auth_key = pf.get_api_key(valid_email, valid_password)
_, my_pets = pf.get_list_of_pets(auth_key, "my_pets")
# Если список не пустой, то пробуем обновить имя (пустое поле), тип и возраст питомца
if len(my_pets['pets']) > 0:
status, result = pf.update_pet_info(auth_key, my_pets['pets'][0]['id'], name, animal_type, age)
# Проверяем что статус ответа = 200 и имя питомца соответствует заданному
assert status == 200
assert result['name'] == ' '
else:
# если спиок питомцев пустой, то выдаём исключение с текстом об отсутствии своих питомцев
raise Exception("The list of my pets is empty")
def test_add_new_pet_with_valid_data_without_foto(name='БАська Раскалбаська',
animal_type='Котетский', age='1'):
"""Проверяем, что запрос на добавление нового питомца без фото с указанными параметрами
выполняется успешно."""
# Запрашиваем ключ api и сохраняем в переменую auth_key
_, auth_key = pf.get_api_key(valid_email, valid_password)
# Добавляем нового питомца
status, result = pf.add_new_pet_without_photo(auth_key, name, animal_type, age)
# Сверяем полученный ответ с ожидаемым результатом
assert status == 200
assert result['name'] == name
def test_add_new_pet_with_incorrect_data_without_foto(name='@#$%^&!*',
animal_type='', age=''):
"""Проверяем, что запрос на добавление нового питомца без фото с некорректно указанными
параметрами (name задаётся спецсимволами, а animal_type и age - пустые) выполняется успешно."""
# Запрашиваем ключ api и сохраняем в переменую auth_key
_, auth_key = pf.get_api_key(valid_email, valid_password)
# Добавляем нового питомца
status, result = pf.add_new_pet_without_photo(auth_key, name, animal_type, age)
# Сверяем полученный ответ с ожидаемым результатом
assert status == 200
assert result['name'] == name
def test_successful_add_foto_of_pet(pet_id='', pet_photo='images/cat1.jpg'):
"""Проверяем успешность запроса на добавление фото питомца по его id"""
# Получаем ключ auth_key и запрашиваем список своих питомцев
_, auth_key = pf.get_api_key(valid_email, valid_password)
_, my_pets = pf.get_list_of_pets(auth_key, 'my_pets')
# Получаем полный путь изображения питомца и сохраняем в переменную pet_photo
pet_photo = os.path.join(os.path.dirname(__file__), pet_photo)
# Если список не пустой, то пробуем добавить фото питомца
if len(my_pets['pets']) > 0:
pet_id = my_pets['pets'][0]['id']
status, result = pf.add_foto_of_pet(auth_key, pet_id, pet_photo)
# Проверяем что статус ответа = 200 и фото питомца соответствует заданному
assert status == 200
assert result['pet_photo']
else:
# если спиок питомцев пустой, то выдаём исключение с текстом об отсутствии своих питомцев
raise Exception("The list of 'My pets' is empty")
|
[
"noreply@github.com"
] |
JokeRrr1992.noreply@github.com
|
6bb121fe304e7a75d445a0bdc12c1aa8c54b8db4
|
f27fe88de38ed799d900a8cfd4d3a92b7cade8db
|
/test/test_bootstrap.py
|
5f1ac5de7d5bb87ba4a5e73dee15f9dc50bad3ba
|
[
"Apache-2.0",
"curl",
"MIT"
] |
permissive
|
eoinsha/aws-lambda-python-runtime-interface-client
|
43b599177ed5cae2699b38e4caa94e344512c00e
|
3b6a82f0f35c19b04cdd3f20fab27ee35c290982
|
refs/heads/main
| 2023-01-18T19:10:52.836690
| 2020-09-30T13:09:22
| 2020-12-01T10:01:09
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 39,808
|
py
|
"""
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
"""
import json
import os
import re
import tempfile
import traceback
import unittest
from imp import C_BUILTIN
from io import StringIO
from tempfile import NamedTemporaryFile
from unittest.mock import patch, Mock, MagicMock
import awslambdaric.bootstrap as bootstrap
from awslambdaric.lambda_runtime_exception import FaultException
from awslambdaric.lambda_runtime_marshaller import LambdaMarshaller
class TestUpdateXrayEnv(unittest.TestCase):
def setUp(self):
self.org_os_environ = os.environ
def tearDown(self):
os.environ = self.org_os_environ
def test_update_xray_env_variable_empty(self):
os.environ = {}
bootstrap.update_xray_env_variable(None)
self.assertEqual(os.environ.get("_X_AMZN_TRACE_ID"), None)
def test_update_xray_env_variable_remove_old_value(self):
os.environ = {"_X_AMZN_TRACE_ID": "old-id"}
bootstrap.update_xray_env_variable(None)
self.assertEqual(os.environ.get("_X_AMZN_TRACE_ID"), None)
def test_update_xray_env_variable_new_value(self):
os.environ = {}
bootstrap.update_xray_env_variable("new-id")
self.assertEqual(os.environ.get("_X_AMZN_TRACE_ID"), "new-id")
def test_update_xray_env_variable_overwrite(self):
os.environ = {"_X_AMZN_TRACE_ID": "old-id"}
bootstrap.update_xray_env_variable("new-id")
self.assertEqual(os.environ.get("_X_AMZN_TRACE_ID"), "new-id")
class TestHandleEventRequest(unittest.TestCase):
def setUp(self):
self.lambda_runtime = Mock()
self.lambda_runtime.marshaller = LambdaMarshaller()
self.event_body = '"event_body"'
self.working_directory = os.getcwd()
@staticmethod
def dummy_handler(json_input, lambda_context):
return {"input": json_input, "aws_request_id": lambda_context.aws_request_id}
def test_handle_event_request_happy_case(self):
bootstrap.handle_event_request(
self.lambda_runtime,
self.dummy_handler,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
self.lambda_runtime.post_invocation_result.assert_called_once_with(
"invoke_id",
'{"input": "event_body", "aws_request_id": "invoke_id"}',
"application/json",
)
def test_handle_event_request_invalid_client_context(self):
expected_response = {
"errorType": "Runtime.LambdaContextUnmarshalError",
"errorMessage": "Unable to parse Client Context JSON: Expecting value: line 1 column 1 (char 0)",
}
bootstrap.handle_event_request(
self.lambda_runtime,
self.dummy_handler,
"invoke_id",
self.event_body,
"application/json",
"invalid_client_context_not_json",
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
args, _ = self.lambda_runtime.post_invocation_error.call_args
error_response = json.loads(args[1])
self.assertEqual(args[0], "invoke_id")
self.assertTrue(
expected_response.items() <= error_response.items(),
"Response doesn't contain all the necessary fields\nExpected: {}\nActual: {}".format(
expected_response, error_response
),
)
self.assertEqual(
json.loads(args[2]),
{
"working_directory": self.working_directory,
"exceptions": [
{
"message": expected_response["errorMessage"],
"type": "LambdaValidationError",
"stack": [],
}
],
"paths": [],
},
)
def test_handle_event_request_invalid_cognito_idenity(self):
expected_response = {
"errorType": "Runtime.LambdaContextUnmarshalError",
"errorMessage": "Unable to parse Cognito Identity JSON: Expecting value: line 1 column 1 (char 0)",
}
bootstrap.handle_event_request(
self.lambda_runtime,
self.dummy_handler,
"invoke_id",
self.event_body,
"application/json",
{},
"invalid_cognito_identity",
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
args, _ = self.lambda_runtime.post_invocation_error.call_args
error_response = json.loads(args[1])
self.assertEqual(args[0], "invoke_id")
self.assertTrue(
expected_response.items() <= error_response.items(),
"Response doesn't contain all the necessary fields\nExpected: {}\nActual: {}".format(
expected_response, error_response
),
)
self.assertEqual(
json.loads(args[2]),
{
"working_directory": self.working_directory,
"exceptions": [
{
"message": expected_response["errorMessage"],
"type": "LambdaValidationError",
"stack": [],
}
],
"paths": [],
},
)
def test_handle_event_request_invalid_event_body(self):
expected_response = {
"errorType": "Runtime.UnmarshalError",
"errorMessage": "Unable to unmarshal input: Expecting value: line 1 column 1 (char 0)",
}
invalid_event_body = "not_valid_json"
bootstrap.handle_event_request(
self.lambda_runtime,
self.dummy_handler,
"invoke_id",
invalid_event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
args, _ = self.lambda_runtime.post_invocation_error.call_args
error_response = json.loads(args[1])
self.assertEqual(args[0], "invoke_id")
self.assertTrue(
expected_response.items() <= error_response.items(),
"Response doesn't contain all the necessary fields\nExpected: {}\nActual: {}".format(
expected_response, error_response
),
)
self.assertEqual(
json.loads(args[2]),
{
"working_directory": self.working_directory,
"exceptions": [
{
"message": expected_response["errorMessage"],
"type": "LambdaValidationError",
"stack": [],
}
],
"paths": [],
},
)
def test_handle_event_request_invalid_response(self):
def invalid_json_response(json_input, lambda_context):
return type("obj", (object,), {"propertyName": "propertyValue"})
expected_response = {
"errorType": "Runtime.MarshalError",
"errorMessage": "Unable to marshal response: Object of type type is not JSON serializable",
}
bootstrap.handle_event_request(
self.lambda_runtime,
invalid_json_response,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
args, _ = self.lambda_runtime.post_invocation_error.call_args
error_response = json.loads(args[1])
self.assertEqual(args[0], "invoke_id")
self.assertTrue(
expected_response.items() <= error_response.items(),
"Expected response is not a subset of the actual response\nExpected: {}\nActual: {}".format(
expected_response, error_response
),
)
self.assertEqual(
json.loads(args[2]),
{
"working_directory": self.working_directory,
"exceptions": [
{
"message": expected_response["errorMessage"],
"type": "LambdaValidationError",
"stack": [],
}
],
"paths": [],
},
)
def test_handle_event_request_custom_exception(self):
def raise_exception_handler(json_input, lambda_context):
class MyError(Exception):
def __init__(self, message):
self.message = message
raise MyError("My error")
expected_response = {"errorType": "MyError", "errorMessage": "My error"}
bootstrap.handle_event_request(
self.lambda_runtime,
raise_exception_handler,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
args, _ = self.lambda_runtime.post_invocation_error.call_args
error_response = json.loads(args[1])
self.assertEqual(args[0], "invoke_id")
self.assertTrue(
expected_response.items() <= error_response.items(),
"Expected response is not a subset of the actual response\nExpected: {}\nActual: {}".format(
expected_response, error_response
),
)
xray_fault = json.loads(args[2])
self.assertEqual(xray_fault["working_directory"], self.working_directory)
self.assertEqual(len(xray_fault["exceptions"]), 1)
self.assertEqual(
xray_fault["exceptions"][0]["message"], expected_response["errorMessage"]
)
self.assertEqual(
xray_fault["exceptions"][0]["type"], expected_response["errorType"]
)
self.assertEqual(len(xray_fault["exceptions"][0]["stack"]), 1)
self.assertEqual(
xray_fault["exceptions"][0]["stack"][0]["label"], "raise_exception_handler"
)
self.assertIsInstance(xray_fault["exceptions"][0]["stack"][0]["line"], int)
self.assertEqual(
xray_fault["exceptions"][0]["stack"][0]["path"], os.path.realpath(__file__)
)
self.assertEqual(len(xray_fault["paths"]), 1)
self.assertEqual(xray_fault["paths"][0], os.path.realpath(__file__))
def test_handle_event_request_no_module(self):
def unable_to_import_module(json_input, lambda_context):
import invalid_module
expected_response = {
"errorType": "ModuleNotFoundError",
"errorMessage": "No module named 'invalid_module'",
}
bootstrap.handle_event_request(
self.lambda_runtime,
unable_to_import_module,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
args, _ = self.lambda_runtime.post_invocation_error.call_args
error_response = json.loads(args[1])
self.assertEqual(args[0], "invoke_id")
self.assertTrue(
expected_response.items() <= error_response.items(),
"Expected response is not a subset of the actual response\nExpected: {}\nActual: {}".format(
expected_response, error_response
),
)
def test_handle_event_request_fault_exception(self):
def raise_exception_handler(json_input, lambda_context):
try:
import invalid_module
except ImportError as e:
raise FaultException(
"FaultExceptionType",
"Fault exception msg",
["trace_line1\ntrace_line2", "trace_line3\ntrace_line4"],
)
expected_response = {
"errorType": "FaultExceptionType",
"errorMessage": "Fault exception msg",
"stackTrace": ["trace_line1\ntrace_line2", "trace_line3\ntrace_line4"],
}
bootstrap.handle_event_request(
self.lambda_runtime,
raise_exception_handler,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
args, _ = self.lambda_runtime.post_invocation_error.call_args
error_response = json.loads(args[1])
self.assertEqual(args[0], "invoke_id")
self.assertEqual(error_response.items(), expected_response.items())
self.assertEqual(
json.loads(args[2]),
{
"working_directory": self.working_directory,
"exceptions": [
{
"message": expected_response["errorMessage"],
"type": "LambdaValidationError",
"stack": [],
}
],
"paths": [],
},
)
@patch("sys.stdout", new_callable=StringIO)
def test_handle_event_request_fault_exception_logging(self, mock_stdout):
def raise_exception_handler(json_input, lambda_context):
try:
import invalid_module
except ImportError as e:
raise bootstrap.FaultException(
"FaultExceptionType",
"Fault exception msg",
traceback.format_list(
[
("spam.py", 3, "<module>", "spam.eggs()"),
("eggs.py", 42, "eggs", 'return "bacon"'),
]
),
)
bootstrap.handle_event_request(
self.lambda_runtime,
raise_exception_handler,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
# NOTE: Indentation characters are NO-BREAK SPACE (U+00A0) not SPACE (U+0020)
error_logs = "[ERROR] FaultExceptionType: Fault exception msg\r"
error_logs += "Traceback (most recent call last):\r"
error_logs += ' File "spam.py", line 3, in <module>\r'
error_logs += " spam.eggs()\r"
error_logs += ' File "eggs.py", line 42, in eggs\r'
error_logs += ' return "bacon"\n'
self.assertEqual(mock_stdout.getvalue(), error_logs)
@patch("sys.stdout", new_callable=StringIO)
def test_handle_event_request_fault_exception_logging_notrace(self, mock_stdout):
def raise_exception_handler(json_input, lambda_context):
try:
import invalid_module
except ImportError as e:
raise bootstrap.FaultException(
"FaultExceptionType", "Fault exception msg", None
)
bootstrap.handle_event_request(
self.lambda_runtime,
raise_exception_handler,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
error_logs = "[ERROR] FaultExceptionType: Fault exception msg\n"
self.assertEqual(mock_stdout.getvalue(), error_logs)
@patch("sys.stdout", new_callable=StringIO)
def test_handle_event_request_fault_exception_logging_nomessage_notrace(
self, mock_stdout
):
def raise_exception_handler(json_input, lambda_context):
try:
import invalid_module
except ImportError as e:
raise bootstrap.FaultException("FaultExceptionType", None, None)
bootstrap.handle_event_request(
self.lambda_runtime,
raise_exception_handler,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
error_logs = "[ERROR] FaultExceptionType\n"
self.assertEqual(mock_stdout.getvalue(), error_logs)
@patch("sys.stdout", new_callable=StringIO)
def test_handle_event_request_fault_exception_logging_notype_notrace(
self, mock_stdout
):
def raise_exception_handler(json_input, lambda_context):
try:
import invalid_module
except ImportError as e:
raise bootstrap.FaultException(None, "Fault exception msg", None)
bootstrap.handle_event_request(
self.lambda_runtime,
raise_exception_handler,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
error_logs = "[ERROR] Fault exception msg\n"
self.assertEqual(mock_stdout.getvalue(), error_logs)
@patch("sys.stdout", new_callable=StringIO)
def test_handle_event_request_fault_exception_logging_notype_nomessage(
self, mock_stdout
):
def raise_exception_handler(json_input, lambda_context):
try:
import invalid_module
except ImportError as e:
raise bootstrap.FaultException(
None,
None,
traceback.format_list(
[
("spam.py", 3, "<module>", "spam.eggs()"),
("eggs.py", 42, "eggs", 'return "bacon"'),
]
),
)
bootstrap.handle_event_request(
self.lambda_runtime,
raise_exception_handler,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
error_logs = "[ERROR]\r"
error_logs += "Traceback (most recent call last):\r"
error_logs += ' File "spam.py", line 3, in <module>\r'
error_logs += " spam.eggs()\r"
error_logs += ' File "eggs.py", line 42, in eggs\r'
error_logs += ' return "bacon"\n'
self.assertEqual(mock_stdout.getvalue(), error_logs)
@patch("sys.stdout", new_callable=StringIO)
@patch("imp.find_module")
@patch("imp.load_module")
def test_handle_event_request_fault_exception_logging_syntax_error(
self, mock_load_module, mock_find_module, mock_stdout
):
try:
eval("-")
except SyntaxError as e:
syntax_error = e
mock_find_module.return_value = (None, None, ("", "", None))
mock_load_module.side_effect = syntax_error
response_handler = bootstrap._get_handler("a.b")
bootstrap.handle_event_request(
self.lambda_runtime,
response_handler,
"invoke_id",
self.event_body,
"application/json",
{},
{},
"invoked_function_arn",
0,
bootstrap.StandardLogSink(),
)
import sys
sys.stderr.write(mock_stdout.getvalue())
error_logs = "[ERROR] Runtime.UserCodeSyntaxError: Syntax error in module 'a': unexpected EOF while parsing (<string>, line 1)\r"
error_logs += "Traceback (most recent call last):\r"
error_logs += ' File "<string>" Line 1\r'
error_logs += " -\n"
self.assertEqual(mock_stdout.getvalue(), error_logs)
class TestXrayFault(unittest.TestCase):
def test_make_xray(self):
class CustomException(Exception):
def __init__(self):
pass
actual = bootstrap.make_xray_fault(
CustomException.__name__,
"test_message",
"working/dir",
[["test.py", 28, "test_method", "does_not_matter"]],
)
self.assertEqual(actual["working_directory"], "working/dir")
self.assertEqual(actual["paths"], ["test.py"])
self.assertEqual(len(actual["exceptions"]), 1)
self.assertEqual(actual["exceptions"][0]["message"], "test_message")
self.assertEqual(actual["exceptions"][0]["type"], "CustomException")
self.assertEqual(len(actual["exceptions"][0]["stack"]), 1)
self.assertEqual(actual["exceptions"][0]["stack"][0]["label"], "test_method")
self.assertEqual(actual["exceptions"][0]["stack"][0]["path"], "test.py")
self.assertEqual(actual["exceptions"][0]["stack"][0]["line"], 28)
def test_make_xray_with_multiple_tb(self):
class CustomException(Exception):
def __init__(self):
pass
actual = bootstrap.make_xray_fault(
CustomException.__name__,
"test_message",
"working/dir",
[
["test.py", 28, "test_method", ""],
["another_test.py", 2718, "another_test_method", ""],
],
)
self.assertEqual(len(actual["exceptions"]), 1)
self.assertEqual(len(actual["exceptions"][0]["stack"]), 2)
self.assertEqual(actual["exceptions"][0]["stack"][0]["label"], "test_method")
self.assertEqual(actual["exceptions"][0]["stack"][0]["path"], "test.py")
self.assertEqual(actual["exceptions"][0]["stack"][0]["line"], 28)
self.assertEqual(
actual["exceptions"][0]["stack"][1]["label"], "another_test_method"
)
self.assertEqual(actual["exceptions"][0]["stack"][1]["path"], "another_test.py")
self.assertEqual(actual["exceptions"][0]["stack"][1]["line"], 2718)
class TestGetEventHandler(unittest.TestCase):
class FaultExceptionMatcher(BaseException):
def __init__(self, msg, exception_type=None, trace_pattern=None):
self.msg = msg
self.exception_type = exception_type
self.trace = (
trace_pattern if trace_pattern is None else re.compile(trace_pattern)
)
def __eq__(self, other):
trace_matches = True
if self.trace is not None:
# Validate that trace is an array
if not isinstance(other.trace, list):
trace_matches = False
elif not self.trace.match("".join(other.trace)):
trace_matches = False
return (
self.msg in other.msg
and self.exception_type == other.exception_type
and trace_matches
)
def test_get_event_handler_bad_handler(self):
handler_name = "bad_handler"
response_handler = bootstrap._get_handler(handler_name)
with self.assertRaises(FaultException) as cm:
response_handler()
returned_exception = cm.exception
self.assertEqual(
self.FaultExceptionMatcher(
"Bad handler 'bad_handler': not enough values to unpack (expected 2, got 1)",
"Runtime.MalformedHandlerName",
),
returned_exception,
)
def test_get_event_handler_import_error(self):
handler_name = "no_module.handler"
response_handler = bootstrap._get_handler(handler_name)
with self.assertRaises(FaultException) as cm:
response_handler()
returned_exception = cm.exception
self.assertEqual(
self.FaultExceptionMatcher(
"Unable to import module 'no_module': No module named 'no_module'",
"Runtime.ImportModuleError",
),
returned_exception,
)
def test_get_event_handler_syntax_error(self):
tmp_file = tempfile.NamedTemporaryFile(suffix=".py", dir=".", delete=False)
tmp_file.write(
b"def syntax_error()\n\tprint('syntax error, no colon after function')"
)
tmp_file.close()
filename_w_ext = os.path.basename(tmp_file.name)
filename, _ = os.path.splitext(filename_w_ext)
handler_name = "{}.syntax_error".format(filename)
response_handler = bootstrap._get_handler(handler_name)
with self.assertRaises(FaultException) as cm:
response_handler()
returned_exception = cm.exception
self.assertEqual(
self.FaultExceptionMatcher(
"Syntax error in",
"Runtime.UserCodeSyntaxError",
".*File.*\\.py.*Line 1.*",
),
returned_exception,
)
if os.path.exists(tmp_file.name):
os.remove(tmp_file.name)
def test_get_event_handler_missing_error(self):
tmp_file = tempfile.NamedTemporaryFile(suffix=".py", dir=".", delete=False)
tmp_file.write(b"def wrong_handler_name():\n\tprint('hello')")
tmp_file.close()
filename_w_ext = os.path.basename(tmp_file.name)
filename, _ = os.path.splitext(filename_w_ext)
handler_name = "{}.my_handler".format(filename)
response_handler = bootstrap._get_handler(handler_name)
with self.assertRaises(FaultException) as cm:
response_handler()
returned_exception = cm.exception
self.assertEqual(
self.FaultExceptionMatcher(
"Handler 'my_handler' missing on module '{}'".format(filename),
"Runtime.HandlerNotFound",
),
returned_exception,
)
if os.path.exists(tmp_file.name):
os.remove(tmp_file.name)
@patch("imp.find_module")
def test_get_event_handler_build_in_conflict(self, mock_find_module):
handler_name = "sys.hello"
mock_find_module.return_value = (None, None, ("", "", C_BUILTIN))
response_handler = bootstrap._get_handler(handler_name)
with self.assertRaises(FaultException) as cm:
response_handler()
returned_exception = cm.exception
self.assertEqual(
self.FaultExceptionMatcher(
"Cannot use built-in module sys as a handler module",
"Runtime.BuiltInModuleConflict",
),
returned_exception,
)
class TestContentType(unittest.TestCase):
def setUp(self):
self.lambda_runtime = Mock()
self.lambda_runtime.marshaller = LambdaMarshaller()
def test_application_json(self):
bootstrap.handle_event_request(
lambda_runtime_client=self.lambda_runtime,
request_handler=lambda event, ctx: {"response": event["msg"]},
invoke_id="invoke-id",
event_body=b'{"msg":"foo"}',
content_type="application/json",
client_context_json=None,
cognito_identity_json=None,
invoked_function_arn="invocation-arn",
epoch_deadline_time_in_ms=1415836801003,
log_sink=bootstrap.StandardLogSink(),
)
self.lambda_runtime.post_invocation_result.assert_called_once_with(
"invoke-id", '{"response": "foo"}', "application/json"
)
def test_binary_request_binary_response(self):
event_body = b"\x89PNG\r\n\x1a\n\x00\x00\x00"
bootstrap.handle_event_request(
lambda_runtime_client=self.lambda_runtime,
request_handler=lambda event, ctx: event,
invoke_id="invoke-id",
event_body=event_body,
content_type="image/png",
client_context_json=None,
cognito_identity_json=None,
invoked_function_arn="invocation-arn",
epoch_deadline_time_in_ms=1415836801003,
log_sink=bootstrap.StandardLogSink(),
)
self.lambda_runtime.post_invocation_result.assert_called_once_with(
"invoke-id", event_body, "application/unknown"
)
def test_json_request_binary_response(self):
binary_data = b"\x89PNG\r\n\x1a\n\x00\x00\x00"
bootstrap.handle_event_request(
lambda_runtime_client=self.lambda_runtime,
request_handler=lambda event, ctx: binary_data,
invoke_id="invoke-id",
event_body=b'{"msg":"ignored"}',
content_type="application/json",
client_context_json=None,
cognito_identity_json=None,
invoked_function_arn="invocation-arn",
epoch_deadline_time_in_ms=1415836801003,
log_sink=bootstrap.StandardLogSink(),
)
self.lambda_runtime.post_invocation_result.assert_called_once_with(
"invoke-id", binary_data, "application/unknown"
)
def test_binary_with_application_json(self):
bootstrap.handle_event_request(
lambda_runtime_client=self.lambda_runtime,
request_handler=lambda event, ctx: event,
invoke_id="invoke-id",
event_body=b"\x89PNG\r\n\x1a\n\x00\x00\x00",
content_type="application/json",
client_context_json=None,
cognito_identity_json=None,
invoked_function_arn="invocation-arn",
epoch_deadline_time_in_ms=1415836801003,
log_sink=bootstrap.StandardLogSink(),
)
self.lambda_runtime.post_invocation_result.assert_not_called()
self.lambda_runtime.post_invocation_error.assert_called_once()
(
invoke_id,
error_result,
xray_fault,
), _ = self.lambda_runtime.post_invocation_error.call_args
error_dict = json.loads(error_result)
self.assertEqual("invoke-id", invoke_id)
self.assertEqual("Runtime.UnmarshalError", error_dict["errorType"])
class TestLogError(unittest.TestCase):
@patch("sys.stdout", new_callable=StringIO)
def test_log_error_standard_log_sink(self, mock_stdout):
err_to_log = bootstrap.make_error("Error message", "ErrorType", None)
bootstrap.log_error(err_to_log, bootstrap.StandardLogSink())
expected_logged_error = "[ERROR] ErrorType: Error message\n"
self.assertEqual(mock_stdout.getvalue(), expected_logged_error)
def test_log_error_framed_log_sink(self):
with NamedTemporaryFile() as temp_file:
with bootstrap.FramedTelemetryLogSink(temp_file.name) as log_sink:
err_to_log = bootstrap.make_error("Error message", "ErrorType", None)
bootstrap.log_error(err_to_log, log_sink)
expected_logged_error = "[ERROR] ErrorType: Error message"
with open(temp_file.name, "rb") as f:
content = f.read()
frame_type = int.from_bytes(content[:4], "big")
self.assertEqual(frame_type, 0xA55A0001)
length = int.from_bytes(content[4:8], "big")
self.assertEqual(length, len(expected_logged_error.encode("utf8")))
actual_message = content[8:].decode()
self.assertEqual(actual_message, expected_logged_error)
@patch("sys.stdout", new_callable=StringIO)
def test_log_error_indentation_standard_log_sink(self, mock_stdout):
err_to_log = bootstrap.make_error(
"Error message", "ErrorType", [" line1 ", " line2 ", " "]
)
bootstrap.log_error(err_to_log, bootstrap.StandardLogSink())
expected_logged_error = "[ERROR] ErrorType: Error message\rTraceback (most recent call last):\r\xa0\xa0line1 \r\xa0\xa0line2 \r\xa0\xa0\n"
self.assertEqual(mock_stdout.getvalue(), expected_logged_error)
def test_log_error_indentation_framed_log_sink(self):
with NamedTemporaryFile() as temp_file:
with bootstrap.FramedTelemetryLogSink(temp_file.name) as log_sink:
err_to_log = bootstrap.make_error(
"Error message", "ErrorType", [" line1 ", " line2 ", " "]
)
bootstrap.log_error(err_to_log, log_sink)
expected_logged_error = "[ERROR] ErrorType: Error message\nTraceback (most recent call last):\n\xa0\xa0line1 \n\xa0\xa0line2 \n\xa0\xa0"
with open(temp_file.name, "rb") as f:
content = f.read()
frame_type = int.from_bytes(content[:4], "big")
self.assertEqual(frame_type, 0xA55A0001)
length = int.from_bytes(content[4:8], "big")
self.assertEqual(length, len(expected_logged_error.encode("utf8")))
actual_message = content[8:].decode()
self.assertEqual(actual_message, expected_logged_error)
@patch("sys.stdout", new_callable=StringIO)
def test_log_error_empty_stacktrace_line_standard_log_sink(self, mock_stdout):
err_to_log = bootstrap.make_error(
"Error message", "ErrorType", ["line1", "", "line2"]
)
bootstrap.log_error(err_to_log, bootstrap.StandardLogSink())
expected_logged_error = "[ERROR] ErrorType: Error message\rTraceback (most recent call last):\rline1\r\rline2\n"
self.assertEqual(mock_stdout.getvalue(), expected_logged_error)
def test_log_error_empty_stacktrace_line_framed_log_sink(self):
with NamedTemporaryFile() as temp_file:
with bootstrap.FramedTelemetryLogSink(temp_file.name) as log_sink:
err_to_log = bootstrap.make_error(
"Error message", "ErrorType", ["line1", "", "line2"]
)
bootstrap.log_error(err_to_log, log_sink)
expected_logged_error = "[ERROR] ErrorType: Error message\nTraceback (most recent call last):\nline1\n\nline2"
with open(temp_file.name, "rb") as f:
content = f.read()
frame_type = int.from_bytes(content[:4], "big")
self.assertEqual(frame_type, 0xA55A0001)
length = int.from_bytes(content[4:8], "big")
self.assertEqual(length, len(expected_logged_error))
actual_message = content[8:].decode()
self.assertEqual(actual_message, expected_logged_error)
class TestUnbuffered(unittest.TestCase):
def test_write(self):
mock_stream = MagicMock()
unbuffered = bootstrap.Unbuffered(mock_stream)
unbuffered.write("YOLO!")
mock_stream.write.assert_called_once_with("YOLO!")
mock_stream.flush.assert_called_once()
def test_writelines(self):
mock_stream = MagicMock()
unbuffered = bootstrap.Unbuffered(mock_stream)
unbuffered.writelines(["YOLO!"])
mock_stream.writelines.assert_called_once_with(["YOLO!"])
mock_stream.flush.assert_called_once()
class TestLogSink(unittest.TestCase):
@patch("sys.stdout", new_callable=StringIO)
def test_create_unbuffered_log_sinks(self, mock_stdout):
if "_LAMBDA_TELEMETRY_LOG_FD" in os.environ:
del os.environ["_LAMBDA_TELEMETRY_LOG_FD"]
actual = bootstrap.create_log_sink()
self.assertIsInstance(actual, bootstrap.StandardLogSink)
actual.log("log")
self.assertEqual(mock_stdout.getvalue(), "log")
def test_create_framed_telemetry_log_sinks(self):
fd = "test_fd"
os.environ["_LAMBDA_TELEMETRY_LOG_FD"] = fd
actual = bootstrap.create_log_sink()
self.assertIsInstance(actual, bootstrap.FramedTelemetryLogSink)
self.assertEqual(actual.filename, "/proc/self/fd/" + fd)
self.assertFalse("_LAMBDA_TELEMETRY_LOG_FD" in os.environ)
def test_single_frame(self):
with NamedTemporaryFile() as temp_file:
message = "hello world\nsomething on a new line!\n"
with bootstrap.FramedTelemetryLogSink(temp_file.name) as ls:
ls.log(message)
with open(temp_file.name, "rb") as f:
content = f.read()
frame_type = int.from_bytes(content[:4], "big")
self.assertEqual(frame_type, 0xA55A0001)
length = int.from_bytes(content[4:8], "big")
self.assertEqual(length, len(message))
actual_message = content[8:].decode()
self.assertEqual(actual_message, message)
def test_multiple_frame(self):
with NamedTemporaryFile() as temp_file:
first_message = "hello world\nsomething on a new line!"
second_message = "hello again\nhere's another message\n"
with bootstrap.FramedTelemetryLogSink(temp_file.name) as ls:
ls.log(first_message)
ls.log(second_message)
with open(temp_file.name, "rb") as f:
content = f.read()
pos = 0
for message in [first_message, second_message]:
frame_type = int.from_bytes(content[pos : pos + 4], "big")
self.assertEqual(frame_type, 0xA55A0001)
pos += 4
length = int.from_bytes(content[pos : pos + 4], "big")
self.assertEqual(length, len(message))
pos += 4
actual_message = content[pos : pos + len(message)].decode()
self.assertEqual(actual_message, message)
pos += len(message)
self.assertEqual(content[pos:], b"")
class TestBootstrapModule(unittest.TestCase):
@patch("awslambdaric.bootstrap.handle_event_request")
@patch("awslambdaric.bootstrap.LambdaRuntimeClient")
def test_run(self, mock_runtime_client, mock_handle_event_request):
expected_app_root = "/tmp/test/app_root"
expected_handler = "app.my_test_handler"
expected_lambda_runtime_api_addr = "test_addr"
mock_event_request = MagicMock()
mock_event_request.x_amzn_trace_id = "123"
mock_runtime_client.return_value.wait_next_invocation.side_effect = [
mock_event_request,
MagicMock(),
]
with self.assertRaises(TypeError) as cm:
bootstrap.run(
expected_app_root, expected_handler, expected_lambda_runtime_api_addr
)
returned_exception = cm.exception
mock_handle_event_request.assert_called_once()
@patch(
"awslambdaric.bootstrap.LambdaLoggerHandler",
Mock(side_effect=Exception("Boom!")),
)
@patch("awslambdaric.bootstrap.build_fault_result", MagicMock())
@patch("awslambdaric.bootstrap.log_error", MagicMock())
@patch("awslambdaric.bootstrap.LambdaRuntimeClient", MagicMock())
@patch("awslambdaric.bootstrap.sys")
def test_run_exception(self, mock_sys):
class TestException(Exception):
pass
expected_app_root = "/tmp/test/app_root"
expected_handler = "app.my_test_handler"
expected_lambda_runtime_api_addr = "test_addr"
mock_sys.exit.side_effect = TestException("Boom!")
with self.assertRaises(TestException) as cm:
bootstrap.run(
expected_app_root, expected_handler, expected_lambda_runtime_api_addr
)
returned_exception = cm.exception
mock_sys.exit.assert_called_once_with(1)
if __name__ == "__main__":
unittest.main()
|
[
"11421173+carlzogh@users.noreply.github.com"
] |
11421173+carlzogh@users.noreply.github.com
|
00cc5253654225237f259127c0b421064c796d04
|
e4ff1b059345de4ecde159c19a7b58cb75dd0df5
|
/skipper.py
|
1c06defdbf337fcadf8f8576cfa646f44d2cb24b
|
[] |
no_license
|
holychicken99/skip_ad
|
4f7f4972ba5e14345091aea3125e29730df48f37
|
1ccdc39458eeba6a8cce4a19d42aa89aefe244e9
|
refs/heads/master
| 2023-08-16T00:02:03.862869
| 2021-10-03T02:44:14
| 2021-10-03T02:44:14
| 412,956,216
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 533
|
py
|
import random
import time
import keyboard
import pyautogui
import win32api
import win32con
from pyautogui import *
while 1:
if pyautogui.locateOnScreen('Capture.png',confidence=0.6,grayscale=True)!=None:
a,b,c,d = pyautogui.locateOnScreen('Capture.png',confidence=0.6,grayscale=True)
a+=15
print(a)
b+=20
print(b)
pyautogui.click(x=a,y=b)
# print(type(pyautogui.locateOnScreen('Capture.png',confidence=0.5,grayscale=True)))
time.sleep(0.5)
|
[
"akshit.singh20@gmail.com"
] |
akshit.singh20@gmail.com
|
f0c2ca134afc4cc14b53ae4652e5f051a58d2cb3
|
e5eebab946cd83851d6f788c51600ecc36b42a64
|
/cochincorperation.py
|
25b7ecc1d0addbde8dfa5902932c1deb2da2ec36
|
[] |
no_license
|
mayant-solutions/tender-progarams
|
70ea8e261f0ca3214de0f39540dadf367e08f68a
|
8392e969e20710331438a17575a3d9a6b4ef3b61
|
refs/heads/master
| 2020-04-01T09:59:42.917947
| 2018-10-15T11:01:30
| 2018-10-15T11:01:30
| 153,098,083
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,419
|
py
|
import requests,bs4 ,re, os, datetime
output = []
def extract(i):
soups = str(i)
td = bs4.BeautifulSoup(soups,"lxml")
tdList = td.findAll('td')
url = td.select('a')
sample_urls = url[1].get('href')
urls = "http://tender.lsgkerala.gov.in"+sample_urls[2:]
LB_name= tdList[0]
tender_No= tdList[1].getText()
Tender_description = tdList[3].getText()
Closing_Date = tdList[4].getText()
return LB_name, tender_No, Tender_description, Closing_Date, urls
def main(key):
s=1
l =0
count=1
search = str(key)
while s<50:
res = requests.get('http://tender.lsgkerala.gov.in/pages/displayTender.php?Start='+str(s)+'&Index='+str(l))
if s%10==0:
l+=1
s+=1
res.raise_for_status()
soup = bs4.BeautifulSoup(res.text,"lxml")
elements= soup.findAll(True,{"class":["clsLBContentLT","clsLBContent"]})
for i in elements:
LB_name, tender_no, tender_des, close_date, urlForDownload= extract(i)
#print(close_date)
if search in tender_des:
output.append(urlForDownload)
return output
'''download = requests.get(urlForDownload)
download.raise_for_status()
with open ('tender'+str(count)+'.pdf','wb') as f:
for c in download.iter_content(100000):
f.write(c)
count+=1'''
|
[
"noreply@github.com"
] |
mayant-solutions.noreply@github.com
|
da6113fd27319ee8325f6c7cc61632067aa28883
|
e5cfe0f84f38280839d9eef67baca718c04f4dea
|
/python_code/ndvi_evi_cigreen_apar.py
|
7133d24177ac27f154efc3e8bb734fbbae886ead
|
[] |
no_license
|
songlaoshi/SIF_GPP_reviewer
|
a02a58c11af666064cce02d519d2cb19e07e8233
|
ac00782f9166af31a062ea91af8e33fc08613bce
|
refs/heads/master
| 2020-04-15T15:03:49.485906
| 2019-05-18T01:56:01
| 2019-05-18T01:56:01
| 164,778,107
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,343
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
@author: lizhaohui
@contact: lizhaoh2015@gmail.com
@file: ndvi_evi_cigreen_apar.py
@time: 2019/3/2 16:37
'''
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from scipy.optimize import curve_fit
from scipy.stats.stats import pearsonr
from sklearn import metrics
from matplotlib import ticker
# 科学计数法
fromatter = ticker.ScalarFormatter(useMathText=True)
fromatter.set_scientific(True)
fromatter.set_powerlimits((-1, 1))
def funcp(p):
if p < 0.001:
return 0.001
if p < 0.05 and p > 0.001:
return 0.05
if p > 0.05 and p < 0.1:
return 0.1
def funcstar(p):
if p < 0.001:
return '**'
if p < 0.05 and p >= 0.001:
return '*'
else:
return ''
filepath = r'D:\Shared_Folder\Lzh py\SifAnalysis\SIF_GPP_reviewer\data'
axis_font = {'fontname': 'Arial', 'size': 18}
font1 = {'family': 'Times New Roman',
'weight': 'normal',
'size': 18, }
legend_font = {'fontname': 'Arial', 'size': 14}
ticklabelsize = 18
markersize = 8
linewidth = 3
######################################################
fig, axs = plt.subplots(2, 2, figsize=(9.5, 9))
plt.subplots_adjust(wspace=0.29, hspace=0.29, top=0.86)
linreg = LinearRegression()
# ------------------------------------------
# data = pd.ExcelFile(
# filepath + '\\' + r'GPP_VPD_Ta_Tleaf_PAR_APAR_SIF_VI_CI_SIFyield_LUE_daymean_withnan-SIFyield delete d192-vi delete.xlsx')
data = pd.ExcelFile(
filepath + '\\' + r'SIF_GPP_VI_ref_daymean_sq2017corn.xlsx')
daymean = data.parse('Sheet1')
daymean1 = pd.concat([daymean['SFMSIFyield'], daymean['LUE']], axis=1)
daymean1 = daymean1.dropna()
daymean2 = pd.concat(
[daymean['PAR'], daymean['SFM'], daymean['GPP'], daymean['NDVI'], daymean['EVI'], daymean['CVI']], axis=1)
daymean2 = daymean2.dropna()
PAR = daymean2['PAR']
print(PAR.shape)
# APAR = daymean['APAR']
SIF = daymean2['SFM']
GPP = daymean2['GPP']
CIgreen = daymean2['CVI']
NDVI = daymean2['NDVI']
EVI = daymean2['EVI']
# irrigated
fAPARchl_ndvi = 1.1 * NDVI - 0.27
fAPARchl_evi = 1.29 * EVI - 0.16
# fAPARchl_CIgreen = 0.12 * CIgreen - 0.1
fAPARchl_CIgreen = CIgreen
APARchl_ndvi = fAPARchl_ndvi * PAR
APARchl_evi = fAPARchl_evi * PAR
APARchl_CIgreen = fAPARchl_CIgreen * PAR
## -------------------SIF and GPP------------------------------
SIFyield = daymean1['SFMSIFyield']
LUE = daymean1['LUE']
p1, = axs[0, 0].plot(LUE, SIFyield,
color='k', marker='o', linestyle='', label='', markersize=markersize)
reg = linreg.fit(LUE.values.reshape(-1, 1), SIFyield)
a, b = linreg.coef_, linreg.intercept_
pred = reg.predict(LUE.values.reshape(-1, 1))
text = 'y=' + '%.4f' % a + 'x' + '+' + '%.4f' % b
r, p = pearsonr(LUE, SIFyield)
r2 = '%.2f' % np.square(r)
text1 = r'$R^2$= ' + r2 + funcstar(p)
axs[0, 0].plot(LUE.values.reshape(-1, 1), pred,
color='k', linewidth=2, label=text1)
axs[0, 0].set_xlabel(r"$GPP/APAR_{canopy}$", **axis_font)
axs[0, 0].set_ylabel(r"$SIF/APAR_{canopy}$", **axis_font)
# axs[0,1].set_xlabel(r"LUE",**axis_font)
# axs[0,1].set_ylabel(r"SIF$_{yield}$",**axis_font)
axs[0, 0].yaxis.set_major_formatter(fromatter)
# axs[0,1].set_ylim(0,1.1)
# axs[0,1].set_xlim(0,1.1)
axs[0, 0].tick_params(labelsize=ticklabelsize)
axs[0, 0].text((np.max(LUE) + 0.05) * 0.76, (np.max(SIFyield) + 0.0005) * 0.82, '(a)', **axis_font)
axs[0, 0].text(0, (np.max(SIFyield) + 0.0005) * 0.82, text1, color='k', **legend_font)
axs[0, 0].set_yticks([0, 0.001, 0.002, 0.003])
# --------------------NDVI---------------------------
## -------------------SIF and GPP------------------------------
SIFyield = SIF / APARchl_ndvi
LUE = GPP / APARchl_ndvi
p1, = axs[1, 0].plot(LUE, SIFyield,
color='k', marker='o', linestyle='', label='', markersize=markersize)
reg = linreg.fit(LUE.values.reshape(-1, 1), SIFyield)
a, b = linreg.coef_, linreg.intercept_
pred = reg.predict(LUE.values.reshape(-1, 1))
text = 'y=' + '%.4f' % a + 'x' + '+' + '%.4f' % b
r, p = pearsonr(LUE, SIFyield)
r2 = '%.2f' % np.square(r)
text1 = r'$R^2$= ' + r2 + funcstar(p)
axs[1, 0].plot(LUE.values.reshape(-1, 1), pred,
color='k', linewidth=2, label=text1)
axs[1, 0].set_xlabel(r"$GPP/APAR_{NDVI}$", **axis_font)
axs[1, 0].set_ylabel(r"$SIF/APAR_{NDVI}$", **axis_font)
axs[1, 0].yaxis.set_major_formatter(fromatter)
# axs[1,0].set_ylim(0,np.max(SIFyield)+0.0005)
# axs[1,0].set_xlim(0,np.max(LUE)+0.05)
axs[1, 0].tick_params(labelsize=ticklabelsize)
axs[1, 0].text((np.max(LUE) + 0.05) * 0.64, (np.max(SIFyield) + 0.0005) * 0.83, '(c)', **axis_font)
axs[1, 0].text((np.max(LUE) + 0.05) * 0.03, (np.max(SIFyield) + 0.0005) * 0.83, text1, color='k', **legend_font)
# axs[1,0].set_yticks([0,0.003,0.006,0.009])
# -----------------------------------------------
## -------------------SIF and GPP------------------------------
SIFyield = SIF / APARchl_evi
LUE = GPP / APARchl_evi
p1, = axs[1, 1].plot(LUE, SIFyield,
color='k', marker='o', linestyle='', label='', markersize=markersize)
reg = linreg.fit(LUE.values.reshape(-1, 1), SIFyield)
a, b = linreg.coef_, linreg.intercept_
pred = reg.predict(LUE.values.reshape(-1, 1))
text = 'y=' + '%.4f' % a + 'x' + '+' + '%.4f' % b
r, p = pearsonr(LUE, SIFyield)
r2 = '%.2f' % np.square(r)
text1 = r'$R^2$= ' + r2 + funcstar(p)
axs[1, 1].plot(LUE.values.reshape(-1, 1), pred,
color='k', linewidth=2, label=text1)
axs[1, 1].set_xlabel(r"$GPP/APAR_{EVI}$", **axis_font)
axs[1, 1].set_ylabel(r"$SIF/APAR_{EVI}$", **axis_font)
axs[1, 1].yaxis.set_major_formatter(fromatter)
# axs[1,1].set_ylim(0,np.max(SIFyield)+0.0005)
# axs[1,1].set_xlim(0,np.max(LUE)+0.05)
axs[1, 1].tick_params(labelsize=ticklabelsize)
axs[1, 1].text((np.max(LUE) + 0.05) * 0.64, (np.max(SIFyield) + 0.0005) * 0.84, '(d)', **axis_font)
axs[1, 1].text((np.max(LUE) + 0.05) * 0.04, (np.max(SIFyield) + 0.0005) * 0.84, text1, color='k', **legend_font)
axs[1, 1].set_xticks([0, 0.05, 0.1])
# -----------------------------------------------
## -------------------SIF and GPP------------------------------
SIFyield = SIF / APARchl_CIgreen
LUE = GPP / APARchl_CIgreen
p1, = axs[0, 1].plot(LUE, SIFyield,
color='k', marker='o', linestyle='', label='', markersize=markersize)
reg = linreg.fit(LUE.values.reshape(-1, 1), SIFyield)
a, b = linreg.coef_, linreg.intercept_
pred = reg.predict(LUE.values.reshape(-1, 1))
text = 'y=' + '%.4f' % a + 'x' + '+' + '%.4f' % b
r, p = pearsonr(LUE, SIFyield)
r2 = '%.2f' % np.square(r)
text1 = r'$R^2$= ' + r2 + funcstar(p)
axs[0, 1].plot(LUE.values.reshape(-1, 1), pred,
color='k', linewidth=2, label=text1)
axs[0, 1].set_xlabel(r"$GPP/APAR_{green}$", **axis_font)
axs[0, 1].set_ylabel(r"$SIF/APAR_{green}$", **axis_font)
axs[0, 1].yaxis.set_major_formatter(fromatter)
# axs[1,2].set_ylim(0,np.max(SIFyield)+0.0005)
# axs[1,2].set_xlim(0,np.max(LUE)+0.05)
axs[0, 1].tick_params(labelsize=ticklabelsize)
axs[0, 1].text((np.max(LUE) + 0.05) * 0.85, (np.max(SIFyield) + 0.0005) * 0.89, '(b)', **axis_font)
axs[0, 1].text((np.max(LUE) + 0.05) * 0.03, (np.max(SIFyield) + 0.0005) * 0.89, text1, color='k', **legend_font)
# axs[1,2].set_yticks([0,0.003,0.006,0.009])
# axs[0,0].remove()
# axs[0,2].remove()
plt.show()
|
[
"lizhaoh2015@gmail.com"
] |
lizhaoh2015@gmail.com
|
0eb742bea954542d8ca1e1f83256561089c96248
|
a0cae0c3e510a81fd9db6d945bf7518a85ecca30
|
/editline/tests/test_dict_completion.py
|
8323bdceb754b96fc19b4ac300c2015e8deee338
|
[] |
no_license
|
mark-nicholson/python-editline
|
c5deb7789caab43eee08337a4e92bb2336646b1d
|
c23f1071c4b832a92f66e2f49142e5c5f00e500d
|
refs/heads/master
| 2021-01-18T14:11:48.376557
| 2020-02-19T00:30:42
| 2020-02-19T00:30:42
| 14,903,378
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,631
|
py
|
"""
Verifying the completer will properly complete various forms of dictionaries.
"""
import sys
import re
import unittest
from test.support import import_module
# just grab what we need from the other...
from editline.tests.test_lineeditor import CompletionsBase, CompletionsCommon
#
# Check Dictionary support
#
class Completions_Dictionary(CompletionsCommon):
prep_script = [
'a = { "tomatoes": 10, "peaches": 5, "pears": 8, "pecans": 100 }'
]
cmd = "a['tomatoes']"
cmd_tab_index = 7
result = '10'
tidy_cmd = ''
tidy_len = 1
comp = None
comp_len = 0
class Completions_Dictionary_NotArray(Completions_Dictionary):
cmd = "a['pecans']"
cmd_tab_index = 2
tidy_cmd = "pecans']"
result = '100'
comp_len = 2
comp = re.compile(r'peaches\s+pears\s+pecans\s+tomatoes')
class Completions_Dictionary_MultiUnique(Completions_Dictionary):
cmd = "a['pecans']"
cmd_tab_index = 6
result = '100'
class Completions_Dictionary_Multi(Completions_Dictionary):
cmd = "a['pecans']"
cmd_tab_index = 5
result = '100'
tidy_cmd = None
tidy_len = None
comp_idx = 0
comp_len = 2
comp = re.compile(r'peaches\s+pears\s+pecans')
class Completions_Dictionary_Multi2(Completions_Dictionary_Multi):
cmd = "a['pears']"
cmd_tab_index = 6
result = '8'
comp = re.compile(r'peaches\s+pears')
#
# Multi-Level Dictionaries
#
class Completions_Dict3D_L1(CompletionsBase):
prep_script = [
'from editline.tests.support.data_structures import three_d_dict'
]
cmd = "three_d_dict['zero']['zero_one']['zero_one_two']"
cmd_tab_index = 14
result = '{:d}'.format(0x012)
comp = re.compile(r'one\s+three\s+two\s+zero')
comp_idx = 0
comp_len = 2
class Completions_Dict3D_L2(Completions_Dict3D_L1):
cmd_tab_index = 22
tidy_cmd = "one']['zero_one_two']"
comp = re.compile(r'zero_one\s+zero_two\s+zero_zero')
class Completions_Dict3D_L3(Completions_Dict3D_L1):
cmd_tab_index = 34
tidy_cmd = "two']"
comp = re.compile(r'zero_one_one\s+zero_one_three\s+zero_one_two\s+zero_one_zero')
class Completions_Dict3D_L1_MultiKey(Completions_Dict3D_L1):
cmd = "three_d_dict['three']['three_two']['three_two_two']"
cmd_tab_index = 15
result = '{:d}'.format(0x322)
comp = re.compile(r'three\s+two')
# L2 has no conflicting keys
class Completions_Dict3D_L3_MultiKey(Completions_Dict3D_L1_MultiKey):
cmd_tab_index = 47
tidy_cmd = "wo']"
comp = re.compile(r'three_two_three\s+three_two_two')
# kick off
if __name__ == "__main__":
unittest.main()
|
[
"nicholson.mark@gmail.com"
] |
nicholson.mark@gmail.com
|
6dc1d885d1fb02394835aa756909605c8de5de1c
|
23a21d79dfe1a986939225b95fa4f574dc9989bf
|
/day2/test_mem_alloc_in_function.py
|
9af31f4a00fbc2e14a99588b9f701d5daaa9ff43
|
[] |
no_license
|
Tourmaline/computational_python_course
|
591b0092d1c4f4d0d89ecfb079ac16f16dff47a7
|
3a8e62d3d454d8473ae3dd04d9209868edb8b2fa
|
refs/heads/master
| 2021-07-13T13:19:38.719628
| 2017-10-18T12:00:22
| 2017-10-18T12:00:22
| 107,129,818
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 413
|
py
|
def f(l):
print(id(l))
l = [1,2,3] # to modify l which is outside of the function we can use l[:] = [1,2,3], but it is not recommended due to potential problems because of function's side effects
print(id(l))
print("inside function ", l)
val = 4
return l, val
if __name__ == '__main__':
i = [5,6]
print(id(i))
(l, val) = f(i)
print(id(i))
print(l)
print(i)
|
[
"nastja.kruchinina@gmail.com"
] |
nastja.kruchinina@gmail.com
|
d17fe38776c513a5cb06ef0fa54950c272ffa0c4
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_212/171.py
|
5ce8038ecc3ce657db872f7688e9b63545982c5a
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,254
|
py
|
t = int(raw_input().strip())
for xyz in range(1, t+1):
n, p = list(map(int, raw_input().strip().split()))
ans = 0
if p == 2:
x = list(map(int, raw_input().strip().split()))
o, e = [0, 0]
for i in range(n):
if x[i] % 2 == 0:
e += 1
else:
o += 1
ans = e+((o+1)/2)
elif p == 3:
x = list(map(int, raw_input().strip().split()))
o, e, j = [0, 0, 0]
for i in range(n):
if x[i] % 3 == 0:
e += 1
elif x[i] % 3 == 1:
o += 1
else:
j += 1
ans = e+min(o, j)+((max(o, j)-min(o, j)+2)/3)
else:
x = list(map(int, raw_input().strip().split()))
o, e, j, z = [0, 0, 0, 0]
for i in range(n):
if x[i] % 4 == 0:
o += 1
elif x[i] % 4 == 1:
e += 1
elif x[i] % 4 == 2:
j += 1
else:
z += 1
ans = o+min(e, z)+(j/2)+(max(e, z)-min(e, z)+3)/4
if j % 2 == 1:
if (max(e, z)-min(e, z)) % 4 == 0 or (max(e, z)-min(e, z)) % 4 == 3:
ans += 1
print("Case #"+str(xyz)+": "+str(ans))
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
714212d2cd4b88de22de78b73d4b79a9b9e35572
|
072a9b5465681b42b5520209d31cc739e74ebbb6
|
/apps/reviews/urls.py
|
ce631420d1849a0ad47be9c1bbaa32ef5dbd0fd1
|
[] |
no_license
|
rpmmitchell/book_reviews
|
a1786e569e5a39c3c27be07c680c02e796a2e63b
|
956aad3d7335313cb924665ef0180eafb7d87d0e
|
refs/heads/master
| 2021-04-26T23:42:38.280176
| 2018-03-05T00:41:39
| 2018-03-05T00:41:39
| 123,843,185
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 553
|
py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^success/(?P<num>\d+)$', views.success),
url(r'^login$', views.login),
url(r'^register$', views.register),
url(r'^logout$', views.logout),
url(r'^add/$', views.add),
url(r'^create$', views.create),
url(r'^review_page/(?P<num>\d+)/(?P<num2>\d+)$', views.review_page),
url(r'^add_review/(?P<num>\d+)/(?P<num2>\d+)$', views.add_review),
url(r'^profile/(?P<num>\d+)$', views.profile),
url(r'^delete/(?P<num>\d+)/(?P<num2>\d+)$', views.delete),
]
|
[
"rpmmitchell@outllok.com"
] |
rpmmitchell@outllok.com
|
de5926d2d1a9ddb96b44a178957c07d2b6bb98e4
|
ca875898694ad83624fba6529c19e45620380f17
|
/python/test/py_tools/cp_tools/release1.py
|
d410b580dd82814f9a8765cf04a7e46fdfed167a
|
[] |
no_license
|
yyzhou94/my-codes
|
0759c2db3095f52f19f5b667057444033958a8f2
|
cf75e581fc0a016fc9ba3064cc4aab5d3a0c3869
|
refs/heads/master
| 2021-02-15T19:40:49.432217
| 2017-04-07T07:09:54
| 2017-04-07T07:09:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,249
|
py
|
#! /usr/bin/env python
"""
Created on 2015-06-12
@author: lisapi
@name: release.py
The script is used to copy images from daily dir to release dir and create md5 file
"""
#import some lib
import pexpect
import sys
import re
import os
import pxssh
from time import sleep
import api
# Build platform model dir name mapping
dir_model_name = {'3290': '3290',
'3295': '3295',
'3297': '3297',
'3922': '3922',
'3920': '3920',
'3924': 'as5600_52x',
'3930': '3930',
'3780': '3780',
'5401': '5401',
'5101': '5101',
'es4654bf': 'as4600_54t',
'as6701_32x': 'as6701_32x',
'niagara2632xl': 'niagara2632xl',
'as5712_54x': 'as5712_54x',
'arctica4804i': 'arctica4804i'}
# Build image name mapping
image_model_name = {'3290': 'P3290',
'3295': 'P3295',
'3297': 'P3297',
'3922': 'P3922',
'3920': 'P3920',
'3924': 'as5600_52x',
'3930': 'P3930',
'3780': 'P3780',
'5401': 'P5401',
'5101': 'P5101',
'es4654bf': 'as4600_54t',
'as6701_32x': 'as6701_32x',
'niagara2632xl': 'niagara2632xl',
'as5712_54x': 'as5712_54x',
'arctica4804i': 'arctica4804i'}
# Onie image name mapping
onie_name_map = {'3290': 'quanta_lb9a',
'3295': 'quanta_lb9',
'3297': 'celestica_d1012',
'3922': 'accton_as5610_52x',
'3920': 'quanta_ly2',
'3924': 'accton_as5600_52x',
'3930': 'celestica_d2030',
'3780': 'quanta_lb8',
'5101': 'foxconn_cabrera',
'5401': 'foxconn_urus',
'es4654bf': 'accton_as4600_54t',
'as6701_32x': 'accton_as6701_32x',
'niagara2632xl': 'accton_niagara2632xl',
'as5712_54x': 'accton_as5712_54x',
'arctica4804i': 'penguin_arctica4804i'}
#User/server info
user_name= "build"
user_password = 'build'
server_ip = '10.10.50.16'
#Copy image to release dir and create md5 file
def copy_image(model=None,branch_name=None,sRevision=None,sDr=None):
#login server
child = api.ssh_login(ip=server_ip,user=user_name,password=user_password)
print "*********************"
#create dir
cmd = [ 'cd /tftpboot/build/release/',
'mkdir %s' %(sDr)]
for icmd in cmd:
api.sendExpect(child=child, command=icmd)
#copy image
for model_name in model:
print 'model:', model
print 'type(model):', type(model)
if model_name in ['as5712_54x', 'niagara2632xl']:
platform = 'x86'
else:
platform = 'powerpc'
print "platform is %s" %platform
commands = [
'cd /tftpboot/build/release/%s' %(sDr),
'mkdir %s' %(image_model_name[model_name]),
'cp /tftpboot/build/daily/%s/picos-%s-%s-%s.tar.gz /tftpboot/build/release/%s/%s' % (dir_model_name[model_name],branch_name,image_model_name[model_name],sRevision,sDr,image_model_name[model_name]),
'cp /tftpboot/build/daily/%s/pica-switching-%s-%s-%s.deb /tftpboot/build/release/%s/%s' %(dir_model_name[model_name],branch_name,image_model_name[model_name],sRevision,sDr,image_model_name[model_name]),
'cp /tftpboot/build/daily/%s/pica-ovs-%s-%s-%s.deb /tftpboot/build/release/%s/%s' %(dir_model_name[model_name],branch_name,image_model_name[model_name],sRevision,sDr,image_model_name[model_name]),
'cp /tftpboot/build/daily/%s/pica-linux-%s-%s-%s.deb /tftpboot/build/release/%s/%s' %(dir_model_name[model_name],branch_name,image_model_name[model_name],sRevision,sDr,image_model_name[model_name]),
'cp /tftpboot/build/daily/%s/pica-tools-%s-%s-%s.deb /tftpboot/build/release/%s/%s' %(dir_model_name[model_name],branch_name,image_model_name[model_name],sRevision,sDr,image_model_name[model_name]),
'cp /tftpboot/build/daily/%s/onie-installer-%s-%s-picos-%s-%s.bin /tftpboot/build/release/%s/%s' %(dir_model_name[model_name],platform,onie_name_map[model_name],branch_name,sRevision,sDr,image_model_name[model_name]),
'cd /tftpboot/build/release/%s/%s' %(sDr,image_model_name[model_name]),
'md5sum picos-%s-%s-%s.tar.gz >> picos-%s-%s-%s.tar.gz.md5' %(branch_name,image_model_name[model_name],sRevision,branch_name,image_model_name[model_name],sRevision)]
for command in commands:
api.sendExpect(child=child, command=command)
sleep(0.5)
# execute copy_image
print 'the values are ', str(sys.argv)
if len(sys.argv) < 5:
print "+++++++++++++++"
sys.exit(1)
else len(sys.argv) == 5:
model,branch_name,sRevision,sDr = [w for w in sys.argv[1:]]
model = [s for s in model.split(" ")]
print '000model:', model
print '000type(model):', type(model)
copy_image(model=model,branch_name=branch_name,sRevision=sRevision,sDr=sDr)
|
[
"lpi@dev-18.local.pica8.com"
] |
lpi@dev-18.local.pica8.com
|
efc46ec92cc13a67bcd81507e3c3300c35ca8125
|
f8fcbb45e46fe3b6454e808c141c9a1f3185c49c
|
/vedadep/converters/__init__.py
|
860f13b61b164a5c078a6d377f8e42e0a07b8e9d
|
[
"Apache-2.0"
] |
permissive
|
liuguoyou/volksdep
|
b00ad2b4635b2e217bf387c9bab671b2df2f0278
|
8739dcfa1d2218e287265e74e2e71a77c5d3247f
|
refs/heads/master
| 2022-08-24T01:44:16.306057
| 2020-05-22T10:48:52
| 2020-05-22T10:48:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 83
|
py
|
from .tensorrt import TRTEngine, Calibrator
__all__ = ['TRTEngine', 'Calibrator']
|
[
"hongxiang.cai@media-smart.cn"
] |
hongxiang.cai@media-smart.cn
|
0a69648a119a1ae14dd61e9e09c441cc8aa3ddfe
|
3e7e8384836599db9d788281a8cc924b6d77bbb6
|
/dota_item_scrapper.py
|
356d1fbeadd341d3f8c1b8edce20ea9ea756fd9d
|
[] |
no_license
|
kjayashankar/ctf
|
db4941661bf49f6c7d6f7b1967cdde99c0221d34
|
6ab7668384aaf4b44766b7c77ba9b41e84176412
|
refs/heads/master
| 2021-01-21T10:05:00.104736
| 2017-02-28T01:22:22
| 2017-02-28T01:22:22
| 83,374,329
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,713
|
py
|
import time
import re
import socket
import dota2api
import json
api = dota2api.Initialise('829F60173C0683A4DCC3C665FF8ED79F')
match = api.get_match_details(match_id='1000193456')
items = api.get_game_items()
sock=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
sock.connect(('139.59.61.220',6666))
#FLag ALEXCTF{1_4M_l33t_b0t}
count=0;
while True:
recipe=sock.recv(1024)
if "xiomara" in recipe:
break
print recipe
da = recipe.split('\n')
print "--------------"
if da==['']:
break
print da
for row in da:
if 'Can haz cost of Recipe' in row:
print "inside recipe"
rec = row.split(':')[1].strip()[:-1].strip()
weapon = filter(lambda i: i['localized_name'] == 'Recipe: '+rec, items['items'])[0]
price = str(weapon['cost']).replace(' ','')
print sock.send(price+'\r\n')
elif 'Can haz cost of ' in row:
data3 = row.split('Can haz cost of ')[1].strip()[:-3].strip()
weapon = filter(lambda i: i['localized_name'] == data3, items['items'])[0]
tw = str(weapon['cost']).replace(' ','')
print sock.send(tw+'\r\n')
elif 'Can haz internal name of Recipe' in row:
name = row.split(':')[1].strip()[:-1].strip()
print name
weapon1 = filter(lambda i: i['localized_name'] == 'Recipe: '+rec, items['items'])[0]
name2 = str(weapon1['name'].encode('utf-8')).strip()+'\r'
print name2
print sock.send(name2.encode()+'\n')
elif 'Can haz internal name of ' in row:
dname = row.split('Can haz internal name of ')[1].strip()[:-3].strip()
print dname
weapon2 = filter(lambda i: i['localized_name'] == dname, items['items'])[0]
twname = str(weapon2['name'].encode('utf-8')).strip()+'\r'
print twname
print sock.send(twname.encode()+'\n')
|
[
"kjayashankar@yahoo.com"
] |
kjayashankar@yahoo.com
|
014a46f05f01c5b4e4e36b7b40e78eee8f08aa00
|
f7d3593150c8f5001926bf025d41c6424b91cafd
|
/app/api/v1/gift.py
|
8f428794e6aabaa98f161814ee1521126f70fcac
|
[] |
no_license
|
lemonlxn/Restful-API
|
7fa99f97be60392547f837c279021799757ae84f
|
56339337451174976e96ec897f3cd7bfdb768f9f
|
refs/heads/master
| 2020-03-23T21:22:08.643676
| 2018-07-24T04:03:41
| 2018-07-24T04:03:41
| 142,100,048
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 853
|
py
|
# /usr/bin/env python
# -*- coding:utf-8 -*-
# @Time : 2018/6/23 15:23
# @Author : lemon
from flask import g
from app import db
from app.libs.exception import DuplicateGift, Success
from app.libs.redprint import Redprint
from app.libs.token_auth import auth
from app.models.book import Book
from app.models.gift import Gift
api = Redprint('gift')
@api.route('/<isbn>',methods = ['POST'])
@auth.login_required
def create(isbn):
uid = g.user.uid
with db.auto_commit():
Book.query.filter_by(isbn=isbn).first_or_404() # 首先判断该本书,是否在数据库中
gift = Gift.query.filter_by(isbn=isbn,uid=uid).first() # 检测礼物的重复性
if gift:
raise DuplicateGift()
gift = Gift()
gift.isbn = isbn
gift.uid = uid
db.session.add(gift)
return Success()
|
[
"38677657+lemonlxn@users.noreply.github.com"
] |
38677657+lemonlxn@users.noreply.github.com
|
90b965c8e3a8cc2af5edd2a522eaa2b3efb2b225
|
4d1b8b145c32bdb7a63d14927878b83396d0a63f
|
/data mining/kernel_trick.py
|
d213197f210dc528eb60afd9060f10772163f74f
|
[] |
no_license
|
a5587527/Datamining1
|
70242052166a16330962aa2dc8b333bb98952b0e
|
0ec0bb7d646bb61e41625fe7bb88957a1ed5fd4b
|
refs/heads/master
| 2020-03-17T09:48:16.917172
| 2018-05-15T09:01:23
| 2018-05-15T09:01:23
| 133,489,427
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,988
|
py
|
import numpy as np
import pandas as pd
# remember to change the path
data=pd.read_csv('iris.csv')
data=np.array(data)
data=np.mat(data[:,0:4])
# print np.dot(data[0],data[1].T)
# length refers to the number of data
length=len(data)
# caiculate the kernel(k) using the kernel function
k=np.mat(np.zeros((length,length)))
for i in range(0,length):
for j in range(i,length):
k[i,j]=(np.dot(data[i],data[j].T))**2
k[j,i]=k[i,j]
# print k
# save the kernel
name=range(length)
test=pd.DataFrame(columns=name,data=k)
test.to_csv('iris_k.csv')
len_k=len(k)
# centered kernel matrix
I = np.eye(len_k)
one=np.ones((len_k,len_k))
A=I-1.0/len_k*one
# print A
centered_k=np.dot(np.dot(A,k),A)
print (centered_k)
# save centered kernel matrix
test=pd.DataFrame(columns=name,data=centered_k)
test.to_csv('iris_ck.csv')
# normalized kernel matrix
W_2=np.zeros((len_k,len_k))
for i in range(0,len_k):
W_2[i,i]=k[i,i]**(-0.5)
# print W_2
normalized_k=np.dot(np.dot(W_2,k),W_2)
# print normalized_k
# save normalized kernel matrix
test=pd.DataFrame(columns=name,data=normalized_k)
test.to_csv('iris_nk.csv')
# caiculate fai
fai = np.mat(np.zeros((length, 10)))
for i in range(0, length):
for j in range(0, 4):
fai[i, j] = data[i, j]**2
for m in range(0, 3):
for n in range(m+1, 4):
j = j+1
fai[i, j] = 2**0.5*data[i, m]*data[i, n]
# print fai
# save fai
name_f = range(10)
test = pd.DataFrame(columns=name_f, data=fai)
test.to_csv('iris_fai.csv')
# calculate kernel through fai
k_f = np.mat(np.zeros((length, length)))
for i in range(0, length):
for j in range(i, length):
k_f[i, j] = (np.dot(fai[i], fai[j].T))
k_f[j, i] = k_f[i, j]
test=pd.DataFrame(columns=name,data=k_f)
test.to_csv('iris_kf.csv')
# centered fai
rows = fai.shape[0]
cols = fai.shape[1]
centered_fai = np.mat(np.zeros((rows, cols)))
for i in range(0, cols):
centered_fai[:, i] = fai[:, i]-np.mean(fai[:, i])
print (centered_fai)
test=pd.DataFrame(columns=name_f, data=centered_fai)
test.to_csv('iris_cf.csv')
# calculate centered kernel through centered fai
k_cf=np.mat(np.zeros((length, length)))
for i in range(0, length):
for j in range(i, length):
k_cf[i, j] = (np.dot(centered_fai[i], centered_fai[j].T))
k_cf[j, i] = k_cf[i, j]
test=pd.DataFrame(columns=name, data=k_cf)
test.to_csv('iris_kcf.csv')
# normalized fai
normalized_fai=np.mat(np.zeros((rows, cols)))
for i in range(0,len(fai)):
temp=np.linalg.norm(fai[i])
normalized_fai[i] = fai[i]/np.linalg.norm(fai[i])
print (normalized_fai)
test=pd.DataFrame(columns=name_f, data=normalized_fai)
test.to_csv('iris_nf.csv')
# calculate normalized kernel through normalized fai
k_nf = np.mat(np.zeros((length, length)))
for i in range(0,length):
for j in range(i, length):
k_nf[i, j] = (np.dot(normalized_fai[i], normalized_fai[j].T))
k_nf[j, i] = k_nf[i, j]
test=pd.DataFrame(columns=name, data=k_nf)
test.to_csv('iris_knf.csv')
|
[
"491169734@qq.com"
] |
491169734@qq.com
|
c3976314a62b60431040c3cfb906ee25f4bb2eaa
|
2dbb8edc6167cc00778a7e863a63a56c384f75cf
|
/examples/ex10_rhombus_tesselation.py
|
caadac4f3bcd50bda6f95b324042f5fe7a3c4559
|
[] |
no_license
|
simvisage/oricrete
|
129e130433eb438a271f8cd24d77fd78c552ef8c
|
ef7d20e66806037cc0c07541b45abc4f516ae187
|
refs/heads/master
| 2020-12-25T16:54:18.988848
| 2016-08-05T14:06:09
| 2016-08-05T14:06:09
| 3,638,675
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,038
|
py
|
#-------------------------------------------------------------------------------
#
# Copyright (c) 2012, IMB, RWTH Aachen.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in simvisage/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.simvisage.com/licenses/BSD.txt
#
# Thanks for using Simvisage open source!
#
# Created on Sep 8, 2011 by: matthias
from traits.api import HasTraits, Float
import numpy as np
import sympy as sm
a_, b_, c_, d_ = sm.symbols('a,b,c,d')
# own Modules
from oricrete.folding import \
YoshimuraCreasePattern, CreasePatternView, x_
from oricrete.folding.cnstr_target_face import \
CnstrTargetFace, r_, s_, t_
class GT(HasTraits):
Lx = Float(1.0)
Ly = Float(1.0)
y_left = Float(0.0)
y_middle = Float(0.1)
y_right = Float(1.0)
def __call__(self, nodes):
x, y, z = nodes.T
Lx = np.max(x) - np.min(x)
Ly = np.max(y) - np.min(y)
fn = a_ * x_ ** 2 + b_ * x_ + c_ - d_
eqns = [fn.subs({x_:0, d_:self.y_left}),
fn.subs({x_:Ly, d_:self.y_right}),
fn.subs({x_:Ly / 2, d_:self.y_middle})]
abc_subs = sm.solve(eqns, [a_, b_, c_])
abc_subs[d_] = 0
fn_x = sm.lambdify([x_], fn.subs(abc_subs))
dy = ((x - Lx / 2) / Lx / 2) * fn_x(y)
return np.c_[x, y + dy, z]
if __name__ == '__main__':
L_x = 8
L_y = 8
cp = YoshimuraCreasePattern(n_steps=8,
L_x=L_x,
L_y=L_y,
n_x=2,
n_y=24,
# geo_transform = GT(L_x = L_x, L_y = L_y),
show_iter=False,
z0_ratio=0.1,
MAX_ITER=100)
n_h = cp.N_h
n_v = cp.N_v
n_i = cp.N_i
A = 0.2
B = 0.5
s_term = 4 * B * t_ * s_ * (1 - s_ / L_y) # * r_ / L_x
face_z_t = CnstrTargetFace(F=[r_, s_, 4 * A * t_ * r_ * (1 - r_ / L_x) - s_term])
n_arr = np.hstack([n_h[:, :].flatten(),
n_v[:, :].flatten(),
n_i[:, :].flatten()
])
cp.tf_lst = [(face_z_t, n_arr)]
cp.cnstr_lhs = [ # [(n_h[1, 0], 0, 1.0)], # 0
# [(n_h[0, -1], 0, 1.0)], # 1
[(n_h[1, -1], 1, 1.0), (n_h[1, 0], 1, 1.0)],
]
cp.cnstr_rhs = np.zeros((len(cp.cnstr_lhs),), dtype=float)
# @todo - renaming of methods
# @todo - projection on the caf - to get the initial vector
# @todo - gemetry transformator
# @todo - derivatives of caf for the current position.
# @todo - rthombus generator with cut-away elements
# @todo - time step counting - save the initial step separately from the time history
X0 = cp.generate_X0()
X_fc = cp.solve(X0 + 1e-6)
#
# print 'nodes'
# new_nodes = cp.get_new_nodes(X_fc)
# cp2 = CreasePattern(nodes = new_nodes,
# crease_lines = cp.crease_lines,
# facets = cp.facets,
# n_steps = 1,
# show_iter = True,
# z0_ratio = 0.1,
# MAX_ITER = 200)
#
# face_z_t = CnstrTargetFace(F = [r_, s_, 0])
#
# cp2.tf_lst = [(face_z_t, n_arr)]
#
# cp2.cnstr_lhs = [[(n_h[1, 0], 0, 1.0)], # 0
# # [(n_h[1, -1], 0, 1.0)], # 1
# # [(n_h[1, -1], 1, 1.0), (n_h[1, 0], 1, 1.0)],
# ]
# cp2.cnstr_rhs = np.zeros((len(cp2.cnstr_lhs),), dtype = float)
#
# X0 = -1e-3 * np.linalg.norm(X_fc) * X_fc
#
# X_fc = cp2.solve(X0, constant_length = True)
#
my_model = CreasePatternView(data=cp,
ff_resolution=30,
show_cnstr=True)
my_model.configure_traits()
|
[
"rostislav.chudoba@rwth-aachen.de"
] |
rostislav.chudoba@rwth-aachen.de
|
73db1eaf700cf7ea4ae45ea490cfa1ba6d779e8d
|
0b05dcb48e41a1ad3a2648e312fdec3fc9ba6977
|
/contactapp/admin.py
|
0a560c3a6201e105481fea1c96e4afa66c40c393
|
[] |
no_license
|
murengera/eportal-api
|
fe90962d5a1a039e0d782c3a360d11dec9cce89a
|
0faf8c5dd072d26ed010cbc4d24b27f232037e77
|
refs/heads/master
| 2023-03-15T11:38:49.751643
| 2021-03-12T14:39:43
| 2021-03-12T14:39:43
| 286,403,145
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 151
|
py
|
from django.contrib import admin
from contactapp.models import ContactForm,Subscribe
admin.site.register(ContactForm)
admin.site.register(Subscribe)
|
[
"daltonbigirimana5@gmail.com"
] |
daltonbigirimana5@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.