blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cc0f8b6f81250521d58ab355afa7c69856634c8c
|
84beae5897780aa2fd70016bc2916475252680cb
|
/migrations/versions/454cbbd0f266_user_table.py
|
e510b762d9e45f5cd1fbc60fd79a84453127be27
|
[] |
no_license
|
perica22/GameColor
|
39a79cec0b77b4a1edf9a168b92ef5fa85d4a30f
|
ff62c164b17484f2d0dce22ff34aafe7d6b29f83
|
refs/heads/master
| 2020-04-16T07:22:29.163305
| 2019-01-12T12:34:46
| 2019-01-12T12:34:46
| 165,383,998
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,130
|
py
|
"""user table
Revision ID: 454cbbd0f266
Revises:
Create Date: 2018-12-31 13:44:36.346623
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '454cbbd0f266'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=64), nullable=True),
sa.Column('email', sa.String(length=120), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_user_username'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
# ### end Alembic commands ###
|
[
"pprokic22@gmail.com"
] |
pprokic22@gmail.com
|
ba3e56151cdaa39a758eec07e458620ed7dd3939
|
5301f6a0c1a84690fe43e8a7506e5a75b93f68e5
|
/sec6_reinforcement_learning/_3_upper_confidence_bound.py
|
0d5aae25f6f461724c4ee1031994ed9e70581b1e
|
[] |
no_license
|
gungoren/ml_lecture_notes
|
7b08c81fa113680649ddb02b2aedffc6422bfbac
|
24944fb8e5ccbbb741df9cf4512fea1a0a203c98
|
refs/heads/master
| 2020-06-28T21:51:13.668536
| 2020-02-12T16:01:01
| 2020-02-12T16:01:01
| 200,351,395
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 833
|
py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
veriler = pd.read_csv('Ads_CTR_Optimisation.csv')
import math
#UCB
N, d = veriler.shape
#Ri(n)
oduller = [0] * d
#Ni(n)
tiklamalar = [0] * d
toplam = 0
secilenler = []
for n in range(1,N):
ad = 0
max_ucb = 0
for i in range(0,d):
if tiklamalar[i] > 0:
ortalama = oduller[i] / tiklamalar[i]
delta = math.sqrt(3/2* math.log(n)/tiklamalar[i])
ucb = ortalama + delta
else:
ucb = N*10
if max_ucb < ucb:
max_ucb = ucb
ad = i
secilenler.append(ad)
tiklamalar[ad] = tiklamalar[ad]+ 1
odul = veriler.values[n, ad]
oduller[ad] = oduller[ad]+ odul
toplam = toplam + odul
print('Toplam Odul:')
print(toplam)
plt.hist(secilenler)
plt.show()
|
[
"mehmetgungoren@lyrebirdstudio.net"
] |
mehmetgungoren@lyrebirdstudio.net
|
7eed2d5611b691b7f7d6e3bef55d4869a2792295
|
04bdc6684be0c05bc9e54e3ef79a4390e31386c7
|
/todoproject/todoapp/migrations/0001_initial.py
|
af965b0a5a98c34d717862bf7aefa2ee1e34e4ba
|
[] |
no_license
|
anandhureji/todoapp
|
aff4153a7533b98aa7123cca1a9979aa22b2f102
|
a5f91eba2770bb85a0f5b36f710c61811fcd9dfd
|
refs/heads/master
| 2023-04-28T04:35:01.666392
| 2021-05-18T14:17:15
| 2021-05-18T14:17:15
| 368,552,465
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 548
|
py
|
# Generated by Django 3.2 on 2021-04-23 16:50
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('priority', models.ImageField(upload_to='')),
],
),
]
|
[
"anandhureji33@gmail.com"
] |
anandhureji33@gmail.com
|
8e4c19e91b0ee905017c4d65646fb1eadea55e38
|
684490173da009901863ffe53f1a97b788528134
|
/api/src/application/user/enums.py
|
57fddda545daf7575599a11c5ca8e71c2b4bc82d
|
[
"MIT"
] |
permissive
|
iliaskaras/VCFHandler
|
88f439b5e7ae9ac0d674e645c76f93832ac94364
|
5372659e4472207be964e0d233994a0ffff536fe
|
refs/heads/main
| 2023-04-26T13:28:35.541591
| 2021-05-19T01:04:52
| 2021-05-19T01:04:52
| 367,017,292
| 0
| 0
| null | 2021-05-19T00:57:13
| 2021-05-13T10:44:33
|
Python
|
UTF-8
|
Python
| false
| false
| 235
|
py
|
from typing import List
from enum import Enum
class Permission(Enum):
read = 'read'
write = 'write'
execute = 'execute'
@classmethod
def values(cls) -> List[str]:
return [member.value for member in cls]
|
[
"hlias.karas@yahoo.gr"
] |
hlias.karas@yahoo.gr
|
5a4c72d4e208f1c0a24bf6e5cb4584e59696e6b4
|
51d48106e416075f74985c258fd0a4fc802d46cd
|
/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/utils.py
|
445cafb2d57cbe5719de82cb19fdaae08fd33e50
|
[
"PostgreSQL"
] |
permissive
|
KaiBarrett/pgadmin4
|
2651d80f7d1784c42309c7867115db950dc1e58c
|
954fee7b8a6e04d40c83d2882a4453c15588897f
|
refs/heads/master
| 2022-11-28T16:28:38.196803
| 2020-08-12T14:34:38
| 2020-08-12T14:34:38
| 286,761,581
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 62,439
|
py
|
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
""" Implements Utility class for Table and Partitioned Table. """
import re
import copy
from functools import wraps
import simplejson as json
from flask import render_template, jsonify, request
from flask_babelex import gettext
from pgadmin.browser.server_groups.servers.databases.schemas\
.tables.base_partition_table import BasePartitionTable
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
gone, make_response as ajax_response
from pgadmin.browser.server_groups.servers.databases.schemas.utils \
import DataTypeReader, parse_rule_definition
from pgadmin.browser.server_groups.servers.utils import parse_priv_from_db, \
parse_priv_to_db
from pgadmin.browser.utils import PGChildNodeView
from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
columns import utils as column_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
constraints.foreign_key import utils as fkey_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
constraints.check_constraint import utils as check_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
constraints.exclusion_constraint import utils as exclusion_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
constraints.index_constraint import utils as idxcons_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
triggers import utils as trigger_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
compound_triggers import utils as compound_trigger_utils
from pgadmin.browser.server_groups.servers.databases.schemas. \
tables.row_security_policies import \
utils as row_security_policies_utils
class BaseTableView(PGChildNodeView, BasePartitionTable):
"""
This class is base class for tables and partitioned tables.
Methods:
-------
* check_precondition()
- This function will behave as a decorator which will checks
database connection before running view, it will also attaches
manager,conn & template_path properties to self
* _formatter(data, tid)
- It will return formatted output of query result
as per client model format
* get_table_dependents(self, tid):
- This function get the dependents and return ajax response
for the table node.
* get_table_dependencies(self, tid):
- This function get the dependencies and return ajax response
for the table node.
* get_table_statistics(self, tid):
- Returns the statistics for a particular table if tid is specified,
otherwise it will return statistics for all the tables in that
schema.
* get_reverse_engineered_sql(self, did, scid, tid, main_sql, data):
- This function will creates reverse engineered sql for
the table object.
* reset_statistics(self, scid, tid):
- This function will reset statistics of table.
"""
@staticmethod
def check_precondition(f):
"""
This function will behave as a decorator which will checks
database connection before running view, it will also attaches
manager,conn & template_path properties to self
"""
@wraps(f)
def wrap(*args, **kwargs):
# Here args[0] will hold self & kwargs will hold gid,sid,did
self = args[0]
driver = get_driver(PG_DEFAULT_DRIVER)
did = kwargs['did']
self.manager = driver.connection_manager(kwargs['sid'])
self.conn = self.manager.connection(did=kwargs['did'])
self.qtIdent = driver.qtIdent
self.qtTypeIdent = driver.qtTypeIdent
# We need datlastsysoid to check if current table is system table
self.datlastsysoid = self.manager.db_info[
did
]['datlastsysoid'] if self.manager.db_info is not None and \
did in self.manager.db_info else 0
ver = self.manager.version
server_type = self.manager.server_type
# Set the template path for the SQL scripts
self.table_template_path = compile_template_path('tables/sql',
server_type, ver)
self.data_type_template_path = compile_template_path(
'datatype/sql',
server_type, ver)
self.partition_template_path = \
'partitions/sql/{0}/#{0}#{1}#'.format(server_type, ver)
# Template for Column ,check constraint and exclusion
# constraint node
self.column_template_path = 'columns/sql/#{0}#'.format(ver)
# Template for index node
self.index_template_path = compile_template_path(
'indexes/sql', server_type, ver)
# Template for index node
self.row_security_policies_template_path = \
'row_security_policies/sql/#{0}#'.format(ver)
# Template for trigger node
self.trigger_template_path = \
'triggers/sql/{0}/#{1}#'.format(server_type, ver)
# Template for compound trigger node
self.compound_trigger_template_path = \
'compound_triggers/sql/{0}/#{1}#'.format(server_type, ver)
# Template for rules node
self.rules_template_path = 'rules/sql'
# Supported ACL for table
self.acl = ['a', 'r', 'w', 'd', 'D', 'x', 't']
# Supported ACL for columns
self.column_acl = ['a', 'r', 'w', 'x']
return f(*args, **kwargs)
return wrap
def _formatter(self, did, scid, tid, data):
"""
Args:
data: dict of query result
scid: schema oid
tid: table oid
Returns:
It will return formatted output of query result
as per client model format
"""
# Need to format security labels according to client js collection
if 'seclabels' in data and data['seclabels'] is not None:
seclabels = []
for seclbls in data['seclabels']:
k, v = seclbls.split('=')
seclabels.append({'provider': k, 'label': v})
data['seclabels'] = seclabels
# We need to parse & convert ACL coming from database to json format
SQL = render_template("/".join([self.table_template_path, 'acl.sql']),
tid=tid, scid=scid)
status, acl = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=acl)
# We will set get privileges from acl sql so we don't need
# it from properties sql
for row in acl['rows']:
priv = parse_priv_from_db(row)
if row['deftype'] in data:
data[row['deftype']].append(priv)
else:
data[row['deftype']] = [priv]
# We will add Auto vacuum defaults with out result for grid
data['vacuum_table'] = copy.deepcopy(
self.parse_vacuum_data(self.conn, data, 'table'))
data['vacuum_toast'] = copy.deepcopy(
self.parse_vacuum_data(self.conn, data, 'toast'))
# Fetch columns for the table logic
#
# 1) Check if of_type and inherited tables are present?
# 2) If yes then Fetch all the columns for of_type and inherited tables
# 3) Add columns in columns collection
# 4) Find all the columns for tables and filter out columns which are
# not inherited from any table & format them one by one
other_columns = []
table_or_type = ''
# Get of_type table columns and add it into columns dict
if data['typoid']:
SQL = render_template("/".join([self.table_template_path,
'get_columns_for_table.sql']),
tid=data['typoid'])
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
other_columns = res['rows']
table_or_type = 'type'
# Get inherited table(s) columns and add it into columns dict
elif data['coll_inherits'] and len(data['coll_inherits']) > 0:
# Return all tables which can be inherited & do not show
# system columns
SQL = render_template("/".join([self.table_template_path,
'get_inherits.sql']),
show_system_objects=False,
scid=scid
)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
if row['inherits'] in data['coll_inherits']:
# Fetch columns using inherited table OID
SQL = render_template("/".join(
[self.table_template_path,
'get_columns_for_table.sql']),
tid=row['oid']
)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
other_columns.extend(res['rows'][:])
table_or_type = 'table'
# We will fetch all the columns for the table using
# columns properties.sql, so we need to set template path
data = column_utils.get_formatted_columns(self.conn, tid,
data, other_columns,
table_or_type)
# Here we will add constraint in our output
index_constraints = {
'p': 'primary_key', 'u': 'unique_constraint'
}
for ctype in index_constraints.keys():
data[index_constraints[ctype]] = []
status, constraints = \
idxcons_utils.get_index_constraints(self.conn, did, tid, ctype)
if status:
for cons in constraints:
data.setdefault(
index_constraints[ctype], []).append(cons)
# Add Foreign Keys
status, foreign_keys = fkey_utils.get_foreign_keys(self.conn, tid)
if status:
for fk in foreign_keys:
data.setdefault('foreign_key', []).append(fk)
# Add Check Constraints
status, check_constraints = \
check_utils.get_check_constraints(self.conn, tid)
if status:
data['check_constraint'] = check_constraints
# Add Exclusion Constraint
status, exclusion_constraints = \
exclusion_utils.get_exclusion_constraints(self.conn, did, tid)
if status:
for ex in exclusion_constraints:
data.setdefault('exclude_constraint', []).append(ex)
return data
def get_table_dependents(self, tid):
"""
This function get the dependents and return ajax response
for the table node.
Args:
tid: Table ID
"""
# Specific condition for column which we need to append
where = "WHERE dep.refobjid={0}::OID".format(tid)
dependents_result = self.get_dependents(
self.conn, tid
)
# Specific sql to run againt column to fetch dependents
SQL = render_template("/".join([self.table_template_path,
'depend.sql']), where=where)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in res['rows']:
ref_name = row['refname']
if ref_name is None:
continue
dep_type = ''
dep_str = row['deptype']
if dep_str == 'a':
dep_type = 'auto'
elif dep_str == 'n':
dep_type = 'normal'
elif dep_str == 'i':
dep_type = 'internal'
dependents_result.append({'type': 'sequence', 'name': ref_name,
'field': dep_type})
return ajax_response(
response=dependents_result,
status=200
)
def get_table_dependencies(self, tid):
"""
This function get the dependencies and return ajax response
for the table node.
Args:
tid: Table ID
"""
dependencies_result = self.get_dependencies(
self.conn, tid
)
return ajax_response(
response=dependencies_result,
status=200
)
def get_table_statistics(self, scid, tid):
"""
Statistics
Args:
scid: Schema Id
tid: Table Id
Returns the statistics for a particular table if tid is specified,
otherwise it will return statistics for all the tables in that
schema.
"""
# Fetch schema name
status, schema_name = self.conn.execute_scalar(
render_template(
"/".join([self.table_template_path, 'get_schema.sql']),
conn=self.conn, scid=scid
)
)
if not status:
return internal_server_error(errormsg=schema_name)
if tid is None:
status, res = self.conn.execute_dict(
render_template(
"/".join([self.table_template_path,
'coll_table_stats.sql']), conn=self.conn,
schema_name=schema_name
)
)
else:
# For Individual table stats
# Check if pgstattuple extension is already created?
# if created then only add extended stats
status, is_pgstattuple = self.conn.execute_scalar("""
SELECT (count(extname) > 0) AS is_pgstattuple
FROM pg_extension
WHERE extname='pgstattuple'
""")
if not status:
return internal_server_error(errormsg=is_pgstattuple)
# Fetch Table name
status, table_name = self.conn.execute_scalar(
render_template(
"/".join([self.table_template_path, 'get_table.sql']),
conn=self.conn, scid=scid, tid=tid
)
)
if not status:
return internal_server_error(errormsg=table_name)
status, res = self.conn.execute_dict(
render_template(
"/".join([self.table_template_path, 'stats.sql']),
conn=self.conn, schema_name=schema_name,
table_name=table_name,
is_pgstattuple=is_pgstattuple, tid=tid
)
)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
data=res,
status=200
)
def get_reverse_engineered_sql(self, did, scid, tid, main_sql, data,
json_resp=True, diff_partition_sql=False):
"""
This function will creates reverse engineered sql for
the table object
Args:
did: Database ID
scid: Schema ID
tid: Table ID
main_sql: List contains all the reversed engineered sql
data: Table's Data
json_resp: Json response or plain SQL
diff_partition_sql: In Schema diff, the Partition sql should be
return separately to perform further task
"""
"""
#####################################
# 1) Reverse engineered sql for TABLE
#####################################
"""
# Table & Schema declaration so that we can use them in child nodes
schema = data['schema']
table = data['name']
is_partitioned = 'is_partitioned' in data and data['is_partitioned']
sql_header = ''
data = self._formatter(did, scid, tid, data)
# Now we have all lis of columns which we need
# to include in our create definition, Let's format them
if 'columns' in data:
for c in data['columns']:
if 'attacl' in c:
c['attacl'] = parse_priv_to_db(
c['attacl'], self.column_acl
)
# check type for '[]' in it
if 'cltype' in c:
c['cltype'], c['hasSqrBracket'] = \
column_utils.type_formatter(c['cltype'])
if json_resp:
sql_header = u"-- Table: {0}.{1}\n\n-- ".format(
data['schema'], data['name'])
sql_header += render_template("/".join([self.table_template_path,
'delete.sql']),
data=data, conn=self.conn)
sql_header = sql_header.strip('\n')
sql_header += '\n'
# Add into main sql
main_sql.append(sql_header)
partition_main_sql = ""
# Parse privilege data
if 'relacl' in data:
data['relacl'] = parse_priv_to_db(data['relacl'], self.acl)
# if table is partitions then
if 'relispartition' in data and data['relispartition']:
table_sql = render_template("/".join([self.partition_template_path,
'create.sql']),
data=data, conn=self.conn)
else:
table_sql = render_template("/".join([self.table_template_path,
'create.sql']),
data=data, conn=self.conn, is_sql=True)
# Add into main sql
table_sql = re.sub('\n{2,}', '\n\n', table_sql)
main_sql.append(table_sql.strip('\n'))
"""
######################################
# 2) Reverse engineered sql for INDEX
######################################
"""
SQL = render_template("/".join([self.index_template_path,
'nodes.sql']), tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
# Dynamically load index utils to avoid circular dependency.
from pgadmin.browser.server_groups.servers.databases.schemas. \
tables.indexes import utils as index_utils
for row in rset['rows']:
index_sql = index_utils.get_reverse_engineered_sql(
self.conn, schema, table, did, tid, row['oid'],
self.datlastsysoid,
template_path=None, with_header=json_resp)
index_sql = u"\n" + index_sql
# Add into main sql
index_sql = re.sub('\n{2,}', '\n\n', index_sql)
main_sql.append(index_sql.strip('\n'))
"""
########################################################
# 2) Reverse engineered sql for ROW SECURITY POLICY
########################################################
"""
if self.manager.version >= 90500:
SQL = \
render_template(
"/".join([self.row_security_policies_template_path,
'nodes.sql']), tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
policy_sql = row_security_policies_utils. \
get_reverse_engineered_sql(
self.conn, schema, table, did, tid, row['oid'],
self.datlastsysoid,
template_path=None, with_header=json_resp)
policy_sql = u"\n" + policy_sql
# Add into main sql
policy_sql = re.sub('\n{2,}', '\n\n', policy_sql)
main_sql.append(policy_sql.strip('\n'))
"""
########################################
# 3) Reverse engineered sql for TRIGGERS
########################################
"""
SQL = render_template("/".join([self.trigger_template_path,
'nodes.sql']), tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
trigger_sql = trigger_utils.get_reverse_engineered_sql(
self.conn, schema, table, tid, row['oid'],
self.datlastsysoid, self.blueprint.show_system_objects,
template_path=None, with_header=json_resp)
trigger_sql = u"\n" + trigger_sql
# Add into main sql
trigger_sql = re.sub('\n{2,}', '\n\n', trigger_sql)
main_sql.append(trigger_sql)
"""
#################################################
# 4) Reverse engineered sql for COMPOUND TRIGGERS
#################################################
"""
if self.manager.server_type == 'ppas' \
and self.manager.version >= 120000:
SQL = render_template("/".join(
[self.compound_trigger_template_path, 'nodes.sql']), tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
compound_trigger_sql = \
compound_trigger_utils.get_reverse_engineered_sql(
self.conn, schema, table, tid, row['oid'],
self.datlastsysoid)
compound_trigger_sql = u"\n" + compound_trigger_sql
# Add into main sql
compound_trigger_sql = \
re.sub('\n{2,}', '\n\n', compound_trigger_sql)
main_sql.append(compound_trigger_sql)
"""
#####################################
# 5) Reverse engineered sql for RULES
#####################################
"""
SQL = render_template("/".join(
[self.rules_template_path, 'nodes.sql']), tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
rules_sql = '\n'
SQL = render_template("/".join(
[self.rules_template_path, 'properties.sql']
), rid=row['oid'], datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
display_comments = True
if not json_resp:
display_comments = False
res_data = parse_rule_definition(res)
# Update the correct table name for rules
if 'view' in res_data:
res_data['view'] = table
rules_sql += render_template("/".join(
[self.rules_template_path, 'create.sql']),
data=res_data, display_comments=display_comments)
# Add into main sql
rules_sql = re.sub('\n{2,}', '\n\n', rules_sql)
main_sql.append(rules_sql)
"""
##########################################
# 6) Reverse engineered sql for PARTITIONS
##########################################
"""
if is_partitioned:
SQL = render_template("/".join([self.partition_template_path,
'nodes.sql']),
scid=scid, tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
if len(rset['rows']):
if json_resp:
sql_header = u"\n-- Partitions SQL"
partition_sql = ''
for row in rset['rows']:
part_data = dict()
part_data['partitioned_table_name'] = data['name']
part_data['parent_schema'] = data['schema']
if not json_resp:
part_data['schema'] = data['schema']
else:
part_data['schema'] = row['schema_name']
part_data['relispartition'] = True
part_data['name'] = row['name']
part_data['partition_value'] = row['partition_value']
part_data['is_partitioned'] = row['is_partitioned']
part_data['partition_scheme'] = row['partition_scheme']
partition_sql += render_template("/".join(
[self.partition_template_path, 'create.sql']),
data=part_data, conn=self.conn)
# Add into main sql
partition_sql = re.sub('\n{2,}', '\n\n', partition_sql
).strip('\n')
partition_main_sql = partition_sql.strip('\n')
if not diff_partition_sql:
main_sql.append(
sql_header + '\n\n' + partition_main_sql
)
sql = '\n'.join(main_sql)
if not json_resp:
return sql, partition_main_sql
return ajax_response(response=sql.strip('\n'))
def reset_statistics(self, scid, tid):
"""
This function will reset statistics of table
Args:
scid: Schema ID
tid: Table ID
"""
# checking the table existence using the function of the same class
schema_name, table_name = self.get_schema_and_table_name(tid)
if table_name is None:
return gone(gettext("The specified table could not be found."))
# table exist
try:
SQL = render_template("/".join([self.table_template_path,
'reset_stats.sql']),
tid=tid)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=gettext("Table statistics have been reset"),
data={
'id': tid,
'scid': scid
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
def get_partition_scheme(self, data):
partition_scheme = None
part_type = 'sub_partition_type' if 'sub_partition_type' in data \
else 'partition_type'
part_keys = 'sub_partition_keys' if 'sub_partition_keys' in data \
else 'partition_keys'
if part_type in data and data[part_type] == 'range':
partition_scheme = 'RANGE ('
elif part_type in data and data[part_type] == 'list':
partition_scheme = 'LIST ('
elif part_type in data and data[part_type] == 'hash':
partition_scheme = 'HASH ('
for row in data[part_keys]:
if row['key_type'] == 'column':
partition_scheme += self.qtIdent(
self.conn, row['pt_column']) + ', '
elif row['key_type'] == 'expression':
partition_scheme += row['expression'] + ', '
# Remove extra space and comma
if len(data[part_keys]) > 0:
partition_scheme = partition_scheme[:-2]
partition_scheme += ')'
return partition_scheme
@staticmethod
def validate_constrains(key, data):
if key == 'primary_key' or key == 'unique_constraint':
if 'columns' in data and len(data['columns']) > 0:
return True
else:
return False
elif key == 'foreign_key':
if 'oid' not in data:
for arg in ['columns']:
if arg not in data or \
(isinstance(data[arg], list) and
len(data[arg]) < 1):
return False
if 'autoindex' in data and \
data['autoindex'] and \
('coveringindex' not in data or
data['coveringindex'] == ''):
return False
return True
elif key == 'check_constraint':
for arg in ['consrc']:
if arg not in data or data[arg] == '':
return False
return True
elif key == 'exclude_constraint':
pass
return True
@staticmethod
def check_and_convert_name_to_string(data):
"""
This function will check and covert table to string incase
it is numeric
Args:
data: data dict
Returns:
Updated data dict
"""
if isinstance(data['name'], (int, float)):
data['name'] = str(data['name'])
return data
def get_sql(self, did, scid, tid, data, res):
"""
This function will generate create/update sql from model data
coming from client
"""
if tid is not None:
old_data = res['rows'][0]
old_data = self._formatter(did, scid, tid, old_data)
# We will convert privileges coming from client required
if 'relacl' in data:
for mode in ['added', 'changed', 'deleted']:
if mode in data['relacl']:
data['relacl'][mode] = parse_priv_to_db(
data['relacl'][mode], self.acl
)
# If name is not present in request data
if 'name' not in data:
data['name'] = old_data['name']
data = BaseTableView.check_and_convert_name_to_string(data)
# If name if not present
if 'schema' not in data:
data['schema'] = old_data['schema']
# Filter out new tables from list, we will send complete list
# and not newly added tables in the list from client
# so we will filter new tables here
if 'coll_inherits' in data:
p_len = len(old_data['coll_inherits'])
c_len = len(data['coll_inherits'])
# If table(s) added
if c_len > p_len:
data['coll_inherits_added'] = list(
set(data['coll_inherits']) -
set(old_data['coll_inherits'])
)
# If table(s)removed
elif c_len < p_len:
data['coll_inherits_removed'] = list(
set(old_data['coll_inherits']) -
set(data['coll_inherits'])
)
# Safe side verification,In case it happens..
# If user removes and adds same number of table
# eg removed one table and added one new table
elif c_len == p_len:
data['coll_inherits_added'] = list(
set(data['coll_inherits']) -
set(old_data['coll_inherits'])
)
data['coll_inherits_removed'] = list(
set(old_data['coll_inherits']) -
set(data['coll_inherits'])
)
# Update the vacuum table settings.
self.update_vacuum_settings('vacuum_table', old_data, data)
# Update the vacuum toast table settings.
self.update_vacuum_settings('vacuum_toast', old_data, data)
SQL = render_template(
"/".join([self.table_template_path, 'update.sql']),
o_data=old_data, data=data, conn=self.conn
)
# Removes training new lines
SQL = SQL.strip('\n') + '\n\n'
# Parse/Format columns & create sql
if 'columns' in data:
# Parse the data coming from client
data = column_utils.parse_format_columns(data, mode='edit')
columns = data['columns']
column_sql = '\n'
# If column(s) is/are deleted
if 'deleted' in columns:
for c in columns['deleted']:
c['schema'] = data['schema']
c['table'] = data['name']
# Sql for drop column
if 'inheritedfrom' not in c:
column_sql += render_template("/".join(
[self.column_template_path, 'delete.sql']),
data=c, conn=self.conn).strip('\n') + '\n\n'
# If column(s) is/are changed
# Here we will be needing previous properties of column
# so that we can compare & update it
if 'changed' in columns:
for c in columns['changed']:
c['schema'] = data['schema']
c['table'] = data['name']
properties_sql = render_template(
"/".join([self.column_template_path,
'properties.sql']),
tid=tid,
clid=c['attnum'],
show_sys_objects=self.blueprint.show_system_objects
)
status, res = self.conn.execute_dict(properties_sql)
if not status:
return internal_server_error(errormsg=res)
old_col_data = res['rows'][0]
old_col_data['cltype'], \
old_col_data['hasSqrBracket'] = \
column_utils.type_formatter(old_col_data['cltype'])
old_col_data = \
column_utils.convert_length_precision_to_string(
old_col_data)
old_col_data = column_utils.fetch_length_precision(
old_col_data)
old_col_data['cltype'] = \
DataTypeReader.parse_type_name(
old_col_data['cltype'])
# Sql for alter column
if 'inheritedfrom' not in c and\
'inheritedfromtable' not in c:
column_sql += render_template("/".join(
[self.column_template_path, 'update.sql']),
data=c, o_data=old_col_data, conn=self.conn
).strip('\n') + '\n\n'
# If column(s) is/are added
if 'added' in columns:
for c in columns['added']:
c['schema'] = data['schema']
c['table'] = data['name']
c = column_utils.convert_length_precision_to_string(c)
if 'inheritedfrom' not in c and\
'inheritedfromtable' not in c:
column_sql += render_template("/".join(
[self.column_template_path, 'create.sql']),
data=c, conn=self.conn).strip('\n') + '\n\n'
# Combine all the SQL together
SQL += column_sql.strip('\n')
# Check for partitions
if 'partitions' in data:
partitions = data['partitions']
partitions_sql = '\n'
# If partition(s) is/are deleted
if 'deleted' in partitions:
for row in partitions['deleted']:
temp_data = dict()
schema_name, table_name = \
self.get_schema_and_table_name(row['oid'])
temp_data['parent_schema'] = data['schema']
temp_data['partitioned_table_name'] = data['name']
temp_data['schema'] = schema_name
temp_data['name'] = table_name
# Sql for detach partition
partitions_sql += render_template(
"/".join(
[
self.partition_template_path,
'detach.sql'
]
),
data=temp_data,
conn=self.conn).strip('\n') + '\n\n'
# If partition(s) is/are added
if 'added' in partitions and 'partition_scheme' in old_data\
and old_data['partition_scheme'] != '':
temp_data = dict()
temp_data['schema'] = data['schema']
temp_data['name'] = data['name']
# get the partition type
temp_data['partition_type'] = \
old_data['partition_scheme'].split()[0].lower()
temp_data['partitions'] = partitions['added']
partitions_sql += \
self.get_partitions_sql(temp_data).strip('\n') + '\n\n'
# Combine all the SQL together
SQL += '\n' + partitions_sql.strip('\n')
data['columns_to_be_dropped'] = []
if 'columns' in data and 'deleted' in data['columns']:
data['columns_to_be_dropped'] = list(map(
lambda d: d['name'], data['columns']['deleted']))
# Check if index constraints are added/changed/deleted
index_constraint_sql = \
idxcons_utils.get_index_constraint_sql(
self.conn, did, tid, data)
# If we have index constraint sql then ad it in main sql
if index_constraint_sql is not None:
SQL += '\n' + index_constraint_sql
# Check if foreign key(s) is/are added/changed/deleted
foreign_key_sql = fkey_utils.get_foreign_key_sql(
self.conn, tid, data)
# If we have foreign key sql then ad it in main sql
if foreign_key_sql is not None:
SQL += '\n' + foreign_key_sql
# Check if check constraint(s) is/are added/changed/deleted
check_constraint_sql = check_utils.get_check_constraint_sql(
self.conn, tid, data)
# If we have check constraint sql then ad it in main sql
if check_constraint_sql is not None:
SQL += '\n' + check_constraint_sql
# Check if exclusion constraint(s) is/are added/changed/deleted
exclusion_constraint_sql = \
exclusion_utils.get_exclusion_constraint_sql(
self.conn, did, tid, data)
# If we have check constraint sql then ad it in main sql
if exclusion_constraint_sql is not None:
SQL += '\n' + exclusion_constraint_sql
else:
res = None
required_args = [
'name'
]
for arg in required_args:
if arg not in data:
return gettext('-- definition incomplete')
# validate constraint data.
for key in ['primary_key', 'unique_constraint',
'foreign_key', 'check_constraint',
'exclude_constraint']:
if key in data and len(data[key]) > 0:
for constraint in data[key]:
if not self.validate_constrains(key, constraint):
return gettext(
'-- definition incomplete for {0}'.format(key)
)
# We will convert privileges coming from client required
# in server side format
if 'relacl' in data:
data['relacl'] = parse_priv_to_db(data['relacl'], self.acl)
# Parse & format columns
data = column_utils.parse_format_columns(data)
data = BaseTableView.check_and_convert_name_to_string(data)
if 'foreign_key' in data:
for c in data['foreign_key']:
schema, table = fkey_utils.get_parent(
self.conn, c['columns'][0]['references'])
c['remote_schema'] = schema
c['remote_table'] = table
partitions_sql = ''
if 'is_partitioned' in data and data['is_partitioned']:
data['relkind'] = 'p'
# create partition scheme
data['partition_scheme'] = self.get_partition_scheme(data)
partitions_sql = self.get_partitions_sql(data)
# Update the vacuum table settings.
self.update_vacuum_settings('vacuum_table', data)
# Update the vacuum toast table settings.
self.update_vacuum_settings('vacuum_toast', data)
SQL = render_template("/".join([self.table_template_path,
'create.sql']),
data=data, conn=self.conn)
# Append SQL for partitions
SQL += '\n' + partitions_sql
SQL = re.sub('\n{2,}', '\n\n', SQL)
SQL = SQL.strip('\n')
return SQL, data['name'] if 'name' in data else old_data['name']
def update(self, gid, sid, did, scid, tid, data, res, parent_id=None):
"""
This function will update an existing table object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
data: Data to update
res: Table properties
parent_id: parent table id if current table is partition of parent
table else none
"""
# checking the table existence using the function of the same class
schema_name, table_name = self.get_schema_and_table_name(tid)
if table_name is None:
return gone(gettext("The specified table could not be found."))
# table exists
try:
SQL, name = self.get_sql(did, scid, tid, data, res)
SQL = SQL.strip('\n').strip(' ')
status, rest = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=rest)
SQL = render_template("/".join([self.table_template_path,
'get_schema_oid.sql']), tid=tid)
status, rest = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rest)
if not parent_id:
parent_id = scid
# Check for partitions
partitions_oid = dict()
if 'partitions' in data:
# Fetch oid of schema for all detached partitions
if 'deleted' in data['partitions']:
detached = []
for row in data['partitions']['deleted']:
status, pscid = self.conn.execute_scalar(
render_template(
"/".join([
self.table_template_path,
'get_schema_oid.sql'
]),
tid=row['oid']
)
)
if not status:
return internal_server_error(errormsg=pscid)
detached.append(
{'oid': row['oid'], 'schema_id': pscid}
)
partitions_oid['detached'] = detached
# Fetch oid and schema oid for all created/attached partitions
if 'added' in data['partitions']:
created = []
attached = []
for row in data['partitions']['added']:
if row['is_attach']:
status, pscid = self.conn.execute_scalar(
render_template(
"/".join([
self.table_template_path,
'get_schema_oid.sql'
]),
tid=row['partition_name']
)
)
if not status:
return internal_server_error(errormsg=pscid)
attached.append({
'oid': row['partition_name'],
'schema_id': pscid
})
else:
tmp_data = dict()
tmp_data['name'] = row['partition_name']
SQL = render_template(
"/".join([
self.table_template_path, 'get_oid.sql'
]),
scid=scid, data=tmp_data
)
status, ptid = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=ptid)
created.append({
'oid': ptid,
'schema_id': scid
})
partitions_oid['created'] = created
partitions_oid['attached'] = attached
if 'is_partitioned' in res['rows'][0]:
is_partitioned = res['rows'][0]['is_partitioned']
else:
is_partitioned = False
# If partitioned_table_name in result set then get partition
# icon css class else table icon.
if 'partitioned_table_name' in res['rows'][0]:
res['rows'][0]['is_sub_partitioned'] = is_partitioned
icon = self.get_partition_icon_css_class(res['rows'][0])
else:
icon = self.get_icon_css_class(res['rows'][0])
return jsonify(
node=self.blueprint.generate_browser_node(
tid,
parent_id,
name,
icon=icon,
is_partitioned=is_partitioned,
parent_schema_id=scid,
schema_id=rest['rows'][0]['scid'],
schema_name=rest['rows'][0]['nspname'],
affected_partitions=partitions_oid
)
)
except Exception as e:
return internal_server_error(errormsg=str(e))
def properties(self, gid, sid, did, scid, tid, res,
return_ajax_response=True):
"""
This function will show the properties of the selected table node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
scid: Schema ID
tid: Table ID
res: Table/Partition table properties
return_ajax_response: If True then return the ajax response
Returns:
JSON of selected table node
"""
data = res['rows'][0]
data['vacuum_settings_str'] = ''
if data['reloptions'] is not None:
data['vacuum_settings_str'] += '\n'.join(data['reloptions'])
if data['toast_reloptions'] is not None:
data['vacuum_settings_str'] += '\n' \
if data['vacuum_settings_str'] != '' else ''
data['vacuum_settings_str'] += '\n'.\
join(map(lambda o: 'toast.' + o, data['toast_reloptions']))
data['vacuum_settings_str'] = data[
'vacuum_settings_str'
].replace('=', ' = ')
data = self._formatter(did, scid, tid, data)
# Fetch partition of this table if it is partitioned table.
if 'is_partitioned' in data and data['is_partitioned']:
# get the partition type
data['partition_type'] = \
data['partition_scheme'].split()[0].lower()
partitions = []
SQL = render_template("/".join([self.partition_template_path,
'nodes.sql']),
scid=scid, tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
partition_name = row['name']
# if schema name is different then display schema
# qualified name on UI.
if data['schema'] != row['schema_name']:
partition_name = row['schema_name'] + '.' + row['name']
if data['partition_type'] == 'range':
if row['partition_value'] == 'DEFAULT':
is_default = True
range_from = None
range_to = None
else:
range_part = row['partition_value'].split(
'FOR VALUES FROM (')[1].split(') TO')
range_from = range_part[0]
range_to = range_part[1][2:-1]
is_default = False
partitions.append({
'oid': row['oid'],
'partition_name': partition_name,
'values_from': range_from,
'values_to': range_to,
'is_default': is_default,
'is_sub_partitioned': row['is_sub_partitioned'],
'sub_partition_scheme': row['sub_partition_scheme']
})
elif data['partition_type'] == 'list':
if row['partition_value'] == 'DEFAULT':
is_default = True
range_in = None
else:
range_part = row['partition_value'].split(
'FOR VALUES IN (')[1]
range_in = range_part[:-1]
is_default = False
partitions.append({
'oid': row['oid'],
'partition_name': partition_name,
'values_in': range_in,
'is_default': is_default,
'is_sub_partitioned': row['is_sub_partitioned'],
'sub_partition_scheme': row['sub_partition_scheme']
})
else:
range_part = row['partition_value'].split(
'FOR VALUES WITH (')[1].split(",")
range_modulus = range_part[0].strip().strip(
"modulus").strip()
range_remainder = range_part[1].strip().\
strip(" remainder").strip(")").strip()
partitions.append({
'oid': row['oid'],
'partition_name': partition_name,
'values_modulus': range_modulus,
'values_remainder': range_remainder,
'is_sub_partitioned': row['is_sub_partitioned'],
'sub_partition_scheme': row['sub_partition_scheme']
})
data['partitions'] = partitions
if not return_ajax_response:
return data
return ajax_response(
response=data,
status=200
)
def get_partitions_sql(self, partitions, schema_diff=False):
"""
This function will iterate all the partitions and create SQL.
:param partitions: List of partitions
:param schema_diff: If true then create sql accordingly.
"""
sql = ''
for row in partitions['partitions']:
part_data = dict()
part_data['partitioned_table_name'] = partitions['name']
part_data['parent_schema'] = partitions['schema']
if 'is_attach' in row and row['is_attach']:
schema_name, table_name = \
self.get_schema_and_table_name(row['partition_name'])
part_data['schema'] = schema_name
part_data['name'] = table_name
else:
part_data['schema'] = partitions['schema']
part_data['relispartition'] = True
part_data['name'] = row['partition_name']
if 'is_default' in row and row['is_default'] and (
partitions['partition_type'] == 'range' or
partitions['partition_type'] == 'list'):
part_data['partition_value'] = 'DEFAULT'
elif partitions['partition_type'] == 'range':
range_from = row['values_from'].split(',')
range_to = row['values_to'].split(',')
from_str = ', '.join("{0}".format(item) for
item in range_from)
to_str = ', '.join("{0}".format(item) for
item in range_to)
part_data['partition_value'] = 'FOR VALUES FROM (' +\
from_str + ') TO (' +\
to_str + ')'
elif partitions['partition_type'] == 'list':
range_in = row['values_in'].split(',')
in_str = ', '.join("{0}".format(item) for item in range_in)
part_data['partition_value'] = 'FOR VALUES IN (' + in_str\
+ ')'
else:
range_modulus = row['values_modulus'].split(',')
range_remainder = row['values_remainder'].split(',')
modulus_str = ', '.join("{0}".format(item) for item in
range_modulus)
remainder_str = ', '.join("{0}".format(item) for item in
range_remainder)
part_data['partition_value'] = 'FOR VALUES WITH (MODULUS '\
+ modulus_str \
+ ', REMAINDER ' +\
remainder_str + ')'
# Check if partition is again declare as partitioned table.
if 'is_sub_partitioned' in row and row['is_sub_partitioned']:
part_data['partition_scheme'] = row['sub_partition_scheme'] \
if 'sub_partition_scheme' in row else \
self.get_partition_scheme(row)
part_data['is_partitioned'] = True
if 'is_attach' in row and row['is_attach']:
partition_sql = render_template(
"/".join([self.partition_template_path, 'attach.sql']),
data=part_data, conn=self.conn
)
else:
# For schema diff we create temporary partitions to copy the
# data from original table to temporary table.
if schema_diff:
part_data['name'] = row['temp_partition_name']
partition_sql = render_template(
"/".join([self.partition_template_path, 'create.sql']),
data=part_data, conn=self.conn
)
sql += partition_sql
return sql
def truncate(self, gid, sid, did, scid, tid, res):
"""
This function will truncate the table object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
"""
# Below will decide if it's simple drop or drop with cascade call
data = request.form if request.form else json.loads(
request.data, encoding='utf-8'
)
# Convert str 'true' to boolean type
is_cascade = json.loads(data['cascade'])
data = res['rows'][0]
SQL = render_template("/".join([self.table_template_path,
'truncate.sql']),
data=data, cascade=is_cascade)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=gettext("Table truncated"),
data={
'id': tid,
'scid': scid
}
)
def get_delete_sql(self, res):
# Below will decide if it's simple drop or drop with cascade call
if self.cmd == 'delete':
# This is a cascade operation
cascade = True
else:
cascade = False
data = res['rows'][0]
return render_template(
"/".join([self.table_template_path, 'delete.sql']),
data=data, cascade=cascade,
conn=self.conn
)
def delete(self, gid, sid, did, scid, tid, res):
"""
This function will delete the table object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
"""
SQL = self.get_delete_sql(res)
status, res = self.conn.execute_scalar(SQL)
if not status:
return status, res
return True, {
'id': tid,
'scid': scid
}
def get_schema_and_table_name(self, tid):
"""
This function will fetch the schema qualified name of the
given table id.
:param tid: Table Id.
"""
# Get schema oid
status, scid = self.conn.execute_scalar(
render_template("/".join([self.table_template_path,
'get_schema_oid.sql']), tid=tid))
if not status:
return internal_server_error(errormsg=scid)
if scid is None:
return None, None
# Fetch schema name
status, schema_name = self.conn.execute_scalar(
render_template("/".join([self.table_template_path,
'get_schema.sql']), conn=self.conn,
scid=scid)
)
if not status:
return internal_server_error(errormsg=schema_name)
# Fetch Table name
status, table_name = self.conn.execute_scalar(
render_template(
"/".join([self.table_template_path, 'get_table.sql']),
conn=self.conn, scid=scid, tid=tid
)
)
if not status:
return internal_server_error(errormsg=table_name)
return schema_name, table_name
def update_vacuum_settings(self, vacuum_key, old_data, data=None):
"""
This function iterate the vacuum and vacuum toast table and create
two new dictionaries. One for set parameter and another for reset.
:param vacuum_key: Key to be checked.
:param old_data: Old data
:param data: New data
:return:
"""
# When creating a table old_data is the actual data
if data is None:
if vacuum_key in old_data:
for opt in old_data[vacuum_key]:
if 'value' in opt and opt['value'] is None:
opt.pop('value')
# Iterate vacuum table
elif vacuum_key in data and 'changed' in data[vacuum_key] \
and vacuum_key in old_data:
set_values = []
reset_values = []
for data_row in data[vacuum_key]['changed']:
for old_data_row in old_data[vacuum_key]:
if data_row['name'] == old_data_row['name'] and \
'value' in data_row:
if data_row['value'] is not None:
set_values.append(data_row)
elif data_row['value'] is None and \
'value' in old_data_row:
reset_values.append(data_row)
if len(set_values) > 0:
data[vacuum_key]['set_values'] = set_values
if len(reset_values) > 0:
data[vacuum_key]['reset_values'] = reset_values
|
[
"morderkai@hotmail.com"
] |
morderkai@hotmail.com
|
b37335f9e0440c5cb31486bf57494e3aa0f993a9
|
45f6a4dfc837998565d4e4e4cde258a27fdbd424
|
/learn_hb_acc/acc/Source/DataLock/Lock2.py
|
1f369f5272c0e61228b6a273a5cd7e036b4c6fce
|
[] |
no_license
|
isoundy000/learn_python
|
c220966c42187335c5342269cafc6811ac04bab3
|
fa1591863985a418fd361eb6dac36d1301bc1231
|
refs/heads/master
| 2022-12-29T10:27:37.857107
| 2020-10-16T03:52:44
| 2020-10-16T03:52:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 305
|
py
|
#!/usr/bin/env python
#-*- coding: UTF-8 -*-
__author__ = 'ghou'
from Source.DataLock.Lock1 import Lock1
class Lock2:
def __init__(self, lock):
# print "lock"
self._lock = lock
self._lock.Lock()
def __del__(self):
# print "release"
self._lock.Release()
|
[
"1737785826@qq.com"
] |
1737785826@qq.com
|
3a06e9e9cb617ccb4aaab63ae309614690860766
|
91c71d5b6c2200f97037a4d53ca7bc23beebf98e
|
/CyberSecurityRumble CTF/Crypto/ezdsa/signer.py
|
cec927fe006be092c29143d0d2bbf7e89af7cef9
|
[] |
no_license
|
ROFLailXGOD/CTFs
|
caf4394057987d2dfd0cb048375a9f7a271defc8
|
8f9a0d2957298014e34e43b5ea84c51cf8a787cc
|
refs/heads/master
| 2023-03-02T18:38:56.181437
| 2021-02-06T10:43:03
| 2021-02-06T10:43:03
| 292,396,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 855
|
py
|
import socketserver
import random
import ecdsa
key = open("secp256k1-key.pem").read()
sk = ecdsa.SigningKey.from_pem(key)
def sony_rand(n):
return random.getrandbits(8*n).to_bytes(n, "big")
def sign(data):
if data == b"admin":
raise ValueError("Not Permitted!")
signature = sk.sign(data, entropy=sony_rand)
return signature
class TCPHandler(socketserver.StreamRequestHandler):
def handle(self):
data = self.rfile.readline().strip()
try:
signature = sign(data).hex()
self.wfile.write(b"Your token: " + data + b"," + signature.encode())
except ValueError as ex:
self.wfile.write(b"Invalid string submitted: " + str(ex).encode())
if __name__ == '__main__':
server = socketserver.ForkingTCPServer(("0.0.0.0", 10101), TCPHandler)
server.serve_forever()
|
[
"vovavovava@mail.ru"
] |
vovavovava@mail.ru
|
f23dd6761564ddb1c16f96850078e43e8e6b7d06
|
6e5cbf71ba43e83e1e1e381a3da645a1f9575703
|
/ece681PatternRecognition/CourseProject/senti_analysis_final/Python_Code/last_sprint_0.py
|
b8bc2a82cd113966de519e9e4835565f0407a1fc
|
[] |
no_license
|
AmberWangjie/qsx_duke_course
|
ab5a2a4eea28bf719b03f1750f593c5cf46af1be
|
b1d780bf1b6da1590d6f85cedd2f2f70c47bce71
|
refs/heads/master
| 2021-05-03T23:58:17.291403
| 2017-05-10T01:31:41
| 2017-05-10T01:31:41
| 120,403,601
| 0
| 1
| null | 2018-02-06T04:54:40
| 2018-02-06T04:54:40
| null |
UTF-8
|
Python
| false
| false
| 2,243
|
py
|
#-*- coding: utf-8 -*-
import re,os,glob,copy
from sklearn import datasets,svm,cross_validation,tree,decomposition
from sklearn.metrics import roc_curve, auc
from sklearn.naive_bayes import MultinomialNB,GaussianNB,BernoulliNB
import numpy as np
import matplotlib.pyplot as plt
import nltk,string
wordcount={}
with open(os.path.join(os.getcwd(),"test.txt")) as f:
for word in f.read().split():
if (len(re.findall("[^a-zA-Z]",word))!=0):
continue
if word not in wordcount:
wordcount[word] = 1
else:
wordcount[word] += 1
wordcount=sorted(wordcount.iteritems(), key=lambda (k,v): (v,k))
feature=[]
pos=[]
neg=[]
with open(os.path.join(os.getcwd(),"positive-words.txt")) as p,open(os.path.join(os.getcwd(),"negative-words.txt")) as n:
for w in p.read().split():
pos.append(w)
for w in n.read().split():
neg.append(w)
for wo in wordcount:
if (wo[0] in pos) | (wo[0] in neg):
feature.append(wo[0])
feature.append('\?')
feature.append('\!')
#feature=['love','wonderful','best','great','superb','stil','beautiful','bad','worst','stupid','waste','boring','\?','\!']
def analyzer(f1):
global feature
f_words=copy.deepcopy(feature)
index=range(len(f_words))
f_words=zip(f_words,index)
f_count=[0]*len(f_words)
for line in f1:
for f_word in f_words :
ma = re.findall(f_word[0],line)
if len(ma)!=0 :
f_count[f_word[1]] = f_count[f_word[1]] + len(ma)
return f_count
nu = 0;
co = 0.0;
data = []
target = []
for root, dirs, files in os.walk("./review0"):
it = glob.glob(os.path.join(root,"*.txt"))
flag = 0
if re.search("neg",root) :
flag = 1
if len(it)==0:
continue
for path in it :
with open(path) as f_input:
data.append(analyzer(f_input))
target.append(flag)
data_pre = copy.deepcopy(data)
for i in range(len(data_pre)):
for j in range(len(data_pre[i])):
if data_pre[i][j]>0:
data_pre[i][j]=1
data = np.array(data)
data_pre = np.array(data_pre)
target = np.array(target)
np.savetxt('../Dataset/data.txt',data,fmt="%1d",delimiter=' ');
np.savetxt('../Dataset/data_pre.txt',data_pre,fmt="%1d",delimiter=' ');
np.savetxt('../Dataset/target.txt',target,fmt="%1d",delimiter=' ');
|
[
"qsxyh123@gmail.com"
] |
qsxyh123@gmail.com
|
fa01f2c4235022464b36c98cf0478c200258f51c
|
54f352a242a8ad6ff5516703e91da61e08d9a9e6
|
/Source Codes/AtCoder/arc013/B/4419292.py
|
e610ae7336b7f2edb3c78c31116d79ff4b9ad53d
|
[] |
no_license
|
Kawser-nerd/CLCDSA
|
5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb
|
aee32551795763b54acb26856ab239370cac4e75
|
refs/heads/master
| 2022-02-09T11:08:56.588303
| 2022-01-26T18:53:40
| 2022-01-26T18:53:40
| 211,783,197
| 23
| 9
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 359
|
py
|
c=int(input())
info=[]
for i in range(c):
info.append(list(map(int,input().split())))
n_max=0
m_max=0
l_max=0
for j in range(c):
info[j].sort()
if n_max<info[j][0]:
n_max=info[j][0]
if m_max<info[j][1]:
m_max=info[j][1]
if l_max<info[j][2]:
l_max=info[j][2]
print(n_max*m_max*l_max)
|
[
"kwnafi@yahoo.com"
] |
kwnafi@yahoo.com
|
108e662bdbbe0750ef625548e598ffa8f551dbb3
|
e2952eadc5d28354251139d6b184c8b096e57e11
|
/pythonStudy/example/判断闰年.py
|
2549501fa4c9ef5fcf6516247584650c1aa3be3c
|
[] |
no_license
|
haohualiangshi/studyPath
|
5067c2029cd34f9c21af3e54bfe916a1fa70eb37
|
396266254797eeb73feb6d2cf9321e2cca2a869b
|
refs/heads/master
| 2022-12-22T00:45:53.935467
| 2021-04-13T12:27:04
| 2021-04-13T12:27:04
| 85,938,728
| 1
| 0
| null | 2022-12-11T17:37:21
| 2017-03-23T10:48:54
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 138
|
py
|
"""
判断闰年
"""
year = int(input("请输入年份:"))
is_leap = (year % 4 == 0 and year % 100 != 0 or year%400==0)
print(is_leap)
|
[
"13525006097@163.com"
] |
13525006097@163.com
|
03d14277c5344e701582ab6202425e25a138540c
|
62af1d87b47ecd6b7e9e1613f0772f7bc5bd14e2
|
/triangle.py
|
d7b030e5699b0587a694db1e2aa3f22d76074f6d
|
[] |
no_license
|
hernandez26/Projects
|
2db8d353e75b576b0ca5e20a4685faadbbb39020
|
4ae34e83664d1dd06e06ed19969746cd90a7e0a2
|
refs/heads/master
| 2020-08-05T20:14:53.399223
| 2020-05-11T18:26:43
| 2020-05-11T18:26:43
| 212,692,760
| 0
| 0
| null | 2020-05-11T18:23:21
| 2019-10-03T22:29:46
|
Python
|
UTF-8
|
Python
| false
| false
| 1,114
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 3 09:25:30 2019
@author: Princess
"""
from collections import Counter
# WHILE LOOP
x = 3
while x < 10:
print(x)
x = x + 1
x = int(3)
y = int(7)
z = int(4)
if x == y == z:
print("Equalateral triangle")
elif x==y or y==z or z==x:
print("isosceles triangle")
else:
print("Scalene triangle")
isos=[]
equal=[]
trianglecount=[]
scale=[]
finalset= {}
for a in range(1,11):
for b in range(1,11):
for c in range(1,11):
if (a!=b!=c and a+b > c) or (a!=b!=c and b+c > a) or (a!=b!=c and c+a > b): #Scalene check
scale.append(1)
elif a==b!=c or b==c!=a or c==a!=b: #Isosceles check
isos.append(1)
elif a == b == c: #Equalateral check
equal.append(1)
else:
print("That is all of the posible triangles")
trianglecount = scale+isos+equal
print(len(trianglecount))
(tuple(trianglecount))
print(len(finalset))
|
[
"noreply@github.com"
] |
hernandez26.noreply@github.com
|
ddfb118d4ac2c9352c6c4a430804c39aedb00e10
|
93c2e40b60e02c114f71227badf70175b0367321
|
/app/models/checkin.py
|
7f8e1a4b5d5a0f140f18a67dbc63adb6ce929917
|
[] |
no_license
|
italovieira/prontuario-backend
|
b2d63806c27f4110e97508f9f5a2b8fbfb99477e
|
5c698a2d4ae9eae71c376db436c0b6d09e70e4b2
|
refs/heads/master
| 2023-05-10T11:48:44.078222
| 2019-06-18T01:41:48
| 2019-06-18T01:43:39
| 190,486,717
| 0
| 0
| null | 2023-05-01T20:57:41
| 2019-06-06T00:18:59
|
Python
|
UTF-8
|
Python
| false
| false
| 582
|
py
|
from .model import Model
from ..util import format_date
class Checkin(Model):
def __init__(self, cpf_paciente, cpf_secretario, data):
self.cpf_paciente = cpf_paciente
self.cpf_secretario = cpf_secretario
self.data = format_date(data)
def serialize(self):
from ..dao.usuario import UsuarioDAO
get_nome = UsuarioDAO().get_nome_usuario_from_cpf
data = super().serialize()
data['nome_paciente'] = get_nome(data['cpf_paciente'])
data['nome_secretario'] = get_nome(data['cpf_secretario'])
return data
|
[
"italogv@hotmail.com"
] |
italogv@hotmail.com
|
396806327b28e7f9e7ffb2fb37a19b82b5c85ef1
|
98842f31692bb5ae785f0ef10086bca8c46dec97
|
/tests/test_models.py
|
e8cfc236853e908c3e94ed3b301c251105f935ff
|
[
"MIT"
] |
permissive
|
akashsengupta1997/segmentation_models
|
b90266056e95de3cd5bcb3e534d674ee8afd9b7c
|
ac2e5f6dd9fb5833c14c4265faf985f817b27885
|
refs/heads/master
| 2020-04-13T10:19:49.107250
| 2019-05-16T10:50:11
| 2019-05-16T10:50:11
| 163,136,690
| 0
| 0
|
MIT
| 2018-12-26T04:38:49
| 2018-12-26T04:38:49
| null |
UTF-8
|
Python
| false
| false
| 2,745
|
py
|
import sys
import pytest
import random as rn
import six
import numpy as np
import keras.backend as K
sys.path.insert(0, '..')
from segmentation_models import Unet
from segmentation_models import Linknet
from segmentation_models import PSPNet
from segmentation_models import FPN
from segmentation_models.backbones import backbones as bkb
BACKBONES = list(bkb.backbones.keys())
def keras_test(func):
"""Function wrapper to clean up after TensorFlow tests.
# Arguments
func: test function to clean up after.
# Returns
A function wrapping the input function.
"""
@six.wraps(func)
def wrapper(*args, **kwargs):
output = func(*args, **kwargs)
K.clear_session()
return output
return wrapper
@keras_test
def _test_none_shape(model_fn, backbone, *args, **kwargs):
# define number of channels
input_shape = kwargs.get('input_shape', None)
n_channels = 3 if input_shape is None else input_shape[-1]
# create test sample
x = np.ones((1, 32, 32, n_channels))
# define model and process sample
model = model_fn(backbone, *args, **kwargs)
y = model.predict(x)
# check output dimensions
assert x.shape[:-1] == y.shape[:-1]
@keras_test
def _test_shape(model_fn, backbone, input_shape, *args, **kwargs):
# create test sample
x = np.ones((1, *input_shape))
# define model and process sample
model = model_fn(backbone, input_shape=input_shape, *args, **kwargs)
y = model.predict(x)
# check output dimensions
assert x.shape[:-1] == y.shape[:-1]
def test_unet():
_test_none_shape(
Unet, rn.choice(BACKBONES), encoder_weights=None)
_test_none_shape(
Unet, rn.choice(BACKBONES), encoder_weights='imagenet')
_test_shape(
Unet, rn.choice(BACKBONES), input_shape=(256, 256, 4), encoder_weights=None)
def test_linknet():
_test_none_shape(
Linknet, rn.choice(BACKBONES), encoder_weights=None)
_test_none_shape(
Linknet, rn.choice(BACKBONES), encoder_weights='imagenet')
_test_shape(
Linknet, rn.choice(BACKBONES), input_shape=(256, 256, 4), encoder_weights=None)
def test_pspnet():
_test_shape(
PSPNet, rn.choice(BACKBONES), input_shape=(384, 384, 4), encoder_weights=None)
_test_shape(
PSPNet, rn.choice(BACKBONES), input_shape=(384, 384, 3), encoder_weights='imagenet')
def test_fpn():
_test_none_shape(
FPN, rn.choice(BACKBONES), encoder_weights=None)
_test_none_shape(
FPN, rn.choice(BACKBONES), encoder_weights='imagenet')
_test_shape(
FPN, rn.choice(BACKBONES), input_shape=(256, 256, 4), encoder_weights=None)
if __name__ == '__main__':
pytest.main([__file__])
|
[
"qubvel@gmail.com"
] |
qubvel@gmail.com
|
d33f220321e336e138837e39ee23debce2c0c27e
|
e64e11891cc0dc3f55168841026d260a0fdb45d5
|
/webapp/forms.py
|
87879743747f84a96a0dde2b58dba1288d500fbb
|
[] |
no_license
|
yikkin/my_webapp
|
79f5bdbfec650d004ab3be76698994ecaaf4930f
|
2bd1fd4aeac5d606085313402b39f04edace0de2
|
refs/heads/master
| 2023-01-13T05:13:05.613563
| 2020-11-15T21:45:55
| 2020-11-15T21:45:55
| 313,128,448
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 173
|
py
|
from django import forms
from . models import Technologies
class TechnologiesForm(forms.ModelForm):
class Meta:
model = Technologies
fields = ["techno"]
|
[
"yongwe52@gmail.com"
] |
yongwe52@gmail.com
|
e8cba1e488bb187b94ce98f304dd63715debf0e0
|
1e457efb459352143ed307cae6ffbd862e36b878
|
/20200603-2集合常见操作.py
|
e73d789409fa29031bb790f76b067cfca056dcb7
|
[] |
no_license
|
xuzhanhao06/PythonBase
|
950653874e695a7802f2326bb6f8a99a98518eed
|
9bf46bdf24819aa7aa829aa1ccfa88cf3181da8b
|
refs/heads/master
| 2022-10-11T04:23:04.138757
| 2020-06-11T14:25:05
| 2020-06-11T14:25:05
| 271,566,122
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,247
|
py
|
s1={10,20}
print('---------add() ,集合可变类型,不可序')
s1.add(100)
print(s1)#{100, 10, 20}
print('集合有去重功能!')
s1.add(100)
print(s1)#{100, 10, 20}
#s1.add([10,20,30])
#print(s1) 报错!!!
print('---------update() 增加的数据是 *序列*')
print(f's1:{s1}')
s1.update([10,20,30,50])
print(s1)#{100, 10, 50, 20, 30}
#s1.update(100)
#print(s1) 报错!!
print('------------------------删除-----------------------------------')
s1={10,20,30,40,50}
print('remove()')
s1.remove(10)
print(s1)#{40, 50, 20, 30}
print('discard():删除指定数据,数据不存在也不报错----------------------------------')
s1={10,20,30,40,50}
print(s1)
s1.discard(10)
print(s1)#{40, 50, 20, 30}
s1.discard(10)
print(s1)#{40, 50, 20, 30} 不会报错
print('----pop()随机删除某个数据,并返回这个数据----')
s1={10,20,30,40,50}
print(s1)
del_num=s1.pop()
print(del_num) #40
print(s1) #{10, 50, 20, 30}
print('----pop()随机删除某个数据,并返回这个数据----')
print('------------------------查找-----------------------------------')
s1={10,20,30,40,50}
print(10 in s1)#10 在集合中 返回True
print(10 not in s1)#False
|
[
"noreply@github.com"
] |
xuzhanhao06.noreply@github.com
|
7cf0ae9730b5c29b23c9020d99b8a0ada544610a
|
6b619df7af97326589444ad89dc775045958a81e
|
/mnc/venv/Lib/site-packages/validator_collection/checkers.py
|
29f8fbc2a976f31450817075c039ce692e43e6e6
|
[] |
no_license
|
muhammadali07/coding_interview
|
f06216e00c3c4395d80437bef42e1e35328e24db
|
2f2d4f5f4db24ddc9a6084b47a48c503c3499340
|
refs/heads/main
| 2023-06-20T11:32:47.271925
| 2021-07-15T09:56:29
| 2021-07-15T09:56:29
| 378,059,719
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 54,533
|
py
|
# -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
# pylint: disable=W0703
import io
import sys
import validator_collection.validators as validators
from validator_collection._compat import integer_types, basestring
from validator_collection._decorators import disable_checker_on_env
# pylint: disable=W0613
## CORE
@disable_checker_on_env
def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
def _check_base_classes(base_classes, check_for_type):
"""Indicate whether ``check_for_type`` exists in ``base_classes``.
"""
return_value = False
for base in base_classes:
if base.__name__ == check_for_type:
return_value = True
break
else:
return_value = _check_base_classes(base.__bases__, check_for_type)
if return_value is True:
break
return return_value
@disable_checker_on_env
def are_equivalent(*args, **kwargs):
"""Indicate if arguments passed to this function are equivalent.
.. hint::
This checker operates recursively on the members contained within iterables
and :class:`dict <python:dict>` objects.
.. caution::
If you only pass one argument to this checker - even if it is an iterable -
the checker will *always* return ``True``.
To evaluate members of an iterable for equivalence, you should instead
unpack the iterable into the function like so:
.. code-block:: python
obj = [1, 1, 1, 2]
result = are_equivalent(*obj)
# Will return ``False`` by unpacking and evaluating the iterable's members
result = are_equivalent(obj)
# Will always return True
:param args: One or more values, passed as positional arguments.
:param strict_typing: If ``True``, will only identify items as equivalent if they have
identical sub-typing. If ``False``, related sub-types will be returned as equivalent.
Defaults to ``True``.
:type strict_typing: :class:`bool <python:bool>`
:returns: ``True`` if ``args`` are equivalent, and ``False`` if not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
strict_typing = kwargs.get('strict_typing', True)
if len(args) == 1:
return True
first_item = args[0]
for item in args[1:]:
if strict_typing and type(item) != type(first_item): # pylint: disable=C0123
return False
elif type(item) != type(first_item) and \
is_type(item, first_item.__class__) and \
is_type(first_item, item.__class__): # pylint: disable=C0123
return False
if isinstance(item, dict):
if not are_dicts_equivalent(item, first_item, **kwargs):
return False
elif hasattr(item, '__iter__') and not isinstance(item, (str, bytes, dict)):
if len(item) != len(first_item):
return False
for value in item:
if value not in first_item:
return False
for value in first_item:
if value not in item:
return False
else:
if item != first_item:
return False
return True
@disable_checker_on_env
def are_dicts_equivalent(*args, **kwargs):
"""Indicate if :ref:`dicts <python:dict>` passed to this function have identical
keys and values.
:param args: One or more values, passed as positional arguments.
:param strict_typing: If ``True``, will only identify items as equivalent if they have
identical sub-typing. If ``False``, related sub-types will be returned as equivalent.
Defaults to ``True``.
:type strict_typing: :class:`bool <python:bool>`
:param missing_as_none: If ``True``, will treat missing keys in one value and
:obj:`None <python:None>` keys in the other as equivalent. If ``False``, missing and
:obj:`None <pythoN:None>` keys will fail. Defaults to ``False``.
:type missing_as_none: :class:`bool <python:bool>`
:returns: ``True`` if ``args`` have identical keys/values, and ``False`` if not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
# pylint: disable=too-many-return-statements
missing_as_none = kwargs.get('missing_as_none', False)
if not args:
return False
if len(args) == 1:
return True
if not all(is_dict(x) for x in args):
return False
first_item = args[0]
for item in args[1:]:
if missing_as_none and len(item) != len(first_item):
for key in item:
if key not in first_item:
first_item[key] = None
for key in first_item:
if key not in item:
item[key] = None
if len(item) != len(first_item):
return False
for key in item:
if key not in first_item:
return False
if not are_equivalent(item[key], first_item[key], **kwargs):
return False
for key in first_item:
if key not in item:
return False
if not are_equivalent(first_item[key], item[key], **kwargs):
return False
return True
@disable_checker_on_env
def is_between(value,
minimum = None,
maximum = None,
**kwargs):
"""Indicate whether ``value`` is greater than or equal to a supplied ``minimum``
and/or less than or equal to ``maximum``.
.. note::
This function works on any ``value`` that support comparison operators,
whether they are numbers or not. Technically, this means that ``value``,
``minimum``, or ``maximum`` need to implement the Python magic methods
:func:`__lte__ <python:object.__lte__>` and :func:`__gte__ <python:object.__gte__>`.
If ``value``, ``minimum``, or ``maximum`` do not support comparison
operators, they will raise :class:`NotImplemented <python:NotImplemented>`.
:param value: The ``value`` to check.
:type value: anything that supports comparison operators
:param minimum: If supplied, will return ``True`` if ``value`` is greater than or
equal to this value.
:type minimum: anything that supports comparison operators /
:obj:`None <python:None>`
:param maximum: If supplied, will return ``True`` if ``value`` is less than or
equal to this value.
:type maximum: anything that supports comparison operators /
:obj:`None <python:None>`
:returns: ``True`` if ``value`` is greater than or equal to a supplied ``minimum``
and less than or equal to a supplied ``maximum``. Otherwise, returns ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
:raises NotImplemented: if ``value``, ``minimum``, or ``maximum`` do not
support comparison operators
:raises ValueError: if both ``minimum`` and ``maximum`` are
:obj:`None <python:None>`
"""
if minimum is None and maximum is None:
raise ValueError('minimum and maximum cannot both be None')
if value is None:
return False
if minimum is not None and maximum is None:
return value >= minimum
elif minimum is None and maximum is not None:
return value <= maximum
elif minimum is not None and maximum is not None:
return value >= minimum and value <= maximum
@disable_checker_on_env
def has_length(value,
minimum = None,
maximum = None,
**kwargs):
"""Indicate whether ``value`` has a length greater than or equal to a
supplied ``minimum`` and/or less than or equal to ``maximum``.
.. note::
This function works on any ``value`` that supports the
:func:`len() <python:len>` operation. This means that ``value`` must implement
the :func:`__len__ <python:__len__>` magic method.
If ``value`` does not support length evaluation, the checker will raise
:class:`NotImplemented <python:NotImplemented>`.
:param value: The ``value`` to check.
:type value: anything that supports length evaluation
:param minimum: If supplied, will return ``True`` if ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will return ``True`` if ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``True`` if ``value`` has length greater than or equal to a
supplied ``minimum`` and less than or equal to a supplied ``maximum``.
Otherwise, returns ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
:raises TypeError: if ``value`` does not support length evaluation
:raises ValueError: if both ``minimum`` and ``maximum`` are
:obj:`None <python:None>`
"""
if minimum is None and maximum is None:
raise ValueError('minimum and maximum cannot both be None')
length = len(value)
minimum = validators.numeric(minimum,
allow_empty = True)
maximum = validators.numeric(maximum,
allow_empty = True)
return is_between(length,
minimum = minimum,
maximum = maximum)
@disable_checker_on_env
def is_dict(value, **kwargs):
"""Indicate whether ``value`` is a valid :class:`dict <python:dict>`
.. note::
This will return ``True`` even if ``value`` is an empty
:class:`dict <python:dict>`.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if isinstance(value, dict):
return True
try:
value = validators.dict(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_json(value,
schema = None,
json_serializer = None,
**kwargs):
"""Indicate whether ``value`` is a valid JSON object.
.. note::
``schema`` supports JSON Schema Drafts 3 - 7. Unless the JSON Schema indicates the
meta-schema using a ``$schema`` property, the schema will be assumed to conform to
Draft 7.
:param value: The value to evaluate.
:param schema: An optional JSON schema against which ``value`` will be validated.
:type schema: :class:`dict <python:dict>` / :class:`str <python:str>` /
:obj:`None <python:None>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.json(value,
schema = schema,
json_serializer = json_serializer,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_string(value,
coerce_value = False,
minimum_length = None,
maximum_length = None,
whitespace_padding = False,
**kwargs):
"""Indicate whether ``value`` is a string.
:param value: The value to evaluate.
:param coerce_value: If ``True``, will check whether ``value`` can be coerced
to a string if it is not already. Defaults to ``False``.
:type coerce_value: :class:`bool <python:bool>`
:param minimum_length: If supplied, indicates the minimum number of characters
needed to be valid.
:type minimum_length: :class:`int <python:int>`
:param maximum_length: If supplied, indicates the minimum number of characters
needed to be valid.
:type maximum_length: :class:`int <python:int>`
:param whitespace_padding: If ``True`` and the value is below the
``minimum_length``, pad the value with spaces. Defaults to ``False``.
:type whitespace_padding: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if value is None:
return False
minimum_length = validators.integer(minimum_length, allow_empty = True, **kwargs)
maximum_length = validators.integer(maximum_length, allow_empty = True, **kwargs)
if isinstance(value, basestring) and not value:
if minimum_length and minimum_length > 0 and not whitespace_padding:
return False
return True
try:
value = validators.string(value,
coerce_value = coerce_value,
minimum_length = minimum_length,
maximum_length = maximum_length,
whitespace_padding = whitespace_padding,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_iterable(obj,
forbid_literals = (str, bytes),
minimum_length = None,
maximum_length = None,
**kwargs):
"""Indicate whether ``obj`` is iterable.
:param forbid_literals: A collection of literals that will be considered invalid
even if they are (actually) iterable. Defaults to a :class:`tuple <python:tuple>`
containing :class:`str <python:str>` and :class:`bytes <python:bytes>`.
:type forbid_literals: iterable
:param minimum_length: If supplied, indicates the minimum number of members
needed to be valid.
:type minimum_length: :class:`int <python:int>`
:param maximum_length: If supplied, indicates the minimum number of members
needed to be valid.
:type maximum_length: :class:`int <python:int>`
:returns: ``True`` if ``obj`` is a valid iterable, ``False`` if not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if obj is None:
return False
if obj in forbid_literals:
return False
try:
obj = validators.iterable(obj,
allow_empty = True,
forbid_literals = forbid_literals,
minimum_length = minimum_length,
maximum_length = maximum_length,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_not_empty(value, **kwargs):
"""Indicate whether ``value`` is empty.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is empty, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.not_empty(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_none(value, allow_empty = False, **kwargs):
"""Indicate whether ``value`` is :obj:`None <python:None>`.
:param value: The value to evaluate.
:param allow_empty: If ``True``, accepts falsey values as equivalent to
:obj:`None <python:None>`. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is :obj:`None <python:None>`, ``False``
if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
validators.none(value, allow_empty = allow_empty, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_variable_name(value, **kwargs):
"""Indicate whether ``value`` is a valid Python variable name.
.. caution::
This function does **NOT** check whether the variable exists. It only
checks that the ``value`` would work as a Python variable (or class, or
function, etc.) name.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
validators.variable_name(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_callable(value, **kwargs):
"""Indicate whether ``value`` is callable (like a function, method, or class).
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
return hasattr(value, '__call__')
@disable_checker_on_env
def is_uuid(value, **kwargs):
"""Indicate whether ``value`` contains a :class:`UUID <python:uuid.UUID>`
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
validators.uuid(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
## DATE / TIME
@disable_checker_on_env
def is_date(value,
minimum = None,
maximum = None,
coerce_value = False,
**kwargs):
"""Indicate whether ``value`` is a :class:`date <python:datetime.date>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is on or after
this value.
:type minimum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be
coerced to a :class:`date <python:datetime.date>`. If ``False``,
will only return ``True`` if ``value`` is a date value only. Defaults to
``False``.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.date(value,
minimum = minimum,
maximum = maximum,
coerce_value = coerce_value,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_datetime(value,
minimum = None,
maximum = None,
coerce_value = False,
**kwargs):
"""Indicate whether ``value`` is a :class:`datetime <python:datetime.datetime>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is on or after
this value.
:type minimum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be
coerced to a :class:`datetime <python:datetime.datetime>`. If ``False``,
will only return ``True`` if ``value`` is a complete timestamp. Defaults to
``False``.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.datetime(value,
minimum = minimum,
maximum = maximum,
coerce_value = coerce_value,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_time(value,
minimum = None,
maximum = None,
coerce_value = False,
**kwargs):
"""Indicate whether ``value`` is a :class:`time <python:datetime.time>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is on or after this value.
:type minimum: :func:`datetime <validator_collection.validators.datetime>` or
:func:`time <validator_collection.validators.time>`-compliant
:class:`str <python:str>` / :class:`datetime <python:datetime.datetime>` /
:class:`time <python:datetime.time> / numeric / :obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :func:`datetime <validator_collection.validators.datetime>` or
:func:`time <validator_collection.validators.time>`-compliant
:class:`str <python:str>` / :class:`datetime <python:datetime.datetime>` /
:class:`time <python:datetime.time> / numeric / :obj:`None <python:None>`
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be
coerced to a :class:`time <python:datetime.time>`. If ``False``,
will only return ``True`` if ``value`` is a valid time. Defaults to
``False``.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.time(value,
minimum = minimum,
maximum = maximum,
coerce_value = coerce_value,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_timezone(value,
positive = True,
**kwargs):
"""Indicate whether ``value`` is a :class:`tzinfo <python:datetime.tzinfo>`.
.. caution::
This does **not** validate whether the value is a timezone that actually
exists, nor can it resolve timzone names (e.g. ``'Eastern'`` or ``'CET'``).
For that kind of functionality, we recommend you utilize:
`pytz <https://pypi.python.org/pypi/pytz>`_
:param value: The value to evaluate.
:param positive: Indicates whether the ``value`` is positive or negative
(only has meaning if ``value`` is a string). Defaults to ``True``.
:type positive: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.timezone(value,
positive = positive,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_timedelta(value,
resolution = None,
**kwargs):
"""Indicate whether ``value`` is a :class:`timedelta <python:datetime.timedelta>`.
.. note::
Coerceable string formats are:
* HH:MM:SS
* X day, HH:MM:SS
* X days, HH:MM:SS
* HH:MM:SS.us
* X day, HH:MM:SS.us
* X days, HH:MM:SS.us
where "us" refer to microseconds. Shout out to Alex Pitchford for sharing the
`string-parsing regex <http://kbyanc.blogspot.com/2007/08/python-reconstructing-timedeltas-from.html?showComment=1452111163905#c3907051065256615667>`_.
:param value: The value to evaluate.
:param resolution: Indicates the time period resolution represented by ``value``.
Accepts ``'years'``, ``'weeks'``, ``'days'``, ``'hours'``, ``'minutes'``,
``'seconds'``, ``'milliseconds'``, or ``'microseconds'``. Defaults to
``'seconds'``.
:type resolution: :class:`str <python:str>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.timedelta(value,
resolution = resolution,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
## NUMBERS
@disable_checker_on_env
def is_numeric(value,
minimum = None,
maximum = None,
**kwargs):
"""Indicate whether ``value`` is a numeric value.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.numeric(value,
minimum = minimum,
maximum = maximum,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_integer(value,
coerce_value = False,
minimum = None,
maximum = None,
base = 10,
**kwargs):
"""Indicate whether ``value`` contains a whole number.
:param value: The value to evaluate.
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be coerced
to whole number. If ``False``, will only return ``True`` if ``value`` is already
a whole number (regardless of type). Defaults to ``False``.
:type coerce_value: :class:`bool <python:bool>`
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:param base: Indicates the base that is used to determine the integer value.
The allowed values are 0 and 2–36. Base-2, -8, and -16 literals can be
optionally prefixed with ``0b/0B``, ``0o/0O/0``, or ``0x/0X``, as with
integer literals in code. Base 0 means to interpret the string exactly as
an integer literal, so that the actual base is 2, 8, 10, or 16. Defaults to
``10``.
:type base: :class:`int <python:int>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.integer(value,
coerce_value = coerce_value,
minimum = minimum,
maximum = maximum,
base = base,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_float(value,
minimum = None,
maximum = None,
**kwargs):
"""Indicate whether ``value`` is a :class:`float <python:float>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.float(value,
minimum = minimum,
maximum = maximum,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_fraction(value,
minimum = None,
maximum = None,
**kwargs):
"""Indicate whether ``value`` is a :class:`Fraction <python:fractions.Fraction>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.fraction(value,
minimum = minimum,
maximum = maximum,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_decimal(value,
minimum = None,
maximum = None,
**kwargs):
"""Indicate whether ``value`` contains a :class:`Decimal <python:decimal.Decimal>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.decimal(value,
minimum = minimum,
maximum = maximum,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
## FILE-RELATED
@disable_checker_on_env
def is_bytesIO(value, **kwargs):
"""Indicate whether ``value`` is a :class:`BytesIO <python:io.BytesIO>` object.
.. note::
This checker will return ``True`` even if ``value`` is empty, so long as
its type is a :class:`BytesIO <python:io.BytesIO>`.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
return isinstance(value, io.BytesIO)
@disable_checker_on_env
def is_stringIO(value, **kwargs):
"""Indicate whether ``value`` is a :class:`StringIO <python:io.StringIO>` object.
.. note::
This checker will return ``True`` even if ``value`` is empty, so long as
its type is a :class:`String <python:io.StringIO>`.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
return isinstance(value, io.StringIO)
@disable_checker_on_env
def is_pathlike(value, **kwargs):
"""Indicate whether ``value`` is a path-like object.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.path(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_on_filesystem(value, **kwargs):
"""Indicate whether ``value`` is a file or directory that exists on the local
filesystem.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.path_exists(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_file(value, **kwargs):
"""Indicate whether ``value`` is a file that exists on the local filesystem.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.file_exists(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_directory(value, **kwargs):
"""Indicate whether ``value`` is a directory that exists on the local filesystem.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.directory_exists(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_readable(value, **kwargs):
"""Indicate whether ``value`` is a readable file.
.. caution::
**Use of this validator is an anti-pattern and should be used with caution.**
Validating the readability of a file *before* attempting to read it
exposes your code to a bug called
`TOCTOU <https://en.wikipedia.org/wiki/Time_of_check_to_time_of_use>`_.
This particular class of bug can expose your code to **security vulnerabilities**
and so this validator should only be used if you are an advanced user.
A better pattern to use when reading from a file is to apply the principle of
EAFP ("easier to ask forgiveness than permission"), and simply attempt to
write to the file using a ``try ... except`` block:
.. code-block:: python
try:
with open('path/to/filename.txt', mode = 'r') as file_object:
# read from file here
except (OSError, IOError) as error:
# Handle an error if unable to write.
:param value: The value to evaluate.
:type value: Path-like object
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
validators.readable(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_writeable(value,
**kwargs):
"""Indicate whether ``value`` is a writeable file.
.. caution::
This validator does **NOT** work correctly on a Windows file system. This
is due to the vagaries of how Windows manages its file system and the
various ways in which it can manage file permission.
If called on a Windows file system, this validator will raise
:class:`NotImplementedError() <python:NotImplementedError>`.
.. caution::
**Use of this validator is an anti-pattern and should be used with caution.**
Validating the writability of a file *before* attempting to write to it
exposes your code to a bug called
`TOCTOU <https://en.wikipedia.org/wiki/Time_of_check_to_time_of_use>`_.
This particular class of bug can expose your code to **security vulnerabilities**
and so this validator should only be used if you are an advanced user.
A better pattern to use when writing to file is to apply the principle of
EAFP ("easier to ask forgiveness than permission"), and simply attempt to
write to the file using a ``try ... except`` block:
.. code-block:: python
try:
with open('path/to/filename.txt', mode = 'a') as file_object:
# write to file here
except (OSError, IOError) as error:
# Handle an error if unable to write.
.. note::
This validator relies on :func:`os.access() <python:os.access>` to check
whether ``value`` is writeable. This function has certain limitations,
most especially that:
* It will **ignore** file-locking (yielding a false-positive) if the file
is locked.
* It focuses on *local operating system permissions*, which means if trying
to access a path over a network you might get a false positive or false
negative (because network paths may have more complicated authentication
methods).
:param value: The value to evaluate.
:type value: Path-like object
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises NotImplementedError: if called on a Windows system
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if sys.platform in ['win32', 'cygwin']:
raise NotImplementedError('not supported on Windows')
try:
validators.writeable(value,
allow_empty = False,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_executable(value,
**kwargs):
"""Indicate whether ``value`` is an executable file.
.. caution::
This validator does **NOT** work correctly on a Windows file system. This
is due to the vagaries of how Windows manages its file system and the
various ways in which it can manage file permission.
If called on a Windows file system, this validator will raise
:class:`NotImplementedError() <python:NotImplementedError>`.
.. caution::
**Use of this validator is an anti-pattern and should be used with caution.**
Validating the writability of a file *before* attempting to execute it
exposes your code to a bug called
`TOCTOU <https://en.wikipedia.org/wiki/Time_of_check_to_time_of_use>`_.
This particular class of bug can expose your code to **security vulnerabilities**
and so this validator should only be used if you are an advanced user.
A better pattern to use when writing to file is to apply the principle of
EAFP ("easier to ask forgiveness than permission"), and simply attempt to
execute the file using a ``try ... except`` block.
.. note::
This validator relies on :func:`os.access() <python:os.access>` to check
whether ``value`` is writeable. This function has certain limitations,
most especially that:
* It will **ignore** file-locking (yielding a false-positive) if the file
is locked.
* It focuses on *local operating system permissions*, which means if trying
to access a path over a network you might get a false positive or false
negative (because network paths may have more complicated authentication
methods).
:param value: The value to evaluate.
:type value: Path-like object
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises NotImplementedError: if called on a Windows system
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if sys.platform in ['win32', 'cygwin']:
raise NotImplementedError('not supported on Windows')
try:
validators.executable(value,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
## INTERNET-RELATED
@disable_checker_on_env
def is_email(value, **kwargs):
"""Indicate whether ``value`` is an email address.
.. note::
Email address validation is...complicated. The methodology that we have
adopted here is *generally* compliant with
`RFC 5322 <https://tools.ietf.org/html/rfc5322>`_ and uses a combination of
string parsing and regular expressions.
String parsing in particular is used to validate certain *highly unusual*
but still valid email patterns, including the use of escaped text and
comments within an email address' local address (the user name part).
This approach ensures more complete coverage for unusual edge cases, while
still letting us use regular expressions that perform quickly.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.email(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_url(value, **kwargs):
"""Indicate whether ``value`` is a URL.
.. note::
URL validation is...complicated. The methodology that we have
adopted here is *generally* compliant with
`RFC 1738 <https://tools.ietf.org/html/rfc1738>`_,
`RFC 6761 <https://tools.ietf.org/html/rfc6761>`_,
`RFC 2181 <https://tools.ietf.org/html/rfc2181>`_ and uses a combination of
string parsing and regular expressions,
This approach ensures more complete coverage for unusual edge cases, while
still letting us use regular expressions that perform quickly.
:param value: The value to evaluate.
:param allow_special_ips: If ``True``, will succeed when validating special IP
addresses, such as loopback IPs like ``127.0.0.1`` or ``0.0.0.0``. If ``False``,
will fail if ``value`` is a special IP address. Defaults to ``False``.
:type allow_special_ips: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.url(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_domain(value, **kwargs):
"""Indicate whether ``value`` is a valid domain.
.. caution::
This validator does not verify that ``value`` **exists** as a domain. It
merely verifies that its contents *might* exist as a domain.
.. note::
This validator checks to validate that ``value`` resembles a valid
domain name. It is - generally - compliant with
`RFC 1035 <https://tools.ietf.org/html/rfc1035>`_ and
`RFC 6761 <https://tools.ietf.org/html/rfc6761>`_, however it diverges
in a number of key ways:
* Including authentication (e.g. ``username:password@domain.dev``) will
fail validation.
* Including a path (e.g. ``domain.dev/path/to/file``) will fail validation.
* Including a port (e.g. ``domain.dev:8080``) will fail validation.
If you are hoping to validate a more complete URL, we recommend that you
see :func:`url <validator_collection.validators.url>`.
:param value: The value to evaluate.
:param allow_ips: If ``True``, will succeed when validating IP addresses,
If ``False``, will fail if ``value`` is an IP address. Defaults to ``False``.
:type allow_ips: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.domain(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_ip_address(value, **kwargs):
"""Indicate whether ``value`` is a valid IP address (version 4 or version 6).
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.ip_address(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_ipv4(value, **kwargs):
"""Indicate whether ``value`` is a valid IP version 4 address.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.ipv4(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_ipv6(value, **kwargs):
"""Indicate whether ``value`` is a valid IP version 6 address.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.ipv6(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_mac_address(value, **kwargs):
"""Indicate whether ``value`` is a valid MAC address.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.mac_address(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
@disable_checker_on_env
def is_mimetype(value, **kwargs):
"""Indicate whether ``value`` is a valid MIME type.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.mimetype(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
|
[
"36842581+muhammadali07@users.noreply.github.com"
] |
36842581+muhammadali07@users.noreply.github.com
|
981c3a051141f5a0d5af4da62da922d3ffae287f
|
b509431d2ca7d2ceac85e9765d442e6a04153e47
|
/hackathon/python-api/flask/bin/sdbadmin
|
9e982496a78d29f2ee35ae64b506a5787082377d
|
[] |
no_license
|
Mashiat31/know_your_rights_workplace
|
c57ab8e101f25053b8932722d89d4042f08786a0
|
54063624bdea1543ae4ed2d2e08d58d9eebedd46
|
refs/heads/master
| 2022-04-07T08:47:15.340501
| 2020-03-08T19:59:47
| 2020-03-08T19:59:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,053
|
#!/home/dama/know_your_rights_workplace/hackathon/python-api/flask/bin/python2
# Copyright (c) 2009 Chris Moyer http://kopertop.blogspot.com/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
#
# Tools to dump and recover an SDB domain
#
VERSION = "%prog version 1.0"
import boto
import time
from boto import sdb
from boto.compat import json
def choice_input(options, default=None, title=None):
"""
Choice input
"""
if title == None:
title = "Please choose"
print title
objects = []
for n, obj in enumerate(options):
print "%s: %s" % (n, obj)
objects.append(obj)
choice = int(raw_input(">>> "))
try:
choice = objects[choice]
except:
choice = default
return choice
def confirm(message="Are you sure?"):
choice = raw_input("%s [yN] " % message)
return choice and len(choice) > 0 and choice[0].lower() == "y"
def dump_db(domain, file_name, use_json=False, sort_attributes=False):
"""
Dump SDB domain to file
"""
f = open(file_name, "w")
if use_json:
for item in domain:
data = {"name": item.name, "attributes": item}
print >> f, json.dumps(data, sort_keys=sort_attributes)
else:
doc = domain.to_xml(f)
def empty_db(domain):
"""
Remove all entries from domain
"""
for item in domain:
item.delete()
def load_db(domain, file, use_json=False):
"""
Load a domain from a file, this doesn't overwrite any existing
data in the file so if you want to do a full recovery and restore
you need to call empty_db before calling this
:param domain: The SDB Domain object to load to
:param file: The File to load the DB from
"""
if use_json:
for line in file.readlines():
if line:
data = json.loads(line)
item = domain.new_item(data['name'])
item.update(data['attributes'])
item.save()
else:
domain.from_xml(file)
def check_valid_region(conn, region):
if conn is None:
print 'Invalid region (%s)' % region
sys.exit(1)
def create_db(domain_name, region_name):
"""Create a new DB
:param domain: Name of the domain to create
:type domain: str
"""
sdb = boto.sdb.connect_to_region(region_name)
check_valid_region(sdb, region_name)
return sdb.create_domain(domain_name)
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser(version=VERSION, usage="Usage: %prog [--dump|--load|--empty|--list|-l] [options]")
# Commands
parser.add_option("--dump", help="Dump domain to file", dest="dump", default=False, action="store_true")
parser.add_option("--load", help="Load domain contents from file", dest="load", default=False, action="store_true")
parser.add_option("--empty", help="Empty all contents of domain", dest="empty", default=False, action="store_true")
parser.add_option("-l", "--list", help="List All domains", dest="list", default=False, action="store_true")
parser.add_option("-c", "--create", help="Create domain", dest="create", default=False, action="store_true")
parser.add_option("-a", "--all-domains", help="Operate on all domains", action="store_true", default=False, dest="all_domains")
if json:
parser.add_option("-j", "--use-json", help="Load/Store as JSON instead of XML", action="store_true", default=False, dest="json")
parser.add_option("-s", "--sort-attibutes", help="Sort the element attributes", action="store_true", default=False, dest="sort_attributes")
parser.add_option("-d", "--domain", help="Do functions on domain (may be more then one)", action="append", dest="domains")
parser.add_option("-f", "--file", help="Input/Output file we're operating on", dest="file_name")
parser.add_option("-r", "--region", help="Region (e.g. us-east-1[default] or eu-west-1)", default="us-east-1", dest="region_name")
(options, args) = parser.parse_args()
if options.create:
for domain_name in options.domains:
create_db(domain_name, options.region_name)
exit()
sdb = boto.sdb.connect_to_region(options.region_name)
check_valid_region(sdb, options.region_name)
if options.list:
for db in sdb.get_all_domains():
print db
exit()
if not options.dump and not options.load and not options.empty:
parser.print_help()
exit()
#
# Setup
#
if options.domains:
domains = []
for domain_name in options.domains:
domains.append(sdb.get_domain(domain_name))
elif options.all_domains:
domains = sdb.get_all_domains()
else:
domains = [choice_input(options=sdb.get_all_domains(), title="No domain specified, please choose one")]
#
# Execute the commands
#
stime = time.time()
if options.empty:
if confirm("WARNING!!! Are you sure you want to empty the following domains?: %s" % domains):
stime = time.time()
for domain in domains:
print "--------> Emptying %s <--------" % domain.name
empty_db(domain)
else:
print "Canceling operations"
exit()
if options.dump:
for domain in domains:
print "--------> Dumping %s <---------" % domain.name
if options.file_name:
file_name = options.file_name
else:
file_name = "%s.db" % domain.name
dump_db(domain, file_name, options.json, options.sort_attributes)
if options.load:
for domain in domains:
print "---------> Loading %s <----------" % domain.name
if options.file_name:
file_name = options.file_name
else:
file_name = "%s.db" % domain.name
load_db(domain, open(file_name, "rb"), options.json)
total_time = round(time.time() - stime, 2)
print "--------> Finished in %s <--------" % total_time
|
[
"damacorrech@gmail.com"
] |
damacorrech@gmail.com
|
|
05302549e808192e9666ab082e798253d02d5e19
|
d52aadf33f41edd66997f04d78c9f11e6aa9c22e
|
/tests/imfusion/test_ctg.py
|
844a8b741a91c3b4ce813844c2fc7c90d1597796
|
[
"MIT"
] |
permissive
|
sofiaff/imfusion
|
3ec3404715e6c9fbca749a551a384f80217d8068
|
796c546802b40ae4aa34e60c6dd308ef4a3c80b1
|
refs/heads/master
| 2021-07-11T21:56:16.484192
| 2017-05-11T20:50:36
| 2017-05-11T20:50:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,138
|
py
|
# -*- coding: utf-8 -*-
"""Tests for imfusion.ctg module."""
# pylint: disable=wildcard-import,redefined-builtin,unused-wildcard-import
from __future__ import absolute_import, division, print_function
from builtins import *
# pylint: enable=wildcard-import,redefined-builtin,unused-wildcard-import
from collections import namedtuple
import pyfaidx
import pytest
from imfusion import ctg
from imfusion.build import Reference
from imfusion.model import Insertion
from imfusion.util.frozendict import frozendict
Sequence = namedtuple('Sequence', ['seq'])
Gene = namedtuple('Gene', ['contig', 'start', 'end', 'strand'])
# pylint: disable=redefined-outer-name,no-self-use,too-few-public-methods
# pylint: disable=protected-access,C0103
def _insertion(id,
seqname,
position,
strand,
support_junction=1,
support_spanning=1,
metadata=None):
"""Helper function for building an Insertion instance."""
return Insertion(
id=id,
seqname=seqname,
position=position,
strand=strand,
support_junction=support_junction,
support_spanning=support_spanning,
support=support_junction + support_spanning,
metadata=frozendict(metadata or {}))
class TestMergeIntervals(object):
"""Tests for the merge_intervals function."""
def test_merge_intervals(self):
"""Test using example intervals."""
intervals = [(0, 10), (14, 20), (18, 30), (30, 32), (33, 35)]
merged = list(ctg.merge_intervals(intervals))
assert merged == [(0, 10), (14, 32), (33, 35)]
def test_merge_intervals_empty(self):
"""Test empty interval case."""
merged = list(ctg.merge_intervals([]))
assert merged == []
class TestMergeGenomicIntervals(object):
"""Tests for the merge_genomic_intervals function."""
def test_merge_genomic_intervals(self):
"""Test using example intervals."""
intervals = [('1', 10, 20), ('1', 22, 30), ('1', 25, 32), ('2', 8, 15),
('3', 25, 40), ('3', 35, 40)]
merged = list(ctg.merge_genomic_intervals(intervals))
assert merged == [('1', 10, 20), ('1', 22, 32), ('2', 8, 15),
('3', 25, 40)]
def test_merge_genomic_intervals_empty(self):
"""Test empty interval case."""
merged = list(ctg.merge_genomic_intervals([]))
assert merged == []
@pytest.fixture
def reference_path():
"""Returns path to example reference sequence."""
return pytest.helpers.data_path('reference.fa', relative_to=__file__)
@pytest.fixture
def reference(reference_path):
"""Returns example reference sequence."""
return pyfaidx.Fasta(str(reference_path))
class TestCountRegion(object):
"""Tests for count_region function."""
def test_count_region(self, reference):
"""Test counting without pattern."""
assert ctg.count_region(reference, region=('1', 10, 30)) == 20
def test_count_region_pattern(self, reference):
"""Test counting with pattern."""
assert ctg.count_region(
reference, region=('1', 10, 30), pattern='AG|GA') == 3
class TestCountTotal(object):
"""Tests for count_total function."""
def test_count_total(self, reference):
"""Test without intervals, without pattern."""
assert ctg.count_total(reference) == 280
def test_count_total_pattern(self, reference):
"""Test without intervals, with pattern."""
assert ctg.count_total(reference, pattern='TA') == 34
def test_count_total_intervals(self, reference):
"""Test with intervals, without pattern."""
intervals = [('1', 10, 20), ('1', 15, 25), ('2', 0, 10)]
assert ctg.count_total(reference, intervals=intervals) == 25
def test_count_total_intervals_pattern(self, reference):
"""Test with intervals, with pattern."""
intervals = [('1', 10, 20), ('1', 15, 25), ('2', 0, 10)]
assert ctg.count_total(
reference, pattern='TA', intervals=intervals) == 3
@pytest.fixture
def insertions():
"""Example insertion set."""
return [
_insertion(id='1', seqname='1', position=9, strand=1,
metadata=frozendict({'gene_id': 'gene_a', 'sample': 'S1'})),
_insertion(id='2', seqname='1', position=15, strand=-1,
metadata=frozendict({'gene_id': 'gene_b', 'sample': 'S2'}))
] # yapf: disable
class TestTestRegion(object):
"""Tests for test_region function."""
def test_test_region(self, insertions, reference):
"""Test enriched region."""
p_val = ctg.test_region(insertions, reference, region=('1', 5, 20))
assert p_val < 0.01
def test_test_region_negative(self, insertions, reference):
"""Test non-enriched region."""
p_val = ctg.test_region(insertions, reference, region=('1', 10, 30))
assert p_val > 0.01
class TestApplyWindow(object):
"""Tests for apply_window function."""
def test_apply_window(self):
"""Tests example on forward strand."""
new_window = ctg._apply_gene_window(
Gene('1', 100, 120, '+'), window=(80, 50))
assert new_window == ('1', 20, 170)
def test_apply_window_rev(self):
"""Tests example on reverse strand."""
new_window = ctg._apply_gene_window(
Gene('1', 100, 120, '-'), window=(80, 50))
assert new_window == ('1', 50, 200)
def test_apply_window_none(self):
"""Tests example without window."""
new_window = ctg._apply_gene_window(
Gene('1', 100, 120, '-'), window=None)
assert new_window == ('1', 100, 120)
def test_apply_window_wrong_strand(self):
"""Tests example without proper strand."""
with pytest.raises(ValueError):
ctg._apply_gene_window(Gene('1', 100, 120, None), window=(80, 50))
class TestSubsetToWindows(object):
"""Tests subset_to_windows function."""
def test_subset_insertions(self, insertions):
"""Test example."""
windows = {'gene_a': ('1', 8, 12), 'gene_b': ('2', 10, 20)}
subset = ctg._subset_to_windows(insertions, windows)
assert len(subset) == 1
assert subset[0].seqname == '1'
def test_subset_insertions_no_overlap(self, insertions):
"""Test example with no insertions within windows."""
windows = {'gene_a': ('1', 100, 120), 'gene_b': ('2', 10, 20)}
assert len(ctg._subset_to_windows(insertions, windows)) == 0
def test_subset_insertions_no_seqname(self, insertions):
"""Test example with overlapping position on different sequence."""
windows = {'gene_a': ('2', 100, 120), 'gene_b': ('2', 10, 20)}
assert len(ctg._subset_to_windows(insertions, windows)) == 0
def test_subset_insertions_wrong_gene(self, insertions):
"""Test example."""
windows = {'gene_a': ('1', 8, 12), 'gene_c': ('1', 10, 20)}
subset = ctg._subset_to_windows(insertions, windows)
assert len(subset) == 1
assert subset[0].seqname == '1'
class TestCollapsePerSample(object):
"""Tests for collapse_per_sample function."""
def test_example(self, insertions):
"""Tests example with collapsing."""
insertions[1] = insertions[1]._replace(
metadata={'gene_id': 'gene_a',
'sample': 'S1'})
merged = list(ctg._collapse_per_sample(insertions))
assert len(merged) == 1
assert merged[0].position == 12
def test_negative_example(self, insertions):
"""Tests example without collapsing."""
merged = list(ctg._collapse_per_sample(insertions))
assert merged == insertions
@pytest.fixture
def ctg_insertions():
"""Insertions for test_ctg test case."""
return [
_insertion(id='1', seqname='1', position=9, strand=1,
metadata={'gene_id': 'gene_a', 'sample': 'S1'}),
_insertion(id='2', seqname='1', position=9, strand=1,
metadata={'gene_id': 'gene_a', 'sample': 'S1'}),
_insertion(id='3', seqname='1', position=8, strand=-1,
metadata={'gene_id': 'gene_a', 'sample': 'S1'}),
_insertion(id='4', seqname='1', position=8, strand=-1,
metadata={'gene_id': 'gene_b', 'sample': 'S2'}),
_insertion(id='5', seqname='2', position=12, strand=-1,
metadata={'gene_id': 'gene_c', 'sample': 'S2'}),
_insertion(id='6', seqname='1', position=6, strand=-1,
metadata={'gene_id': 'gene_a', 'sample': 'S3'})
] # yapf: disable
@pytest.fixture
def ctg_reference():
"""Reference for test_ctg test case."""
return Reference(
pytest.helpers.data_path(
'ctg_reference', relative_to=__file__))
class TestTestCtgs(object):
"""Tests for the test_ctgs_function."""
def test_example(self, ctg_insertions, ctg_reference):
"""Test example with three genes."""
# TODO: P-values seem higher than in previous tests. Check why.
# Do CTG test.
result = ctg.test_ctgs(ctg_insertions, ctg_reference, per_sample=False)
result = result.set_index('gene_id')
# Check results.
assert len(result) == 3
assert result.loc['gene_a', 'p_value'] < 0.05
assert result.loc['gene_b', 'p_value'] > 0.05
assert result.loc['gene_c', 'p_value'] > 0.05
def test_example_with_collapse(self, ctg_insertions, ctg_reference):
"""Tests if gene_a is no longer significant after collapsing."""
# Do CTG test.
result = ctg.test_ctgs(ctg_insertions, ctg_reference, per_sample=True)
result = result.set_index('gene_id')
# Check results.
assert len(result) == 3
assert result.loc['gene_a', 'p_value'] > 0.05
assert result.loc['gene_b', 'p_value'] > 0.05
assert result.loc['gene_c', 'p_value'] > 0.05
def test_example_with_chromosomes(self, ctg_insertions, ctg_reference):
"""Tests subsetting for specific chromosomes."""
# Do CTG test.
result = ctg.test_ctgs(
ctg_insertions, ctg_reference, chromosomes=['1'], per_sample=True)
assert len(result) == 2
assert set(result['gene_id']) == {'gene_a', 'gene_b'}
def test_example_with_window(self, ctg_insertions, ctg_reference):
"""Tests applying a gene window."""
# TODO: check the generated windows.
# Do CTG test.
result = ctg.test_ctgs(
ctg_insertions, ctg_reference, window=(4, 0), per_sample=False)
result = result.set_index('gene_id')
# Check result.
assert result.loc['gene_a', 'p_value'] < 0.05
def test_empty(self, ctg_reference):
"""Test example without insertions."""
result = ctg.test_ctgs(
[], ctg_reference, window=(4, 0), per_sample=False)
assert len(result) == 0
assert list(result.columns) == [
'gene_id', 'p_value', 'q_value', 'gene_name', 'n_samples'
]
|
[
"julianderuiter@gmail.com"
] |
julianderuiter@gmail.com
|
77cbac08c036ca1e98da48e154c14c9d66922d1b
|
005f02cb534bbf91fe634fcf401441e1179365c8
|
/15-Blog Project/ruxhino/blog/forms.py
|
7e963ad3556465665a256eb2e063fe2ed71ea748
|
[] |
no_license
|
Ruxhino-B/django-deployment-example
|
220a39a456871a1bf42a64fd5b945731056fc7b9
|
e19713ac1e11af202152ad20d7c3c94891a77e83
|
refs/heads/master
| 2020-04-18T02:21:10.505691
| 2020-01-06T14:18:18
| 2020-01-06T14:25:25
| 167,159,223
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 697
|
py
|
from django import forms
from blog.models import Post,Comment
class PostForm(forms.ModelForm):
class Meta():
model = Post
fields = ('author','title','text')
widgets = {
'title':forms.TextInput(attrs={'class':'textinputclass'}),
'text':forms.Textarea(attrs={'class':'editable medium-editor-textarea postcontent'})
}
class CommentForm(forms.ModelForm):
class Meta():
model = Comment
fields = ('author','text')
widgets = {
'author':forms.TextInput(attrs={'class':'textinputclass'}),
'text':forms.Textarea(attrs={'class':'editable medium-editor-textarea'})
}
|
[
"ruxhino@gmail.com"
] |
ruxhino@gmail.com
|
9cc7f8b8f3d0bb3c00162281ecb39f2f4f78cfb6
|
57ee98c57c566260f4d134e8160ce2245810c7ae
|
/scripts/bioconductor/bioconductor_skeleton.py
|
2b95a277635abe0217c9388192b303805c4ee483
|
[
"MIT"
] |
permissive
|
inodb/bioconda-recipes
|
c2b3e9af0940fd90bf8335b987e3a5d6d4749b74
|
5d28e0dbaf62f3c3a87d3c2a125f60a23874f3d0
|
refs/heads/master
| 2021-01-18T02:32:13.931120
| 2016-05-11T20:04:39
| 2016-05-11T20:04:39
| 58,580,064
| 1
| 0
| null | 2016-05-11T20:47:58
| 2016-05-11T20:47:58
| null |
UTF-8
|
Python
| false
| false
| 15,426
|
py
|
#!/usr/bin/env python
import shutil
import tempfile
import configparser
from textwrap import dedent
import tarfile
import pyaml
import hashlib
import os
import re
import bs4
import urllib
from urllib import request
from urllib import parse
from urllib import error
from collections import OrderedDict
import logging
import requests
logging.basicConfig(level=logging.INFO, format='[bioconductor_skeleton.py %(asctime)s]: %(message)s')
logger = logging.getLogger()
logging.getLogger("requests").setLevel(logging.WARNING)
base_url = 'http://bioconductor.org/packages/'
# Packages that might be specified in the DESCRIPTION of a package as
# dependencies, but since they're built-in we don't need to specify them in
# the meta.yaml.
#
# Note: this list is from:
#
# conda create -n rtest -c r r
# R -e "rownames(installed.packages())"
BASE_R_PACKAGES = ["base", "boot", "class", "cluster", "codetools", "compiler",
"datasets", "foreign", "graphics", "grDevices", "grid",
"KernSmooth", "lattice", "MASS", "Matrix", "methods",
"mgcv", "nlme", "nnet", "parallel", "rpart", "spatial",
"splines", "stats", "stats4", "survival", "tcltk", "tools",
"utils"]
HERE = os.path.abspath(os.path.dirname(__file__))
class PageNotFoundError(Exception): pass
class BioCProjectPage(object):
def __init__(self, package):
"""
Represents a single Bioconductor package page and provides access to
scraped data.
>>> x = BioCProjectPage('DESeq2')
>>> x.tarball_url
'http://bioconductor.org/packages/release/bioc/src/contrib/DESeq2_1.8.2.tar.gz'
"""
self.base_url = base_url
self.package = package
self._md5 = None
self._cached_tarball = None
self._dependencies = None
self.build_number = 0
self.request = requests.get(os.path.join(base_url, package))
if not self.request:
raise PageNotFoundError('Error {0.status_code} ({0.reason})'.format(self.request))
# Since we provide the "short link" we will get redirected. Using
# requests allows us to keep track of the final destination URL, which
# we need for reconstructing the tarball URL.
self.url = self.request.url
# The table at the bottom of the page has the info we want. An earlier
# draft of this script parsed the dependencies from the details table.
# That's still an option if we need a double-check on the DESCRIPTION
# fields.
self.soup = bs4.BeautifulSoup(
self.request.content,
'html.parser')
self.details_table = self.soup.find_all(attrs={'class': 'details'})[0]
# However, it is helpful to get the version info from this table. That
# way we can try getting the bioaRchive tarball and cache that.
for td in self.details_table.findAll('td'):
if td.getText() == 'Version':
version = td.findNext().getText()
break
self.version = version
@property
def bioaRchive_url(self):
"""
Returns the bioaRchive URL if one exists for this version of this
package, otherwise returns None.
Note that to get the package version, we're still getting the
bioconductor tarball to extract the DESCRIPTION file.
"""
url = 'https://bioarchive.galaxyproject.org/{0.package}_{0.version}.tar.gz'.format(self)
response = requests.get(url)
if response:
return url
elif response.status_code == 404:
return
else:
raise PageNotFoundError("Unexpected error: {0.status_code} ({0.reason})".format(response))
@property
def bioconductor_tarball_url(self):
"""
Return the url to the tarball from the bioconductor site.
"""
r = re.compile('{0}.*\.tar.gz'.format(self.package))
def f(href):
return href and r.search(href)
results = self.soup.find_all(href=f)
assert len(results) == 1, (
"Found {0} tags with '.tar.gz' in href".format(len(results)))
s = list(results[0].stripped_strings)
assert len(s) == 1
# build the actual URL based on the identified package name and the
# relative URL from the source. Here we're just hard-coding
# '../src/contrib' based on the structure of the bioconductor site.
return os.path.join(parse.urljoin(self.url, '../src/contrib'), s[0])
@property
def tarball_url(self):
url = self.bioaRchive_url
if url:
return url
return self.bioconductor_tarball_url
@property
def tarball_basename(self):
return os.path.basename(self.tarball_url)
@property
def cached_tarball(self):
"""
Downloads the tarball to the `cached_bioconductor_tarballs` dir if one
hasn't already been downloaded for this package.
This is because we need the whole tarball to get the DESCRIPTION file
and to generate an md5 hash, so we might as well save it somewhere.
"""
if self._cached_tarball:
return self._cached_tarball
cache_dir = os.path.join(HERE, 'cached_bioconductor_tarballs')
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
fn = os.path.join(cache_dir, self.tarball_basename)
if os.path.exists(fn):
self._cached_tarball = fn
return fn
tmp = tempfile.NamedTemporaryFile(delete=False).name
with open(tmp, 'wb') as fout:
logger.info('Downloading {0} to {1}'.format(self.tarball_url, fn))
response = requests.get(self.tarball_url)
if response:
fout.write(response.content)
else:
raise PageNotFoundError('Unexpected error {0.status_code} ({0.reason})'.format(response))
shutil.move(tmp, fn)
self._cached_tarball = fn
return fn
@property
def description(self):
"""
Extract the DESCRIPTION file from the tarball and parse it.
"""
t = tarfile.open(self.cached_tarball)
d = t.extractfile(os.path.join(self.package, 'DESCRIPTION')).read()
self._contents = d
c = configparser.ConfigParser()
# On-spec config files need a "section", but the DESCRIPTION file
# doesn't have one. So we just add a fake section, and let the
# configparser take care of the details of parsing.
c.read_string('[top]\n' + d.decode('UTF-8'))
e = c['top']
# Glue together newlines
for k in e.keys():
e[k] = e[k].replace('\n', ' ')
return dict(e)
#@property
#def version(self):
# return self.description['version']
@property
def license(self):
return self.description['license']
@property
def imports(self):
try:
return self.description['imports'].split(', ')
except KeyError:
return []
@property
def depends(self):
try:
return self.description['depends'].split(', ')
except KeyError:
return []
def _parse_dependencies(self, items):
"""
The goal is to go from
['package1', 'package2', 'package3 (>= 0.1)', 'package4']
to::
[
('package1', ""),
('package2', ""),
('package3', " >=0.1"),
('package1', ""),
]
"""
results = []
for item in items:
toks = [i.strip() for i in item.split('(')]
if len(toks) == 1:
results.append((toks[0], ""))
elif len(toks) == 2:
assert ')' in toks[1]
toks[1] = toks[1].replace(')', '').replace(' ', '')
results.append(tuple(toks))
else:
raise ValueError("Found {0} toks: {1}".format(len(toks), toks))
return results
@property
def dependencies(self):
if self._dependencies:
return self._dependencies
results = []
# Some packages specify a minimum R version, which we'll need to keep
# track of
specific_r_version = False
# Sometimes a version is specified only in the `depends` and not in the
# `imports`. We keep the most specific version of each.
version_specs = list(
set(
self._parse_dependencies(self.imports) +
self._parse_dependencies(self.depends)
)
)
versions = {}
for name, version in version_specs:
if name in versions:
if not versions[name] and version:
versions[name] = version
else:
versions[name] = version
for name, version in sorted(versions.items()):
# DESCRIPTION notes base R packages, but we don't need to specify
# them in the dependencies.
if name in BASE_R_PACKAGES:
continue
# Try finding the dependency on the bioconductor site; if it can't
# be found then we assume it's in CRAN.
try:
BioCProjectPage(name)
prefix = 'bioconductor-'
except PageNotFoundError:
prefix = 'r-'
logger.info('{0:>12} dependency: name="{1}" version="{2}"'.format(
{'r-': 'R', 'bioconductor-': 'BioConductor'}[prefix],
name, version))
# add padding to version string
if version:
version = " " + version
if name.lower() == 'r':
# "r >=2.5" rather than "r-r >=2.5"
specific_r_version = True
results.append(name.lower() + version)
else:
results.append(prefix + name.lower() + version)
# Add R itself if no specific version was specified
if not specific_r_version:
results.append('r')
self._dependencies = results
return self._dependencies
@property
def md5(self):
"""
Calculate the md5 hash of the tarball so it can be filled into the
meta.yaml.
"""
if self._md5 is None:
self._md5 = hashlib.md5(
open(self.cached_tarball, 'rb').read()).hexdigest()
return self._md5
@property
def meta_yaml(self):
"""
Build the meta.yaml string based on discovered values.
Here we use a nested OrderedDict so that all meta.yaml files created by
this script have the same consistent format. Otherwise we're the whims
of Python dict sorting.
We use pyaml (rather than yaml) because it has better handling of
OrderedDicts.
"""
url = self.bioaRchive_url
if not url:
url = self.tarball_url
DEPENDENCIES = sorted(self.dependencies)
d = OrderedDict((
(
'package', OrderedDict((
('name', 'bioconductor-' + self.package.lower()),
('version', self.version),
)),
),
(
'source', OrderedDict((
('fn', self.tarball_basename),
('url', url),
('md5', self.md5),
)),
),
(
'build', OrderedDict((
('number', self.build_number),
('rpaths', ['lib/R/lib/', 'lib/']),
)),
),
(
'requirements', OrderedDict((
# If you don't make copies, pyaml sees these as the same
# object and tries to make a shortcut, causing an error in
# decoding unicode. Possible pyaml bug? Anyway, this fixes
# it.
('build', DEPENDENCIES[:]),
('run', DEPENDENCIES[:]),
)),
),
(
'test', OrderedDict((
('commands',
['''$R -e "library('{package}')"'''.format(
package=self.package)]),
)),
),
(
'about', OrderedDict((
('home', self.url),
('license', self.license),
('summary', self.description['description']),
)),
),
))
return pyaml.dumps(d).decode('utf-8')
def write_recipe(package, recipe_dir, force=False):
"""
Write the meta.yaml and build.sh files.
"""
proj = BioCProjectPage(package)
recipe_dir = os.path.join(recipe_dir, 'bioconductor-' + proj.package.lower())
if os.path.exists(recipe_dir) and not force:
raise ValueError("{0} already exists, aborting".format(recipe_dir))
else:
if not os.path.exists(recipe_dir):
print('creating %s' % recipe_dir)
os.makedirs(recipe_dir)
# If the version number has not changed but something else in the recipe
# *has* changed, then bump the version number.
meta_file = os.path.join(recipe_dir, 'meta.yaml')
if os.path.exists(meta_file):
updated_meta = pyaml.yaml.load(proj.meta_yaml)
current_meta = pyaml.yaml.load(open(meta_file))
# pop off the version and build numbers so we can compare the rest of
# the dicts
updated_version = updated_meta['package'].pop('version')
current_version = current_meta['package'].pop('version')
updated_build_number = updated_meta['build'].pop('number')
current_build_number = current_meta['build'].pop('number')
if (
(updated_version == current_version)
and
(updated_meta != current_meta)
):
proj.build_number = int(current_build_number) + 1
with open(os.path.join(recipe_dir, 'meta.yaml'), 'w') as fout:
fout.write(proj.meta_yaml)
with open(os.path.join(recipe_dir, 'build.sh'), 'w') as fout:
fout.write(dedent(
"""
#!/bin/bash
# R refuses to build packages that mark themselves as
# "Priority: Recommended"
mv DESCRIPTION DESCRIPTION.old
grep -v '^Priority: ' DESCRIPTION.old > DESCRIPTION
#
$R CMD INSTALL --build .
#
# # Add more build steps here, if they are necessary.
#
# See
# http://docs.continuum.io/conda/build.html
# for a list of environment variables that are set during the build
# process.
# """
)
)
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('package', help='Bioconductor package name')
ap.add_argument('--recipes', default='recipes',
help='Recipe will be created in <recipe-dir>/<package>')
ap.add_argument('--force', action='store_true',
help='Overwrite the contents of an existing recipe')
args = ap.parse_args()
write_recipe(args.package, args.recipes, args.force)
|
[
"dalerr@niddk.nih.gov"
] |
dalerr@niddk.nih.gov
|
6520da5bb0edf10bbc7c9d36a7777515cfa145a9
|
8a54e11b6ed11d2974e13bb86c554cbbb4bb35ec
|
/iot/models.py
|
04d723d9a8de929987755509468a51e19d0ecc4b
|
[] |
no_license
|
DesigningShit/DSCore
|
222f853e46dce181b81fd4e188038bbd09fc1496
|
2e0ea24be171bd8cbdf7be3e3db8562a2cc94bf0
|
refs/heads/master
| 2020-03-25T09:56:47.936561
| 2018-08-09T15:37:24
| 2018-08-09T15:37:24
| 143,680,134
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,296
|
py
|
from django.db import models as m
from api.keygen import getRandomID
from api.models import Profile
class IOTTenant(m.Model):
owner = m.ForeignKey('api.Profile', related_name='TenantOwner', to_field='userkey', on_delete=m.CASCADE)
name = m.CharField(max_length=150, default='.')
tenantid = m.CharField(max_length=30, default=getRandomID, unique=True)
created = m.DateTimeField(auto_now_add=True)
modified = m.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class IOTCustomer(m.Model):
owner = m.ForeignKey('IOTTenant', related_name='TenantOwner', to_field='tenantid', on_delete=m.CASCADE)
name = m.CharField(max_length=150, default='.')
customerid = m.CharField(max_length=30, default=getRandomID, unique=True)
created = m.DateTimeField(auto_now_add=True)
modified = m.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class IOTChannelModel(m.Model):
channelowner = m.ForeignKey('IOTCustomer', related_name='channelowner', to_field='customerid', on_delete=m.CASCADE)
name = m.CharField(max_length=250, default='.')
channelid = m.CharField(max_length=30, default=getRandomID, unique=True)
created = m.DateTimeField(auto_now_add=True)
modified = m.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class IOTSensorModel(m.Model):
channel = m.ForeignKey('IOTChannelModel', related_name='channel', to_field='channelid', on_delete=m.CASCADE)
sensorid = m.CharField(max_length=30, default=getRandomID, unique=True)
name = m.CharField(max_length=250, default='Not Supplied')
created = m.DateTimeField(auto_now_add=True)
modified = m.DateTimeField(auto_now=True)
context = m.CharField(max_length=250, default='Undefined')
def __str__(self):
return self.name
class IOTSensorReadingModel(m.Model):
sensor = m.ForeignKey('IOTSensorModel', related_name='sensor', to_field='sensorid', on_delete=m.CASCADE)
readingid = m.CharField(max_length=30, default=getRandomID, unique=True)
created = m.DateTimeField(auto_now_add=True)
modified = m.DateTimeField(auto_now=True)
data = m.CharField(max_length=250, default='Not Supplied')
def __str__(self):
return self.sensor.sensorid+':'+self.data
|
[
"mpuckett@designingshit.com"
] |
mpuckett@designingshit.com
|
74dddc3fa04e29a5d4f6cc7672029d37a1350ca3
|
aeab159551473506f1f187388df10e010ec3e16d
|
/Practice Problems 3.1-3.13/3.12.py
|
77e05946e530837863c54018bafee236f2376085
|
[] |
no_license
|
Osama710/Assignment-3
|
5b27f52f1bd4cf4f3c19e335396e1d3c7e6a9008
|
5238f570cf253135b7807a8096b5ab736d5eea3e
|
refs/heads/master
| 2020-04-07T21:29:50.868286
| 2018-11-22T17:23:23
| 2018-11-22T17:23:23
| 158,729,413
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 164
|
py
|
print("Muhammad Osama - 18B-003-CS - Sec'A'")
print("Practice Problem: 3.12 ")
def negatives(lst):
for i in lst:
if i < 0:
print(i)
|
[
"noreply@github.com"
] |
Osama710.noreply@github.com
|
8286d2fa7b3c0d2bdd92e58a5fbd42d12ea04b67
|
7d21e180f943d902833162dd22697ebff2f7c83f
|
/references/Collect_v04.py
|
27c83e35b1e00bd5e232ab4a443f3733f6b1bc53
|
[] |
no_license
|
joinmm/c4d-export-to-render-farm
|
811556da359e17ecf0e352f869814b67e026da46
|
b3a657703528a81c50dc00510f53dcd9ab63929d
|
refs/heads/master
| 2022-12-21T03:31:32.033015
| 2020-09-27T12:59:10
| 2020-09-27T12:59:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,101
|
py
|
import c4d
from c4d import gui
def get_targetPath():
targetPath = c4d.storage.SaveDialog()
if not targetPath[-4:] == ".c4d":
targetPath = targetPath+".c4d"
else:
None
return targetPath
def main():
doc=c4d.documents.GetActiveDocument()
docpath=doc.GetDocumentPath()
targetPath = "/Users/Carlos/Desktop/Test 03"
#You will need c4d R15.057 for this function
assets=c4d.documents.GetAllAssets(doc, True, docpath)
missingAssets = None
saveProject_flags = c4d.SAVEPROJECT_ASSETS | c4d.SAVEPROJECT_SCENEFILE | c4d.SAVEPROJECT_PROGRESSALLOWED | c4d.SAVEPROJECT_ADDTORECENTLIST
collect_file = c4d.documents.SaveProject(doc, saveProject_flags, targetPath, assets, missingAssets)
c4d.EventAdd()
if collect_file == False:
gui.MessageDialog('The scene has not been collected,\ncheck the console for more details.') print "the scene has not been collected."
return
else:
print "the scene has been collected correctly."
if __name__=='__main__':
main()
|
[
"carlos@dynetv.com"
] |
carlos@dynetv.com
|
96a9e58ece7758108a958f45ccfeaf172c17d3c3
|
7c8857320bc9ef04244dc799a1e7f9348ee01de9
|
/transaction.py
|
498bb7f44f0a6411e93aa67df8c51e8c19cda8d5
|
[] |
no_license
|
pagliuca523/financial_automation
|
2415cce6040192bbf9991a30540aa2de073c0255
|
54a11977ffc25a23ffc79989cbf069f27e2885ae
|
refs/heads/main
| 2023-03-14T01:24:37.963857
| 2021-03-07T19:44:02
| 2021-03-07T19:44:02
| 336,790,956
| 0
| 0
| null | 2021-03-07T19:44:02
| 2021-02-07T13:15:58
|
Python
|
UTF-8
|
Python
| false
| false
| 74
|
py
|
class Transaction:
def __init__(self, date, name,Balance )
pass
|
[
"pagliuca523@hotmail.com"
] |
pagliuca523@hotmail.com
|
fcced74d0a0788e38378e8e59aab7c132a1a13e4
|
941a611199b8ee963dc904693e54e8fa8aa18ddb
|
/python/surf/devices/silabs/_Si5326.py
|
ea9453a734164017a2dabf8e3d0a8bab9a9fc3ea
|
[
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
cjchin/surf
|
84dab5716a6a5d3c6f2b9dc8226b7bc55de999e3
|
c52c3520c10a4459ca88751a57fb51c20f6a431a
|
refs/heads/master
| 2023-05-12T06:42:18.312381
| 2023-04-24T19:32:53
| 2023-04-24T19:32:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 40,856
|
py
|
#-----------------------------------------------------------------------------
# This file is part of 'SLAC Firmware Standard Library'.
# It is subject to the license terms in the LICENSE.txt file found in the
# top-level directory of this distribution and at:
# https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html.
# No part of 'SLAC Firmware Standard Library', including this file,
# may be copied, modified, propagated, or distributed except according to
# the terms contained in the LICENSE.txt file.
#-----------------------------------------------------------------------------
import pyrogue as pr
import rogue
import click
import fnmatch
class Si5326(pr.Device):
def __init__(self,**kwargs):
self._useVars = rogue.Version.greaterThanEqual('5.4.0')
if self._useVars:
size = 0
else:
size = (0x100 << 2) # 1KB
super().__init__(size=size, **kwargs)
if self._useVars:
self.add(pr.RemoteVariable(
name = "DataBlock",
description = "",
offset = 0,
bitSize = 32 * 0x100,
bitOffset = 0,
numValues = 0x100,
valueBits = 32,
valueStride = 32,
updateNotify = True,
bulkOpEn = True,
overlapEn = True,
verify = True,
hidden = True,
base = pr.UInt,
mode = "RW",
))
self.add(pr.LocalVariable(
name = "TxtFilePath",
description = "Used if command's argument is empty",
mode = "RW",
value = "",
))
##############################
# Commands
##############################
@self.command(value='',description="Load the .txt from DSPLLsim",)
def LoadTxtFile(arg):
# Check if non-empty argument
if (arg != ""):
path = arg
else:
# Use the variable path instead
path = self.TxtFilePath.get()
# Check for .txt file
if fnmatch.fnmatch(path, '*.txt'):
click.secho( f'{self.path}.LoadTxtFile(): {path}', fg='green')
else:
click.secho( f'{self.path}.LoadTxtFile(): {path} is not .txt', fg='red')
return
# Open the .txt file
fh = open(path,'r')
all_lines = fh.readlines()
fh.close()
# Process the read file
for line in all_lines:
if line.find('#') < 0:
addr,data = line.replace('h','').replace(',','').split()
self._setValue(
offset = int(addr)<<2,
data = int(data,16),
)
# Update local RemoteVariables and verify conflagration
self.readBlocks(recurse=True)
self.checkBlocks(recurse=True)
###########################
# Register[0]
###########################
self.add(pr.RemoteVariable(
name = 'FREE_RUN',
description = 'Internal to the device, route XA/XB to CKIN2.',
offset = (0 << 2),
bitSize = 1,
bitOffset = 6,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'CKOUT_ALWAYS_ON',
description = 'This will bypass the SQ_ICAL function',
offset = (0 << 2),
bitSize = 1,
bitOffset = 5,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'BYPASS_REG',
description = 'This bit enables or disables the PLL bypass mode',
offset = (0 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[1]
###########################
self.add(pr.RemoteVariable(
name = 'CK_PRIOR2',
description = 'Selects which of the input clocks will be 2nd priority in the autoselection state machine.',
offset = (1 << 2),
bitSize = 2,
bitOffset = 2,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'CK_PRIOR1',
description = 'Selects which of the input clocks will be 1st priority in the autoselection state machine.',
offset = (1 << 2),
bitSize = 2,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[2]
###########################
self.add(pr.RemoteVariable(
name = 'BWSEL_REG',
description = 'Selects nominal f3dB bandwidth for PLL.',
offset = (2 << 2),
bitSize = 4,
bitOffset = 4,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[3]
###########################
self.add(pr.RemoteVariable(
name = 'CKSEL_REG',
description = 'If the device is operating in register-based manual clock selection mode',
offset = (3 << 2),
bitSize = 2,
bitOffset = 6,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'DHOLD',
description = 'Forces the part into digital hold',
offset = (3 << 2),
bitSize = 1,
bitOffset = 5,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'SQ_ICAL',
description = 'This bit determines if the output clocks will remain enabled or be squelched during an internal calibration',
offset = (3 << 2),
bitSize = 1,
bitOffset = 4,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[4]
###########################
self.add(pr.RemoteVariable(
name = 'AUTOSEL_REG',
description = 'Selects method of input clock selection to be used.',
offset = (4 << 2),
bitSize = 2,
bitOffset = 6,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'HIST_DEL',
description = 'Selects amount of delay to be used in generating the history information used for Digital Hold',
offset = (4 << 2),
bitSize = 5,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[5]
###########################
self.add(pr.RemoteVariable(
name = 'ICMOS',
description = 'When the output buffer is set to CMOS mode, these bits determine the output buffer drive strength',
offset = (5 << 2),
bitSize = 2,
bitOffset = 6,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[6]
###########################
self.add(pr.RemoteVariable(
name = 'SLEEP',
description = 'In sleep mode, all clock outputs are disabled and the maximum amount of internal circuitry is powered down',
offset = (6 << 2),
bitSize = 1,
bitOffset = 6,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'SFOUT2_REG',
description = 'Controls output signal format and disable for CKOUT2 output buffer',
offset = (6 << 2),
bitSize = 3,
bitOffset = 3,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'SFOUT1_REG',
description = 'Controls output signal format and disable for CKOUT1 output buffer',
offset = (6 << 2),
bitSize = 3,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[7]
###########################
self.add(pr.RemoteVariable(
name = 'FOSREFSEL',
description = 'Selects which input clock is used as the reference frequency',
offset = (7 << 2),
bitSize = 3,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[8]
###########################
self.add(pr.RemoteVariable(
name = 'HLOG_2',
offset = (8 << 2),
bitSize = 2,
bitOffset = 6,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'HLOG_1',
offset = (8 << 2),
bitSize = 2,
bitOffset = 4,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[9]
###########################
self.add(pr.RemoteVariable(
name = 'HIST_AVG',
description = 'Selects amount of averaging time to be used in generating the history information used for Digital Hold',
offset = (9 << 2),
bitSize = 5,
bitOffset = 3,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[10]
###########################
self.add(pr.RemoteVariable(
name = 'DSBL2_REG',
description = 'This bit controls the powerdown of the CKOUT2 output buffer',
offset = (10 << 2),
bitSize = 1,
bitOffset = 3,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'DSBL1_REG',
description = 'This bit controls the powerdown of the CKOUT1 output buffer',
offset = (10 << 2),
bitSize = 1,
bitOffset = 2,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[11]
###########################
self.add(pr.RemoteVariable(
name = 'PD_CK2',
description = 'This bit controls the powerdown of the CKIN2 input buffer',
offset = (11 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'PD_CK1',
description = 'This bit controls the powerdown of the CKIN1 input buffer',
offset = (11 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[16]
###########################
self.add(pr.RemoteVariable(
name = 'CLAT',
description = 'With INCDEC_PIN=0, this register sets the phase delay for CKOUTn in units of 1/Fosc',
offset = (16 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[17]
###########################
self.add(pr.RemoteVariable(
name = 'FLAT_VALID',
description = 'Before writing a new FLAT[14:0] value, this bit must be set to zero',
offset = (17 << 2),
bitSize = 1,
bitOffset = 7,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'FLAT_14_8',
description = 'Fine resolution control for overall device skew from input clocks to output clocks',
offset = (17 << 2),
bitSize = 7,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[18]
###########################
self.add(pr.RemoteVariable(
name = 'FLAT_7_0',
description = 'Fine resolution control for overall device skew from input clocks to output clocks',
offset = (18 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[19]
###########################
self.add(pr.RemoteVariable(
name = 'FOS_EN',
description = 'Frequency Offset Enable globally disables FOS',
offset = (19 << 2),
bitSize = 1,
bitOffset = 7,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'FOS_THR',
description = 'Frequency Offset at which FOS is declared',
offset = (19 << 2),
bitSize = 2,
bitOffset = 5,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'VALTIME',
description = 'Sets amount of time for input clock to be valid before the associated alarm is removed',
offset = (19 << 2),
bitSize = 2,
bitOffset = 3,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOCKT',
description = 'Sets retrigger interval for one shot monitoring phase detector output',
offset = (19 << 2),
bitSize = 3,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[20]
###########################
self.add(pr.RemoteVariable(
name = 'CK2_BAD_PIN',
description = 'The CK2_BAD status can be reflected on the C2B output pin',
offset = (20 << 2),
bitSize = 1,
bitOffset = 3,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'CK1_BAD_PIN',
description = 'The CK1_BAD status can be reflected on the C1B output pin',
offset = (20 << 2),
bitSize = 1,
bitOffset = 2,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOL_PIN',
description = 'The LOL_INT status bit can be reflected on the LOL output pin',
offset = (20 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'INT_PIN',
description = 'Reflects the interrupt status on the INT_C1B output pin',
offset = (20 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[21]
###########################
self.add(pr.RemoteVariable(
name = 'INCDEC_PIN',
description = 'Determines how coarse skew adjustments can be made',
offset = (21 << 2),
bitSize = 1,
bitOffset = 7,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'CK1_ACTV_PIN',
offset = (21 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'CKSEL_PIN',
offset = (21 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[22]
###########################
self.add(pr.RemoteVariable(
name = 'CK_ACTV_POL',
description = 'Sets the active polarity for the CS_CA signals when reflected on an output pin',
offset = (22 << 2),
bitSize = 1,
bitOffset = 3,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'CK_BAD_POL',
description = 'Sets the active polarity for the INT_C1B and C2B signals when reflected on output pin',
offset = (22 << 2),
bitSize = 1,
bitOffset = 2,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOL_POL',
description = 'Sets the active polarity for the LOL status when reflected on an output pin',
offset = (22 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'INT_POL',
description = 'Sets the active polarity for the interrupt status when reflected on the INT_C1B output pin',
offset = (22 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[23]
###########################
self.add(pr.RemoteVariable(
name = 'LOS2_MSK',
description = 'Determines if a LOS on CKIN2 (LOS2_FLG) is used in the generation of an interrupt',
offset = (23 << 2),
bitSize = 1,
bitOffset = 2,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOS1_MSK',
description = 'Determines if a LOS on CKIN1 (LOS1_FLG) is used in the generation of an interrupt',
offset = (23 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOSX_MSK',
description = 'Determines if a LOS on XA/XB(LOSX_FLG) is used in the generation of an interrupt',
offset = (23 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[24]
###########################
self.add(pr.RemoteVariable(
name = 'FOS2_MSK',
description = 'Determines if the FOS2_FLG is used to in the generation of an interrupt',
offset = (24 << 2),
bitSize = 1,
bitOffset = 2,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'FOS1_MSK',
description = 'Determines if the FOS1_FLG is used in the generation of an interrupt',
offset = (24 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOL_MSK',
description = 'Determines if the LOL_FLG is used in the generation of an interrupt',
offset = (24 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[25]
###########################
self.add(pr.RemoteVariable(
name = 'N1_HS',
description = 'Sets value for N1 high speed divider which drives NCn_LS (n = 1 to 2) low-speed divider',
offset = (25 << 2),
bitSize = 3,
bitOffset = 5,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[31]
###########################
self.add(pr.RemoteVariable(
name = 'NC1_LS_19_16',
description = 'Sets value for NC1 low-speed divider, which drives CKOUT1 output',
offset = (31 << 2),
bitSize = 4,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[32]
###########################
self.add(pr.RemoteVariable(
name = 'NC1_LS_15_8',
description = 'Sets value for NC1 low-speed divider, which drives CKOUT1 output.',
offset = (32 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[33]
###########################
self.add(pr.RemoteVariable(
name = 'NC1_LS_7_0',
description = 'Sets value for NC1 low-speed divider, which drives CKOUT1 output.',
offset = (33 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[34]
###########################
self.add(pr.RemoteVariable(
name = 'NC2_LS_19_16',
description = 'Sets value for NC2 low-speed divider, which drives CKOUT2 output',
offset = (34 << 2),
bitSize = 4,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[35]
###########################
self.add(pr.RemoteVariable(
name = 'NC2_LS_15_8',
description = 'Sets value for NC2 low-speed divider, which drives CKOUT2 output',
offset = (35 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[36]
###########################
self.add(pr.RemoteVariable(
name = 'NC2_LS_7_0',
description = 'Sets value for NC2 low-speed divider, which drives CKOUT2 output',
offset = (36 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[40]
###########################
self.add(pr.RemoteVariable(
name = 'N2_HS',
description = 'Sets value for N2 high speed divider which drives N2LS low-speed divider.',
offset = (40 << 2),
bitSize = 3,
bitOffset = 5,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'N2_LS_19_16',
description = 'Sets value for N2 low-speed divider, which drives phase detector.',
offset = (40 << 2),
bitSize = 4,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[41]
###########################
self.add(pr.RemoteVariable(
name = 'N2_LS_15_8',
description = 'Sets value for N2 low-speed divider, which drives phase detector.',
offset = (41 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[42]
###########################
self.add(pr.RemoteVariable(
name = 'N2_LS_7_0',
description = 'Sets value for N2 low-speed divider, which drives phase detector.',
offset = (42 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[43]
###########################
self.add(pr.RemoteVariable(
name = 'N31_18_16',
description = 'Sets value for input divider for CKIN1',
offset = (43 << 2),
bitSize = 3,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[44]
###########################
self.add(pr.RemoteVariable(
name = 'N31_15_8',
description = 'Sets value for input divider for CKIN1',
offset = (44 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[45]
###########################
self.add(pr.RemoteVariable(
name = 'N31_7_0',
description = 'Sets value for input divider for CKIN1',
offset = (45 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[46]
###########################
self.add(pr.RemoteVariable(
name = 'N32_18_16',
description = 'Sets value for input divider for CKIN2',
offset = (46 << 2),
bitSize = 3,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[47]
###########################
self.add(pr.RemoteVariable(
name = 'N32_15_8',
description = 'Sets value for input divider for CKIN2',
offset = (47 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[48]
###########################
self.add(pr.RemoteVariable(
name = 'N32_7_0',
description = 'Sets value for input divider for CKIN2',
offset = (48 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[55]
###########################
self.add(pr.RemoteVariable(
name = 'CLKIN2RATE',
description = 'CKINn frequency selection for FOS alarm monitoring',
offset = (55 << 2),
bitSize = 3,
bitOffset = 3,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'CLKIN1RATE',
description = 'CKINn frequency selection for FOS alarm monitoring',
offset = (55 << 2),
bitSize = 3,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[128]
###########################
self.add(pr.RemoteVariable(
name = 'CK2_ACTV_REG',
description = 'Indicates if CKIN2 is currently the active clock for the PLL input',
offset = (128 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RO',
pollInterval = 1,
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'CK1_ACTV_REG',
description = 'Indicates if CKIN1 is currently the active clock for the PLL input',
offset = (128 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RO',
pollInterval = 1,
overlapEn = True,
))
###########################
# Register[129]
###########################
self.add(pr.RemoteVariable(
name = 'LOS2_INT',
description = 'Indicates the LOS status on CKIN2',
offset = (129 << 2),
bitSize = 1,
bitOffset = 2,
mode = 'RO',
pollInterval = 1,
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOS1_INT',
description = 'Indicates the LOS status on CKIN1',
offset = (129 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RO',
pollInterval = 1,
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOSX_INT',
description = 'Indicates the LOS status of the external reference on the XA/XB pins',
offset = (129 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RO',
pollInterval = 1,
overlapEn = True,
))
###########################
# Register[130]
###########################
self.add(pr.RemoteVariable(
name = 'CLATPROGRESS',
description = 'Indicates if the last change in the CLAT register has been processed.',
offset = (130 << 2),
bitSize = 1,
bitOffset = 7,
mode = 'RO',
pollInterval = 1,
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'DIGHOLDVALID',
description = 'Indicates if the digital hold circuit has enough samples of a valid clock to meet digital hold specifications',
offset = (130 << 2),
bitSize = 1,
bitOffset = 6,
mode = 'RO',
pollInterval = 1,
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'FOS2_INT',
description = 'CKIN2 Frequency Offset Status',
offset = (130 << 2),
bitSize = 1,
bitOffset = 2,
mode = 'RO',
pollInterval = 1,
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'FOS1_INT',
description = 'CKIN1 Frequency Offset Status',
offset = (130 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RO',
pollInterval = 1,
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOL_INT',
description = 'PLL Loss of Lock Status',
offset = (130 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RO',
pollInterval = 1,
overlapEn = True,
))
###########################
# Register[131]
###########################
self.add(pr.RemoteVariable(
name = 'LOS2_FLG',
description = 'CKIN2 Loss-of-Signal Flag',
offset = (131 << 2),
bitSize = 1,
bitOffset = 2,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOS1_FLG',
description = 'CKIN1 Loss-of-Signal Flag',
offset = (131 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOSX_FLG',
description = 'External Reference (signal on pins XA/XB) Loss-of-Signal Flag',
offset = (131 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[132]
###########################
self.add(pr.RemoteVariable(
name = 'FOS2_FLG',
description = 'CLKIN_2 Frequency Offset Flag',
offset = (132 << 2),
bitSize = 1,
bitOffset = 3,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'FOS1_FLG',
description = 'CLKIN_1 Frequency Offset Flag',
offset = (132 << 2),
bitSize = 1,
bitOffset = 2,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOL_FLG',
description = 'PLL Loss of Lock Flag',
offset = (132 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[134]
###########################
self.add(pr.RemoteVariable(
name = 'PARTNUM_RO_11_4',
description = 'Device ID',
offset = (134 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RO',
overlapEn = True,
))
###########################
# Register[135]
###########################
self.add(pr.RemoteVariable(
name = 'PARTNUM_RO_3_0',
description = 'Device ID',
offset = (135 << 2),
bitSize = 4,
bitOffset = 4,
mode = 'RO',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'REVID_RO',
description = 'Indicates Revision Number of Device.',
offset = (135 << 2),
bitSize = 4,
bitOffset = 0,
mode = 'RO',
overlapEn = True,
))
###########################
# Register[136]
###########################
self.add(pr.RemoteVariable(
name = 'RST_REG',
description = 'Internal Reset (Same as Pin Reset)',
offset = (136 << 2),
bitSize = 1,
bitOffset = 7,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'ICAL',
description = 'Start an Internal Calibration Sequence',
offset = (136 << 2),
bitSize = 1,
bitOffset = 6,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[138]
###########################
self.add(pr.RemoteVariable(
name = 'LOS2_EN_1',
description = 'Enable CKIN2 LOS Monitoring on the Specified Input',
offset = (138 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOS1_EN_1',
description = 'Enable CKIN1 LOS Monitoring on the Specified Input',
offset = (138 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[139]
###########################
self.add(pr.RemoteVariable(
name = 'LOS2_EN_0',
description = 'Enable CKIN2 LOS Monitoring on the Specified Input',
offset = (139 << 2),
bitSize = 1,
bitOffset = 5,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'LOS1_EN_0',
description = 'Enable CKIN1 LOS Monitoring on the Specified Input',
offset = (139 << 2),
bitSize = 1,
bitOffset = 4,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'FOS2_EN',
description = 'Enables FOS on a Per Channel Basis',
offset = (139 << 2),
bitSize = 1,
bitOffset = 1,
mode = 'RW',
overlapEn = True,
))
self.add(pr.RemoteVariable(
name = 'FOS1_EN',
description = 'Enables FOS on a Per Channel Basis',
offset = (139 << 2),
bitSize = 1,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[142]
###########################
self.add(pr.RemoteVariable(
name = 'INDEPENDENTSKEW1',
description = '8 bit field that represents a twos complement of the phase offset in terms of clocks from the high speed output divider',
offset = (142 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
###########################
# Register[143]
###########################
self.add(pr.RemoteVariable(
name = 'INDEPENDENTSKEW2',
description = '8 bit field that represents a twos complement of the phase offset in terms of clocks from the high speed output divider',
offset = (143 << 2),
bitSize = 8,
bitOffset = 0,
mode = 'RW',
overlapEn = True,
))
self.add(pr.LinkVariable(
name = 'Locked',
description = 'Inverse of LOL',
mode = 'RO',
dependencies = [self.LOL_INT],
linkedGet = lambda: (False if self.LOL_INT.value() else True)
))
def _setValue(self,offset,data):
if self._useVars:
# Note: index is byte index (not word index)
self.DataBlock.set(value=data,index=(offset%0x400)>>2)
else:
self._rawWrite(offset,data) # Deprecated
|
[
"ruckman@slac.stanford.edu"
] |
ruckman@slac.stanford.edu
|
35625f181c0366611d6ea6e2f30127b29f67add9
|
00af94d633b29adb849409a264caa49d4702822e
|
/examples/depthai-python/lib/python3.6/site-packages/ipykernel/tests/test_message_spec.py
|
a142165f164c95aaa04015dedcc3a6b672439191
|
[
"MIT"
] |
permissive
|
gromovnik1337/depthai-python
|
bcc0fe5aff3651a698ee86daf07a5a860f3675d4
|
2b17444aba2f94a236222934e1572c4dd06062dc
|
refs/heads/main
| 2023-03-28T00:34:03.525543
| 2021-03-27T15:28:09
| 2021-03-27T15:28:09
| 348,476,293
| 0
| 0
|
MIT
| 2021-03-20T08:20:56
| 2021-03-16T20:01:17
| null |
UTF-8
|
Python
| false
| false
| 15,929
|
py
|
"""Test suite for our zeromq-based message specification."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import re
import sys
from distutils.version import LooseVersion as V
from queue import Empty
import nose.tools as nt
from nose.plugins.skip import SkipTest
from traitlets import (
HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum
)
from .utils import (TIMEOUT, start_global_kernel, flush_channels, execute,
get_reply, )
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
KC = None
def setup():
global KC
KC = start_global_kernel()
#-----------------------------------------------------------------------------
# Message Spec References
#-----------------------------------------------------------------------------
class Reference(HasTraits):
"""
Base class for message spec specification testing.
This class is the core of the message specification test. The
idea is that child classes implement trait attributes for each
message keys, so that message keys can be tested against these
traits using :meth:`check` method.
"""
def check(self, d):
"""validate a dict against our traits"""
for key in self.trait_names():
assert key in d
# FIXME: always allow None, probably not a good idea
if d[key] is None:
continue
try:
setattr(self, key, d[key])
except TraitError as e:
assert False, str(e)
class Version(Unicode):
def __init__(self, *args, **kwargs):
self.min = kwargs.pop('min', None)
self.max = kwargs.pop('max', None)
kwargs['default_value'] = self.min
super(Version, self).__init__(*args, **kwargs)
def validate(self, obj, value):
if self.min and V(value) < V(self.min):
raise TraitError("bad version: %s < %s" % (value, self.min))
if self.max and (V(value) > V(self.max)):
raise TraitError("bad version: %s > %s" % (value, self.max))
class RMessage(Reference):
msg_id = Unicode()
msg_type = Unicode()
header = Dict()
parent_header = Dict()
content = Dict()
def check(self, d):
super(RMessage, self).check(d)
RHeader().check(self.header)
if self.parent_header:
RHeader().check(self.parent_header)
class RHeader(Reference):
msg_id = Unicode()
msg_type = Unicode()
session = Unicode()
username = Unicode()
version = Version(min='5.0')
mime_pat = re.compile(r'^[\w\-\+\.]+/[\w\-\+\.]+$')
class MimeBundle(Reference):
metadata = Dict()
data = Dict()
def _data_changed(self, name, old, new):
for k,v in new.items():
assert mime_pat.match(k)
assert isinstance(v, str)
# shell replies
class Reply(Reference):
status = Enum(('ok', 'error'), default_value='ok')
class ExecuteReply(Reply):
execution_count = Integer()
def check(self, d):
Reference.check(self, d)
if d['status'] == 'ok':
ExecuteReplyOkay().check(d)
elif d['status'] == 'error':
ExecuteReplyError().check(d)
elif d['status'] == 'aborted':
ExecuteReplyAborted().check(d)
class ExecuteReplyOkay(Reply):
status = Enum(('ok',))
user_expressions = Dict()
class ExecuteReplyError(Reply):
status = Enum(('error',))
ename = Unicode()
evalue = Unicode()
traceback = List(Unicode())
class ExecuteReplyAborted(Reply):
status = Enum(('aborted',))
class InspectReply(Reply, MimeBundle):
found = Bool()
class ArgSpec(Reference):
args = List(Unicode())
varargs = Unicode()
varkw = Unicode()
defaults = List()
class Status(Reference):
execution_state = Enum(('busy', 'idle', 'starting'), default_value='busy')
class CompleteReply(Reply):
matches = List(Unicode())
cursor_start = Integer()
cursor_end = Integer()
status = Unicode()
class LanguageInfo(Reference):
name = Unicode('python')
version = Unicode(sys.version.split()[0])
class KernelInfoReply(Reply):
protocol_version = Version(min='5.0')
implementation = Unicode('ipython')
implementation_version = Version(min='2.1')
language_info = Dict()
banner = Unicode()
def check(self, d):
Reference.check(self, d)
LanguageInfo().check(d['language_info'])
class ConnectReply(Reference):
shell_port = Integer()
control_port = Integer()
stdin_port = Integer()
iopub_port = Integer()
hb_port = Integer()
class CommInfoReply(Reply):
comms = Dict()
class IsCompleteReply(Reference):
status = Enum(('complete', 'incomplete', 'invalid', 'unknown'), default_value='complete')
def check(self, d):
Reference.check(self, d)
if d['status'] == 'incomplete':
IsCompleteReplyIncomplete().check(d)
class IsCompleteReplyIncomplete(Reference):
indent = Unicode()
# IOPub messages
class ExecuteInput(Reference):
code = Unicode()
execution_count = Integer()
class Error(ExecuteReplyError):
"""Errors are the same as ExecuteReply, but without status"""
status = None # no status field
class Stream(Reference):
name = Enum(('stdout', 'stderr'), default_value='stdout')
text = Unicode()
class DisplayData(MimeBundle):
pass
class ExecuteResult(MimeBundle):
execution_count = Integer()
class HistoryReply(Reply):
history = List(List())
references = {
'execute_reply' : ExecuteReply(),
'inspect_reply' : InspectReply(),
'status' : Status(),
'complete_reply' : CompleteReply(),
'kernel_info_reply': KernelInfoReply(),
'connect_reply': ConnectReply(),
'comm_info_reply': CommInfoReply(),
'is_complete_reply': IsCompleteReply(),
'execute_input' : ExecuteInput(),
'execute_result' : ExecuteResult(),
'history_reply' : HistoryReply(),
'error' : Error(),
'stream' : Stream(),
'display_data' : DisplayData(),
'header' : RHeader(),
}
"""
Specifications of `content` part of the reply messages.
"""
def validate_message(msg, msg_type=None, parent=None):
"""validate a message
This is a generator, and must be iterated through to actually
trigger each test.
If msg_type and/or parent are given, the msg_type and/or parent msg_id
are compared with the given values.
"""
RMessage().check(msg)
if msg_type:
assert msg['msg_type'] == msg_type
if parent:
assert msg['parent_header']['msg_id'] == parent
content = msg['content']
ref = references[msg['msg_type']]
ref.check(content)
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
# Shell channel
def test_execute():
flush_channels()
msg_id = KC.execute(code='x=1')
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'execute_reply', msg_id)
def test_execute_silent():
flush_channels()
msg_id, reply = execute(code='x=1', silent=True)
# flush status=idle
status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
validate_message(status, 'status', msg_id)
assert status['content']['execution_state'] == 'idle'
nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
count = reply['execution_count']
msg_id, reply = execute(code='x=2', silent=True)
# flush status=idle
status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
validate_message(status, 'status', msg_id)
assert status['content']['execution_state'] == 'idle'
nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
count_2 = reply['execution_count']
assert count_2 == count
def test_execute_error():
flush_channels()
msg_id, reply = execute(code='1/0')
assert reply['status'] == 'error'
assert reply['ename'] == 'ZeroDivisionError'
error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
validate_message(error, 'error', msg_id)
def test_execute_inc():
"""execute request should increment execution_count"""
flush_channels()
msg_id, reply = execute(code='x=1')
count = reply['execution_count']
flush_channels()
msg_id, reply = execute(code='x=2')
count_2 = reply['execution_count']
assert count_2 == count+1
def test_execute_stop_on_error():
"""execute request should not abort execution queue with stop_on_error False"""
flush_channels()
fail = '\n'.join([
# sleep to ensure subsequent message is waiting in the queue to be aborted
'import time',
'time.sleep(0.5)',
'raise ValueError',
])
KC.execute(code=fail)
msg_id = KC.execute(code='print("Hello")')
KC.get_shell_msg(timeout=TIMEOUT)
reply = KC.get_shell_msg(timeout=TIMEOUT)
assert reply['content']['status'] == 'aborted'
flush_channels()
KC.execute(code=fail, stop_on_error=False)
msg_id = KC.execute(code='print("Hello")')
KC.get_shell_msg(timeout=TIMEOUT)
reply = KC.get_shell_msg(timeout=TIMEOUT)
assert reply['content']['status'] == 'ok'
def test_non_execute_stop_on_error():
"""test that non-execute_request's are not aborted after an error"""
flush_channels()
fail = '\n'.join([
# sleep to ensure subsequent message is waiting in the queue to be aborted
'import time',
'time.sleep(0.5)',
'raise ValueError',
])
KC.execute(code=fail)
KC.kernel_info()
KC.comm_info()
KC.inspect(code="print")
reply = KC.get_shell_msg(timeout=TIMEOUT) # execute
assert reply['content']['status'] == 'error'
reply = KC.get_shell_msg(timeout=TIMEOUT) # kernel_info
assert reply['content']['status'] == 'ok'
reply = KC.get_shell_msg(timeout=TIMEOUT) # comm_info
assert reply['content']['status'] == 'ok'
reply = KC.get_shell_msg(timeout=TIMEOUT) # inspect
assert reply['content']['status'] == 'ok'
def test_user_expressions():
flush_channels()
msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
user_expressions = reply['user_expressions']
nt.assert_equal(user_expressions, {'foo': {
'status': 'ok',
'data': {'text/plain': '2'},
'metadata': {},
}})
def test_user_expressions_fail():
flush_channels()
msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
user_expressions = reply['user_expressions']
foo = user_expressions['foo']
assert foo['status'] == 'error'
assert foo['ename'] == 'NameError'
def test_oinfo():
flush_channels()
msg_id = KC.inspect('a')
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'inspect_reply', msg_id)
def test_oinfo_found():
flush_channels()
msg_id, reply = execute(code='a=5')
msg_id = KC.inspect('a')
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'inspect_reply', msg_id)
content = reply['content']
assert content['found']
text = content['data']['text/plain']
assert 'Type:' in text
assert 'Docstring:' in text
def test_oinfo_detail():
flush_channels()
msg_id, reply = execute(code='ip=get_ipython()')
msg_id = KC.inspect('ip.object_inspect', cursor_pos=10, detail_level=1)
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'inspect_reply', msg_id)
content = reply['content']
assert content['found']
text = content['data']['text/plain']
assert 'Signature:' in text
assert 'Source:' in text
def test_oinfo_not_found():
flush_channels()
msg_id = KC.inspect('dne')
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'inspect_reply', msg_id)
content = reply['content']
assert not content['found']
def test_complete():
flush_channels()
msg_id, reply = execute(code="alpha = albert = 5")
msg_id = KC.complete('al', 2)
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'complete_reply', msg_id)
matches = reply['content']['matches']
for name in ('alpha', 'albert'):
assert name in matches
def test_kernel_info_request():
flush_channels()
msg_id = KC.kernel_info()
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'kernel_info_reply', msg_id)
def test_connect_request():
flush_channels()
msg = KC.session.msg('connect_request')
KC.shell_channel.send(msg)
return msg['header']['msg_id']
msg_id = KC.kernel_info()
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'connect_reply', msg_id)
def test_comm_info_request():
flush_channels()
if not hasattr(KC, 'comm_info'):
raise SkipTest()
msg_id = KC.comm_info()
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'comm_info_reply', msg_id)
def test_single_payload():
"""
We want to test the set_next_input is not triggered several time per cell.
This is (was ?) mostly due to the fact that `?` in a loop would trigger
several set_next_input.
I'm tempted to thing that we actually want to _allow_ multiple
set_next_input (that's users' choice). But that `?` itself (and ?'s
transform) should avoid setting multiple set_next_input).
"""
flush_channels()
msg_id, reply = execute(code="ip = get_ipython()\n"
"for i in range(3):\n"
" ip.set_next_input('Hello There')\n")
payload = reply['payload']
next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"]
assert len(next_input_pls) == 1
def test_is_complete():
flush_channels()
msg_id = KC.is_complete("a = 1")
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'is_complete_reply', msg_id)
def test_history_range():
flush_channels()
msg_id_exec = KC.execute(code='x=1', store_history = True)
reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
msg_id = KC.history(hist_access_type = 'range', raw = True, output = True, start = 1, stop = 2, session = 0)
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'history_reply', msg_id)
content = reply['content']
assert len(content['history']) == 1
def test_history_tail():
flush_channels()
msg_id_exec = KC.execute(code='x=1', store_history = True)
reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
msg_id = KC.history(hist_access_type = 'tail', raw = True, output = True, n = 1, session = 0)
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'history_reply', msg_id)
content = reply['content']
assert len(content['history']) == 1
def test_history_search():
flush_channels()
msg_id_exec = KC.execute(code='x=1', store_history = True)
reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
msg_id = KC.history(hist_access_type = 'search', raw = True, output = True, n = 1, pattern = '*', session = 0)
reply = get_reply(KC, msg_id, TIMEOUT)
validate_message(reply, 'history_reply', msg_id)
content = reply['content']
assert len(content['history']) == 1
# IOPub channel
def test_stream():
flush_channels()
msg_id, reply = execute("print('hi')")
stdout = KC.iopub_channel.get_msg(timeout=TIMEOUT)
validate_message(stdout, 'stream', msg_id)
content = stdout['content']
assert content['text'] == 'hi\n'
def test_display_data():
flush_channels()
msg_id, reply = execute("from IPython.display import display; display(1)")
display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
validate_message(display, 'display_data', parent=msg_id)
data = display['content']['data']
assert data['text/plain'] == '1'
|
[
"vice.roncevic@luxc.tech"
] |
vice.roncevic@luxc.tech
|
eeced7e4f26e641d78ffaa365396b40acb467179
|
aa09c3985477e3104448a9ca2f37286ffb4fbfbd
|
/NASA_Wind_Test/test_Programs/test_ROS.py
|
1ca950fa223c89933ff27a5237e665542b3fd532
|
[] |
no_license
|
samA94/Test_Files_Wind_Overshoot
|
10855a7b8347e8216cc4b0240d04104fa9467066
|
b2155d09da4e9c577c99d8f32c70c7ac02fc5b2a
|
refs/heads/master
| 2021-01-13T15:04:58.598141
| 2017-01-30T15:20:17
| 2017-01-30T15:20:17
| 76,274,216
| 0
| 0
| null | 2017-01-12T20:01:20
| 2016-12-12T16:24:56
|
Python
|
UTF-8
|
Python
| false
| false
| 1,272
|
py
|
import rospy
from geometry_msgs.msg import PoseWithCovariance
from mavros_msgs.srv import CommandBool, SetMode, StreamRate
from mavros_msgs.msg import OverrideRCIn
import time
rospy.init_node("send_Waypoints")
rospy.wait_for_service("mavros/set_stream_rate")
setRate = rospy.ServiceProxy("mavros/set_stream_rate", StreamRate)
setRate(0, 50, 1)
rospy.Rate(50.0)
def quad_Command(mode, armVar = False):
#initialize topics for arming quad
rospy.wait_for_service("mavros/cmd/arming")
armQuad = rospy.ServiceProxy("mavros/cmd/arming", CommandBool)
rospy.wait_for_service("mavros/set_mode")
modeSet = rospy.ServiceProxy("mavros/set_mode", SetMode)
#arm quadrotor and initialize proper mode
armQuad(armVar)
print "System Arm Status: ", armVar
time.sleep(3)
modeSet(mode[0], mode[1])
print "Mode set to: ", mode
mode_List = [0, "OFFBOARD"]
quad_Command(mode_List, True)
pub_Velocity = rospy.Publisher('/mavros/RC/override', OverrideRCIn)
i = 0
velocity = OverrideRCIn()
while i < 50:
if i%2 == 0:
velocity.channels[2] = 1200
else:
velocity.channels[2] = 1000
pub_Velocity.publish(velocity)
i = i + 1
time.sleep(0.1)
velocity.channels[2] = 900
pub_Velocity.publish(velocity)
|
[
"noreply@github.com"
] |
samA94.noreply@github.com
|
067e6698922e5217a1602d5a4f7363c299b73a15
|
e69b36ec130b76f422a32eff98a366661a41561e
|
/mercenarylair/common/accounts/models.py
|
c3785f39cfaaa4774c5c9c66d6594719b2b6f1dd
|
[] |
no_license
|
manuel-delverme/mercenaryLair
|
8c7001d00859d6045d51e921168059e46e67eab0
|
9171773904409e43588f614d4c2a82b50f699986
|
refs/heads/master
| 2021-01-01T19:11:49.272941
| 2013-08-24T14:02:10
| 2013-08-24T14:02:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 406
|
py
|
from django.db import models
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from userena.models import UserenaLanguageBaseProfile
class UserProfile(UserenaLanguageBaseProfile):
user = models.OneToOneField(User,unique=True,verbose_name=_('user'),related_name='my_profile')
feedback = models.CharField(_('feedback'),max_length=5)
|
[
"production@41.215.240.102"
] |
production@41.215.240.102
|
94e2d9ac109fe7b31e9dde5bcd721061f309f6dc
|
36d4c9a57b53f5e14acb512759b49fe44d9990d8
|
/python_essential_q/q10.py
|
28bd1c40de356fab38ca5b007fc0e628028b6f26
|
[] |
no_license
|
yosef8234/test
|
4a280fa2b27563c055b54f2ed3dfbc7743dd9289
|
8bb58d12b2837c9f8c7b1877206a365ab9004758
|
refs/heads/master
| 2021-05-07T22:46:06.598921
| 2017-10-16T18:11:26
| 2017-10-16T18:11:26
| 107,286,907
| 4
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,851
|
py
|
# Question 10
# Consider the following code, what will it output?
class A(object):
def go(self):
print("go A go!")
def stop(self):
print("stop A stop!")
def pause(self):
raise Exception("Not Implemented")
class B(A):
def go(self):
super(B, self).go()
print("go B go!")
class C(A):
def go(self):
super(C, self).go()
print("go C go!")
def stop(self):
super(C, self).stop()
print("stop C stop!")
class D(B,C):
def go(self):
super(D, self).go()
print("go D go!")
def stop(self):
super(D, self).stop()
print("stop D stop!")
def pause(self):
print("wait D wait!")
class E(B,C): pass
a = A()
b = B()
c = C()
d = D()
e = E()
# specify output from here onwards
a.go()
b.go()
c.go()
d.go()
e.go()
a.stop()
b.stop()
c.stop()
d.stop()
e.stop()
a.pause()
b.pause()
c.pause()
d.pause()
e.pause()
# Answer
# The output is specified in the comments in the segment below:
a.go()
# go A go!
b.go()
# go A go!
# go B go!
c.go()
# go A go!
# go C go!
d.go()
# go A go!
# go C go!
# go B go!
# go D go!
e.go()
# go A go!
# go C go!
# go B go!
a.stop()
# stop A stop!
b.stop()
# stop A stop!
c.stop()
# stop A stop!
# stop C stop!
d.stop()
# stop A stop!
# stop C stop!
# stop D stop!
e.stop()
# stop A stop!
a.pause()
# ... Exception: Not Implemented
b.pause()
# ... Exception: Not Implemented
c.pause()
# ... Exception: Not Implemented
d.pause()
# wait D wait!
e.pause()
# ...Exception: Not Implemented
# Why do we care?
# Because OO programming is really, really important. Really. Answering this question shows your understanding of inheritance and the use of Python's super function. Most of the time the order of resolution doesn't matter. Sometimes it does, it depends on your application.
|
[
"ekoz@protonmail.com"
] |
ekoz@protonmail.com
|
282f5a44c5c5feb9a18b032f70b02529bdb6567c
|
c54b6d130dbc229a2aec7432af9f8623acd1b760
|
/DL-ICP4/question1.py
|
3f6650a6649dbeccb0704106f8b35bec0cded60a
|
[] |
no_license
|
sabdj2020/CSEE-5590---Python-DeepLearning
|
58f5ce8ef39c5ea03882335accfe9db1d728911e
|
5a9ffa2d5f9b2d7f6f2860154acfba93557a63b1
|
refs/heads/master
| 2022-11-21T13:02:44.343148
| 2020-07-24T05:00:01
| 2020-07-24T05:00:01
| 271,330,420
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,856
|
py
|
# import libraries
import numpy
from keras.datasets import cifar10
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout
from keras.layers import Flatten
from keras.constraints import maxnorm
from keras.optimizers import SGD
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.utils import np_utils
from keras import backend as K
from keras.models import load_model
import matplotlib.pyplot as plt
# K.set_image_data_format('channels_first')
# Load cidfar dataset
(X_train, y_train), (X_test, y_test) = cifar10.load_data()
# Store train and test data
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train = X_train / 255.0
X_test = X_test / 255.0
# Use to_categorical function from np_utils for making it into categorical value
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
num_classes = y_test.shape[1]
# QUESTION1
# sequential model
model = Sequential()
# Convolutional input layer, 32 feature maps with a size of 3×3 and a rectifier activation function.
model.add(Conv2D(32, (3, 3), input_shape=(32, 32, 3), padding='same', activation='relu', kernel_constraint=maxnorm(3)))
# Dropout layer at 20%.
model.add(Dropout(0.2))
# Convolutional layer, 32 feature maps with a size of 3×3 and a rectifier activation function.
model.add(Conv2D(32, (3, 3), activation='relu', padding='same', kernel_constraint=maxnorm(3)))
# Max Pool layer with size 2×2.
model.add(MaxPooling2D(pool_size=(2, 2)))
# Convolutional layer, 64 feature maps with a size of 3×3 and a rectifier activation function.
model.add(Conv2D(64, (3, 3), padding='same', activation='relu', kernel_constraint=maxnorm(3)))
# Dropout layer at 20%.
model.add(Dropout(0.2))
# Convolutional layer, 64 feature maps with a size of 3×3 and a rectifier activation function.
model.add(Conv2D(64, (3, 3), activation='relu', padding='same', kernel_constraint=maxnorm(3)))
# Max Pool layer with size 2×2.
model.add(MaxPooling2D(pool_size=(2, 2)))
#Convolutional layer, 128 feature maps with a size of 3×3 and a rectifier activation function.
model.add(Conv2D(128, (3, 3), padding='same', activation='relu', kernel_constraint=maxnorm(3)))
# Dropout layer at 20%.
model.add(Dropout(0.2))
# Convolutional layer,128 feature maps with a size of 3×3 and a rectifier activation function.
model.add(Conv2D(128, (3, 3), activation='relu', padding='same', kernel_constraint=maxnorm(3)))
# Max Pool layer with size 2×2.
model.add(MaxPooling2D(pool_size=(2, 2)))
# Flatten layer.
model.add(Flatten())
# Dropout layer at 20%.
model.add(Dropout(0.2))
# Fully connected layer with 1024 units and a rectifier activation function.
model.add(Dense(1024, activation='relu', kernel_constraint=maxnorm(3)))
# Dropout layer at 20%.
model.add(Dropout(0.2))
# Fully connected layer with 512 units and a rectifier activation function.
model.add(Dense(512, activation='relu', kernel_constraint=maxnorm(3)))
# Dropout layer at 20%.
model.add(Dropout(0.2))
# Fully connected output layer with 10 units and a softmax activation function
model.add(Dense(num_classes, activation='softmax'))
# Compile model
epochs = 6
lrate = 0.01
decay = lrate/epochs
sgd = SGD(lr=lrate, momentum=0.9, decay=decay, nesterov=False)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
print(model.summary())
# Fit the model
model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=epochs, batch_size=128)
# save the model
model.save('cifar10.h5')
# Final evaluation of the model
model = load_model('cifar10.h5')
scores = model.evaluate(X_test, y_test, verbose=0)
print("Accuracy: %.2f%%" % (scores[1]*100))
|
[
"noreply@github.com"
] |
sabdj2020.noreply@github.com
|
f7c870c6b9331ebece86690f381856c76fc12e39
|
fb913727279d104311af5dffe7fbb2f7ab86c57c
|
/sentiment_analysis.py
|
db329c7bc551780545f1a126f5c41c6cc0bcd706
|
[] |
no_license
|
ariequeve/twitter_sentiment_analysis
|
b0aedebe0ca0cbe202998b7cca77305375355d5c
|
5519b275837a54154666a2e8cb959f24a3d6ce47
|
refs/heads/main
| 2023-07-16T22:20:42.642613
| 2021-08-25T17:55:34
| 2021-08-25T17:55:34
| 308,977,067
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,673
|
py
|
"""
Adaptado de: https://www.freecodecamp.org/news/how-to-make-your-own-sentiment-analyzer-using-python-and-googles-natural-language-api-9e91e1c493e/
"""
import tweepy
import os
import json
from datetime import datetime, timedelta
import re
from nltk.tokenize import WordPunctTokenizer
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types
# Accedemos a las claves de acceso a nuestra App de Twitter:
with open('Keys.json') as f:
data = json.load(f)
ACC_TOKEN = data["Access_token"]
ACC_SECRET = data["Access_token secret"]
CONS_KEY = data["API_key"]
CONS_SECRET = data["API_secret key"]
def authentication(cons_key, cons_secret, acc_token, acc_secret):
"""
Función para obtener acceso a una app de Twitter dadas las
claves.
Args:
cons_key: Consumer Key.
cons_secret: Consumer API Secret.
acc_token: Access Token.
acc_secret: Access Token Secret.
Returns:
api con el acceso garantizado.
"""
auth = tweepy.OAuthHandler(cons_key, cons_secret)
auth.set_access_token(acc_token, acc_secret)
api = tweepy.API(auth)
return api
def search_tweets(keyword, total_tweets):
"""
Función para buscar tweets en español dados un keyword y una cantidad total de tweets. En
este caso se limitan también a buscar en un periodo no mayor a 24 horas.
Args:
keyword: Palabra a buscar en Twitter.
total_tweets: Cantidad total de tweets a buscar.
Returns:
search result: Iterable con toda la información de los tweets encontrados.
"""
today_datetime = datetime.today().now()
yesterday_datetime = today_datetime - timedelta(days=1)
yesterday_date = yesterday_datetime.strftime('%Y-%m-%d')
api = authentication(CONS_KEY, CONS_SECRET, ACC_TOKEN, ACC_SECRET)
search_result = tweepy.Cursor(api.search,
q=keyword,
since=yesterday_date,
result_type='recent',
lang='es').items(total_tweets)
return search_result
def clean_tweets(tweet):
"""
Función para limpiar los tweets antes de ser enviados a la API de análisis de
sentimiento.
Nota: La API de Google es bastante flexible a la hora de realizar análisis de
sentimiento. No estoy seguro de que todas estas "limpiezas" sean del todo
necesarias.
Args:
tweet: Tweet (o texto) a limpiar.
Returns:
clean_tweet: Tweet ya limpio para proceder a realizar análisis de sentimiento.
"""
# Removemos el usuario en el tweet
user_removed = re.sub(r'@[A-Za-z0-9]+', '', tweet.decode('utf-8'))
# Removemos cualquier link presente en el tweet
link_removed = re.sub('https?://[A-Za-z0-9./]+', '', user_removed)
# llevamos todo a minúsculas
lower_case_tweet = link_removed.lower()
# Instanciamos un tokenizador y, de aucerdo a sus reglas, creamos la lista de tokens
tok = WordPunctTokenizer()
words = tok.tokenize(lower_case_tweet)
# Unimos los tokens para crear un único string a ser enviado
clean_tweet = (' '.join(words)).strip()
return clean_tweet
def get_sentiment_score(tweet):
"""
Función que utiliza la API NLP de Google para realizar análisis de sentimiento
sobre un texto.
Args:
tweet: Tweet (o texto) a realizar análisis de sentimiento.
Returns:
sentiment_score: Puntaje de sentimiento cuyo rango va desde -1.0 (negativo) hasta
1.0 (positivo).
Nota:
El análisis de sentimiento de Google también arroja un valor de magnitud ("magnitude").
Este valor es usado para determinar la "fuerza" general del sentimiento calculado. Para
mayor detalle consultar:
https://cloud.google.com/natural-language/docs/basics#interpreting_sentiment_analysis_values
"""
client = language.LanguageServiceClient()
document = types\
.Document(content=tweet, type=enums.Document.Type.PLAIN_TEXT)
sentiment_score = client\
.analyze_sentiment(document=document)\
.document_sentiment\
.score
return sentiment_score
def analyze_tweets(keyword, total_tweets):
"""
Función general para realizar el análisis de tweets, engloba las funciones anteriores.
Args:
keyword: Palabra a buscar en Twitter.
total_tweets: Cantidad total de tweets a buscar.
Returns:
final_score: Promedio del score de sentimiento entre los tweets analizados.
"""
score = 0
tweets = search_tweets(keyword, total_tweets)
lista_tweets = []
for tweet in tweets:
cleaned_tweet = clean_tweets(tweet.text.encode('utf-8'))
sentiment_score = get_sentiment_score(cleaned_tweet)
score += sentiment_score
no_link_tweet = re.sub('https?://[A-Za-z0-9./]+', '', tweet.text)
lista_tweets.append((no_link_tweet, sentiment_score))
final_score = round((score / float(total_tweets)), 2)
return final_score, lista_tweets
# Casos Ejemplo:
# 1.- Probando comentarios específicos:
""" bad_comment = get_sentiment_score('¡Esta lavadora no sirve para nada!')
good_comment = get_sentiment_score('Esta lavadora es buenisima')
neutral_comment = get_sentiment_score('Lavadora mas o menos')
print('bad_comment_score:', bad_comment)
print('good_comment_score:', good_comment)
print('neutral_comment_score:', neutral_comment) """
|
[
"noreply@github.com"
] |
ariequeve.noreply@github.com
|
1edbd04e266260b0d6e143036234a1f6dc851899
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/response/AlipayEbppInstserviceDeductConsultResponse.py
|
c95c7306da6e9fcd48e6f3c6a8c8e345a727b215
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,603
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayEbppInstserviceDeductConsultResponse(AlipayResponse):
def __init__(self):
super(AlipayEbppInstserviceDeductConsultResponse, self).__init__()
self._allow_sign = None
self._error_code = None
self._extend_field = None
self._pay_mode = None
@property
def allow_sign(self):
return self._allow_sign
@allow_sign.setter
def allow_sign(self, value):
self._allow_sign = value
@property
def error_code(self):
return self._error_code
@error_code.setter
def error_code(self, value):
self._error_code = value
@property
def extend_field(self):
return self._extend_field
@extend_field.setter
def extend_field(self, value):
self._extend_field = value
@property
def pay_mode(self):
return self._pay_mode
@pay_mode.setter
def pay_mode(self, value):
self._pay_mode = value
def parse_response_content(self, response_content):
response = super(AlipayEbppInstserviceDeductConsultResponse, self).parse_response_content(response_content)
if 'allow_sign' in response:
self.allow_sign = response['allow_sign']
if 'error_code' in response:
self.error_code = response['error_code']
if 'extend_field' in response:
self.extend_field = response['extend_field']
if 'pay_mode' in response:
self.pay_mode = response['pay_mode']
|
[
"jiandong.jd@antfin.com"
] |
jiandong.jd@antfin.com
|
11b220cc5aeec4a4d8cf8455718322627d9e49fc
|
13a35ef1c6a222b33ed6d1859d958533647aefda
|
/punchykickgravityflipwarz/sprite_sheet.py
|
23e6423f3a2de4f55bd5a4899ce5ababb69d483c
|
[
"MIT"
] |
permissive
|
BeerNCode/punchykickgravityflipwarz
|
ee6b4146e91391922b407177e9acf32e862209de
|
d6a2c3879f47c9ec9752c888b1f0b0deb713f0e3
|
refs/heads/master
| 2022-12-02T19:08:00.112910
| 2019-11-23T14:08:19
| 2019-11-23T14:08:19
| 223,481,859
| 0
| 1
|
MIT
| 2022-11-22T04:50:43
| 2019-11-22T20:32:16
|
Python
|
UTF-8
|
Python
| false
| false
| 723
|
py
|
import pygame
class SpriteSheet(object):
def __init__(self, filename):
self.sheet = None
self.filename = filename
def image_at(self, rectangle):
if self.sheet is None:
self.sheet = pygame.image.load(self.filename).convert_alpha()
rect = pygame.Rect(rectangle)
image = pygame.Surface(rect.size).convert()
image.blit(self.sheet, (0, 0), rect)
return image
def images_at(self, rects):
return [self.image_at(rect) for rect in rects]
def load_strip(self, rect, image_count, colorkey = None):
tups = [(rect[0]+rect[2]*x, rect[1], rect[2], rect[3]) for x in range(image_count)]
return self.images_at(tups)
|
[
"thomasjackdalby@gmail.com"
] |
thomasjackdalby@gmail.com
|
5b970b86771c3f3f30534af76e990e04bda051b8
|
53167f3a60bf8ef0ea76abae571ba2c8cf81d0fa
|
/Model/BackBone/ResNeXt3D.py
|
aa21f20f1a3eb52ac492b093d43474e89d651706
|
[] |
no_license
|
Magnety/UNetTransformer
|
c5cc546b576e181c2fa99c6ed5df4541394d173a
|
05114dee83dd16e1c078cb0ed22bdb85ca023a2a
|
refs/heads/master
| 2023-03-25T12:50:55.376053
| 2021-03-26T05:22:04
| 2021-03-26T05:22:04
| 351,639,457
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,554
|
py
|
from functools import partial
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
def conv3x3x3(in_planes, out_planes, stride=1):
"""3x3x3 convolution with padding."""
return nn.Conv3d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=1,
bias=False)
def downsample_basic_block(x, planes, stride):
out = F.avg_pool3d(x, kernel_size=1, stride=stride)
zero_pads = torch.Tensor(
out.size(0), planes - out.size(1), out.size(2), out.size(3),
out.size(4)).zero_()
if isinstance(out.data, torch.cuda.FloatTensor):
zero_pads = zero_pads.cuda()
out = Variable(torch.cat([out.data, zero_pads], dim=1))
return out
class ResNeXtBottleneck(nn.Module):
expansion = 2
def __init__(self, inplanes, planes, cardinality, stride=1,
downsample=None):
super(ResNeXtBottleneck, self).__init__()
mid_planes = cardinality * int(planes / 32)
self.conv1 = nn.Conv3d(inplanes, mid_planes, kernel_size=1, bias=False)
self.gn1 = nn.GroupNorm(32, mid_planes)
# self.bn1 = nn.BatchNorm3d(mid_planes)
self.conv2 = nn.Conv3d(
mid_planes,
mid_planes,
kernel_size=3,
stride=stride,
padding=1,
groups=cardinality,
bias=False)
self.gn2 = nn.GroupNorm(32, mid_planes)
# self.bn2 = nn.BatchNorm3d(mid_planes)
self.conv3 = nn.Conv3d(
mid_planes, planes * self.expansion, kernel_size=1, bias=False)
# self.bn3 = nn.BatchNorm3d(planes * self.expansion)
self.gn3 = nn.GroupNorm(32, planes * self.expansion)
self.relu = nn.PReLU()
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.gn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.gn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.gn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNeXtDilatedBottleneck(nn.Module):
expansion = 2
def __init__(self, inplanes, planes, cardinality, stride=1,
downsample=None):
super(ResNeXtDilatedBottleneck, self).__init__()
mid_planes = cardinality * int(planes / 32)
self.conv1 = nn.Conv3d(inplanes, mid_planes, kernel_size=1, bias=False)
# self.bn1 = nn.BatchNorm3d(mid_planes)
self.gn1 = nn.GroupNorm(32, mid_planes)
self.conv2 = nn.Conv3d(
mid_planes,
mid_planes,
kernel_size=3,
stride=stride,
padding=2,
dilation=2,
groups=cardinality,
bias=False)
# self.bn2 = nn.BatchNorm3d(mid_planes)
self.gn2 = nn.GroupNorm(32, mid_planes)
self.conv3 = nn.Conv3d(
mid_planes, planes * self.expansion, kernel_size=1, bias=False)
# self.bn3 = nn.BatchNorm3d(planes * self.expansion)
self.gn3 = nn.GroupNorm(32, planes * self.expansion)
self.relu = nn.PReLU()
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.gn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.gn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.gn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNeXt3D(nn.Module):
def __init__(self, block, layers, shortcut_type='B', cardinality=32, num_classes=400):
self.inplanes = 64
super(ResNeXt3D, self).__init__()
self.conv1 = nn.Conv3d(1, 64, kernel_size=7, stride=(1, 2, 2), padding=(3, 3, 3), bias=False)
# self.bn1 = nn.BatchNorm3d(64)
self.gn1 = nn.GroupNorm(32, 64)
self.relu = nn.PReLU()
self.maxpool = nn.MaxPool3d(kernel_size=(3, 3, 3), stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0], shortcut_type, cardinality)
self.layer2 = self._make_layer(block, 128, layers[1], shortcut_type, cardinality, stride=(1, 2, 2))
self.layer3 = self._make_layer(ResNeXtDilatedBottleneck, 256, layers[2], shortcut_type, cardinality, stride=1)
self.layer4 = self._make_layer(ResNeXtDilatedBottleneck, 512, layers[3], shortcut_type, cardinality, stride=1)
self.avgpool = nn.AdaptiveAvgPool3d(1)
self.fc = nn.Linear(cardinality * 32 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv3d):
m.weight = nn.init.kaiming_normal(m.weight, mode='fan_out')
elif isinstance(m, nn.BatchNorm3d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, shortcut_type, cardinality, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
if shortcut_type == 'A':
downsample = partial(
downsample_basic_block,
planes=planes * block.expansion,
stride=stride)
else:
downsample = nn.Sequential(
nn.Conv3d(
self.inplanes,
planes * block.expansion,
kernel_size=1,
stride=stride,
bias=False),
# nn.BatchNorm3d(planes * block.expansion)
nn.GroupNorm(32, planes * block.expansion),
)
layers = []
layers.append(
block(self.inplanes, planes, cardinality, stride, downsample))
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes, cardinality))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.gn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
def get_fine_tuning_parameters(model, ft_begin_index):
if ft_begin_index == 0:
return model.parameters()
ft_module_names = []
for i in range(ft_begin_index, 5):
ft_module_names.append('layer{}'.format(i))
ft_module_names.append('fc')
parameters = []
for k, v in model.named_parameters():
for ft_module in ft_module_names:
if ft_module in k:
parameters.append({'params': v})
break
else:
parameters.append({'params': v, 'lr': 0.0})
return parameters
def resnext3d10(**kwargs):
"""Constructs a ResNeXt3D-10 model."""
model = ResNeXt3D(ResNeXtBottleneck, [1, 1, 1, 1], **kwargs)
return model
def resnext3d18(**kwargs):
"""Constructs a ResNeXt3D-18 model."""
model = ResNeXt3D(ResNeXtBottleneck, [2, 2, 2, 2], **kwargs)
return model
def resnext3d34(**kwargs):
"""Constructs a ResNeXt3D-34 model."""
model = ResNeXt3D(ResNeXtBottleneck, [3, 4, 6, 3], **kwargs)
return model
def resnext3d50(**kwargs):
"""Constructs a ResNeXt3D-50 model."""
model = ResNeXt3D(ResNeXtBottleneck, [3, 4, 6, 3], **kwargs)
return model
def resnext3d101(**kwargs):
"""Constructs a ResNeXt3D-101 model."""
model = ResNeXt3D(ResNeXtBottleneck, [3, 4, 23, 3], **kwargs)
return model
def resnext3d152(**kwargs):
"""Constructs a ResNeXt3D-152 model."""
model = ResNeXt3D(ResNeXtBottleneck, [3, 8, 36, 3], **kwargs)
return model
def resnext3d200(**kwargs):
"""Constructs a ResNeXt3D-200 model."""
model = ResNeXt3D(ResNeXtBottleneck, [3, 24, 36, 3], **kwargs)
return model
|
[
"liuyiyao0916@163.com"
] |
liuyiyao0916@163.com
|
93dfe2f8a97410e4fcd89278abfcdf944a3425f2
|
16516732031deb7f7e074be9fe757897557eee2d
|
/yukicoder/No.537 ユーザーID.py
|
9576d19f8d5f00b37f4292691eb9cd8cda00ecd4
|
[] |
no_license
|
cale-i/atcoder
|
90a04d3228864201cf63c8f8fae62100a19aefa5
|
c21232d012191ede866ee4b9b14ba97eaab47ea9
|
refs/heads/master
| 2021-06-24T13:10:37.006328
| 2021-03-31T11:41:59
| 2021-03-31T11:41:59
| 196,288,266
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 365
|
py
|
# yukicoder No.537 ユーザーID 2020/01/28
n=int(input())
def divisors(n):
div=[]
for i in range(1,int(n**0.5)+1):
if n%i==0:
div.append(i)
if i!=n//i:
div.append(n//i)
div.sort()
return div
div=divisors(n)
ans=set()
for d in div:
ans.add(str(d)+str(n//d))
print(len(ans))
|
[
"calei078029@gmail.com"
] |
calei078029@gmail.com
|
8874c003d22d321b6138652f2967a1e13dbc8fbb
|
6f87f5db0fb8a86178f0cbf073d22ecfd9c92431
|
/__init__.py
|
5394c94a4a707ef1dbe2b52523f80718ea3b9efb
|
[] |
no_license
|
BlenderCN-Org/HDR_Lightmapper
|
fbea54db7cfa0ded3756fe63389845a499357d47
|
c8fa49dbaa14ce4e36608c861b5fe48599320b47
|
refs/heads/master
| 2020-07-19T16:09:53.235454
| 2019-09-04T18:31:32
| 2019-09-04T18:31:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 85,268
|
py
|
bl_info = {
"name": "HDR Lightmapper",
"category": "Render",
"location": "Properties -> Render -> HDR Lightmapper",
"description": "HDR Lightmapping solution for Blender",
"author": "Alexander Kleemann",
"version": (0, 0, 1),
"blender": (2, 80, 0)
}
import bpy, math, os, platform, subprocess, sys, re, shutil, webbrowser, glob
from bpy.app.handlers import persistent
from bpy.props import *
from bpy.types import Menu, Panel, UIList
import numpy as np
from time import time
module_pip = False
module_opencv = False
module_armory = False
#bpy.context.object.update_tag({‘OBJECT’, ‘DATA’, ‘TIME’})
#import pip OR install pip os.system('python path-to-get-pip')
#Check if python is set in environment variables
#Check if pip is installed
#system: pip install opencv-python
#system: pip install matplotlib
#install pip
#install opencv-python
#uninstall numpy
#install numpy
#TODO:
#CHECK IF TWO OBJECTS SHARE MATERIAL IF SO SPLIT [Privatize Materials]?? Add shared material support later on...
#Weighted lightmap [Fixed;Dimension] for [Selection/Volume/Collection]
#ADD MARGIN FOR UVUNWRAP
try:
import pip
module_pip = True
except ImportError:
module_pip = False
print("Pip not found")
try:
import cv2
module_opencv = True
except ImportError:
#pip
module_opencv = False
try:
import arm
module_armory = True
except ImportError:
module_armory = False
def ShowMessageBox(message = "", title = "Message Box", icon = 'INFO'):
def draw(self, context):
self.layout.label(text=message)
bpy.context.window_manager.popup_menu(draw, title = title, icon = icon)
class HDRLM_PT_Panel(bpy.types.Panel):
bl_label = "HDR Lightmapper"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "render"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
scene = context.scene
row = layout.row()
row.operator("hdrlm.build_lighting")
row = layout.row()
row.operator("hdrlm.build_lighting_selected")
#row = layout.row()
#row.operator("hdrlm.build_ao")
row = layout.row()
row.operator("hdrlm.enable_lighting")
row = layout.row()
row.operator("hdrlm.disable_lighting")
row = layout.row()
row.operator("hdrlm.clean_lighting")
row = layout.row()
row.operator("hdrlm.open_lightmap_folder")
class HDRLM_PT_MeshMenu(bpy.types.Panel):
bl_label = "HDR Lightmapper"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "object"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
scene = context.scene
obj = bpy.context.object
layout.use_property_split = True
layout.use_property_decorate = False
scene = context.scene
if obj.type == "MESH":
row = layout.row(align=True)
row.prop(obj, "hdrlm_mesh_lightmap_use")
if obj.hdrlm_mesh_lightmap_use:
#row = layout.row()
#row.prop(obj, "hdrlm_mesh_apply_after")
#row = layout.row()
#row.prop(obj, "hdrlm_mesh_emissive")
#row = layout.row()
#row.prop(obj, "hdrlm_mesh_emissive_shadow")
row = layout.row()
row.prop(obj, "hdrlm_mesh_lightmap_resolution")
row = layout.row()
row.prop(obj, "hdrlm_mesh_lightmap_unwrap_mode")
row = layout.row()
row.prop(obj, "hdrlm_mesh_unwrap_margin")
#row = layout.row()
#row.prop(obj, "hdrlm_mesh_bake_ao")
class HDRLM_PT_LightMenu(bpy.types.Panel):
bl_label = "HDR Lightmapper"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "data"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
scene = context.scene
layout.use_property_split = True
layout.use_property_decorate = False
scene = context.scene
obj = bpy.context.object
if obj == None:
return
if obj.type == "LIGHT":
row = layout.row(align=True)
row.prop(obj, "hdrlm_light_lightmap_use")
if obj.hdrlm_light_lightmap_use:
row = layout.row(align=True)
row.prop(obj, "hdrlm_light_type", expand=True)
row = layout.row(align=True)
row.prop(obj, "hdrlm_light_intensity_scale")
row = layout.row(align=True)
row.prop(obj, "hdrlm_light_casts_shadows")
class HDRLM_PT_Unwrap(bpy.types.Panel):
bl_label = "Settings"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "render"
bl_options = {'DEFAULT_CLOSED'}
bl_parent_id = "HDRLM_PT_Panel"
def draw(self, context):
layout = self.layout
scene = context.scene
layout.use_property_split = True
layout.use_property_decorate = False
row = layout.row()
row.prop(scene, 'hdrlm_mode')
row = layout.row(align=True)
row.prop(scene, 'hdrlm_quality')
row = layout.row(align=True)
row.prop(scene, 'hdrlm_lightmap_scale', expand=True)
row = layout.row(align=True)
row.prop(scene, 'hdrlm_lightmap_savedir')
#row = layout.row(align=True)
#row.prop(scene, "hdrlm_caching_mode")
row = layout.row(align=True)
row.prop(scene, 'hdrlm_dilation_margin')
row = layout.row(align=True)
row.prop(scene, 'hdrlm_apply_on_unwrap')
row = layout.row(align=True)
row.prop(scene, 'hdrlm_indirect_only')
row = layout.row(align=True)
row.prop(scene, 'hdrlm_keep_cache_files')
#row = layout.row(align=True)
#row.prop(scene, 'bpy.types.Scene.hdrlm_delete_cache')
class HDRLM_PT_Denoise(bpy.types.Panel):
bl_label = "Denoise"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "render"
bl_options = {'DEFAULT_CLOSED'}
bl_parent_id = "HDRLM_PT_Panel"
def draw_header(self, context):
scene = context.scene
self.layout.prop(scene, "hdrlm_denoise_use", text="")
def draw(self, context):
layout = self.layout
scene = context.scene
layout.use_property_split = True
layout.use_property_decorate = False
scene = context.scene
layout.active = scene.hdrlm_denoise_use
row = layout.row(align=True)
#row.prop(scene, "hdrlm_denoiser", expand=True)
#if scene.hdrlm_denoiser == "OIDN":
row = layout.row(align=True)
row.prop(scene, "hdrlm_oidn_path")
row = layout.row(align=True)
row.prop(scene, "hdrlm_oidn_verbose")
row = layout.row(align=True)
row.prop(scene, "hdrlm_oidn_threads")
row = layout.row(align=True)
row.prop(scene, "hdrlm_oidn_maxmem")
row = layout.row(align=True)
row.prop(scene, "hdrlm_oidn_affinity")
#if scene.hdrlm_denoiser == "Optix":
# row = layout.row(align=True)
# row.prop(scene, "hdrlm_optix_path")
#row = layout.row(align=True)
#row.prop(scene, "hdrlm_oidn_use_albedo")
#row = layout.row(align=True)
#row.prop(scene, "hdrlm_oidn_use_normal")
class HDRLM_PT_Filtering(bpy.types.Panel):
bl_label = "Filtering"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "render"
bl_options = {'DEFAULT_CLOSED'}
bl_parent_id = "HDRLM_PT_Panel"
def draw_header(self, context):
scene = context.scene
self.layout.prop(scene, "hdrlm_filtering_use", text="")
def draw(self, context):
layout = self.layout
scene = context.scene
layout.use_property_split = True
layout.use_property_decorate = False
#layout.active = scene.hdrlm_filtering_use
column = layout.column()
box = column.box()
if module_opencv:
box.label(text="OpenCV Installed", icon="INFO")
else:
box.label(text="Please restart Blender after installing")
box.operator("hdrlm.install_opencv",icon="PREFERENCES")
if(scene.hdrlm_filtering_use):
if(module_opencv):
layout.active = True
else:
layout.active = False
else:
layout.active = False
#row = layout.row(align=True)
#row.prop(scene, "hdrlm_filtering_gimp_path")
#split = box.split()
row = layout.row(align=True)
row.prop(scene, "hdrlm_filtering_mode")
row = layout.row(align=True)
if scene.hdrlm_filtering_mode == "Gaussian":
row.prop(scene, "hdrlm_filtering_gaussian_strength")
row = layout.row(align=True)
row.prop(scene, "hdrlm_filtering_iterations")
elif scene.hdrlm_filtering_mode == "Box":
row.prop(scene, "hdrlm_filtering_box_strength")
row = layout.row(align=True)
row.prop(scene, "hdrlm_filtering_iterations")
elif scene.hdrlm_filtering_mode == "Bilateral":
row.prop(scene, "hdrlm_filtering_bilateral_diameter")
row = layout.row(align=True)
row.prop(scene, "hdrlm_filtering_bilateral_color_deviation")
row = layout.row(align=True)
row.prop(scene, "hdrlm_filtering_bilateral_coordinate_deviation")
row = layout.row(align=True)
row.prop(scene, "hdrlm_filtering_iterations")
else:
row.prop(scene, "hdrlm_filtering_median_kernel", expand=True)
row = layout.row(align=True)
row.prop(scene, "hdrlm_filtering_iterations")
class HDRLM_PT_Encoding(bpy.types.Panel):
bl_label = "Encoding"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "render"
bl_options = {'DEFAULT_CLOSED'}
bl_parent_id = "HDRLM_PT_Panel"
def draw(self, context):
layout = self.layout
scene = context.scene
layout.use_property_split = True
layout.use_property_decorate = False
row = layout.row(align=True)
row.prop(scene, "hdrlm_encoding_mode", expand=True)
if scene.hdrlm_encoding_mode == "RGBM" or scene.hdrlm_encoding_mode == "RGBD":
row = layout.row(align=True)
row.prop(scene, "hdrlm_encoding_range")
#row = layout.row(align=True)
#row.prop(scene, "hdrlm_encoding_armory_setup")
#row = layout.row(align=True)
#row.prop(scene, "hdrlm_encoding_colorspace")
class HDRLM_PT_Compression(bpy.types.Panel):
bl_label = "Compression"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "render"
bl_options = {'DEFAULT_CLOSED'}
bl_parent_id = "HDRLM_PT_Panel"
def draw(self, context):
layout = self.layout
scene = context.scene
layout.use_property_split = True
layout.use_property_decorate = False
if scene.hdrlm_encoding_mode == "RGBE":
layout.label(text="HDR compression not available for RGBE")
else:
row = layout.row(align=True)
row.prop(scene, "hdrlm_compression")
class HDRLM_PT_Additional(bpy.types.Panel):
bl_label = "Additional Armory Features"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "render"
bl_options = {'DEFAULT_CLOSED'}
bl_parent_id = "HDRLM_PT_Panel"
def draw(self, context):
layout = self.layout
scene = context.scene
layout.use_property_split = True
layout.use_property_decorate = False
try:
import arm
module_armory = True
except ImportError:
module_armory = False
if module_armory:
row = layout.row(align=True)
#layout.label(text="Armory found! Hooray!")
row.operator("hdrlm.create_world_volume")
else:
layout.label(text="Armory not detected.")
class HDRLM_PT_LightmapList(bpy.types.Panel):
bl_label = "Lightmaps"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "render"
bl_options = {'DEFAULT_CLOSED'}
bl_parent_id = "HDRLM_PT_Panel"
def draw(self, context):
layout = self.layout
scene = context.scene
layout.use_property_split = True
layout.use_property_decorate = False
row = layout.row(align=True)
row.operator("image.rgbm_encode")
#class HDRLM_PT_LightmapStatus:
# def __init__(self):
class HDRLM_CreateWorldVolume(bpy.types.Operator):
"""Create World Volume"""
bl_idname = "hdrlm.create_world_volume"
bl_label = "Create World Volume"
bl_description = "TODO"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
try:
import arm
module_armory = True
except ImportError:
module_armory = False
createWorldVolume(self, context, arm)
return {'FINISHED'}
class HDRLM_BuildAO(bpy.types.Operator):
"""Builds the lighting"""
bl_idname = "hdrlm.build_ao"
bl_label = "Build Ambient Occlusion"
bl_description = "TODO"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
HDRLM_Build_AO(self, context)
##HDRLM_Build(self, context)
return {'FINISHED'}
class HDRLM_BuildLighting(bpy.types.Operator):
"""Builds the lighting"""
bl_idname = "hdrlm.build_lighting"
bl_label = "Build Light"
bl_description = "TODO"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
HDRLM_Build(self, context)
return {'FINISHED'}
class HDRLM_BuildLightingSelected(bpy.types.Operator):
"""Builds the lighting for a selected"""
bl_idname = "hdrlm.build_lighting_selected"
bl_label = "Build Light for selected"
bl_description = "TODO"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
#TODO! SOME WAY TO TOGGLE ON AND OFF THE EXISTING OBJ WITH LIGHTMAPPING VALUES
#Array prev toggle = all with lightmap true
previousToggle = []
for obj in bpy.data.objects:
if(obj.hdrlm_mesh_lightmap_use):
previousToggle.append(obj.name)
obj.hdrlm_mesh_lightmap_use = False
for obj in bpy.context.selected_objects:
obj.hdrlm_mesh_lightmap_use = True
HDRLM_Build(self, context)
for obj in bpy.data.objects:
obj.hdrlm_mesh_lightmap_use = False
if obj.name in previousToggle:
obj.hdrlm_mesh_lightmap_use = True
return {'FINISHED'}
class HDRLM_ToggleEnableforSelection(bpy.types.Operator):
"""Toggle lightmapping for selection"""
bl_idname = "hdrlm.enable_lighting"
bl_label = "Enable for selection"
bl_description = "TODO"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
for obj in bpy.context.selected_objects:
obj.hdrlm_mesh_lightmap_use = True
return {'FINISHED'}
class HDRLM_ToggleDisableforSelection(bpy.types.Operator):
"""Disable lightmapping for selection"""
bl_idname = "hdrlm.disable_lighting"
bl_label = "Disable for selection"
bl_description = "TODO"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
for obj in bpy.context.selected_objects:
obj.hdrlm_mesh_lightmap_use = False
return {'FINISHED'}
class HDRLM_MakeUniqueMaterials(bpy.types.Operator):
"""Disable lightmapping for selection"""
bl_idname = "hdrlm.make_unique_materials"
bl_label = "Make Unique Materials"
bl_description = "TODO"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
#for obj in bpy.context.selected_objects:
# obj.hdrlm_mesh_lightmap_use = False
return {'FINISHED'}
class HDRLM_CleanLighting(bpy.types.Operator):
"""Clean lightmap cache"""
bl_idname = "hdrlm.clean_lighting"
bl_label = "Clean Lightmap cache"
bl_description = "TODO"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
for obj in bpy.data.objects:
if obj.type == "MESH":
if obj.hdrlm_mesh_lightmap_use:
for slot in obj.material_slots:
backup_material_restore(slot)
# for m in bpy.data.materials: #TODO - CHANGE INTO SPECIFIC MATERIAL
# nodetree = m.node_tree
# nodes = nodetree.nodes
# mainNode = nodetree.nodes[0].inputs[0].links[0].from_node
# for n in nodes:
# if "LM" in n.name:
# nodetree.links.new(n.outputs[0], mainNode.inputs[0])
# for n in nodes:
# if "Lightmap" in n.name:
# nodes.remove(n)
# for obj in bpy.data.objects:
# for slot in obj.material_slots:
# #Soft refresh
# #tempMat = bpy.data.materials.new(name='hdrlm_temporary_shift')
# #tempMat.use_nodes = True
# #mat = bpy.data.materials[slot.material.name]
# #slot.material = bpy.data.materials["AAA"]
# #slot.material = mat
# pass
# #mat = bpy.
# #mat = bpy.data.materials[slot.material.name]
# #slot.material = bpy.data.materials["hdrlm_temporary_shift"]
# #slot.material = mat
# for mat in bpy.data.materials:
# if mat.name.endswith('_baked') or mat.name.endswith('_temp'):
# bpy.data.materials.remove(mat, do_unlink=True)
scene = context.scene
filepath = bpy.data.filepath
dirpath = os.path.join(os.path.dirname(bpy.data.filepath), scene.hdrlm_lightmap_savedir)
if os.path.isdir(dirpath):
pass
#CHECK IF PROCESS USES DIRECTORY
#shutil.rmtree(dirpath)
# for obj in bpy.data.objects:
# ###### MESH / BAKING
# if obj.type == "MESH":
# if obj.hdrlm_mesh_lightmap_use:
# if obj.type == "MESH":
# if "UVMap_baked" in obj.data.uv_layers:
# obj.data.uv_layers.remove(obj.data.uv_layers["UVMap_Lightmaps"])
# for slot in obj.material_slots:
# mat = slot.material
# # Remove temp material
# if mat.name.endswith('_temp'):
# old = slot.material
# slot.material = bpy.data.materials[old.name.split('_' + obj.name)[0]]
# bpy.data.materials.remove(old, do_unlink=True)
# for m in obj.material_slots:
# nodetree = bpy.data.materials[m.name].node_tree
# #Get the material output node
# OutputNode = nodetree.nodes[0]
# #Get the connected node (usually either principled bsdf or armory)
# mainNode = OutputNode.inputs[0].links[0].from_node
# hasPreviousBasecolor = False
# for n in nodetree.nodes:
# prefix = "Lightmap_"
# if n.name == prefix + "Image":
# nodetree.nodes.remove(nodetree.nodes[n.name])
# if n.name == prefix + "Multiplication":
# nodetree.nodes.remove(nodetree.nodes[n.name])
# if n.name == prefix + "UV":
# nodetree.nodes.remove(nodetree.nodes[n.name])
# if n.name == prefix + "RGBM_Decode":
# nodetree.nodes.remove(nodetree.nodes[n.name])
# if n.name == prefix + "BasecolorNode":
# hasPreviousBasecolor = True
# if hasPreviousBasecolor:
# nodetree.links.new(mainNode.inputs[0], nodetree.nodes[prefix+"BasecolorNode"].outputs[0])
# for mat in bpy.data.materials:
# if mat.name.endswith('_baked') or mat.name.endswith('_temp'):
# bpy.data.materials.remove(mat, do_unlink=True)
# for img in bpy.data.images:
# if not img.users:
# bpy.data.images.remove(img)
for mat in bpy.data.materials:
mat.update_tag()
return{'FINISHED'}
class HDRLM_LightmapFolder(bpy.types.Operator):
"""Open Lightmap Folder"""
bl_idname = "hdrlm.open_lightmap_folder"
bl_label = "Explore lightmaps"
bl_description = "TODO"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
scene = context.scene
if not bpy.data.is_saved:
self.report({'INFO'}, "Please save your file first")
return {"CANCELLED"}
filepath = bpy.data.filepath
dirpath = os.path.join(os.path.dirname(bpy.data.filepath), scene.hdrlm_lightmap_savedir)
if os.path.isdir(dirpath):
webbrowser.open('file://' + dirpath)
else:
os.mkdir(dirpath)
webbrowser.open('file://' + dirpath)
return{'FINISHED'}
class HDRLM_InstallOpenCV(bpy.types.Operator):
"""Install OpenCV"""
bl_idname = "hdrlm.install_opencv"
bl_label = "Install OpenCV"
bl_description = "TODO"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
pythonbinpath = bpy.app.binary_path_python
if platform.system() == "Windows":
pythonlibpath = os.path.join(os.path.dirname(os.path.dirname(pythonbinpath)), "lib")
else:
pythonlibpath = os.path.join(os.path.dirname(os.path.dirname(pythonbinpath)), "lib", os.path.basename(pythonbinpath)[:-1])
ensurepippath = os.path.join(pythonlibpath, "ensurepip")
cmda = [pythonbinpath, ensurepippath, "--upgrade", "--user"]
pip = subprocess.run(cmda, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if pip.returncode == 0:
print("Sucessfully installed pip!\n")
else:
print("Failed to install pip!\n")
return{'FINISHED'}
cmdb = [pythonbinpath, "-m", "pip", "install", "opencv-python"]
opencv = subprocess.run(cmdb, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if opencv.returncode == 0:
print("Sucessfully installed OpenCV!\n")
else:
print("Failed to install OpenCV!\n")
return{'FINISHED'}
module_opencv = True
ShowMessageBox("Please restart blender to enable OpenCV filtering", "Restart", 'PREFERENCES')
return{'FINISHED'}
# function to clamp float
def saturate(num, floats=True):
if num < 0:
num = 0
elif num > (1 if floats else 255):
num = (1 if floats else 255)
return num
class HDRLM_EncodeToRGBM(bpy.types.Operator):
"""Encodes the currently viewed HDR image to RGBM format"""
bl_idname = "image.rgbm_encode"
bl_label = "Encode HDR to RGBM"
bl_description = "Encode HDR/float image to RGBM format. Create new image with '_RGBM.png' prefix"
bl_options = {'REGISTER', 'UNDO'}
@classmethod
def poll(cls, context):
sima = context.space_data
return sima.type == 'IMAGE_EDITOR' and sima.image and sima.image.is_float
def execute(self, context):
sima = context.space_data
# Image
ima = sima.image
ima_name = ima.name
if ima.colorspace_settings.name != 'Linear':
ima.colorspace_settings.name = 'Linear'
# Removing .exr or .hdr prefix
if ima_name[-4:] == '.exr' or ima_name[-4:] == '.hdr':
ima_name = ima_name[:-4]
target_ima = bpy.data.images.get(ima_name + '_RGBM.png')
if not target_ima:
target_ima = bpy.data.images.new(
name = ima_name + '_RGBM.png',
width = ima.size[0],
height = ima.size[1],
alpha = True,
float_buffer = False
)
num_pixels = len(ima.pixels)
result_pixel = list(ima.pixels)
# Encode to RGBM
for i in range(0,num_pixels,4):
for j in range(3):
result_pixel[i+j] *= 1.0 / 8.0
result_pixel[i+3] = saturate(max(result_pixel[i], result_pixel[i+1], result_pixel[i+2], 1e-6))
result_pixel[i+3] = math.ceil(result_pixel[i+3] * 255.0) / 255.0;
for j in range(3):
result_pixel[i+j] /= result_pixel[i+3]
target_ima.pixels = result_pixel
sima.image = target_ima
return {'FINISHED'}
def encodeImageRGBM(self, image, maxRange, outDir, quality):
input_image = bpy.data.images[image.name]
image_name = input_image.name
if input_image.colorspace_settings.name != 'Linear':
input_image.colorspace_settings.name = 'Linear'
# Removing .exr or .hdr prefix
if image_name[-4:] == '.exr' or image_name[-4:] == '.hdr':
image_name = image_name[:-4]
target_image = bpy.data.images.get(image_name + '_encoded')
print(image_name + '_encoded')
if not target_image:
target_image = bpy.data.images.new(
name = image_name + '_encoded',
width = input_image.size[0],
height = input_image.size[1],
alpha = True,
float_buffer = False
)
num_pixels = len(input_image.pixels)
result_pixel = list(input_image.pixels)
for i in range(0,num_pixels,4):
for j in range(3):
result_pixel[i+j] *= 1.0 / maxRange;
result_pixel[i+3] = saturate(max(result_pixel[i], result_pixel[i+1], result_pixel[i+2], 1e-6))
result_pixel[i+3] = math.ceil(result_pixel[i+3] * 255.0) / 255.0
for j in range(3):
result_pixel[i+j] /= result_pixel[i+3]
target_image.pixels = result_pixel
input_image = target_image
#Save RGBM
input_image.filepath_raw = outDir + "_encoded.png"
input_image.file_format = "PNG"
bpy.context.scene.render.image_settings.quality = quality
input_image.save_render(filepath = input_image.filepath_raw, scene = bpy.context.scene)
#input_image.
#input_image.save()
def encodeImageRGBD(self, image, maxRange, outDir):
input_image = bpy.data.images[image.name]
image_name = input_image.name
if input_image.colorspace_settings.name != 'Linear':
input_image.colorspace_settings.name = 'Linear'
# Removing .exr or .hdr prefix
if image_name[-4:] == '.exr' or image_name[-4:] == '.hdr':
image_name = image_name[:-4]
target_image = bpy.data.images.get(image_name + '_encoded')
if not target_image:
target_image = bpy.data.images.new(
name = image_name + '_encoded',
width = input_image.size[0],
height = input_image.size[1],
alpha = True,
float_buffer = False
)
num_pixels = len(input_image.pixels)
result_pixel = list(input_image.pixels)
for i in range(0,num_pixels,4):
m = saturate(max(result_pixel[i], result_pixel[i+1], result_pixel[i+2], 1e-6))
d = max(maxRange / m, 1)
d = saturate( math.floor(d) / 255 )
result_pixel[i] = result_pixel[i] * d * 255 / maxRange
result_pixel[i+1] = result_pixel[i+1] * d * 255 / maxRange
result_pixel[i+2] = result_pixel[i+2] * d * 255 / maxRange
result_pixel[i+3] = d
target_image.pixels = result_pixel
input_image = target_image
#Save RGBD
input_image.filepath_raw = outDir + "_encoded.png"
input_image.file_format = "PNG"
input_image.save()
def lerpNodePoints(self, a, b, c):
return (a + c * (b - a))
def draw(self, context):
row = self.layout.row()
row.label(text="Convert:")
row = self.layout.row()
row.operator("image.rgbm_encode")
def load_pfm(file, as_flat_list=False):
"""
Load a PFM file into a Numpy array. Note that it will have
a shape of H x W, not W x H. Returns a tuple containing the
loaded image and the scale factor from the file.
Usage:
with open(r"path/to/file.pfm", "rb") as f:
data, scale = load_pfm(f)
"""
#start = time()
header = file.readline().decode("utf-8").rstrip()
if header == "PF":
color = True
elif header == "Pf":
color = False
else:
raise Exception("Not a PFM file.")
dim_match = re.match(r"^(\d+)\s(\d+)\s$", file.readline().decode("utf-8"))
if dim_match:
width, height = map(int, dim_match.groups())
else:
raise Exception("Malformed PFM header.")
scale = float(file.readline().decode("utf-8").rstrip())
if scale < 0: # little-endian
endian = "<"
scale = -scale
else:
endian = ">" # big-endian
data = np.fromfile(file, endian + "f")
shape = (height, width, 3) if color else (height, width)
if as_flat_list:
result = data
else:
result = np.reshape(data, shape)
#print("PFM import took %.3f s" % (time() - start))
return result, scale
def save_pfm(file, image, scale=1):
"""
Save a Numpy array to a PFM file.
Usage:
with open(r"/path/to/out.pfm", "wb") as f:
save_pfm(f, data)
"""
#start = time()
if image.dtype.name != "float32":
raise Exception("Image dtype must be float32 (got %s)" % image.dtype.name)
if len(image.shape) == 3 and image.shape[2] == 3: # color image
color = True
elif len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1: # greyscale
color = False
else:
raise Exception("Image must have H x W x 3, H x W x 1 or H x W dimensions.")
file.write(b"PF\n" if color else b"Pf\n")
file.write(b"%d %d\n" % (image.shape[1], image.shape[0]))
endian = image.dtype.byteorder
if endian == "<" or endian == "=" and sys.byteorder == "little":
scale = -scale
file.write(b"%f\n" % scale)
image.tofile(file)
#print("PFM export took %.3f s" % (time() - start))
def backup_material_copy(slot):
material = slot.material
dup = material.copy()
dup.name = material.name + "_Original"
dup.use_fake_user = True
def backup_material_restore(slot):
material = slot.material
if material.name + "_Original" in bpy.data.materials:
original = bpy.data.materials[material.name + "_Original"]
slot.material = original
material.name = material.name + "_temp"
original.name = original.name[:-9]
original.use_fake_user = False
material.user_clear()
bpy.data.materials.remove(material)
else:
pass
#Check if material has nodes with lightmap prefix
def backup_material_cache():
filepath = bpy.data.filepath
dirpath = os.path.join(os.path.dirname(bpy.data.filepath), scene.hdrlm_lightmap_savedir)
if not os.path.isdir(dirpath):
os.mkdir(dirpath)
cachefilepath = os.path.join(dirpath, "HDRLM_cache.blend")
bpy.ops.wm.save_as_mainfile(filepath=cachefilepath, copy=True)
def backup_material_cache_restore(matname):
from os.path import join
path_to_scripts = bpy.utils.script_paths()[0]
path_to_script = join(path_to_scripts, 'addons_contrib', 'io_material_loader')
material_locator = "\\Material\\"
file_name = "practicality.blend"
opath = "//" + file_name + material_locator + matname
dpath = join(path_to_script, file_name) + material_locator
bpy.ops.wm.link_append(
filepath=opath, # "//filename.blend\\Folder\\"
filename=matname, # "material_name
directory=dpath, # "fullpath + \\Folder\\
filemode=1,
link=False,
autoselect=False,
active_layer=True,
instance_groups=False,
relative_path=True)
def HDRLM_Build_AO(self, context):
scene = bpy.context.scene
scene.render.engine = "CYCLES"
prevObjRenderset = []
#for obj in bpy.context.selected_objects:
# obj.hdrlm_mesh_lightmap_use = True
#Store previous settings for hide_render
for obj in bpy.data.objects:
if obj.type == "MESH":
if not obj.hide_render:
prevObjRenderset.append(obj.name)
for obj in bpy.data.objects:
if obj.type == "MESH":
obj.hide_render = True
#Bake AO for selection
for obj in bpy.context.selected_objects:
if obj.type == "MESH":
obj.hide_render = False
for obj in bpy.context.selected_objects:
if obj.type == "MESH":
if len(obj.material_slots) == 0:
single = False
number = 0
while single == False:
matname = obj.name + ".00" + str(number)
if matname in bpy.data.materials:
single = False
number = number + 1
else:
mat = bpy.data.materials.new(name=matname)
mat.use_nodes = True
obj.data.materials.append(mat)
single = True
for mat in bpy.data.materials:
if mat.name.endswith('_bakedAO'):
bpy.data.materials.remove(mat, do_unlink=True)
for img in bpy.data.images:
if img.name == obj.name + "_bakedAO":
bpy.data.images.remove(img, do_unlink=True)
#Single user materials?
ob = obj
for slot in ob.material_slots:
# Temp material already exists
if slot.material.name.endswith('_tempAO'):
continue
n = slot.material.name + '_' + ob.name + '_tempAO'
if not n in bpy.data.materials:
slot.material = slot.material.copy()
slot.material.name = n
#Add images for baking
img_name = obj.name + '_bakedAO'
res = int(obj.hdrlm_mesh_lightmap_resolution) / int(scene.hdrlm_lightmap_scale)
if img_name not in bpy.data.images or bpy.data.images[img_name].size[0] != res or bpy.data.images[img_name].size[1] != res:
img = bpy.data.images.new(img_name, res, res, alpha=False, float_buffer=False)
img.name = img_name
else:
img = bpy.data.images[img_name]
for slot in obj.material_slots:
mat = slot.material
mat.use_nodes = True
nodes = mat.node_tree.nodes
if "Baked AO Image" in nodes:
img_node = nodes["Baked AO Image"]
else:
img_node = nodes.new('ShaderNodeTexImage')
img_node.name = 'Baked AO Image'
img_node.location = (100, 100)
img_node.image = img
img_node.select = True
nodes.active = img_node
if scene.hdrlm_apply_on_unwrap:
bpy.ops.object.transform_apply(location=True, rotation=True, scale=True)
uv_layers = obj.data.uv_layers
if not "UVMap_Lightmap" in uv_layers:
uvmap = uv_layers.new(name="UVMap_Lightmap")
uv_layers.active_index = len(uv_layers) - 1
if obj.hdrlm_mesh_lightmap_unwrap_mode == "Lightmap":
bpy.ops.uv.lightmap_pack('EXEC_SCREEN', PREF_CONTEXT='ALL_FACES', PREF_MARGIN_DIV=obj.hdrlm_mesh_unwrap_margin)
elif obj.hdrlm_mesh_lightmap_unwrap_mode == "Smart Project":
bpy.ops.object.select_all(action='DESELECT')
obj.select_set(True)
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='DESELECT')
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.uv.smart_project(angle_limit=45.0, island_margin=obj.hdrlm_mesh_unwrap_margin, user_area_weight=1.0, use_aspect=True, stretch_to_bounds=False)
else:
pass
else:
for i in range(0, len(uv_layers)):
if uv_layers[i].name == 'UVMap_Lightmap':
uv_layers.active_index = i
break
for slot in obj.material_slots:
#ONLY 1 MATERIAL PER OBJECT SUPPORTED FOR NOW!
nodetree = slot.material.node_tree
bpy.context.active_object.active_material = slot.material
n = slot.material.name[:-5] + '_bakedAO'
if not n in bpy.data.materials:
mat = bpy.data.materials.new(name=n)
mat.use_nodes = True
nodes = mat.node_tree.nodes
img_node = nodes.new('ShaderNodeTexImage')
img_node.name = "Baked AO Image"
img_node.location = (100, 100)
img_node.image = bpy.data.images[img_name]
mat.node_tree.links.new(img_node.outputs[0], nodes['Principled BSDF'].inputs[0])
else:
mat = bpy.data.materials[n]
nodes = mat.node_tree.nodes
nodes['Baked AO Image'].image = bpy.data.images[img_name]
for slot in obj.material_slots:
nodetree = bpy.data.materials[slot.name].node_tree
nodes = nodetree.nodes
mainNode = nodetree.nodes[0].inputs[0].links[0].from_node
for n in nodes:
if "LM" in n.name:
nodetree.links.new(n.outputs[0], mainNode.inputs[0])
for n in nodes:
if "Lightmap" in n.name:
#print("Remove")
nodes.remove(n)
print("Baking: " + bpy.context.view_layer.objects.active.name)
bpy.ops.object.bake(type="AO", margin=scene.hdrlm_dilation_margin)
for slot in obj.material_slots:
mat = slot.material
if mat.name.endswith('_tempAO'):
old = slot.material
slot.material = bpy.data.materials[old.name.split('_' + obj.name)[0]]
bpy.data.materials.remove(old, do_unlink=True)
uv_layers = obj.data.uv_layers
uv_layers.active_index = 0
for slot in obj.material_slots:
nodetree = bpy.data.materials[slot.name].node_tree
outputNode = nodetree.nodes[0]
mainNode = outputNode.inputs[0].links[0].from_node
if len(mainNode.inputs[0].links) == 0:
baseColorValue = mainNode.inputs[0].default_value
baseColorNode = nodetree.nodes.new(type="ShaderNodeRGB")
baseColorNode.outputs[0].default_value = baseColorValue
baseColorNode.location = ((mainNode.location[0]-500,mainNode.location[1]))
baseColorNode.name = "AO_BasecolorNode_A"
else:
baseColorNode = mainNode.inputs[0].links[0].from_node
baseColorNode.name = "AO_P"
nodePos1 = mainNode.location
nodePos2 = baseColorNode.location
mixNode = nodetree.nodes.new(type="ShaderNodeMixRGB")
mixNode.name = "AO_Multiplication"
mixNode.location = lerpNodePoints(self, nodePos1, nodePos2, 0.5)
if scene.hdrlm_indirect_only:
mixNode.blend_type = 'ADD'
else:
mixNode.blend_type = 'MULTIPLY'
mixNode.inputs[0].default_value = 1.0
LightmapNode = nodetree.nodes.new(type="ShaderNodeTexImage")
LightmapNode.location = ((baseColorNode.location[0]-300,baseColorNode.location[1] + 300))
LightmapNode.image = bpy.data.images[obj.name + "_bakedAO"]
LightmapNode.name = "AO_Image"
UVLightmap = nodetree.nodes.new(type="ShaderNodeUVMap")
UVLightmap.uv_map = "UVMap_Lightmap"
UVLightmap.name = "AO_UV"
UVLightmap.location = ((-1000, baseColorNode.location[1] + 300))
nodetree.links.new(baseColorNode.outputs[0], mixNode.inputs[1])
nodetree.links.new(LightmapNode.outputs[0], mixNode.inputs[2])
nodetree.links.new(mixNode.outputs[0], mainNode.inputs[0])
nodetree.links.new(UVLightmap.outputs[0], LightmapNode.inputs[0])
return{'FINISHED'}
def createWorldVolume(self, context, arm):
camera = bpy.data.cameras.new("WVCam")
cam_obj = bpy.data.objects.new("WVCamera", camera)
bpy.context.collection.objects.link(cam_obj)
cam_obj.location = bpy.context.scene.cursor.location
camera.angle = math.radians(90)
prevResx = bpy.context.scene.render.resolution_x
prevResy = bpy.context.scene.render.resolution_y
prevCam = bpy.context.scene.camera
prevEngine = bpy.context.scene.render.engine
bpy.context.scene.camera = cam_obj
bpy.context.scene.render.engine = "CYCLES"
bpy.context.scene.render.resolution_x = 512
bpy.context.scene.render.resolution_y = 512
savedir = os.path.join(os.path.dirname(bpy.data.filepath), bpy.context.scene.hdrlm_lightmap_savedir)
directory = os.path.join(savedir, "Probes")
t = 90
positions = {
"xp" : (math.radians(t),0,0),
"zp" : (math.radians(t),0,math.radians(t)),
"xm" : (math.radians(t),0,math.radians(t*2)),
"zm" : (math.radians(t),0,math.radians(-t)),
"yp" : (math.radians(t*2),0,math.radians(t)),
"ym" : (0,0,math.radians(t))
}
cam = cam_obj
image_settings = bpy.context.scene.render.image_settings
image_settings.file_format = "HDR"
image_settings.color_depth = '32'
for val in positions:
cam.rotation_euler = positions[val]
filename = os.path.join(directory, val) + ".hdr"
bpy.data.scenes['Scene'].render.filepath = filename
bpy.ops.render.render(write_still=True)
sdk_path = arm.utils.get_sdk_path()
if arm.utils.get_os() == 'win':
cmft_path = sdk_path + '/lib/armory_tools/cmft/cmft.exe'
elif arm.utils.get_os() == 'mac':
cmft_path = '"' + sdk_path + '/lib/armory_tools/cmft/cmft-osx"'
else:
cmft_path = '"' + sdk_path + '/lib/armory_tools/cmft/cmft-linux64"'
output_file_irr = "COMBINED2.hdr"
posx = directory + "/" + "xp" + ".hdr"
negx = directory + "/" + "xm" + ".hdr"
posy = directory + "/" + "yp" + ".hdr"
negy = directory + "/" + "ym" + ".hdr"
posz = directory + "/" + "zp" + ".hdr"
negz = directory + "/" + "zm" + ".hdr"
output = directory + "/" + "combined"
if arm.utils.get_os() == 'win':
envpipe = [cmft_path,
'--inputFacePosX', posx,
'--inputFaceNegX', negx,
'--inputFacePosY', posy,
'--inputFaceNegY', negy,
'--inputFacePosZ', posz,
'--inputFaceNegZ', negz,
'--output0', output,
'--output0params',
'hdr,rgbe,latlong']
else:
envpipe = [cmft_path + '--inputFacePosX' + posx
+ '--inputFaceNegX' + negx
+ '--inputFacePosY' + posy
+ '--inputFaceNegY' + negy
+ '--inputFacePosZ' + posz
+ '--inputFaceNegZ' + negz
+ '--output0' + output
+ '--output0params' + 'hdr,rgbe,latlong']
subprocess.call(envpipe, shell=True)
input2 = output + ".hdr"
output2 = directory + "/" + "combined2"
if arm.utils.get_os() == 'win':
envpipe2 = [cmft_path,
'--input', input2,
'--filter', 'shcoeffs',
'--outputNum', '1',
'--output0', output2]
else:
envpipe2 = [cmft_path +
'--input' + input2
+ '-filter' + 'shcoeffs'
+ '--outputNum' + '1'
+ '--output0' + output2]
subprocess.call(envpipe2, shell=True)
for obj in bpy.data.objects:
obj.select_set(False)
cam_obj.select_set(True)
bpy.ops.object.delete()
bpy.context.scene.render.resolution_x = prevResx
bpy.context.scene.render.resolution_y = prevResy
bpy.context.scene.camera = prevCam
bpy.context.scene.render.engine = prevEngine
def HDRLM_Build(self, context):
scene = context.scene
cycles = bpy.data.scenes[scene.name].cycles
if not bpy.data.is_saved:
self.report({'INFO'}, "Please save your file first")
return{'FINISHED'}
if scene.hdrlm_denoise_use:
if scene.hdrlm_oidn_path == "":
scriptDir = os.path.dirname(os.path.realpath(__file__))
if os.path.isdir(os.path.join(scriptDir,"OIDN")):
scene.hdrlm_oidn_path = os.path.join(scriptDir,"OIDN")
if scene.hdrlm_oidn_path == "":
self.report({'INFO'}, "No denoise OIDN path assigned")
return{'FINISHED'}
total_time = time()
for obj in bpy.data.objects:
if "_" in obj.name:
obj.name = obj.name.replace("_",".")
if " " in obj.name:
obj.name = obj.name.replace(" ",".")
if "[" in obj.name:
obj.name = obj.name.replace("[",".")
if "]" in obj.name:
obj.name = obj.name.replace("]",".")
# if len(obj.name) > 60:
# obj.name = "TooLongName"
# invalidNaming = True
for slot in obj.material_slots:
if "_" in slot.material.name:
slot.material.name = slot.material.name.replace("_",".")
if " " in slot.material.name:
slot.material.name = slot.material.name.replace(" ",".")
if "[" in slot.material.name:
slot.material.name = slot.material.name.replace("[",".")
if "[" in slot.material.name:
slot.material.name = slot.material.name.replace("]",".")
# if len(slot.material.name) > 60:
# slot.material.name = "TooLongName"
# invalidNaming = True
# if(invalidNaming):
# self.report({'INFO'}, "Naming errors")
# return{'FINISHED'}
prevCyclesSettings = [
cycles.samples,
cycles.max_bounces,
cycles.diffuse_bounces,
cycles.glossy_bounces,
cycles.transparent_max_bounces,
cycles.transmission_bounces,
cycles.volume_bounces,
cycles.caustics_reflective,
cycles.caustics_refractive,
cycles.device,
scene.render.engine
]
cycles.device = scene.hdrlm_mode
scene.render.engine = "CYCLES"
if scene.hdrlm_quality == "Preview":
cycles.samples = 32
cycles.max_bounces = 1
cycles.diffuse_bounces = 1
cycles.glossy_bounces = 1
cycles.transparent_max_bounces = 1
cycles.transmission_bounces = 1
cycles.volume_bounces = 1
cycles.caustics_reflective = False
cycles.caustics_refractive = False
elif scene.hdrlm_quality == "Medium":
cycles.samples = 64
cycles.max_bounces = 2
cycles.diffuse_bounces = 2
cycles.glossy_bounces = 2
cycles.transparent_max_bounces = 2
cycles.transmission_bounces = 2
cycles.volume_bounces = 2
cycles.caustics_reflective = False
cycles.caustics_refractive = False
elif scene.hdrlm_quality == "High":
cycles.samples = 256
cycles.max_bounces = 128
cycles.diffuse_bounces = 128
cycles.glossy_bounces = 128
cycles.transparent_max_bounces = 128
cycles.transmission_bounces = 128
cycles.volume_bounces = 128
cycles.caustics_reflective = False
cycles.caustics_refractive = False
elif scene.hdrlm_quality == "Production":
cycles.samples = 512
cycles.max_bounces = 256
cycles.diffuse_bounces = 256
cycles.glossy_bounces = 256
cycles.transparent_max_bounces = 256
cycles.transmission_bounces = 256
cycles.volume_bounces = 256
cycles.caustics_reflective = True
cycles.caustics_refractive = True
else:
pass
#Configure Lights
for obj in bpy.data.objects:
if obj.type == "LIGHT":
if obj.hdrlm_light_lightmap_use:
if obj.hdrlm_light_casts_shadows:
bpy.data.lights[obj.name].cycles.cast_shadow = True
else:
bpy.data.lights[obj.name].cycles.cast_shadow = False
bpy.data.lights[obj.name].energy = bpy.data.lights[obj.name].energy * obj.hdrlm_light_intensity_scale
#Configure World
for obj in bpy.data.objects:
pass
bakeNum = 0
currBakeNum = 0
for obj in bpy.data.objects:
if obj.type == "MESH":
if obj.hdrlm_mesh_lightmap_use:
bakeNum = bakeNum + 1
#Bake
for obj in bpy.data.objects:
if obj.type == "MESH":
if obj.hdrlm_mesh_lightmap_use:
currBakeNum = currBakeNum + 1
bpy.ops.object.select_all(action='DESELECT')
bpy.context.view_layer.objects.active = obj
obj.select_set(True)
obs = bpy.context.view_layer.objects
active = obs.active
if len(obj.material_slots) == 0:
single = False
number = 0
while single == False:
matname = obj.name + ".00" + str(number)
if matname in bpy.data.materials:
single = False
number = number + 1
else:
mat = bpy.data.materials.new(name=matname)
mat.use_nodes = True
obj.data.materials.append(mat)
single = True
#if len(obj.material_sl) > 1:
for slot in obj.material_slots:
mat = slot.material
if mat.users > 1:
copymat = mat.copy()
slot.material = copymat
# #Make sure there's one material available
# if len(obj.material_slots) == 0:
# if not "MaterialDefault" in bpy.data.materials:
# mat = bpy.data.materials.new(name='MaterialDefault')
# mat.use_nodes = True
# else:
# mat = bpy.data.materials['MaterialDefault']
# obj.data.materials.append(mat)
# --------- MATERIAL BACKUP
if scene.hdrlm_caching_mode == "Copy":
for slot in obj.material_slots:
matname = slot.material.name
originalName = matname + "_Original"
hasOriginal = False
if originalName in bpy.data.materials:
hasOriginal = True
else:
hasOriginal = False
if hasOriginal:
backup_material_restore(slot)
#Copy materials
backup_material_copy(slot)
else: #Cache blend
pass
# --------- MATERIAL BACKUP END
#Remove existing baked materials and images
for mat in bpy.data.materials:
if mat.name.endswith('_baked'):
bpy.data.materials.remove(mat, do_unlink=True)
for img in bpy.data.images:
if img.name == obj.name + "_baked":
bpy.data.images.remove(img, do_unlink=True)
#Single user materials? ONLY ONE MATERIAL SLOT?...
#Fint så langt
ob = obj
for slot in ob.material_slots:
# Temp material already exists
if slot.material.name.endswith('_temp'):
continue
n = slot.material.name + '_' + ob.name + '_temp'
if not n in bpy.data.materials:
slot.material = slot.material.copy()
slot.material.name = n
#Fint så langt...
#Add images for baking
img_name = obj.name + '_baked'
res = int(obj.hdrlm_mesh_lightmap_resolution) / int(scene.hdrlm_lightmap_scale)
if img_name not in bpy.data.images or bpy.data.images[img_name].size[0] != res or bpy.data.images[img_name].size[1] != res:
img = bpy.data.images.new(img_name, res, res, alpha=False, float_buffer=True)
img.name = img_name
else:
img = bpy.data.images[img_name]
for slot in obj.material_slots:
mat = slot.material
mat.use_nodes = True
nodes = mat.node_tree.nodes
if "Baked Image" in nodes:
img_node = nodes["Baked Image"]
else:
img_node = nodes.new('ShaderNodeTexImage')
img_node.name = 'Baked Image'
img_node.location = (100, 100)
img_node.image = img
img_node.select = True
nodes.active = img_node
if scene.hdrlm_apply_on_unwrap:
bpy.ops.object.transform_apply(location=True, rotation=True, scale=True)
uv_layers = obj.data.uv_layers
if not "UVMap_Lightmap" in uv_layers:
uvmap = uv_layers.new(name="UVMap_Lightmap")
uv_layers.active_index = len(uv_layers) - 1
if obj.hdrlm_mesh_lightmap_unwrap_mode == "Lightmap":
bpy.ops.uv.lightmap_pack('EXEC_SCREEN', PREF_CONTEXT='ALL_FACES', PREF_MARGIN_DIV=obj.hdrlm_mesh_unwrap_margin)
elif obj.hdrlm_mesh_lightmap_unwrap_mode == "Smart Project":
bpy.ops.object.select_all(action='DESELECT')
obj.select_set(True)
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='DESELECT')
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.uv.smart_project(angle_limit=45.0, island_margin=obj.hdrlm_mesh_unwrap_margin, user_area_weight=1.0, use_aspect=True, stretch_to_bounds=False)
else:
pass
else:
for i in range(0, len(uv_layers)):
if uv_layers[i].name == 'UVMap_Lightmap':
uv_layers.active_index = i
break
for slot in obj.material_slots:
#ONLY 1 MATERIAL PER OBJECT SUPPORTED FOR NOW!
nodetree = slot.material.node_tree
#HER FEIL?
#bpy.context.active_object.active_material = slot.material
n = slot.material.name[:-5] + '_baked'
if not n in bpy.data.materials:
mat = bpy.data.materials.new(name=n)
mat.use_nodes = True
nodes = mat.node_tree.nodes
img_node = nodes.new('ShaderNodeTexImage')
img_node.name = "Baked Image"
img_node.location = (100, 100)
img_node.image = bpy.data.images[img_name]
mat.node_tree.links.new(img_node.outputs[0], nodes['Principled BSDF'].inputs[0])
else:
mat = bpy.data.materials[n]
nodes = mat.node_tree.nodes
nodes['Baked Image'].image = bpy.data.images[img_name]
for slot in obj.material_slots:
nodetree = bpy.data.materials[slot.name].node_tree
nodes = nodetree.nodes
mainNode = nodetree.nodes[0].inputs[0].links[0].from_node
for n in nodes:
if "LM" in n.name:
nodetree.links.new(n.outputs[0], mainNode.inputs[0])
for n in nodes:
if "Lightmap" in n.name:
#print("Remove")
nodes.remove(n)
print("Baking: " + bpy.context.view_layer.objects.active.name + " | " + str(currBakeNum) + " out of " + str(bakeNum))
if scene.hdrlm_indirect_only:
bpy.ops.object.bake(type="DIFFUSE", pass_filter={"INDIRECT"}, margin=scene.hdrlm_dilation_margin)
else:
bpy.ops.object.bake(type="DIFFUSE", pass_filter={"DIRECT","INDIRECT"}, margin=scene.hdrlm_dilation_margin)
#Unlinked here?..
for mat in bpy.data.materials:
if mat.name.endswith('_baked'):
has_user = False
for obj in bpy.data.objects:
if obj.type == 'MESH' and mat.name.endswith('_' + obj.name + '_baked'):
has_user = True
break
if not has_user:
bpy.data.materials.remove(mat, do_unlink=True)
filepath = bpy.data.filepath
dirpath = os.path.join(os.path.dirname(bpy.data.filepath), scene.hdrlm_lightmap_savedir)
print("Checking for: " + dirpath)
if not os.path.isdir(dirpath):
os.mkdir(dirpath)
#Save
for obj in bpy.data.objects:
if obj.type == "MESH":
if obj.hdrlm_mesh_lightmap_use:
img_name = obj.name + '_baked'
bakemap_path = os.path.join(dirpath, img_name)
bpy.data.images[img_name].filepath_raw = bakemap_path + ".hdr"
bpy.data.images[img_name].file_format = "HDR"
bpy.data.images[img_name].save()
#Denoise here
if scene.hdrlm_denoise_use:
if scene.hdrlm_denoiser == "OIDN":
image = bpy.data.images[img_name]
width = image.size[0]
height = image.size[1]
image_output_array = np.zeros([width, height, 3], dtype="float32")
image_output_array = np.array(image.pixels)
image_output_array = image_output_array.reshape(height, width, 4)
image_output_array = np.float32(image_output_array[:,:,:3])
image_output_destination = bakemap_path + ".pfm"
with open(image_output_destination, "wb") as fileWritePFM:
save_pfm(fileWritePFM, image_output_array)
denoise_output_destination = bakemap_path + "_denoised.pfm"
Scene = context.scene
verbose = Scene.hdrlm_oidn_verbose
affinity = Scene.hdrlm_oidn_affinity
if verbose:
v = "3"
else:
v = "0"
if affinity:
a = "1"
else:
a = "0"
threads = str(Scene.hdrlm_oidn_threads)
maxmem = str(Scene.hdrlm_oidn_maxmem)
if platform.system() == 'Windows':
oidnPath = os.path.join(bpy.path.abspath(scene.hdrlm_oidn_path),"denoise-win.exe")
pipePath = [oidnPath, '-hdr', image_output_destination, '-o', denoise_output_destination, '-verbose', v, '-threads', threads, '-affinity', a, '-maxmem', maxmem]
elif platform.system() == 'Darwin':
oidnPath = os.path.join(bpy.path.abspath(scene.hdrlm_oidn_path),"denoise-osx")
pipePath = [oidnPath + ' -hdr ' + image_output_destination + ' -o ' + denoise_output_destination + ' -verbose ' + n]
else:
oidnPath = os.path.join(bpy.path.abspath(scene.hdrlm_oidn_path),"denoise-linux")
pipePath = [oidnPath + ' -hdr ' + image_output_destination + ' -o ' + denoise_output_destination + ' -verbose ' + n]
if not verbose:
denoisePipe = subprocess.Popen(pipePath, stdout=subprocess.PIPE, stderr=None, shell=True)
else:
denoisePipe = subprocess.Popen(pipePath, shell=True)
denoisePipe.communicate()[0]
with open(denoise_output_destination, "rb") as f:
denoise_data, scale = load_pfm(f)
ndata = np.array(denoise_data)
ndata2 = np.dstack( (ndata, np.ones((width,height)) ) )
img_array = ndata2.ravel()
bpy.data.images[image.name].pixels = img_array
bpy.data.images[image.name].filepath_raw = bakemap_path + "_denoised.hdr"
bpy.data.images[image.name].file_format = "HDR"
bpy.data.images[image.name].save()
elif scene.hdrlm_denoiser == "Optix":
image_output_destination = bakemap_path + ".hdr"
denoise_output_destination = bakemap_path + "_denoised.hdr"
if platform.system() == 'Windows':
optixPath = os.path.join(bpy.path.abspath(scene.hdrlm_optix_path),"Denoiser.exe")
pipePath = [optixPath, '-i', image_output_destination, '-o', denoise_output_destination]
elif platform.system() == 'Darwin':
print("Mac for Optix is still unsupported")
else:
print("Linux for Optix is still unsupported")
denoisePipe = subprocess.Popen(pipePath, stdout=subprocess.PIPE, stderr=None, shell=True)
#if not verbose:
# denoisePipe = subprocess.Popen(pipePath, stdout=subprocess.PIPE, stderr=None, shell=True)
#else:
# denoisePipe = subprocess.Popen(pipePath, shell=True)
denoisePipe.communicate()[0]
else:
print("FATAL ERROR: DENOISER CHOICE....")
if scene.hdrlm_filtering_use:
if scene.hdrlm_denoise_use:
filter_file_input = img_name + "_denoised.hdr"
else:
filter_file_input = img_name + ".hdr"
if all([module_pip, module_opencv]):
filter_file_output = img_name + "_finalized.hdr"
os.chdir(os.path.dirname(bakemap_path))
opencv_process_image = cv2.imread(filter_file_input, -1)
if scene.hdrlm_filtering_mode == "Box":
if scene.hdrlm_filtering_box_strength % 2 == 0:
kernel_size = (scene.hdrlm_filtering_box_strength + 1,scene.hdrlm_filtering_box_strength + 1)
else:
kernel_size = (scene.hdrlm_filtering_box_strength,scene.hdrlm_filtering_box_strength)
opencv_bl_result = cv2.blur(opencv_process_image, kernel_size)
if scene.hdrlm_filtering_iterations > 1:
for x in range(scene.hdrlm_filtering_iterations):
opencv_bl_result = cv2.blur(opencv_bl_result, kernel_size)
elif scene.hdrlm_filtering_mode == "Gaussian":
if scene.hdrlm_filtering_gaussian_strength % 2 == 0:
kernel_size = (scene.hdrlm_filtering_gaussian_strength + 1,scene.hdrlm_filtering_gaussian_strength + 1)
else:
kernel_size = (scene.hdrlm_filtering_gaussian_strength,scene.hdrlm_filtering_gaussian_strength)
sigma_size = 0
opencv_bl_result = cv2.GaussianBlur(opencv_process_image, kernel_size, sigma_size)
if scene.hdrlm_filtering_iterations > 1:
for x in range(scene.hdrlm_filtering_iterations):
opencv_bl_result = cv2.GaussianBlur(opencv_bl_result, kernel_size, sigma_size)
elif scene.hdrlm_filtering_mode == "Bilateral":
diameter_size = scene.hdrlm_filtering_bilateral_diameter
sigma_color = scene.hdrlm_filtering_bilateral_color_deviation
sigma_space = scene.hdrlm_filtering_bilateral_coordinate_deviation
opencv_bl_result = cv2.bilateralFilter(opencv_process_image, diameter_size, sigma_color, sigma_space)
if scene.hdrlm_filtering_iterations > 1:
for x in range(scene.hdrlm_filtering_iterations):
opencv_bl_result = cv2.bilateralFilter(opencv_bl_result, diameter_size, sigma_color, sigma_space)
else:
if scene.hdrlm_filtering_median_kernel % 2 == 0:
kernel_size = (scene.hdrlm_filtering_median_kernel + 1 , scene.hdrlm_filtering_median_kernel + 1)
else:
kernel_size = (scene.hdrlm_filtering_median_kernel, scene.hdrlm_filtering_median_kernel)
opencv_bl_result = cv2.medianBlur(opencv_process_image, kernel_size[0])
if scene.hdrlm_filtering_iterations > 1:
for x in range(scene.hdrlm_filtering_iterations):
opencv_bl_result = cv2.medianBlur(opencv_bl_result, kernel_size[0])
cv2.imwrite(filter_file_output, opencv_bl_result)
bpy.ops.image.open(filepath=os.path.join(os.path.dirname(bakemap_path),filter_file_output))
bpy.data.images[obj.name+"_baked"].name = obj.name + "_temp"
bpy.data.images[obj.name+"_baked_finalized.hdr"].name = obj.name + "_baked"
bpy.data.images.remove(bpy.data.images[obj.name+"_temp"])
else:
print("Modules missing...")
if scene.hdrlm_encoding_mode == "RGBM":
encodeImageRGBM(self, bpy.data.images[obj.name+"_baked"], 6.0, bakemap_path, scene.hdrlm_compression)
bpy.data.images[obj.name+"_baked"].name = obj.name + "_temp"
bpy.data.images[obj.name+"_baked_encoded"].name = obj.name + "_baked"
bpy.data.images.remove(bpy.data.images[obj.name+"_temp"])
elif scene.hdrlm_encoding_mode == "RGBD":
encodeImageRGBD(self, bpy.data.images[obj.name+"_baked"], 6.0, bakemap_path, scene.hdrlm_compression)
bpy.data.images[obj.name+"_baked"].name = obj.name + "_temp"
bpy.data.images[obj.name+"_baked_encoded"].name = obj.name + "_baked"
bpy.data.images.remove(bpy.data.images[obj.name+"_temp"])
#Apply and restore materials
for obj in bpy.data.objects:
if obj.type == "MESH":
if obj.hdrlm_mesh_lightmap_use:
for slot in obj.material_slots:
mat = slot.material
if mat.name.endswith('_temp'):
old = slot.material
slot.material = bpy.data.materials[old.name.split('_' + obj.name)[0]]
bpy.data.materials.remove(old, do_unlink=True)
uv_layers = obj.data.uv_layers
uv_layers.active_index = 0
for slot in obj.material_slots:
#if(scene.hdrlm_encoding_armory_setup):
# print("Setup Armory")
nodetree = bpy.data.materials[slot.name].node_tree
outputNode = nodetree.nodes[0]
mainNode = outputNode.inputs[0].links[0].from_node
if len(mainNode.inputs[0].links) == 0:
baseColorValue = mainNode.inputs[0].default_value
baseColorNode = nodetree.nodes.new(type="ShaderNodeRGB")
baseColorNode.outputs[0].default_value = baseColorValue
baseColorNode.location = ((mainNode.location[0]-500,mainNode.location[1]))
baseColorNode.name = "Lightmap_BasecolorNode_A"
else:
baseColorNode = mainNode.inputs[0].links[0].from_node
baseColorNode.name = "LM_P"
nodePos1 = mainNode.location
nodePos2 = baseColorNode.location
mixNode = nodetree.nodes.new(type="ShaderNodeMixRGB")
mixNode.name = "Lightmap_Multiplication"
mixNode.location = lerpNodePoints(self, nodePos1, nodePos2, 0.5)
if scene.hdrlm_indirect_only:
mixNode.blend_type = 'ADD'
else:
mixNode.blend_type = 'MULTIPLY'
mixNode.inputs[0].default_value = 1.0
LightmapNode = nodetree.nodes.new(type="ShaderNodeTexImage")
LightmapNode.location = ((baseColorNode.location[0]-300,baseColorNode.location[1] + 300))
LightmapNode.image = bpy.data.images[obj.name + "_baked"]
LightmapNode.name = "Lightmap_Image"
UVLightmap = nodetree.nodes.new(type="ShaderNodeUVMap")
UVLightmap.uv_map = "UVMap_Lightmap"
UVLightmap.name = "Lightmap_UV"
UVLightmap.location = ((-1000, baseColorNode.location[1] + 300))
nodetree.links.new(baseColorNode.outputs[0], mixNode.inputs[1])
nodetree.links.new(LightmapNode.outputs[0], mixNode.inputs[2])
nodetree.links.new(mixNode.outputs[0], mainNode.inputs[0])
nodetree.links.new(UVLightmap.outputs[0], LightmapNode.inputs[0])
#for mat in bpy.data.materials:
# for node in mat.node_tree.nodes:
# if node.type == "RGB":
# mat.node_tree.nodes.remove(node)
for mat in bpy.data.materials:
if mat.name.endswith('_baked'):
bpy.data.materials.remove(mat, do_unlink=True)
if not scene.hdrlm_keep_cache_files:
filepath = bpy.data.filepath
dirpath = os.path.join(os.path.dirname(bpy.data.filepath), scene.hdrlm_lightmap_savedir)
if os.path.isdir(dirpath):
list = os.listdir(dirpath)
for file in list:
if file.endswith(".pfm"):
os.remove(os.path.join(dirpath,file))
if file.endswith("denoised.hdr"):
os.remove(os.path.join(dirpath,file))
#for img in bpy.data.images:
# if not img.users:
# bpy.data.images.remove(img)
#pass
#Post bake
cycles.samples = prevCyclesSettings[0]
cycles.max_bounces = prevCyclesSettings[1]
cycles.diffuse_bounces = prevCyclesSettings[2]
cycles.glossy_bounces = prevCyclesSettings[3]
cycles.transparent_max_bounces = prevCyclesSettings[4]
cycles.transmission_bounces = prevCyclesSettings[5]
cycles.volume_bounces = prevCyclesSettings[6]
cycles.caustics_reflective = prevCyclesSettings[7]
cycles.caustics_refractive = prevCyclesSettings[8]
cycles.device = prevCyclesSettings[9]
scene.render.engine = prevCyclesSettings[10]
for mat in bpy.data.materials:
mat.update_tag()
print("The whole ordeal took: %.3f s" % (time() - total_time))
return{'FINISHED'}
def register():
bpy.utils.register_class(HDRLM_EncodeToRGBM)
bpy.utils.register_class(HDRLM_BuildAO)
bpy.utils.register_class(HDRLM_BuildLighting)
bpy.utils.register_class(HDRLM_BuildLightingSelected)
bpy.utils.register_class(HDRLM_ToggleEnableforSelection)
bpy.utils.register_class(HDRLM_ToggleDisableforSelection)
bpy.utils.register_class(HDRLM_CleanLighting)
bpy.utils.register_class(HDRLM_LightmapFolder)
bpy.utils.register_class(HDRLM_PT_Panel)
bpy.utils.register_class(HDRLM_PT_Unwrap)
bpy.utils.register_class(HDRLM_PT_Denoise)
bpy.utils.register_class(HDRLM_PT_Filtering)
bpy.utils.register_class(HDRLM_PT_Encoding)
bpy.utils.register_class(HDRLM_PT_Compression)
#bpy.utils.register_class(HDRLM_PT_Additional)
bpy.utils.register_class(HDRLM_CreateWorldVolume)
#bpy.utils.register_class(HDRLM_PT_LightmapList)
bpy.utils.register_class(HDRLM_PT_MeshMenu)
bpy.utils.register_class(HDRLM_PT_LightMenu)
bpy.utils.register_class(HDRLM_InstallOpenCV)
bpy.types.IMAGE_PT_image_properties.append(draw)
bpy.types.Scene.hdrlm_quality = EnumProperty(
items = [('Preview', 'Preview', 'TODO'),
('Medium', 'Medium', 'TODO'),
('High', 'High', 'TODO'),
('Production', 'Production', 'TODO'),
('Custom', 'Custom', 'TODO')],
name = "Lightmapping Quality", description="TODO", default='Preview')
bpy.types.Scene.hdrlm_lightmap_scale = EnumProperty(
items = [('16', '1/16', 'TODO'),
('8', '1/8', 'TODO'),
('4', '1/4', 'TODO'),
('2', '1/2', 'TODO'),
('1', '1/1', 'TODO')],
name = "Lightmap Resolution scale", description="TODO", default="1")
bpy.types.Scene.hdrlm_lightmap_savedir = StringProperty(name="Lightmap Directory", description="TODO", default="Lightmaps", subtype="FILE_PATH")
bpy.types.Scene.hdrlm_mode = EnumProperty(
items = [('CPU', 'CPU', 'TODO'),
('GPU', 'GPU', 'TODO')],
name = "Device", description="TODO", default="CPU")
bpy.types.Scene.hdrlm_apply_on_unwrap = BoolProperty(name="Apply scale", description="TODO", default=False)
bpy.types.Scene.hdrlm_indirect_only = BoolProperty(name="Indirect Only", description="TODO", default=False)
bpy.types.Scene.hdrlm_keep_cache_files = BoolProperty(name="Keep cache files", description="TODO", default=False)
bpy.types.Scene.hdrlm_dilation_margin = IntProperty(name="Dilation margin", default=16, min=1, max=64, subtype='PIXEL')
bpy.types.Scene.hdrlm_delete_cache = BoolProperty(name="Delete cache", description="TODO", default=True)
bpy.types.Scene.hdrlm_denoise_use = BoolProperty(name="Enable denoising", description="TODO", default=False)
bpy.types.Scene.hdrlm_oidn_path = StringProperty(name="OIDN Path", description="TODO", default="", subtype="FILE_PATH")
bpy.types.Scene.hdrlm_oidn_verbose = BoolProperty(name="Verbose", description="TODO")
bpy.types.Scene.hdrlm_oidn_threads = IntProperty(name="Threads", default=0, min=0, max=64, description="Amount of threads to use. Set to 0 for auto-detect.")
bpy.types.Scene.hdrlm_oidn_maxmem = IntProperty(name="Tiling max Memory", default=0, min=512, max=32768, description="Use tiling for memory conservation. Set to 0 to disable tiling.")
bpy.types.Scene.hdrlm_oidn_affinity = BoolProperty(name="Set Affinity", description="TODO")
bpy.types.Scene.hdrlm_oidn_use_albedo = BoolProperty(name="Use albedo map", description="TODO")
bpy.types.Scene.hdrlm_oidn_use_normal = BoolProperty(name="Use normal map", description="TODO")
bpy.types.Scene.hdrlm_denoiser = EnumProperty(
items = [('OIDN', 'OIDN', 'TODO.'),
('Optix', 'Optix', 'TODO.')],
name = "Denoiser", description="TODO", default='OIDN')
bpy.types.Scene.hdrlm_optix_path = StringProperty(name="Optix Path", description="TODO", default="", subtype="FILE_PATH")
bpy.types.Scene.hdrlm_filtering_use = BoolProperty(name="Enable filtering", description="TODO", default=False)
#bpy.types.Scene.hdrlm_filtering_gimp_path = StringProperty(name="Gimp Path", description="TODO", default="", subtype="FILE_PATH")
bpy.types.Scene.hdrlm_filtering_mode = EnumProperty(
items = [('Box', 'Box', 'TODO'),
('Gaussian', 'Gaussian', 'TODO'),
('Bilateral', 'Bilateral', 'TODO'),
('Median', 'Median', 'TODO')],
name = "Filter", description="TODO", default='Gaussian')
bpy.types.Scene.hdrlm_caching_mode = EnumProperty(
items = [('Copy', 'Copy Material', 'TODO'),
('Cache', 'Blend Cache', 'TODO')],
name = "Caching mode", description="TODO", default='Copy')
bpy.types.Scene.hdrlm_filtering_gaussian_strength = IntProperty(name="Gaussian Strength", default=3, min=1, max=50)
bpy.types.Scene.hdrlm_filtering_iterations = IntProperty(name="Filter Iterations", default=1, min=1, max=50)
bpy.types.Scene.hdrlm_filtering_box_strength = IntProperty(name="Box Strength", default=1, min=1, max=50)
bpy.types.Scene.hdrlm_filtering_bilateral_diameter = IntProperty(name="Pixel diameter", default=3, min=1, max=50)
bpy.types.Scene.hdrlm_filtering_bilateral_color_deviation = IntProperty(name="Color deviation", default=75, min=1, max=100)
bpy.types.Scene.hdrlm_filtering_bilateral_coordinate_deviation = IntProperty(name="Color deviation", default=75, min=1, max=100)
bpy.types.Scene.hdrlm_filtering_median_kernel = IntProperty(name="Median kernel", default=3, min=1, max=5)
bpy.types.Scene.hdrlm_encoding_mode = EnumProperty(
items = [('RGBM', 'RGBM', '8-bit HDR encoding. Good for compatibility, good for memory but has banding issues.'),
('RGBD', 'RGBD', '8-bit HDR encoding. Same as RGBM, but better for highlights and stylized looks.'),
('RGBE', 'RGBE', '32-bit HDR RGBE encoding. Best quality, but high memory usage and not compatible with all devices.')],
name = "Encoding Mode", description="TODO", default='RGBE')
bpy.types.Scene.hdrlm_encoding_range = IntProperty(name="Encoding range", description="Higher gives a larger HDR range, but also gives more banding.", default=6, min=1, max=10)
bpy.types.Scene.hdrlm_encoding_armory_setup = BoolProperty(name="Use Armory decoder", description="TODO", default=True)
bpy.types.Scene.hdrlm_encoding_colorspace = EnumProperty(
items = [('XYZ', 'XYZ', 'TODO'),
('sRGB', 'sRGB', 'TODO'),
('Raw', 'Raw', 'TODO'),
('Non-Color', 'Non-Color', 'TODO'),
('Linear ACES', 'Linear ACES', 'TODO'),
('Linear', 'Linear', 'TODO'),
('Filmic Log', 'Filmic Log', 'TODO')],
name = "Color Space", description="TODO", default='Linear')
bpy.types.Scene.hdrlm_on_identical_mat = EnumProperty(
items = [('Create', 'Create', 'TODO.'),
('Share', 'Share', 'TODO.')],
name = "On identical materials", description="TODO", default='Create')
bpy.types.Scene.hdrlm_baking_mode = EnumProperty(
items = [('Sequential', 'Sequential', 'TODO.'),
('Invoked', 'Invoked', 'TODO.')],
name = "On identical materials", description="TODO", default='Sequential')
bpy.types.Scene.hdrlm_lightmap_mode = EnumProperty(
items = [('Only Light', 'Only Light', 'TODO.'),
('With Albedo', 'With Albedo', 'TODO.'),
('Full', 'Full', 'TODO.')],
name = "On identical materials", description="TODO", default='Full')
bpy.types.Scene.hdrlm_compression = IntProperty(name="PNG Compression", description="0 = No compression. 100 = Maximum compression.", default=0, min=0, max=100)
bpy.types.Object.hdrlm_mesh_lightmap_use = BoolProperty(name="Enable Lightmapping", description="TODO", default=False)
bpy.types.Object.hdrlm_mesh_apply_after = BoolProperty(name="Apply after build", description="TODO", default=False)
bpy.types.Object.hdrlm_mesh_emissive = BoolProperty(name="Include emissive light", description="TODO", default=False)
bpy.types.Object.hdrlm_mesh_emissive_shadow = BoolProperty(name="Emissive casts shadows", description="TODO", default=False)
bpy.types.Object.hdrlm_mesh_lightmap_resolution = EnumProperty(
items = [('32', '32', 'TODO'),
('64', '64', 'TODO'),
('128', '128', 'TODO'),
('256', '256', 'TODO'),
('512', '512', 'TODO'),
('1024', '1024', 'TODO'),
('2048', '2048', 'TODO'),
('4096', '4096', 'TODO'),
('8192', '8192', 'TODO')],
name = "Lightmap Resolution", description="TODO", default='256')
bpy.types.Object.hdrlm_mesh_lightmap_unwrap_mode = EnumProperty(
items = [('Lightmap', 'Lightmap', 'TODO'),
('Smart Project', 'Smart Project', 'TODO'),
('Copy Existing', 'Copy Existing', 'TODO')],
name = "Unwrap Mode", description="TODO", default='Smart Project')
bpy.types.Object.hdrlm_mesh_unwrap_margin = FloatProperty(name="Unwrap Margin", default=0.1, min=0.0, max=1.0, subtype='FACTOR')
bpy.types.Object.hdrlm_mesh_bake_ao = BoolProperty(name="Bake AO", description="TODO", default=False)
bpy.types.Object.hdrlm_light_lightmap_use = BoolProperty(name="Enable for Lightmapping", description="TODO", default=True)
bpy.types.Object.hdrlm_light_type = EnumProperty(
items = [('Static', 'Static', 'Static baked light with both indirect and direct. Hidden after baking.'),
('Stationary', 'Stationary', 'Semi dynamic light. Indirect baked, but can be moved, change intensity and color.')],
name = "Light Type", description="TODO", default='Static')
bpy.types.Object.hdrlm_light_intensity_scale = FloatProperty(name="Intensity Scale", default=1.0, min=0.0, max=10.0, subtype='FACTOR')
bpy.types.Object.hdrlm_light_casts_shadows = BoolProperty(name="Casts shadows", description="TODO", default=True)
def unregister():
bpy.utils.unregister_class(HDRLM_EncodeToRGBM)
bpy.utils.unregister_class(HDRLM_BuildAO)
bpy.utils.unregister_class(HDRLM_BuildLighting)
bpy.utils.unregister_class(HDRLM_BuildLightingSelected)
bpy.utils.unregister_class(HDRLM_ToggleEnableforSelection)
bpy.utils.unregister_class(HDRLM_ToggleDisableforSelection)
bpy.utils.unregister_class(HDRLM_CleanLighting)
bpy.utils.unregister_class(HDRLM_LightmapFolder)
bpy.utils.unregister_class(HDRLM_PT_Panel)
bpy.utils.unregister_class(HDRLM_PT_Unwrap)
bpy.utils.unregister_class(HDRLM_PT_Denoise)
bpy.utils.unregister_class(HDRLM_PT_Filtering)
bpy.utils.unregister_class(HDRLM_PT_Encoding)
bpy.utils.unregister_class(HDRLM_PT_Compression)
#bpy.utils.unregister_class(HDRLM_PT_Additional)
bpy.utils.unregister_class(HDRLM_CreateWorldVolume)
#bpy.utils.unregister_class(HDRLM_PT_LightmapList)
bpy.utils.unregister_class(HDRLM_PT_MeshMenu)
bpy.utils.unregister_class(HDRLM_PT_LightMenu)
bpy.utils.unregister_class(HDRLM_InstallOpenCV)
bpy.types.IMAGE_PT_image_properties.remove(draw)
if __name__ == "__main__":
register()
#OPTION!:
#BAKE WITH ALBEDO
#BAKE WITH DESIGNATED MATERIAL
#Mesh name follows object name..
#
#import bpy
#
#for obj in bpy.data.objects:
# obj.data.name = obj.name
|
[
"KleemannAlexander@gmail.com"
] |
KleemannAlexander@gmail.com
|
52ccceca34d6d4cec9966afc584abee44795bdae
|
10e4e5f3b66f39a3949557aab86e95ae6e219556
|
/evil/test/test_parser.py
|
756880c651a7e05604d556e2e6e77d0dfec957de
|
[] |
no_license
|
dextero/evilvm
|
9afca07d51ed9560f4e6023b2c23704255a6607f
|
b3a9ab10947d10628dce3ce81861a5266d33bd38
|
refs/heads/master
| 2020-03-20T07:17:34.912643
| 2018-06-29T20:22:53
| 2018-06-29T20:22:53
| 137,277,415
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,575
|
py
|
import unittest
from evil.parser import *
from evil.cpu import Operations
class ParserTest(unittest.TestCase):
def test_parse_label(self):
self.assertEqual(Label('foo'), Statement.parse('foo:'))
def test_parse_constant_definition(self):
self.assertEqual(ConstantDefinition('FOO', NumericExpression(1)),
Statement.parse('FOO = 1'))
self.assertEqual(ConstantDefinition('FOO', CharacterExpression('a')),
Statement.parse("FOO = 'a'"))
self.assertEqual(ConstantDefinition('FOO', CharacterExpression('\x42')),
Statement.parse("FOO = '\x42'"))
self.assertEqual(ConstantDefinition('FOO', ConstantExpression('BAR')),
Statement.parse('FOO = BAR'))
self.assertEqual(ConstantDefinition('FOO',
BinaryExpression(NumericExpression(1),
'+',
NumericExpression(2))),
Statement.parse('FOO = 1 + 2'))
def test_parse_data(self):
self.assertEqual(Data(DataType.from_fmt('b'),
ExpressionList([NumericExpression(1),
NumericExpression(2),
NumericExpression(3)])),
Statement.parse('db 1, 2, 3'))
self.assertEqual(Data(DataType.from_fmt('w'),
ExpressionList([NumericExpression(1),
NumericExpression(2),
NumericExpression(3)])),
Statement.parse('dw 1, 2, 3'))
self.assertEqual(Data(DataType.from_fmt('b'),
ExpressionList([CharacterExpression('a'),
CharacterExpression('b'),
CharacterExpression('\x42')])),
Statement.parse('db "ab\x42"'))
def test_parse_instruction(self):
self.assertEqual(Instruction(Operations.movb_i2r,
ArgumentList([Register.A,
NumericExpression(1)])),
Statement.parse('movb.i2r a, 1'))
self.assertEqual(Instruction(Operations.movb_i2r,
ArgumentList([Register.A,
ConstantExpression('WIDTH')])),
Statement.parse('movb.i2r a, WIDTH'))
self.assertEqual(Instruction(Operations.movb_i2r,
ArgumentList([Register.A,
BinaryExpression(ConstantExpression('WIDTH'),
'-',
NumericExpression(1))])),
Statement.parse('movb.i2r a, WIDTH - 1'))
def test_parse_expression_with_parens(self):
self.assertEqual(Instruction(Operations.movb_i2r,
ArgumentList([NumericExpression(1),
BinaryExpression(NumericExpression(2),
'+',
NumericExpression(3))])),
Statement.parse('movb.i2r (1), (2+3)'))
self.assertEqual(Instruction(Operations.movb_i2r,
ArgumentList([
NumericExpression(1),
BinaryExpression(
NumericExpression(2),
'+',
BinaryExpression(
NumericExpression(3),
'-',
BinaryExpression(
NumericExpression(4),
'*',
NumericExpression(5))))
])),
Statement.parse('movb.i2r (((1))), (2+(3-(4)*5))'))
|
[
"marcin@mradomski.pl"
] |
marcin@mradomski.pl
|
713245f2eed71c403e73486f52448030d77a1350
|
b4e28b35e134018bd5514f17552f5c4522faf61a
|
/profiles/views.py
|
6900f5d5db954c72eac50e6e69c53129fb44be37
|
[] |
no_license
|
mmrshohan/hubblelook-app
|
ccdd403026d237f66f110f5dcefff1bfe56ee9aa
|
51a25335633a488c0081f791b21040c99e1cd69f
|
refs/heads/master
| 2021-07-12T00:09:26.560552
| 2019-02-05T03:17:09
| 2019-02-05T03:17:09
| 142,327,506
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,736
|
py
|
from django.shortcuts import render
from django.urls import reverse
from django.urls import reverse_lazy
from django.shortcuts import redirect
from django.contrib.auth.models import User
from django.contrib.auth import get_user_model
from django.contrib.auth.decorators import login_required
from django.views.generic import ListView, TemplateView
from django.views.generic.base import ContextMixin
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
# Models from Mianhubblelook app
from mainhubblelook.models import MainModel, OfficalLetter
# froms Mianhubblelook app
from mainhubblelook.forms import Official_Letter_Form
from profiles.models import Profile, Team
from .forms import Edit_Profile_Form, TeamForm
from mainhubblelook.forms import Quick_word_form, Add_product_view, Article_form
user = get_user_model
class ProfileView(ListView):
template_name = "profile.html"
queryset = MainModel.objects.all()
context_object_name = 'posts'
def get_context_data(self, **Kwargs):
context = super(ProfileView, self).get_context_data(**Kwargs)
context['official_letter'] = OfficalLetter.objects.all()
context['edit'] = Profile.objects.all().first()
return context
class ProfileCreateView(CreateView): #Profile create view
model = Profile
form_class = Edit_Profile_Form
template_name = "edit-profile.html"
success_url = '/portfolio/'
# --------- thought update and delete view ---------------------
#Micro Thoughts update view microthought update view
class MicroThoughtsUpdateView(UpdateView):
model = MainModel
form_class = Quick_word_form
template_name = 'forms/quickword.html'
success_url = '/'
#Micro Thoughts delete View microthought delete view
class MicroThoughtsDeleteView(DeleteView):
model = MainModel
form_class = Quick_word_form
template_name = 'profile.html'
success_url = '/'
# --------- thought update and delete view end here ---------------------
# --------- product update and delete view ---------------------
#Product Update view
class AddProductUpdateView(UpdateView):
model = MainModel
form_class = Add_product_view
template_name = 'forms/addproduct_form.html'
success_url = '/'
#product delete view
class AddProductDeleteView(DeleteView):
model = MainModel
form_class = Add_product_view
template_name = 'profile.html'
success_url = "/"
# --------- product update and delete view end here ---------------------
#Add product update UpdateView Article update view
class ArticleUpdateView(UpdateView):
model = MainModel
form_class = Article_form
template_name = 'forms/article-form.html'
success_url = '/'
#Add prodcuct delete View Article delete view
class ArticleDeleteView(DeleteView):
model = MainModel
form_class = Article_form
template_name = 'profile.html'
success_url = reverse_lazy('profiles:portfolio')
# --------- All views for official letter ---------------------
class Offical_Letter_View(ListView): # Official letter list view
template_name = 'offical-letter-list-view.html'
queryset = OfficalLetter.objects.all()
context_object_name = 'letter_list'
class Offical_Letter_Create_View(CreateView): # official letter create view
form_class = Official_Letter_Form
model = OfficalLetter
template_name = "official-letter-form.html"
success_url = "/"
class Offical_Letter_Update_View(UpdateView): # official letter update view
model = OfficalLetter
form_class = Official_Letter_Form
template_name = 'official-letter-form.html'
success_url = "/"
class Offical_Letter_Delete_View(DeleteView): # official letter delete view
model = OfficalLetter
form_class = Official_Letter_Form
template_name = 'offical-letter-list-view.html'
success_url = "/"
# --------- All views for official letter end here ---------------------
# --------- post's sort view ---------------------
class MicroThoughtsSortView(ListView):
template_name= "Sort-model/micro-thought-sort.html"
queryset = MainModel.objects.all()
context_object_name = 'posts'
class ProductSortView(ListView):
template_name= "Sort-model/product-sort.html"
queryset = MainModel.objects.all()
context_object_name = 'posts'
class ArticleSortView(ListView):
template_name= "Sort-model/article-sort.html"
queryset = MainModel.objects.all()
context_object_name = 'posts'
# --------- post's sort view end here ---------------------
# --------- Team page create and edit view ------------
class TeamPageView(TemplateView):
template_name = "team/team.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['team'] = Team.objects.all().first()
return context
class Team_Create_View(CreateView):
form_class = TeamForm
model = Team
template_name = "team/team-form.html"
success_url = "/"
class Team_Update_View(UpdateView):
model = Team
form_class = TeamForm
template_name = 'team/team-form.html'
success_url = "/"
class Team_Delete_View(DeleteView):
model = Team
form_class = TeamForm
template_name = 'team/team.html'
success_url = "/"
#------- Team view end here ---------------
class Customer_Care_View(TemplateView):
template_name = 'customer-care.html'
class Settings_View(TemplateView):
template_name = 'settings.html'
|
[
"mmrshohan09@gmail.com"
] |
mmrshohan09@gmail.com
|
87b5b866783807ecb5033d508c8c6fa54c82f4a6
|
eba82ef06bf93061b88aa7f0794c63814149c943
|
/data/urls.py
|
88f57d096a705cbf2abf963fdf18f4e82a73af71
|
[] |
no_license
|
NuDron/FanVue
|
bf55642b6fcf282d43e83e13f260a4d90f92f28a
|
ece5090eb6753868e660050683c397d581850617
|
refs/heads/main
| 2023-04-08T13:51:35.093544
| 2021-04-27T09:39:18
| 2021-04-27T09:39:18
| 362,057,334
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 739
|
py
|
from django.urls import path, include
from rest_framework.routers import SimpleRouter
from .serializers import ArtistSerializer, AlbumSerializer
from .views import ArtistViewSet
from . import views
router = SimpleRouter()
router.register("api_artists", ArtistViewSet)
urlpatterns = [
path('v1/', include((router.urls, 'api_v1'))),
path('api_artist/<id>/', views.ArtistDetail.as_view(), name='artist_detail'),
path('api_album/', views.AlbumDetail.as_view(), name='album_detail'),
path('artist/<pk>/', views.art_view.as_view(), name='artist'),
path('album/<pk>/', views.album_view.as_view(), name='album'),
path('genre/<pk>/', views.genre_view.as_view(), name='genre'),
path('', views.home_view, name='home'),
]
|
[
"noreply@github.com"
] |
NuDron.noreply@github.com
|
920a7385980152c3fb1832f4d7d2be695cc0b3e3
|
6f044a0541ddf467bb6251645c3d8107df5f5756
|
/message/serialize.py
|
983e91b05f2a947332a1b91d868c065f3d308468
|
[] |
no_license
|
tpvt99/new-social-network-backend
|
04ae9f0551c09eceb5fd6b4bcf50430243e53199
|
a18d6279a27ba0ce3af1f5d6e985b4b147a4233a
|
refs/heads/master
| 2021-09-04T01:50:43.430961
| 2018-01-14T08:59:41
| 2018-01-14T08:59:41
| 117,415,992
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,156
|
py
|
from django.db.models.query import QuerySet
from django.core.exceptions import ObjectDoesNotExist
from user.serialize import serialize_user_basic
from account.models import OnlineTime
def friend_serialize(friend):
data_serialize = []
if type(friend) == QuerySet:
for i in friend:
try:
time = i.friend.onlinetime.time
except ObjectDoesNotExist:
time = i.friend.last_login
data = {
'friend': serialize_user_basic(i.friend),
'time': time.isoformat()
}
data_serialize.append(data)
else:
try:
time = i.friend.onlinetime.time
except ObjectDoesNotExist:
time = i.friend.last_login
data = {
'friend': serialize_user_basic(friend.friend),
'time': time.isoformat()
}
data_serialize.append(data)
return data_serialize
def on_friend_serialize(friend):
data_serialize = []
if type(friend) == QuerySet:
for i in friend:
try:
time = i.friend.onlinetime.time
except ObjectDoesNotExist:
time = i.friend.last_login
data = {
'f': i.friend.user_id if type(i.friend.user_id) is str else i.friend.user_id.hex,
't': time.isoformat()
}
data_serialize.append(data)
else:
try:
time = i.friend.onlinetime.time
except ObjectDoesNotExist:
time = i.friend.last_login
data = {
'f': friend.friend.user_id if type(friend.friend.user_id) is str else friend.friend.user_id.hex,
't': time.isoformat()
}
data_serialize.append(data)
return data_serialize
def message_serialize(mess):
data_se = []
if type(mess) == QuerySet:
for i in mess:
data = {
'message': {
'text': i.text
},
'us': i.user_send.user_id if type(i.user_send.user_id) is str else i.user_send.user_id.hex,
'time': i.create_time.isoformat()
}
data_se.append(data)
else:
data = {
'message': {
'text': mess.text
},
'us': mess.user_send.user_id if type(mess.user_send.user_id) is str else mess.user_send.user_id.hex,
'time': mess.create_time.isoformat()
}
data_se.append(data)
return data_se
def pollmessage_serialize(a):
#a is list of MessageUserInfo
data = []
for m in a:
frame = m.frame
# saving the fetch
m.fetch = True
m.save()
# done saving the fetch
user_request_id = m.user.user_id.hex
users_id = [i.hex for i in list(frame.users.all().values_list('user_id', flat = True))]
users_id.remove(user_request_id)
data_se = {
'target': frame.message_type,
'total': len(users_id) + 1,
'allExp': users_id,
'to': user_request_id,
'fid': frame.id
}
data.append(data_se)
return data
|
[
"tranphong96.hbk@gmail.com"
] |
tranphong96.hbk@gmail.com
|
c79dba71f7ebe424f45d1b0b78d05bff552e4d3f
|
a25bcf8ef670790b04bb24141d986c6542143e23
|
/main/settings.py
|
95e98bad6fb131d6f01604c179868cbe54fc1a9e
|
[] |
no_license
|
kwonsujee/django_facebook
|
e27110412ddf399a7164042dd1e01ff829f56121
|
8505d6c251ba1c9ee3bf50056166006735fc82bf
|
refs/heads/master
| 2022-12-08T05:40:58.880887
| 2020-09-07T09:42:46
| 2020-09-07T09:42:46
| 293,034,343
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,123
|
py
|
"""
Django settings for main project.
Generated by 'django-admin startproject' using Django 3.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve(strict=True).parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'dn=qf=cf&8rimsp%p9lk)h#3cp^m1$hh=1%3m0nw-k!e^50q9^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['localhost','127.0.0.1','.pythonanywhere.com']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'facebook'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'main.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'main.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'ko'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
|
[
"lkdj1205@naver.com"
] |
lkdj1205@naver.com
|
f030b47e84a5b1a150befc9521d28ac44b0c9c2b
|
53b67db91791f91b712cff3d1b2add1cd1aa8d36
|
/manage.py
|
083aaf0037f8117577915ec4023802b837355dff
|
[] |
no_license
|
florije1988/django_rest_demo
|
af95f9ca17237748b8136db0100051538c697a8f
|
a63f9eba27125e900a27605bcc7cae9c8be13b0c
|
refs/heads/master
| 2016-09-16T02:03:26.512039
| 2014-11-12T10:11:42
| 2014-11-12T10:11:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 259
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_rest_demo.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
[
"florije1988@gmail.com"
] |
florije1988@gmail.com
|
8bb72338276793c2c478b7c0cb117fd31eeb6b17
|
ffa26f16bf90ace7bcac003144e58d3a485f5b72
|
/snippets/urls.py
|
6c2300c171831ac5a87eb2870f2fb26d4272d2b6
|
[] |
no_license
|
nouhben/test-repo
|
f2675b301777b8c5eefee8dac7b8502aae735c9c
|
985071c3d168d5c0dac3d76614b617b9f144f0c3
|
refs/heads/main
| 2023-02-03T05:01:51.759224
| 2020-12-17T10:04:56
| 2020-12-17T10:04:56
| 322,244,743
| 0
| 0
| null | 2020-12-17T10:04:58
| 2020-12-17T09:28:19
|
Python
|
UTF-8
|
Python
| false
| false
| 328
|
py
|
from django.urls import path, include
from . import views
from rest_framework.urlpatterns import format_suffix_patterns
urlpatterns = [
path('snippets/', views.snippet_list, name='snippests'),
path('snippets/<int:pk>/', views.snippet_detail, name='snippest-detail'),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
[
"benkadi.nouh@icloud.com"
] |
benkadi.nouh@icloud.com
|
172e89cae1e9a80fe0e9e1f674f9bfd31c911a0c
|
fd88ba1c649539d17191ca118b9704b6ed0031bf
|
/全站图片链接抓取.py
|
38dbe32e84ab0e3842df787a7daa379bc5b6a28d
|
[] |
no_license
|
ayuday/pachong
|
f542a8ee0f069c69a2628c035ff3e76155fd9e1a
|
fe8768ac162a7ac00697cb8c20e521ee064bf81e
|
refs/heads/master
| 2020-06-18T00:42:11.060909
| 2018-09-20T15:32:41
| 2018-09-20T15:32:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,361
|
py
|
import requests
import re
from bs4 import BeautifulSoup
from time import sleep
# data-original="(https://i.loli.net\S+)"
urls_path = r'C:\Users\owen\Desktop\urls.txt'
image_path = r'C:\Users\owen\Desktop\res.txt'
pattern = r'data-original="(https://i.loli.net\S+)"'
def get_image_url(text):
'''
获取图片外链地址,返回值为图片链接列表
'''
result = re.findall(pattern, text)
return set(result)
def get_content(url, num=3):
'''
获取网页内容,返回值为文章内容
'''
response = requests.get(url, timeout=5)
if response.status_code == requests.codes.ok:
response.encoding = 'utf-8'
Soup = BeautifulSoup(response.text, 'html.parser')
post = Soup.select('#article-post')
return str(post[0])
else:
sleep(1)
return get_content(url, num=num-1)
return [url]
def sava_to_text(urls):
'''
保存图片链接
'''
with open(image_path, 'a+') as res:
for url in urls:
res.write(str(url)+'\n')
if __name__ == '__main__':
with open(urls_path, 'r') as urls:
for url in urls:
try:
content = get_content(url)
sava_to_text(get_image_url(content))
print(str(url) + 'saved.')
except:
print(url)
sleep(1)
|
[
"qcgzxw@qq.com"
] |
qcgzxw@qq.com
|
2d09ba06fefef1fd6fc5b38917cd579402a12f58
|
c9579ecc197a3213fff8c3f46f425db74e759977
|
/pratice7.5.py
|
4282c4e06f0882ece3c48f08ed22aaf2fb2172a5
|
[] |
no_license
|
onlycau/core_python_programming2
|
d4b1379bf34e1c7db78cc7f21c7e75d98427bac4
|
e20c7bee7b6ff1771e20c876bc62668f5c73f593
|
refs/heads/master
| 2020-06-09T09:38:34.713087
| 2019-06-28T07:36:08
| 2019-06-28T07:36:08
| 193,417,806
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,330
|
py
|
#!/usr/bin/env python3
import time
import os
class login(object):
def __init__(self):
self.users = self.users_get()
def users_get(self):
with open('names', 'r') as f:
names = []
for line in f:
names.append(line.strip())
with open('passwords', 'r') as f:
passwords = []
for line in f:
passwords.append(line.strip())
return dict(zip(names, passwords))
def new_usr(self):
username = ''
password_1 = ''
while True:
username = input('please enter a name').lower()
if not self.users.get(username):
break
username = input('username have been created.').lower()
while True:
password_1 = input('please enter your password:').lower()
password_2 = input('please enter your password again:').lower()
if password_1 == password_2:
break
else:
print('different password, please try again.')
with open('names', 'a+') as f:
f.write(os.linesep + username)
with open('passwords', 'a+') as f:
f.write(os.linesep + password_1)
print('name:%s/n password:%s succede new user.' % (username, password_1))
def old_usr(self, username):
while True:
password = input('enter your password:').lower()
if self.users.get(username) == password:
print('login succeded.')
return True
else:
print('wrong password')
def time_log():
pass
def new_or_old(self):
while True:
usr = input('please enter your user name:').lower()
if usr == 'onlycau':
self.administraition()
elif usr in self.users:
return usr
else:
q = input('are you a new user?(y or n)').lower()
if q == 'y':
return False
else:
print("it'a wrong username")
def administraition(self):
pass
def test():
ob = login()
usr = ob.new_or_old()
if usr:
ob.old_usr(usr)
else:
ob.new_usr()
if __name__ == '__main__':
test()
|
[
"990246984@qq.com"
] |
990246984@qq.com
|
fe5fde3f921f53a66c6654807bc0ffcea9e11437
|
16028f3868ae934bb97474bc888f18069eba2ac9
|
/playground/ella-mypage-75b28d7/tests/unit_project/test_utils/test_settings.py
|
6349f8b12d490370ac5b66ffccc9ee52f05ace4c
|
[] |
no_license
|
veilevil/test
|
d536d2a459aa250538f2e9bcac88739e22b2ea11
|
18469d66690e05b5ec973f6a89371419498bcedb
|
refs/heads/master
| 2020-12-30T10:50:01.160357
| 2012-12-18T15:38:32
| 2012-12-18T15:38:32
| 3,152,088
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,043
|
py
|
import sys
from unittest import TestCase
from django import conf
from mypage.utils.settings import Settings
class DummyModule(object):
SAMPLE_CONFIG_OPTION = 'sample-config-value'
class TestSettings(TestCase):
def setUp(self):
dummy_module = DummyModule()
sys.modules['tmp_tests_someapp_conf'] = dummy_module
self.settings = Settings('tmp_tests_someapp_conf')
def test_value_only_in_app_settings(self):
self.failUnlessEqual(self.settings.SAMPLE_CONFIG_OPTION, 'sample-config-value')
def test_value_overriden_via_django_conf_settings(self):
self.failUnlessEqual(self.settings.SITE_ID, 1)
def test_value_defined_nowhere(self):
self.failUnlessRaises(AttributeError, lambda:self.settings.UNDEFINED_VALUE)
def test_value_prefixed_constants(self):
settings = Settings('tmp_tests_someapp_conf', prefix='SITE_')
self.failUnlessEqual(settings.SAMPLE_CONFIG_OPTION, 'sample-config-value')
self.failUnlessEqual(settings.ID, conf.settings.SITE_ID)
|
[
"ve@ve-PC.(none)"
] |
ve@ve-PC.(none)
|
509e4f61eb8ec5503c3b29d7e5356670fe5e43ed
|
aca2d9a5fd9ec3811d95aa8152aef71e9d2e15c4
|
/tasks/migrations/0001_initial.py
|
90b78707bb4bfbac84997977ae539221050808ae
|
[] |
no_license
|
celestinoObrabo/trabalhodeCC
|
2eb66c56fe83296518acb74b3f13df73583bf16e
|
68b007a859ce4d0ccbdc2e1e23603a71ebd395d9
|
refs/heads/master
| 2023-03-06T08:01:39.249978
| 2021-02-08T19:01:33
| 2021-02-08T19:01:33
| 337,175,135
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,203
|
py
|
# Generated by Django 3.0.2 on 2020-11-28 14:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('orientacao', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('description', models.TextField()),
('done', models.CharField(choices=[('doing', 'Doing'), ('done', 'Done')], max_length=5)),
('created_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
('start_date', models.DateField(blank=True, null=True, verbose_name='Data de Entrega')),
('orientacao', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='orientacao.Orientacao')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('user', models.CharField(max_length=100)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasks.Task')),
],
),
migrations.CreateModel(
name='Arquivos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Nome do que esta enviando')),
('arquivo', models.FileField(upload_to='')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Enviado em: ')),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasks.Task')),
],
),
]
|
[
"celehte1@hotmail.com"
] |
celehte1@hotmail.com
|
c085bb6f2ab2a2c4590becd5e464d927fb09e39a
|
317c44e7c5bd759512157f1623ccc995690aaa95
|
/chapter10/thread_sync.py
|
52ef2cee00bb6ceafc0a53596eae348173460e6c
|
[] |
no_license
|
Echo002/AdvancePython
|
83ae4abb91beb164320c77b67a18428a46564d5d
|
198eaf68cd9c7559b10ff3a3e94238f079371842
|
refs/heads/master
| 2023-02-11T17:51:42.785039
| 2021-01-06T08:11:42
| 2021-01-06T08:11:42
| 313,544,462
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 585
|
py
|
from threading import Lock
import threading
# import dis
# def add(a):
# a = a + 1
# return a
#
# print(dis.dis(add))
#
total = 0
lock = Lock()
def add():
global total
global lock
for i in range(1000000):
lock.acquire()
total += 1
lock.release()
def desc():
global total
global lock
for i in range(1000000):
lock.acquire()
total -= 1
lock.release()
thread1 = threading.Thread(target=add)
thread2 = threading.Thread(target=desc)
thread1.start()
thread2.start()
thread1.join()
thread2.join()
print(total)
|
[
"xugaohero@163.com"
] |
xugaohero@163.com
|
abb0e461ca3d7b43b49aa46e2c5e8ae4736b620a
|
37fdc797f0060a67c1e9318032bc7102d4fd9ecd
|
/spider/beautifulsoup_test/lib/python3.7/site-packages/twisted/trial/test/test_script.py
|
9356d140416a706d9d43af3731a7de771d62bb0a
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
Change0224/PycharmProjects
|
8fa3d23b399c5fb55661a79ca059f3da79847feb
|
818ba4fd5dd8bcdaacae490ed106ffda868b6ca4
|
refs/heads/master
| 2021-02-06T15:37:16.653849
| 2020-03-03T14:30:44
| 2020-03-03T14:30:44
| 243,927,023
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 30,600
|
py
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import absolute_import, division
import gc
import re
import sys
import textwrap
import types
from twisted.python import util
from twisted.python.compat import NativeStringIO
from twisted.python.filepath import FilePath
from twisted.python.usage import UsageError
from twisted.scripts import trial
from twisted.trial import unittest
from twisted.trial._dist.disttrial import DistTrialRunner
from twisted.trial.runner import TestLoader
from twisted.trial.runner import TrialRunner, TestSuite, DestructiveTestSuite
from twisted.trial.test.test_loader import testNames
pyunit = __import__('unittest')
def sibpath(filename):
"""
For finding files in twisted/trial/threading_test
"""
return util.sibpath(__file__, filename)
class ForceGarbageCollectionTests(unittest.SynchronousTestCase):
"""
Tests for the --force-gc option.
"""
def setUp(self):
self.config = trial.Options()
self.log = []
self.patch(gc, 'collect', self.collect)
test = pyunit.FunctionTestCase(self.simpleTest)
self.test = TestSuite([test, test])
def simpleTest(self):
"""
A simple threading_test method that records that it was run.
"""
self.log.append('threading_test')
def collect(self):
"""
A replacement for gc.collect that logs calls to itself.
"""
self.log.append('collect')
def makeRunner(self):
"""
Return a L{TrialRunner} object that is safe to use in tests.
"""
runner = trial._makeRunner(self.config)
runner.stream = NativeStringIO()
return runner
def test_forceGc(self):
"""
Passing the --force-gc option to the trial script forces the garbage
collector to run before and after each threading_test.
"""
self.config['force-gc'] = True
self.config.postOptions()
runner = self.makeRunner()
runner.run(self.test)
self.assertEqual(self.log, ['collect', 'threading_test', 'collect',
'collect', 'threading_test', 'collect'])
def test_unforceGc(self):
"""
By default, no garbage collection is forced.
"""
self.config.postOptions()
runner = self.makeRunner()
runner.run(self.test)
self.assertEqual(self.log, ['threading_test', 'threading_test'])
class SuiteUsedTests(unittest.SynchronousTestCase):
"""
Check the category of tests suite used by the loader.
"""
def setUp(self):
"""
Create a trial configuration object.
"""
self.config = trial.Options()
def test_defaultSuite(self):
"""
By default, the loader should use L{DestructiveTestSuite}
"""
loader = trial._getLoader(self.config)
self.assertEqual(loader.suiteFactory, DestructiveTestSuite)
def test_untilFailureSuite(self):
"""
The C{until-failure} configuration uses the L{TestSuite} to keep
instances alive across runs.
"""
self.config['until-failure'] = True
loader = trial._getLoader(self.config)
self.assertEqual(loader.suiteFactory, TestSuite)
class TestModuleTests(unittest.SynchronousTestCase):
def setUp(self):
self.config = trial.Options()
def tearDown(self):
self.config = None
def test_testNames(self):
"""
Check that the testNames helper method accurately collects the
names of tests in suite.
"""
self.assertEqual(testNames(self), [self.id()])
def assertSuitesEqual(self, test1, names):
loader = TestLoader()
names1 = testNames(test1)
names2 = testNames(TestSuite(map(loader.loadByName, names)))
names1.sort()
names2.sort()
self.assertEqual(names1, names2)
def test_baseState(self):
self.assertEqual(0, len(self.config['tests']))
def test_testmoduleOnModule(self):
"""
Check that --testmodule loads a suite which contains the tests
referred to in threading_test-case-name inside its parameter.
"""
self.config.opt_testmodule(sibpath('moduletest.py'))
self.assertSuitesEqual(trial._getSuite(self.config),
['twisted.trial.threading_test.test_log'])
def test_testmoduleTwice(self):
"""
When the same module is specified with two --testmodule flags, it
should only appear once in the suite.
"""
self.config.opt_testmodule(sibpath('moduletest.py'))
self.config.opt_testmodule(sibpath('moduletest.py'))
self.assertSuitesEqual(trial._getSuite(self.config),
['twisted.trial.threading_test.test_log'])
def test_testmoduleOnSourceAndTarget(self):
"""
If --testmodule is specified twice, once for module A and once for
a module which refers to module A, then make sure module A is only
added once.
"""
self.config.opt_testmodule(sibpath('moduletest.py'))
self.config.opt_testmodule(sibpath('test_log.py'))
self.assertSuitesEqual(trial._getSuite(self.config),
['twisted.trial.threading_test.test_log'])
def test_testmoduleOnSelfModule(self):
"""
When given a module that refers to *itself* in the threading_test-case-name
variable, check that --testmodule only adds the tests once.
"""
self.config.opt_testmodule(sibpath('moduleself.py'))
self.assertSuitesEqual(trial._getSuite(self.config),
['twisted.trial.threading_test.moduleself'])
def test_testmoduleOnScript(self):
"""
Check that --testmodule loads tests referred to in threading_test-case-name
buffer variables.
"""
self.config.opt_testmodule(sibpath('scripttest.py'))
self.assertSuitesEqual(trial._getSuite(self.config),
['twisted.trial.threading_test.test_log',
'twisted.trial.threading_test.test_runner'])
def test_testmoduleOnNonexistentFile(self):
"""
Check that --testmodule displays a meaningful error message when
passed a non-existent filename.
"""
buffy = NativeStringIO()
stderr, sys.stderr = sys.stderr, buffy
filename = 'test_thisbetternoteverexist.py'
try:
self.config.opt_testmodule(filename)
self.assertEqual(0, len(self.config['tests']))
self.assertEqual("File %r doesn't exist\n" % (filename,),
buffy.getvalue())
finally:
sys.stderr = stderr
def test_testmoduleOnEmptyVars(self):
"""
Check that --testmodule adds no tests to the suite for modules
which lack threading_test-case-name buffer variables.
"""
self.config.opt_testmodule(sibpath('novars.py'))
self.assertEqual(0, len(self.config['tests']))
def test_testmoduleOnModuleName(self):
"""
Check that --testmodule does *not* support module names as arguments
and that it displays a meaningful error message.
"""
buffy = NativeStringIO()
stderr, sys.stderr = sys.stderr, buffy
moduleName = 'twisted.trial.threading_test.test_script'
try:
self.config.opt_testmodule(moduleName)
self.assertEqual(0, len(self.config['tests']))
self.assertEqual("File %r doesn't exist\n" % (moduleName,),
buffy.getvalue())
finally:
sys.stderr = stderr
def test_parseLocalVariable(self):
declaration = '-*- threading_test-case-name: twisted.trial.threading_test.test_tests -*-'
localVars = trial._parseLocalVariables(declaration)
self.assertEqual({'threading_test-case-name':
'twisted.trial.threading_test.test_tests'},
localVars)
def test_trailingSemicolon(self):
declaration = '-*- threading_test-case-name: twisted.trial.threading_test.test_tests; -*-'
localVars = trial._parseLocalVariables(declaration)
self.assertEqual({'threading_test-case-name':
'twisted.trial.threading_test.test_tests'},
localVars)
def test_parseLocalVariables(self):
declaration = ('-*- threading_test-case-name: twisted.trial.threading_test.test_tests; '
'foo: bar -*-')
localVars = trial._parseLocalVariables(declaration)
self.assertEqual({'threading_test-case-name':
'twisted.trial.threading_test.test_tests',
'foo': 'bar'},
localVars)
def test_surroundingGuff(self):
declaration = ('## -*- threading_test-case-name: '
'twisted.trial.threading_test.test_tests -*- #')
localVars = trial._parseLocalVariables(declaration)
self.assertEqual({'threading_test-case-name':
'twisted.trial.threading_test.test_tests'},
localVars)
def test_invalidLine(self):
self.assertRaises(ValueError, trial._parseLocalVariables,
'foo')
def test_invalidDeclaration(self):
self.assertRaises(ValueError, trial._parseLocalVariables,
'-*- foo -*-')
self.assertRaises(ValueError, trial._parseLocalVariables,
'-*- foo: bar; qux -*-')
self.assertRaises(ValueError, trial._parseLocalVariables,
'-*- foo: bar: baz; qux: qax -*-')
def test_variablesFromFile(self):
localVars = trial.loadLocalVariables(sibpath('moduletest.py'))
self.assertEqual({'threading_test-case-name':
'twisted.trial.threading_test.test_log'},
localVars)
def test_noVariablesInFile(self):
localVars = trial.loadLocalVariables(sibpath('novars.py'))
self.assertEqual({}, localVars)
def test_variablesFromScript(self):
localVars = trial.loadLocalVariables(sibpath('scripttest.py'))
self.assertEqual(
{'threading_test-case-name': ('twisted.trial.threading_test.test_log,'
'twisted.trial.threading_test.test_runner')},
localVars)
def test_getTestModules(self):
modules = trial.getTestModules(sibpath('moduletest.py'))
self.assertEqual(modules, ['twisted.trial.threading_test.test_log'])
def test_getTestModules_noVars(self):
modules = trial.getTestModules(sibpath('novars.py'))
self.assertEqual(len(modules), 0)
def test_getTestModules_multiple(self):
modules = trial.getTestModules(sibpath('scripttest.py'))
self.assertEqual(set(modules),
set(['twisted.trial.threading_test.test_log',
'twisted.trial.threading_test.test_runner']))
def test_looksLikeTestModule(self):
for filename in ['test_script.py', 'twisted/trial/threading_test/test_script.py']:
self.assertTrue(trial.isTestFile(filename),
"%r should be a threading_test file" % (filename,))
for filename in ['twisted/trial/threading_test/moduletest.py',
sibpath('scripttest.py'), sibpath('test_foo.bat')]:
self.assertFalse(trial.isTestFile(filename),
"%r should *not* be a threading_test file" % (filename,))
class WithoutModuleTests(unittest.SynchronousTestCase):
"""
Test the C{without-module} flag.
"""
def setUp(self):
"""
Create a L{trial.Options} object to be used in the tests, and save
C{sys.modules}.
"""
self.config = trial.Options()
self.savedModules = dict(sys.modules)
def tearDown(self):
"""
Restore C{sys.modules}.
"""
for module in ('imaplib', 'smtplib'):
if module in self.savedModules:
sys.modules[module] = self.savedModules[module]
else:
sys.modules.pop(module, None)
def _checkSMTP(self):
"""
Try to import the C{smtplib} module, and return it.
"""
import smtplib
return smtplib
def _checkIMAP(self):
"""
Try to import the C{imaplib} module, and return it.
"""
import imaplib
return imaplib
def test_disableOneModule(self):
"""
Check that after disabling a module, it can't be imported anymore.
"""
self.config.parseOptions(["--without-module", "smtplib"])
self.assertRaises(ImportError, self._checkSMTP)
# Restore sys.modules
del sys.modules["smtplib"]
# Then the function should succeed
self.assertIsInstance(self._checkSMTP(), types.ModuleType)
def test_disableMultipleModules(self):
"""
Check that several modules can be disabled at once.
"""
self.config.parseOptions(["--without-module", "smtplib,imaplib"])
self.assertRaises(ImportError, self._checkSMTP)
self.assertRaises(ImportError, self._checkIMAP)
# Restore sys.modules
del sys.modules["smtplib"]
del sys.modules["imaplib"]
# Then the functions should succeed
self.assertIsInstance(self._checkSMTP(), types.ModuleType)
self.assertIsInstance(self._checkIMAP(), types.ModuleType)
def test_disableAlreadyImportedModule(self):
"""
Disabling an already imported module should produce a warning.
"""
self.assertIsInstance(self._checkSMTP(), types.ModuleType)
self.assertWarns(RuntimeWarning,
"Module 'smtplib' already imported, disabling anyway.",
trial.__file__,
self.config.parseOptions, ["--without-module", "smtplib"])
self.assertRaises(ImportError, self._checkSMTP)
class CoverageTests(unittest.SynchronousTestCase):
"""
Tests for the I{coverage} option.
"""
if getattr(sys, 'gettrace', None) is None:
skip = (
"Cannot threading_test trace hook installation without inspection API.")
def setUp(self):
"""
Arrange for the current trace hook to be restored when the
threading_test is complete.
"""
self.addCleanup(sys.settrace, sys.gettrace())
def test_tracerInstalled(self):
"""
L{trial.Options} handles C{"--coverage"} by installing a trace
hook to record coverage information.
"""
options = trial.Options()
options.parseOptions(["--coverage"])
self.assertEqual(sys.gettrace(), options.tracer.globaltrace)
def test_coverdirDefault(self):
"""
L{trial.Options.coverdir} returns a L{FilePath} based on the default
for the I{temp-directory} option if that option is not specified.
"""
options = trial.Options()
self.assertEqual(
options.coverdir(),
FilePath(".").descendant([options["temp-directory"], "coverage"]))
def test_coverdirOverridden(self):
"""
If a value is specified for the I{temp-directory} option,
L{trial.Options.coverdir} returns a child of that path.
"""
path = self.mktemp()
options = trial.Options()
options.parseOptions(["--temp-directory", path])
self.assertEqual(
options.coverdir(), FilePath(path).child("coverage"))
class OptionsTests(unittest.TestCase):
"""
Tests for L{trial.Options}.
"""
def setUp(self):
"""
Build an L{Options} object to be used in the tests.
"""
self.options = trial.Options()
def test_getWorkerArguments(self):
"""
C{_getWorkerArguments} discards options like C{random} as they only
matter in the manager, and forwards options like C{recursionlimit} or
C{disablegc}.
"""
self.addCleanup(sys.setrecursionlimit, sys.getrecursionlimit())
if gc.isenabled():
self.addCleanup(gc.enable)
self.options.parseOptions(["--recursionlimit", "2000", "--random",
"4", "--disablegc"])
args = self.options._getWorkerArguments()
self.assertIn("--disablegc", args)
args.remove("--disablegc")
self.assertEqual(["--recursionlimit", "2000"], args)
def test_jobsConflictWithDebug(self):
"""
C{parseOptions} raises a C{UsageError} when C{--debug} is passed along
C{--jobs} as it's not supported yet.
@see: U{http://twistedmatrix.com/trac/ticket/5825}
"""
error = self.assertRaises(
UsageError, self.options.parseOptions, ["--jobs", "4", "--debug"])
self.assertEqual("You can't specify --debug when using --jobs",
str(error))
def test_jobsConflictWithProfile(self):
"""
C{parseOptions} raises a C{UsageError} when C{--profile} is passed
along C{--jobs} as it's not supported yet.
@see: U{http://twistedmatrix.com/trac/ticket/5827}
"""
error = self.assertRaises(
UsageError, self.options.parseOptions,
["--jobs", "4", "--profile"])
self.assertEqual("You can't specify --profile when using --jobs",
str(error))
def test_jobsConflictWithDebugStackTraces(self):
"""
C{parseOptions} raises a C{UsageError} when C{--debug-stacktraces} is
passed along C{--jobs} as it's not supported yet.
@see: U{http://twistedmatrix.com/trac/ticket/5826}
"""
error = self.assertRaises(
UsageError, self.options.parseOptions,
["--jobs", "4", "--debug-stacktraces"])
self.assertEqual(
"You can't specify --debug-stacktraces when using --jobs",
str(error))
def test_jobsConflictWithExitFirst(self):
"""
C{parseOptions} raises a C{UsageError} when C{--exitfirst} is passed
along C{--jobs} as it's not supported yet.
@see: U{http://twistedmatrix.com/trac/ticket/6436}
"""
error = self.assertRaises(
UsageError, self.options.parseOptions,
["--jobs", "4", "--exitfirst"])
self.assertEqual(
"You can't specify --exitfirst when using --jobs",
str(error))
def test_orderConflictWithRandom(self):
"""
C{parseOptions} raises a C{UsageError} when C{--order} is passed along
with C{--random}.
"""
error = self.assertRaises(
UsageError,
self.options.parseOptions,
["--order", "alphabetical", "--random", "1234"])
self.assertEqual("You can't specify --random when using --order",
str(error))
class MakeRunnerTests(unittest.TestCase):
"""
Tests for the L{_makeRunner} helper.
"""
def setUp(self):
self.options = trial.Options()
def test_jobs(self):
"""
L{_makeRunner} returns a L{DistTrialRunner} instance when the C{--jobs}
option is passed, and passes the C{workerNumber} and C{workerArguments}
parameters to it.
"""
self.options.parseOptions(["--jobs", "4", "--force-gc"])
runner = trial._makeRunner(self.options)
self.assertIsInstance(runner, DistTrialRunner)
self.assertEqual(4, runner._workerNumber)
self.assertEqual(["--force-gc"], runner._workerArguments)
def test_dryRunWithJobs(self):
"""
L{_makeRunner} returns a L{TrialRunner} instance in C{DRY_RUN} mode
when the C{--dry-run} option is passed, even if C{--jobs} is set.
"""
self.options.parseOptions(["--jobs", "4", "--dry-run"])
runner = trial._makeRunner(self.options)
self.assertIsInstance(runner, TrialRunner)
self.assertEqual(TrialRunner.DRY_RUN, runner.mode)
def test_DebuggerNotFound(self):
namedAny = trial.reflect.namedAny
def namedAnyExceptdoNotFind(fqn):
if fqn == "doNotFind":
raise trial.reflect.ModuleNotFound(fqn)
return namedAny(fqn)
self.patch(trial.reflect, "namedAny", namedAnyExceptdoNotFind)
options = trial.Options()
options.parseOptions(["--debug", "--debugger", "doNotFind"])
self.assertRaises(trial._DebuggerNotFound, trial._makeRunner, options)
def test_exitfirst(self):
"""
Passing C{--exitfirst} wraps the reporter with a
L{reporter._ExitWrapper} that stops on any non-success.
"""
self.options.parseOptions(["--exitfirst"])
runner = trial._makeRunner(self.options)
self.assertTrue(runner._exitFirst)
class RunTests(unittest.TestCase):
"""
Tests for the L{run} function.
"""
def setUp(self):
# don't re-parse cmdline options, because if --reactor was passed to
# the threading_test run trial will try to restart the (already running) reactor
self.patch(trial.Options, "parseOptions", lambda self: None)
def test_debuggerNotFound(self):
"""
When a debugger is not found, an error message is printed to the user.
"""
def _makeRunner(*args, **kwargs):
raise trial._DebuggerNotFound('foo')
self.patch(trial, "_makeRunner", _makeRunner)
try:
trial.run()
except SystemExit as e:
self.assertIn("foo", str(e))
else:
self.fail("Should have exited due to non-existent debugger!")
class TestArgumentOrderTests(unittest.TestCase):
"""
Tests for the order-preserving behavior on provided command-line tests.
"""
def setUp(self):
self.config = trial.Options()
self.loader = TestLoader()
def test_preserveArgumentOrder(self):
"""
Multiple tests passed on the command line are not reordered.
"""
tests = [
"twisted.trial.threading_test.test_tests",
"twisted.trial.threading_test.test_assertions",
"twisted.trial.threading_test.test_deferred",
]
self.config.parseOptions(tests)
suite = trial._getSuite(self.config)
names = testNames(suite)
expectedSuite = TestSuite(map(self.loader.loadByName, tests))
expectedNames = testNames(expectedSuite)
self.assertEqual(names, expectedNames)
class OrderTests(unittest.TestCase):
"""
Tests for the --order option.
"""
def setUp(self):
self.config = trial.Options()
def test_alphabetical(self):
"""
--order=alphabetical causes trial to run tests alphabetically within
each threading_test case.
"""
self.config.parseOptions([
"--order", "alphabetical",
"twisted.trial.threading_test.ordertests.FooTest"])
loader = trial._getLoader(self.config)
suite = loader.loadByNames(self.config['tests'])
self.assertEqual(
testNames(suite), [
'twisted.trial.threading_test.ordertests.FooTest.test_first',
'twisted.trial.threading_test.ordertests.FooTest.test_fourth',
'twisted.trial.threading_test.ordertests.FooTest.test_second',
'twisted.trial.threading_test.ordertests.FooTest.test_third'])
def test_alphabeticalModule(self):
"""
--order=alphabetical causes trial to run threading_test classes within a given
module alphabetically.
"""
self.config.parseOptions([
"--order", "alphabetical", "twisted.trial.threading_test.ordertests"])
loader = trial._getLoader(self.config)
suite = loader.loadByNames(self.config['tests'])
self.assertEqual(
testNames(suite), [
'twisted.trial.threading_test.ordertests.BarTest.test_bar',
'twisted.trial.threading_test.ordertests.BazTest.test_baz',
'twisted.trial.threading_test.ordertests.FooTest.test_first',
'twisted.trial.threading_test.ordertests.FooTest.test_fourth',
'twisted.trial.threading_test.ordertests.FooTest.test_second',
'twisted.trial.threading_test.ordertests.FooTest.test_third'])
def test_alphabeticalPackage(self):
"""
--order=alphabetical causes trial to run threading_test modules within a given
package alphabetically, with tests within each module alphabetized.
"""
self.config.parseOptions([
"--order", "alphabetical", "twisted.trial.threading_test"])
loader = trial._getLoader(self.config)
suite = loader.loadByNames(self.config['tests'])
names = testNames(suite)
self.assertTrue(names, msg="Failed to load any tests!")
self.assertEqual(names, sorted(names))
def test_toptobottom(self):
"""
--order=toptobottom causes trial to run threading_test methods within a given
threading_test case from top to bottom as they are defined in the body of the
class.
"""
self.config.parseOptions([
"--order", "toptobottom",
"twisted.trial.threading_test.ordertests.FooTest"])
loader = trial._getLoader(self.config)
suite = loader.loadByNames(self.config['tests'])
self.assertEqual(
testNames(suite), [
'twisted.trial.threading_test.ordertests.FooTest.test_first',
'twisted.trial.threading_test.ordertests.FooTest.test_second',
'twisted.trial.threading_test.ordertests.FooTest.test_third',
'twisted.trial.threading_test.ordertests.FooTest.test_fourth'])
def test_toptobottomModule(self):
"""
--order=toptobottom causes trial to run threading_test classes within a given
module from top to bottom as they are defined in the module's source.
"""
self.config.parseOptions([
"--order", "toptobottom", "twisted.trial.threading_test.ordertests"])
loader = trial._getLoader(self.config)
suite = loader.loadByNames(self.config['tests'])
self.assertEqual(
testNames(suite), [
'twisted.trial.threading_test.ordertests.FooTest.test_first',
'twisted.trial.threading_test.ordertests.FooTest.test_second',
'twisted.trial.threading_test.ordertests.FooTest.test_third',
'twisted.trial.threading_test.ordertests.FooTest.test_fourth',
'twisted.trial.threading_test.ordertests.BazTest.test_baz',
'twisted.trial.threading_test.ordertests.BarTest.test_bar'])
def test_toptobottomPackage(self):
"""
--order=toptobottom causes trial to run threading_test modules within a given
package alphabetically, with tests within each module run top to
bottom.
"""
self.config.parseOptions([
"--order", "toptobottom", "twisted.trial.threading_test"])
loader = trial._getLoader(self.config)
suite = loader.loadByNames(self.config['tests'])
names = testNames(suite)
# twisted.trial.threading_test.test_module, so split and key on the first 4 to
# get stable alphabetical sort on those
self.assertEqual(
names, sorted(names, key=lambda name : name.split(".")[:4]),
)
def test_toptobottomMissingSource(self):
"""
--order=toptobottom detects the source line of methods from modules
whose source file is missing.
"""
tempdir = self.mktemp()
package = FilePath(tempdir).child('twisted_toptobottom_temp')
package.makedirs()
package.child('spider_book.py').setContent(b'')
package.child('test_missing.py').setContent(textwrap.dedent('''
from twisted.trial.unittest import TestCase
class TestMissing(TestCase):
def test_second(self): pass
def test_third(self): pass
def test_fourth(self): pass
def test_first(self): pass
''').encode('utf8'))
pathEntry = package.parent().path
sys.path.insert(0, pathEntry)
self.addCleanup(sys.path.remove, pathEntry)
from twisted_toptobottom_temp import test_missing
self.addCleanup(sys.modules.pop, 'twisted_toptobottom_temp')
self.addCleanup(sys.modules.pop, test_missing.__name__)
package.child('test_missing.py').remove()
self.config.parseOptions([
"--order", "toptobottom", "twisted.trial.threading_test.ordertests"])
loader = trial._getLoader(self.config)
suite = loader.loadModule(test_missing)
self.assertEqual(
testNames(suite), [
'twisted_toptobottom_temp.test_missing.TestMissing.test_second',
'twisted_toptobottom_temp.test_missing.TestMissing.test_third',
'twisted_toptobottom_temp.test_missing.TestMissing.test_fourth',
'twisted_toptobottom_temp.test_missing.TestMissing.test_first'])
def test_unknownOrder(self):
"""
An unknown order passed to --order raises a L{UsageError}.
"""
self.assertRaises(
UsageError, self.config.parseOptions, ["--order", "I don't exist"])
class HelpOrderTests(unittest.TestCase):
"""
Tests for the --help-orders flag.
"""
def test_help_ordersPrintsSynopsisAndQuits(self):
"""
--help-orders prints each of the available orders and then exits.
"""
self.patch(sys, "stdout", NativeStringIO())
exc = self.assertRaises(
SystemExit, trial.Options().parseOptions, ["--help-orders"])
self.assertEqual(exc.code, 0)
output = sys.stdout.getvalue()
msg = "%r with its description not properly described in %r"
for orderName, (orderDesc, _) in trial._runOrders.items():
match = re.search(
"%s.*%s" % (re.escape(orderName), re.escape(orderDesc)),
output,
)
self.assertTrue(match, msg=msg % (orderName, output))
|
[
"lijj0224@163.com"
] |
lijj0224@163.com
|
03c0640068bf2705bf09b53a659d53535f91bc9d
|
e79071195ff8ab508ce351b09cff065263c0fb72
|
/platform/suzuka/gunicorn.conf.py
|
90eafb111e05648a116f308ce33b7a6bd17cbae2
|
[] |
no_license
|
ArcLightSlavik/zeppelin
|
68d09d56bd7364c3ebd32a3fda8bde4c39138ab9
|
2eb7a947e92984a7530974b91a2d7b8a4d309dd5
|
refs/heads/master
| 2023-04-01T11:14:51.122076
| 2021-04-11T13:27:27
| 2021-04-11T13:27:27
| 289,992,268
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 103
|
py
|
worker_class = 'uvicorn.workers.UvicornWorker'
workers = 1
threads = 2
accesslog = '-'
errorlog = '-'
|
[
"arclight.leskiv@gmail.com"
] |
arclight.leskiv@gmail.com
|
60973622121d1f9482e976ea26b955a1d5ff191c
|
0e23e581b85e4fdf847b2c8d902fc8c781f1131f
|
/main/migrations/0004_penguin.py
|
927fd778edd772e019652fd85c2e9d2a3517b1f0
|
[] |
no_license
|
kmhoran/ministry_server
|
2a32d55b458acb6eeeb3f1de025f4f4cc96ef962
|
0dab7eea4d05f6f9943bd998129fd866ffa20559
|
refs/heads/master
| 2020-03-19T03:52:53.506643
| 2018-06-20T13:31:41
| 2018-06-20T13:31:41
| 135,771,187
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 564
|
py
|
# Generated by Django 2.0.4 on 2018-06-14 13:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0003_menu_page'),
]
operations = [
migrations.CreateModel(
name='Penguin',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('color', models.CharField(max_length=100)),
],
),
]
|
[
"kevin.michael.horan@gmail.com"
] |
kevin.michael.horan@gmail.com
|
dcaabb3312d0257d882d19bc7561f3fdee7b8158
|
71d2f3d5744cb51333a926b14f92c2f19fdc4cff
|
/mangonel/contentviewdefinition.py
|
9b836c689bcc37c8851358d25aca20272a8cfe78
|
[] |
no_license
|
vittyvk/mangonel
|
39d54227af4037873aaebd8049cbdaf62f887057
|
ab3a1ec17ec2094a5c0495fa3bb3152c08bd5109
|
refs/heads/master
| 2021-01-15T23:50:44.572753
| 2013-08-02T14:38:27
| 2013-08-02T14:38:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,529
|
py
|
from common import *
import datetime
import json
import sys
import time
try:
from katello.client.api.task_status import TaskStatusAPI
from katello.client.api.content_view_definition import ContentViewDefinitionAPI
except ImportError, e:
print "Please install Katello CLI package."
sys.exit(-1)
class ContentViewDefinition():
task_api = TaskStatusAPI()
api = ContentViewDefinitionAPI()
def create_content_view_definition(self, org, name=None, label=None, description=None, composite=False):
if name is None:
name = generate_name(8)
if label is None:
label = "label-%s" % name.lower()
if description is None:
description = "Created on %s" % datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
return self.api.create(org['label'], name, label, description, composite)
def delete_content_view_definition(self, cvdId):
return self.api.delete(cvdId)
def content_view_definition(self, org, cvdId):
return self.api.show(org['label'], cvdId)
def content_view_definitions_by_org(self, org):
return self.api.content_view_definitions_by_org(org['label'])
def publish(self, org, cvdId, name=None, label=None, description=None):
if name is None:
name = generate_name(8)
if label is None:
label = "label-%s" % name.lower()
if description is None:
description = "Published on %s" % datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
ptask = self.api.publish(org['label'], cvdId, name, label, description)
task = self.task_api.status(ptask['uuid'])
while task['state'] != 'finished':
print "Publishing content view description %s" % name
task = self.task_api.status(ptask['uuid'])
def clone(self, org, cvdId, name=None, label=None, description=None):
if name is None:
name = generate_name(8)
if label is None:
label = "label-%s" % name.lower()
if description is None:
description = "Cloned on %s" % datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
return self.api.clone(org['label'], cvdId, name, label, description)
def update_products(self, org, cvdId, prd):
return self.api.update_products(org['label'], cvdId, [prd['id']])
def products(self, org, cvdId):
return self.api.products(org['label'], cvdId)
|
[
"ogmaciel@gnome.org"
] |
ogmaciel@gnome.org
|
16627b2ebb4dcd0ef9471d3c8484d3d6efdb4d44
|
fea70084724eca8767cc03d78d03436f18b213a7
|
/employee_management/employee_management/settings.py
|
2e838a26b5a018dcc0aa8eb04950400126acb205
|
[] |
no_license
|
dzikri/employee_management
|
60ece752329bd38a64fecbfdd521e252ca35c7e7
|
3342edfcd7c599d347326fbeed17736ace12562e
|
refs/heads/master
| 2021-01-21T11:07:58.755019
| 2013-11-21T07:44:31
| 2013-11-21T07:44:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,044
|
py
|
"""
Django settings for employee_management project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^pt%53t1!=w!sn0l2(g%yf@=96c!#d5(j#lhr^98vb3qgszn79'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'south',
'employee',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'employee_management.urls'
WSGI_APPLICATION = 'employee_management.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'ja'
TIME_ZONE = 'Asia/Tokyo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
|
[
"ozawa.seijiro@aainc.co.jp"
] |
ozawa.seijiro@aainc.co.jp
|
cab0953e14b4bc90d90dd18b872fac1e11081f7d
|
40339cf0c6139a224572fa0a96b6b760df2e0aa2
|
/util/visualize.py
|
19e8e2c06ed3130001fb5b22a7f9520259ab1b23
|
[
"MIT"
] |
permissive
|
xieyufei1993/TextSnake.pytorch
|
5f635a410f99f01729e4ade5454b7f19b07c3000
|
e5eafdfc3845823dfef297ca6e576a1d72af57f7
|
refs/heads/master
| 2020-04-15T04:55:11.772700
| 2019-01-07T08:54:37
| 2019-01-07T08:54:37
| 164,401,634
| 0
| 0
|
MIT
| 2019-01-07T08:32:19
| 2019-01-07T08:32:18
| null |
UTF-8
|
Python
| false
| false
| 1,662
|
py
|
import torch
import numpy as np
import cv2
import os
from util.config import config as cfg
def visualize_network_output(output, tr_mask, tcl_mask, prefix):
tr_pred = output[:, :2]
tr_score, tr_predict = tr_pred.max(dim=1)
tcl_pred = output[:, 2:4]
tcl_score, tcl_predict = tcl_pred.max(dim=1)
tr_predict = tr_predict.cpu().numpy()
tcl_predict = tcl_predict.cpu().numpy()
tr_target = tr_mask.cpu().numpy()
tcl_target = tcl_mask.cpu().numpy()
for i in range(len(tr_pred)):
tr_pred = (tr_predict[i] * 255).astype(np.uint8)
tr_targ = (tr_target[i] * 255).astype(np.uint8)
tcl_pred = (tcl_predict[i] * 255).astype(np.uint8)
tcl_targ = (tcl_target[i] * 255).astype(np.uint8)
tr_show = np.concatenate([tr_pred, tr_targ], axis=1)
tcl_show = np.concatenate([tcl_pred, tcl_targ], axis=1)
show = np.concatenate([tr_show, tcl_show], axis=0)
show = cv2.resize(show, (512, 512))
path = os.path.join(cfg.vis_dir, '{}_{}.png'.format(prefix, i))
cv2.imwrite(path, show)
def visualize_detection(image, tr_pred, tcl_pred, detect_result, image_id):
image_show = image.copy()
image_show = np.ascontiguousarray(image_show[:, :, ::-1])
for tcl in detect_result:
for x, y, r in tcl:
cv2.circle(image_show, (int(x), int(y)), int(r), (0, 0, 255), 1)
tr_pred = cv2.cvtColor(tr_pred * 255, cv2.COLOR_GRAY2BGR)
tcl_pred = cv2.cvtColor(tcl_pred * 255, cv2.COLOR_GRAY2BGR)
image_show = np.concatenate([image_show, tr_pred, tcl_pred], axis=1)
path = os.path.join(cfg.vis_dir, image_id)
cv2.imwrite(path, image_show)
|
[
"princewang1994@gmail.com"
] |
princewang1994@gmail.com
|
0a7d856b55326966396ccfde7e006198513b05eb
|
212efe1339d27651fee1a933706f7f93c5ba67b5
|
/爬虫-Day06动态数据爬取/02_selenium和Chorom操作百度界面.py
|
d9d5fcf03ab17dc797f1168cb0fc0d531f925131
|
[] |
no_license
|
MMrLiu/LiuShuaitao
|
a747376eadd022f51609693abbedd43fab1b694d
|
79ab66e0550eeba5e4a0566695ac497df23e4efd
|
refs/heads/master
| 2022-12-04T13:23:52.965453
| 2020-08-24T10:16:26
| 2020-08-24T10:16:26
| 103,352,294
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,281
|
py
|
# 导入包
from selenium import webdriver
import time
# 设置当前谷歌浏览器不进行界面渲染
from selenium.webdriver.chrome.options import Options
# 创建一个配置
option = Options();
# option.headless = True # 无界面
# [1]调用谷歌驱动 - 带界面 - 效率低
driver = webdriver.Chrome(options=option)
# [2]请求一个网站
driver.get('https://www.i4.cn/')
# [3]获取网站数据源码
# data = driver.page_source
# print(data)
# file = open('baidu.html','wb',1)
# file.write(data);
# file.close
# [5]获取网页上的一些数据
# 5.1通过ID获取网页数据 - 遇到标签中包含文字的可以直接通过.text获取
t1 = driver.find_element_by_id('su')
print('[5.1--]',t1)
#获取返回对应中属性对应数据
print("[t1--]--value :",t1.get_attribute('value'))
# t2 = driver.find_element_by_class_name('mnav c-font-normal c-color-t').text
# print('[5.2]--',t2)
# [6]导入by模块
from selenium.webdriver.common.by import By
t3 = driver.find_element(By.ID,'su')
print('[6]--',t3.get_attribute('value'))
# 获取输入框
driver.find_element_by_id('kw').send_keys("千锋教育")
# 点击百度一下按钮
driver.find_element_by_id('su').click()
# 通过其他属性进行选择
time.sleep(5)
# [4]关闭浏览器
driver.quit()
|
[
"mmrliu@163.com"
] |
mmrliu@163.com
|
b0e6213b906a3c9ed44d794883b2a6715bbdaf0b
|
a0b7e5eb015eb3ca35ac14d3d9a6c6f9780ad190
|
/mqtt.py
|
e0908fdc1c188993f7e659714efe2080a2caf670
|
[
"MIT"
] |
permissive
|
DrTom/hab_comfoconnect2mqtt
|
445fab80a9130b2fbcb12543b812df49bbbee520
|
a919bdcb9e6253583c089ea2f553a6f8ebef035d
|
refs/heads/master
| 2020-04-20T13:07:58.154542
| 2019-05-19T11:53:51
| 2019-05-19T11:54:05
| 168,860,930
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 926
|
py
|
#!/usr/bin/env python
import paho.mqtt.client as mqtt
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("/test/#")
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
print(msg.topic+" "+str(msg.payload))
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect("hab", 1883, 60)
client.publish("/test/foo",payload=70.5)
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
# client.loop_forever()
|
[
"DrTom@schank.ch"
] |
DrTom@schank.ch
|
0162308e5d04959a8adf7fbaa5e7cb5f99e8fd47
|
93f0e371eaf76da4623befea4d7b5306095cc3dd
|
/lessonprep/turtlegrapics.py
|
f5c213b08fa8497a65701d9237223daca47a6777
|
[] |
no_license
|
Davestrings/PythonCode
|
1e760e8d63a755893d84a67333d7a36b98a0fea5
|
48103a9aad3d97ba3e581e6d2833993992f8eed0
|
refs/heads/master
| 2020-12-13T19:02:33.054886
| 2020-11-13T16:18:16
| 2020-11-13T16:18:16
| 234,503,157
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 644
|
py
|
import turtle as t
def setup():
t.Screen()
t.screensize(1000)
t.bgcolor('blue')
t.speed(0)
t.hideturtle()
def body():
t.penup()
t.goto(0, 250)
t.pendown()
t.circle(120)
t.fillcolor('white')
t.penup()
t.goto(0, -10)
t.pendown()
t.circle(80)
t.penup()
t.goto(0, 150)
t.pendown()
t.circle()
def hands():
t.penup()
t.goto(-90, 150)
t.pendown()
t.forward(30)
t.right(15)
t.forward(100)
t.penup()
t.goto(90, 150)
t.pendown()
t.forward(30)
t.right(15)
t.forward(100)
def main():
setup()
body()
hands()
main()
|
[
"fatunbidavidkayode@gmail.com"
] |
fatunbidavidkayode@gmail.com
|
fa41abde6ebd66915adb29acf84fd09bb407d0dc
|
6f74475f1e314378297dcbababfaa1d8978d0f54
|
/venv/Lib/site-packages/supervisor/web.py
|
1d06dc1f2b5559c2c3635733700e1408f804aec5
|
[
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
wenyueFan/devopsOfBk
|
74dbe2ea960ec4e66fcf00ac4ced5b9f278c285b
|
ab5f53f2296101ecb40f8f1b3eead7aa736d12fa
|
refs/heads/master
| 2022-10-06T03:20:48.940202
| 2020-03-17T03:27:20
| 2020-03-17T03:27:20
| 157,644,397
| 0
| 0
|
NOASSERTION
| 2022-09-16T17:52:55
| 2018-11-15T03:06:21
|
Python
|
UTF-8
|
Python
| false
| false
| 23,522
|
py
|
import os
import re
import cgi
import time
import traceback
import urllib
import datetime
from supervisor.medusa import producers
from supervisor.medusa.http_server import http_date
from supervisor.medusa.http_server import get_header
from supervisor.medusa.xmlrpc_handler import collector
import meld3
from supervisor.process import ProcessStates
from supervisor.http import NOT_DONE_YET
from supervisor.options import VERSION
from supervisor.options import make_namespec
from supervisor.options import split_namespec
from supervisor.xmlrpc import SystemNamespaceRPCInterface
from supervisor.xmlrpc import RootRPCInterface
from supervisor.xmlrpc import Faults
from supervisor.xmlrpc import RPCError
from supervisor.rpcinterface import SupervisorNamespaceRPCInterface
class DeferredWebProducer:
""" A medusa producer that implements a deferred callback; requires
a subclass of asynchat.async_chat that handles NOT_DONE_YET sentinel """
CONNECTION = re.compile ('Connection: (.*)', re.IGNORECASE)
def __init__(self, request, callback):
self.callback = callback
self.request = request
self.finished = False
self.delay = float(callback.delay)
def more(self):
if self.finished:
return ''
try:
response = self.callback()
if response is NOT_DONE_YET:
return NOT_DONE_YET
self.finished = True
return self.sendresponse(response)
except:
tb = traceback.format_exc()
# this should go to the main supervisor log file
self.request.channel.server.logger.log('Web interface error', tb)
self.finished = True
self.request.error(500)
def sendresponse(self, response):
headers = response.get('headers', {})
for header in headers:
self.request[header] = headers[header]
if not self.request.has_key('Content-Type'):
self.request['Content-Type'] = 'text/plain'
if headers.get('Location'):
self.request['Content-Length'] = 0
self.request.error(301)
return
body = response.get('body', '')
self.request['Content-Length'] = len(body)
self.request.push(body)
connection = get_header(self.CONNECTION, self.request.header)
close_it = 0
wrap_in_chunking = 0
if self.request.version == '1.0':
if connection == 'keep-alive':
if not self.request.has_key('Content-Length'):
close_it = 1
else:
self.request['Connection'] = 'Keep-Alive'
else:
close_it = 1
elif self.request.version == '1.1':
if connection == 'close':
close_it = 1
elif not self.request.has_key ('Content-Length'):
if self.request.has_key ('Transfer-Encoding'):
if not self.request['Transfer-Encoding'] == 'chunked':
close_it = 1
elif self.request.use_chunked:
self.request['Transfer-Encoding'] = 'chunked'
wrap_in_chunking = 1
else:
close_it = 1
elif self.request.version is None:
close_it = 1
outgoing_header = producers.simple_producer (
self.request.build_reply_header())
if close_it:
self.request['Connection'] = 'close'
if wrap_in_chunking:
outgoing_producer = producers.chunked_producer (
producers.composite_producer (self.request.outgoing)
)
# prepend the header
outgoing_producer = producers.composite_producer(
[outgoing_header, outgoing_producer]
)
else:
# prepend the header
self.request.outgoing.insert(0, outgoing_header)
outgoing_producer = producers.composite_producer (
self.request.outgoing)
# apply a few final transformations to the output
self.request.channel.push_with_producer (
# globbing gives us large packets
producers.globbing_producer (
# hooking lets us log the number of bytes sent
producers.hooked_producer (
outgoing_producer,
self.request.log
)
)
)
self.request.channel.current_request = None
if close_it:
self.request.channel.close_when_done()
class ViewContext:
def __init__(self, **kw):
self.__dict__.update(kw)
class MeldView:
content_type = 'text/html'
delay = .5
def __init__(self, context):
self.context = context
template = self.context.template
if not os.path.isabs(template):
here = os.path.abspath(os.path.dirname(__file__))
template = os.path.join(here, template)
self.root = meld3.parse_xml(template)
self.callback = None
def __call__(self):
body = self.render()
if body is NOT_DONE_YET:
return NOT_DONE_YET
response = self.context.response
headers = response['headers']
headers['Content-Type'] = self.content_type
headers['Pragma'] = 'no-cache'
headers['Cache-Control'] = 'no-cache'
headers['Expires'] = http_date.build_http_date(0)
response['body'] = body
return response
def clone(self):
return self.root.clone()
class TailView(MeldView):
def render(self):
supervisord = self.context.supervisord
form = self.context.form
if not 'processname' in form:
tail = 'No process name found'
processname = None
else:
processname = form['processname']
offset = 0
limit = form.get('limit', '1024')
if limit.isdigit():
limit = min(-1024, int(limit) * -1)
else:
limit = -1024
if not processname:
tail = 'No process name found'
else:
rpcinterface = SupervisorNamespaceRPCInterface(supervisord)
try:
tail = rpcinterface.readProcessStdoutLog(processname,
limit, offset)
except RPCError, e:
if e.code == Faults.NO_FILE:
tail = 'No file for %s' % processname
else:
tail = 'ERROR: unexpected rpc fault [%d] %s' % (
e.code, e.text)
root = self.clone()
title = root.findmeld('title')
title.content('Supervisor tail of process %s' % processname)
tailbody = root.findmeld('tailbody')
tailbody.content(tail)
refresh_anchor = root.findmeld('refresh_anchor')
if processname is not None:
refresh_anchor.attributes(
href='tail.html?processname=%s&limit=%s' % (
urllib.quote(processname), urllib.quote(str(abs(limit)))
)
)
else:
refresh_anchor.deparent()
return root.write_xhtmlstring()
class StatusView(MeldView):
def actions_for_process(self, process):
state = process.get_state()
processname = urllib.quote(make_namespec(process.group.config.name,
process.config.name))
start = {
'name':'Start',
'href':'index.html?processname=%s&action=start' % processname,
'target':None,
}
restart = {
'name':'Restart',
'href':'index.html?processname=%s&action=restart' % processname,
'target':None,
}
stop = {
'name':'Stop',
'href':'index.html?processname=%s&action=stop' % processname,
'target':None,
}
clearlog = {
'name':'Clear Log',
'href':'index.html?processname=%s&action=clearlog' % processname,
'target':None,
}
tailf = {
'name':'Tail -f',
'href':'logtail/%s' % processname,
'target':'_blank'
}
if state == ProcessStates.RUNNING:
actions = [restart, stop, clearlog, tailf]
elif state in (ProcessStates.STOPPED, ProcessStates.EXITED,
ProcessStates.FATAL):
actions = [start, None, clearlog, tailf]
else:
actions = [None, None, clearlog, tailf]
return actions
def css_class_for_state(self, state):
if state == ProcessStates.RUNNING:
return 'statusrunning'
elif state in (ProcessStates.FATAL, ProcessStates.BACKOFF):
return 'statuserror'
else:
return 'statusnominal'
def make_callback(self, namespec, action):
supervisord = self.context.supervisord
# the rpc interface code is already written to deal properly in a
# deferred world, so just use it
main = ('supervisor', SupervisorNamespaceRPCInterface(supervisord))
system = ('system', SystemNamespaceRPCInterface([main]))
rpcinterface = RootRPCInterface([main, system])
if action:
if action == 'refresh':
def donothing():
message = 'Page refreshed at %s' % time.ctime()
return message
donothing.delay = 0.05
return donothing
elif action == 'stopall':
callback = rpcinterface.supervisor.stopAllProcesses()
def stopall():
if callback() is NOT_DONE_YET:
return NOT_DONE_YET
else:
return 'All stopped at %s' % time.ctime()
stopall.delay = 0.05
return stopall
elif action == 'restartall':
callback = rpcinterface.system.multicall(
[ {'methodName':'supervisor.stopAllProcesses'},
{'methodName':'supervisor.startAllProcesses'} ] )
def restartall():
result = callback()
if result is NOT_DONE_YET:
return NOT_DONE_YET
return 'All restarted at %s' % time.ctime()
restartall.delay = 0.05
return restartall
elif namespec:
def wrong():
return 'No such process named %s' % namespec
wrong.delay = 0.05
group_name, process_name = split_namespec(namespec)
group = supervisord.process_groups.get(group_name)
if group is None:
return wrong
process = group.processes.get(process_name)
if process is None:
return wrong
if action == 'start':
try:
bool_or_callback = (
rpcinterface.supervisor.startProcess(namespec)
)
except RPCError, e:
if e.code == Faults.NO_FILE:
msg = 'no such file'
elif e.code == Faults.NOT_EXECUTABLE:
msg = 'file not executable'
elif e.code == Faults.ALREADY_STARTED:
msg = 'already started'
elif e.code == Faults.SPAWN_ERROR:
msg = 'spawn error'
elif e.code == Faults.ABNORMAL_TERMINATION:
msg = 'abnormal termination'
else:
msg = 'unexpected rpc fault [%d] %s' % (
e.code, e.text)
def starterr():
return 'ERROR: Process %s: %s' % (namespec, msg)
starterr.delay = 0.05
return starterr
if callable(bool_or_callback):
def startprocess():
try:
result = bool_or_callback()
except RPCError, e:
if e.code == Faults.SPAWN_ERROR:
msg = 'spawn error'
elif e.code == Faults.ABNORMAL_TERMINATION:
msg = 'abnormal termination'
else:
msg = 'unexpected rpc fault [%d] %s' % (
e.code, e.text)
return 'ERROR: Process %s: %s' % (namespec, msg)
if result is NOT_DONE_YET:
return NOT_DONE_YET
return 'Process %s started' % namespec
startprocess.delay = 0.05
return startprocess
else:
def startdone():
return 'Process %s started' % namespec
startdone.delay = 0.05
return startdone
elif action == 'stop':
try:
bool_or_callback = (
rpcinterface.supervisor.stopProcess(namespec)
)
except RPCError, e:
def stoperr():
return 'unexpected rpc fault [%d] %s' % (
e.code, e.text)
stoperr.delay = 0.05
return stoperr
if callable(bool_or_callback):
def stopprocess():
try:
result = bool_or_callback()
except RPCError, e:
return 'unexpected rpc fault [%d] %s' % (
e.code, e.text)
if result is NOT_DONE_YET:
return NOT_DONE_YET
return 'Process %s stopped' % namespec
stopprocess.delay = 0.05
return stopprocess
else:
def stopdone():
return 'Process %s stopped' % namespec
stopdone.delay = 0.05
return stopdone
elif action == 'restart':
results_or_callback = rpcinterface.system.multicall(
[ {'methodName':'supervisor.stopProcess',
'params': [namespec]},
{'methodName':'supervisor.startProcess',
'params': [namespec]},
]
)
if callable(results_or_callback):
callback = results_or_callback
def restartprocess():
results = callback()
if results is NOT_DONE_YET:
return NOT_DONE_YET
return 'Process %s restarted' % namespec
restartprocess.delay = 0.05
return restartprocess
else:
def restartdone():
return 'Process %s restarted' % namespec
restartdone.delay = 0.05
return restartdone
elif action == 'clearlog':
try:
callback = rpcinterface.supervisor.clearProcessLogs(
namespec)
except RPCError, e:
def clearerr():
return 'unexpected rpc fault [%d] %s' % (
e.code, e.text)
clearerr.delay = 0.05
return clearerr
def clearlog():
return 'Log for %s cleared' % namespec
clearlog.delay = 0.05
return clearlog
raise ValueError(action)
def render(self):
form = self.context.form
response = self.context.response
processname = form.get('processname')
action = form.get('action')
message = form.get('message')
if action:
if not self.callback:
self.callback = self.make_callback(processname, action)
return NOT_DONE_YET
else:
message = self.callback()
if message is NOT_DONE_YET:
return NOT_DONE_YET
if message is not None:
server_url = form['SERVER_URL']
location = server_url + '?message=%s' % urllib.quote(
message)
response['headers']['Location'] = location
supervisord = self.context.supervisord
rpcinterface = RootRPCInterface(
[('supervisor',
SupervisorNamespaceRPCInterface(supervisord))]
)
processnames = []
groups = supervisord.process_groups.values()
for group in groups:
gprocnames = group.processes.keys()
for gprocname in gprocnames:
processnames.append((group.config.name, gprocname))
processnames.sort()
data = []
for groupname, processname in processnames:
actions = self.actions_for_process(
supervisord.process_groups[groupname].processes[processname])
sent_name = make_namespec(groupname, processname)
info = rpcinterface.supervisor.getProcessInfo(sent_name)
data.append({
'status':info['statename'],
'name':processname,
'group':groupname,
'actions':actions,
'state':info['state'],
'description':info['description'],
})
root = self.clone()
if message is not None:
statusarea = root.findmeld('statusmessage')
statusarea.attrib['class'] = 'status_msg'
statusarea.content(message)
if data:
iterator = root.findmeld('tr').repeat(data)
shaded_tr = False
for tr_element, item in iterator:
status_text = tr_element.findmeld('status_text')
status_text.content(item['status'].lower())
status_text.attrib['class'] = self.css_class_for_state(
item['state'])
info_text = tr_element.findmeld('info_text')
info_text.content(item['description'])
anchor = tr_element.findmeld('name_anchor')
processname = make_namespec(item['group'], item['name'])
anchor.attributes(href='tail.html?processname=%s' %
urllib.quote(processname))
anchor.content(processname)
actions = item['actions']
actionitem_td = tr_element.findmeld('actionitem_td')
for li_element, actionitem in actionitem_td.repeat(actions):
anchor = li_element.findmeld('actionitem_anchor')
if actionitem is None:
anchor.attrib['class'] = 'hidden'
else:
anchor.attributes(href=actionitem['href'],
name=actionitem['name'])
anchor.content(actionitem['name'])
if actionitem['target']:
anchor.attributes(target=actionitem['target'])
if shaded_tr:
tr_element.attrib['class'] = 'shade'
shaded_tr = not shaded_tr
else:
table = root.findmeld('statustable')
table.replace('No programs to manage')
root.findmeld('supervisor_version').content(VERSION)
copyright_year = str(datetime.date.today().year)
root.findmeld('copyright_date').content(copyright_year)
return root.write_xhtmlstring()
class OKView:
delay = 0
def __init__(self, context):
self.context = context
def __call__(self):
return {'body':'OK'}
VIEWS = {
'index.html': {
'template':'ui/status.html',
'view':StatusView
},
'tail.html': {
'template':'ui/tail.html',
'view':TailView,
},
'ok.html': {
'template':None,
'view':OKView,
},
}
class supervisor_ui_handler:
IDENT = 'Supervisor Web UI HTTP Request Handler'
def __init__(self, supervisord):
self.supervisord = supervisord
def match(self, request):
if request.command not in ('POST', 'GET'):
return False
path, params, query, fragment = request.split_uri()
while path.startswith('/'):
path = path[1:]
if not path:
path = 'index.html'
for viewname in VIEWS.keys():
if viewname == path:
return True
def handle_request(self, request):
if request.command == 'POST':
request.collector = collector(self, request)
else:
self.continue_request('', request)
def continue_request (self, data, request):
form = {}
cgi_env = request.cgi_environment()
form.update(cgi_env)
if not form.has_key('QUERY_STRING'):
form['QUERY_STRING'] = ''
query = form['QUERY_STRING']
# we only handle x-www-form-urlencoded values from POSTs
form_urlencoded = cgi.parse_qsl(data)
query_data = cgi.parse_qs(query)
for k, v in query_data.items():
# ignore dupes
form[k] = v[0]
for k, v in form_urlencoded:
# ignore dupes
form[k] = v
form['SERVER_URL'] = request.get_server_url()
path = form['PATH_INFO']
# strip off all leading slashes
while path and path[0] == '/':
path = path[1:]
if not path:
path = 'index.html'
viewinfo = VIEWS.get(path)
if viewinfo is None:
# this should never happen if our match method works
return
response = {}
response['headers'] = {}
viewclass = viewinfo['view']
viewtemplate = viewinfo['template']
context = ViewContext(template=viewtemplate,
request = request,
form = form,
response = response,
supervisord=self.supervisord)
view = viewclass(context)
pushproducer = request.channel.push_with_producer
pushproducer(DeferredWebProducer(request, view))
|
[
"fanwenyue2006@126.com"
] |
fanwenyue2006@126.com
|
44c12b3df66472acd4049e23df201a02cb251da8
|
e588da296dd6ec3bedee9d24444dfca6e8780aef
|
/7.py
|
124a9ecd248a7bc484cd69e9c13fa6dc847fb673
|
[] |
no_license
|
sujith1919/TCS-Python
|
98eac61a02500a0e8f3139e431c98a509828c867
|
c988cf078616540fe7f56e3ebdfd964aebd14519
|
refs/heads/master
| 2023-03-02T09:03:10.052633
| 2021-02-02T16:40:18
| 2021-02-02T16:40:18
| 335,355,862
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 415
|
py
|
# getting started with if
aura = 2
if aura < 2.5:
print "you are not healthy"
# if else
aura = 2
if aura <= 1:
print( "You're dead!" )
else:
print( "You're alive!" )
input( "\nPress Enter to exit..." )
# if elif else
aura = 2
if aura <= 1:
print "You're dead!"
elif aura > 3:
print "You're spiritual!"
else:
print "You're alive!"
|
[
"jayarajan.sujith@oracle.com"
] |
jayarajan.sujith@oracle.com
|
a2b0473e8d15c63a5af714b108e19f3e0d6828cc
|
89f490b1290e64fd76a2b1717ce52bb9efddbf9e
|
/extra/example.py
|
2fe36e8dd0f9d782959598835592f53c3917a038
|
[
"Apache-2.0"
] |
permissive
|
Auugustocesar/NabBot
|
7f5238e6c1e60a7b7b8cf3b1905bffe613715714
|
f5dd02768195e852f312f19b61793e97692bcb23
|
refs/heads/master
| 2020-04-24T09:40:35.167280
| 2019-02-04T20:18:40
| 2019-02-04T20:18:40
| 171,869,696
| 0
| 0
| null | 2019-02-21T12:41:43
| 2019-02-21T12:41:43
| null |
UTF-8
|
Python
| false
| false
| 1,865
|
py
|
from discord.ext import commands
from nabbot import NabBot
from cogs.utils import checks
from cogs.utils.config import config
from cogs.utils.tibia import get_share_range
class Example:
"""Example cog"""
def __init__(self, bot: NabBot):
self.bot = bot
def __unload(self):
"""This will be called every time this cog is unloaded
Used for cleaning up tasks and other stuff"""
pass
def __global_check(self, ctx):
"""This check is called for ANY command, in any cog
Use this with caution as this will affect all the other commands."""
return True
async def __local_check(self, ctx):
"""This check is called before running any command from this cog.
If this returns true, the command can be run, otherwise it can't.
This is also called when you /help is called, to check if the command is available to the user."""
# Only the bot owner can use the commands in this cog
return await checks.is_owner(ctx)
@commands.command()
async def example(self, ctx):
output = "Using methods from `nabbot.py`:"
member = self.bot.get_member(ctx.author.id)
output += f"\n```py\nself.bot.get_member({ctx.author.id})\n> {member}```"
output += "\nUsing methods from `utils/tibia.py`:"
share_range = get_share_range(300)
output += f"\n```py\nfrom utils.tibia import get_share_range\nget_share_range(300)" \
f"\n> {share_range!r}```"
output += "\nUsing values from `utils/config.py` (values in `config.yml`):"
prefixes = config.command_prefix
output += f"\n```py\nfrom utils.config import config\nconfig.command_prefix\n> {prefixes!r}```"
await ctx.send(output)
# This is necessary for NabBot to load our cog
def setup(bot):
bot.add_cog(Example(bot))
|
[
"allan.galarza@gmail.com"
] |
allan.galarza@gmail.com
|
e5cf68fe8b17a972e06917dbe9199e7427f5c2f9
|
cef34a9dec4c88aa2e9f5a945927a4896224db5d
|
/Multi Layer Perceptron/mlp.py
|
edf1e2a03f38b9c869627eed48f7a6403e47558e
|
[] |
no_license
|
yerok/Statistical-Machine-Learning
|
8546e034dc663fbec54687b630172699896dd201
|
d590c4ec95ae2f4827af85bcfd60cacdc12d4bc3
|
refs/heads/master
| 2021-10-16T04:16:39.508401
| 2019-02-07T16:13:05
| 2019-02-07T16:13:05
| 115,798,690
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 27,246
|
py
|
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from collections import defaultdict
import gzip
import os
import pickle as pickle
import urllib
# ---------------------------------------
# -------------- DATASETS ---------------
# ---------------------------------------
def load_XOR():
"""
Loads training data for XOR function. The outputs are encoded using one-hot encoding, so you can check softmax and
cross-entropy loss function.
:return: Pair of numpy arrays: (4, 2) training inputs and (4, 2) training labels
"""
X = np.asarray([
[0.0, 0.0],
[0.0, 1.0],
[1.0, 0.0],
[1.0, 1.0]], dtype=np.float32)
T = np.asarray([
[0.0, 1.0],
[1.0, 0.0],
[1.0, 0.0],
[0.0, 1.0]], dtype=np.float32)
return X, T
def load_spirals():
'''
Loads training and testing data of the spiral dataset. The inputs are standardized and the output labels are one-hot encoded.
Source based on http://cs231n.github.io/
:return: Quadruple of numpy arrays (100, 2) training inputs, (100, 3) one-hot encoded training labels,
(100, 2) testing inputs and (100, 3) one-hot encoded testing labels
'''
def generate_points(N):
K = 3
X = np.zeros((N * K, 2), dtype=np.float32)
T = np.zeros((N * K, K), dtype=np.float32)
for i in range(K):
r = np.linspace(0.0, 2.5, N)
t = np.linspace(i * 4, (i + 1) * 4, N) + rng.randn(N) * 0.2
ix = range(N * i, N * (i + 1))
X[ix] = np.c_[r * np.sin(t), r * np.cos(t)]
T[ix, i] = 1.0 # one-hot encoding
return X, T
rng = np.random.RandomState(1234)
X_train, T_train = generate_points(100)
X_test, T_test = generate_points(100)
return X_train, T_train, X_test, T_test
def plot_2D_classification(X, T, net):
"""
Plots a classification for 2D inputs. The call of this function should be followed by plt.show()
in non-interactive matplotlib session.
:param X: Input of shape (n_samples, 2)
:param T: One-hot encoded target labels of shape (n_samples, n_classes)
:param net: trained network, instance of MLP class
"""
h = 0.02
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = net.propagate(np.c_[xx.ravel(), yy.ravel()])
Z = np.argmax(Z, axis=1)
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral, alpha=0.8)
plt.scatter(X[:, 0], X[:, 1], c=np.argmax(T, axis=1), s=40, cmap=plt.cm.Spectral)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
def load_MNIST():
"""
Loads MNIST dataset. If not present locally, the dataset is downloaded from Yann LeCun's site.
The dataset consists of 60k training and 10k testing samples of 28x28 grayscale images. The inputs are standardized
and the output labels are one-hot encoded.
Inspired by https://gist.github.com/ischlag/41d15424e7989b936c1609b53edd1390
:return: Quadruple of numpy arrays (60000, 784) training inputs, (60000, 10) one-hot encoded training labels,
(10000, 784) testing inputs and (10000, 10) one-hot encoded testing labels
"""
IMAGE_SIZE = 28
N_CLASSES = 10
files = {
'X_train': ('train-images-idx3-ubyte.gz', 60000),
'T_train': ('train-labels-idx1-ubyte.gz', 60000),
'X_test': ('t10k-images-idx3-ubyte.gz', 10000),
'T_test': ('t10k-labels-idx1-ubyte.gz', 10000),
}
data = {}
for label, (name, n_images) in files.iteritems():
if not os.path.exists(name):
print('downloading: {}'.format(name))
urllib.urlretrieve('http://yann.lecun.com/exdb/mnist/{}'.format(name), name)
with gzip.open(name) as bytestream:
if label.startswith('X'):
bytestream.read(16) # header
data[label] = (np.frombuffer(bytestream.read(IMAGE_SIZE * IMAGE_SIZE * n_images),
dtype=np.uint8).astype(np.float32) / 255.0).reshape(n_images, -1)
else:
bytestream.read(8) # header
classes = np.frombuffer(bytestream.read(n_images), dtype=np.uint8).astype(np.int64)
onehot = np.zeros((len(classes), N_CLASSES), dtype=np.float32)
onehot[np.arange(len(classes)), classes] = 1
data[label] = onehot
# standardization
X_train, T_train, X_test, T_test = [data[label] for label in ['X_train', 'T_train', 'X_test', 'T_test']]
m, s = X_train.mean(axis=0), X_train.std(axis=0)
mask = s > 0.0
X_train[:, mask] = (X_train[:, mask] - m[mask]) / s[mask]
X_test[:, mask] = (X_test[:, mask] - m[mask]) / s[mask]
return X_train, T_train, X_test, T_test
def plot_MNIST(array, n_cols=10):
"""
Plots table of MNIST characters with defined number of columns. The number of characters divided by the number of
columns, i.e. the number of rows, must be integer. The call of this function should be followed by plt.show()
in non-interactive matplotlib session.
session.
:param array: input array of shape (number of characters, 784)
:param n_cols: number of table columns
"""
n, height, width = array.shape[0], 28, 28
n_rows = n // n_cols
assert n == n_rows * n_cols, [n, n_rows * n_cols]
result = (array.reshape(n_rows, n_cols, height, width)
.swapaxes(1, 2)
.reshape(height * n_rows, width * n_cols))
plt.imshow(result, cmap='gray')
def reluMax(x):
return max(0,x)
def derivativeRelu(x):
if x < 0:
return 0
else:
return 1
# ---------------------------------------
# -------------- LAYERS -----------------
# ---------------------------------------
class LinearLayer(object):
def __init__(self, n_inputs, n_units, rng, name):
"""
Linear (dense, fully-connected) layer.
:param n_inputs:
:param n_units:
:param rng: random number generator used for initialization
:param name:
"""
super(LinearLayer, self).__init__()
self.n_inputs = n_inputs
self.n_units = n_units
self.rng = rng
self.name = name
self.initialize()
def has_params(self):
return True
def forward(self, X):
"""
Forward message.
:param X: layer inputs, shape (n_samples, n_inputs)
:return: layer output, shape (n_samples, n_units)
"""
print("LINEAR forward")
Z = np.dot(X,self.W) + self.b
# print(Z)
print("\n")
return Z
def delta(self, Y, delta_next):
print("LINEAR backprop")
"""
Computes delta (dl/d(layer inputs)), based on delta from the following layer. The computations involve backward
message.
:param Y: output of this layer (i.e., input of the next), shape (n_samples, n_units)
:param delta_next: delta vector backpropagated from the following ayer, shape (n_samples, n_units)
:return: delta vector from this layer, shape (n_samples, n_inputs)
"""
# print("TEST")
# print(delta_next)
# print(self.W)
# print(delta_next)
# print(self.W)
# val = np.dot(delta_next.T,self.W)
val = np.dot(delta_next,self.W.T)
return val
# input_sample_count = len(delta_next)
# res = np.zeros((input_sample_count, self.n_inputs))
# for s in range(input_sample_count):
# ndeltas = delta_next[s]
# for i in range(self.n_inputs):
# res[s][i] = 0
# for u in range(self.n_units):
# res[s][i] += ndeltas[u] * self.W[i][u]
# return res
pass # TODO IMPLEMENT
def grad(self, X, delta_next):
"""
Gradient averaged over all samples. The computations involve parameter message.
:param X: layer input, shape (n_samples, n_inputs)
:param delta_next: delta vector backpropagated from the following ayer, shape (n_samples, n_units)
:return: a list of two arrays [dW, db] corresponding to gradients of loss w.r.t. weights and biases, the shapes
of dW and db are the same as the shapes of the actual parameters (self.W, self.b)
"""
# gW = np.zeros((self.n_inputs, self.n_units))
# gB = np.ones(self.n_units)
# input_sample_count = len(X)
# for i in range(self.n_inputs):
# for u in range(self.n_units):
# gW[i][u] = 0
# gSW = np.zeros(input_sample_count)
# gSB = np.zeros(input_sample_count)
# for s in range(input_sample_count):
# gSW[s] += delta_next[s][u] * X[s][i]
# gSB[s] += delta_next[s][u] * self.b[u]
# gW[i][u] = np.mean(gSW)
# gB[u] = np.mean(gSB)
# return gW, gB
dW = np.dot(X.T,delta_next)
# print(self.b)
# print(delta_next)
# db = np.dot(self.b.T,delta_next)
# the gradient for the biases is simply the back-propagated error
# from the output units. Doesn't seem correct though.
# db = delta_next
db = self.b
# print(self.b.shape)
# db = np.dot(delta_next,self.b)
# print(db.shape)
return [dW,db]
pass # TODO IMPLEMENT
def initialize(self):
"""
Perform He's initialization (https://arxiv.org/pdf/1502.01852.pdf). This method is tuned for ReLU activation
function. Biases are initialized to 1 increasing probability that ReLU is not initially turned off.
"""
scale = np.sqrt(2.0 / self.n_inputs)
self.W = self.rng.normal(loc=0.0, scale=scale, size=(self.n_inputs, self.n_units))
self.b = np.ones(self.n_units)
def update_params(self, dtheta):
"""
Updates weighs and biases.
:param dtheta: contains a two element list of weight and bias updates the shapes of which corresponds to self.W
and self.b
"""
assert len(dtheta) == 2, len(dtheta)
dW, db = dtheta
print(db.shape)
print(self.b.shape)
assert dW.shape == self.W.shape, dW.shape
assert db.shape == self.b.shape, db.shape
self.W += dW
self.b += db
class ReLULayer(object):
def __init__(self, name):
super(ReLULayer, self).__init__()
self.name = name
def has_params(self):
return False
def forward(self, X):
print("RELU forward")
# print(X)
# haven't found a better way to apply the function
output = np.array(X)
max_v = np.vectorize(reluMax)
for i in range(len(X)):
for j in range(len(X[i])):
X[i][j] = max_v(X[i][j])
print("\n")
return X
pass # TODO IMPLEMENT
def delta(self, Y, delta_next):
print("RELU backprop")
# roll_coll = np.shape(Y)
# input_sample_count = roll_coll[0]
# unit_count = roll_coll[1]
# res = np.zeros((input_sample_count, unit_count))
# print("JAJJAJAJAJ")
# for s in range(input_sample_count):
# sample_delta = delta_next[s]
# t = 0
# for i in range(unit_count):
# d = 0
# if Y[s][i] > 0:
# d = 1
# t += sample_delta[i] * d
# for i in range(unit_count):
# res[s][i] = t
# print(res)
# return res
# print(Y)
derivative_v = np.vectorize(derivativeRelu)
for i in range(len(Y)):
Y[i] = derivative_v(Y[i])
res = np.dot(delta_next,Y)
return res
pass # TODO IMPLEMENT
class SoftmaxLayer(object):
def __init__(self, name):
super(SoftmaxLayer, self).__init__()
self.name = name
def has_params(self):
return False
def forward(self, X):
print("SOFTMAX forward")
for i in range(len(X)):
X[i] = np.exp(X[i]) / np.sum(np.exp(X[i]), axis=0)
print("\n")
return X
pass # TODO IMPLEMENT
def delta(self, Y, delta_next):
print("SOFTMAX backprop")
# nbClass = np.shape(Y)[0]
# output = np.array(Y)
# # derivative_v = np.vectorize(derivativeSoftMax)
# # for k in range(nbClass):
# # for i in range(nbClass):
# # for j in range(len(Y[i])):
# # if i != j :
# # output[k][j] = -(Y[k][j] + Y[k][i])
# # else :
# # output[k][j] = -Y[k][i]*(1-Y[k][j])
# for i in range(nbClass):
# for j in range(len(Y[i])):
# if i != j :
# output[i][j] = -(Y[i][j] + Y[i][i])
# else :
# output[i][j] = -Y[i][i]*(1-Y[i][j])
# return delta_next*output
for i in range(len(Y)):
SM = Y[i].reshape((-1,1))
Y[i] = np.dot((np.diag(Y[i]) - np.dot(SM, SM.T)),delta_next[i])
return Y
# roll_coll = np.shape(Y)
# input_sample_count = roll_coll[0]
# unit_count = roll_coll[1]
# res = np.zeros((input_sample_count, unit_count))
# for s in range(input_sample_count):
# ndeltas = delta_next[s]
# for i in range(unit_count):
# res[s][i] = 0
# for u in range(unit_count):
# d = 0
# if i != u:
# d = -1 * Y[s][i] * Y[s][u]
# else:
# d = Y[s][i] * (1 - Y[s][i])
# res[s][i] += ndeltas[u] * d
# return res
pass # TODO IMPLEMENT
class LossCrossEntropy(object):
def __init__(self, name):
super(LossCrossEntropy, self).__init__()
self.name = name
def forward(self, X, T):
"""
Forward message.
:param X: loss inputs (outputs of the previous layer), shape (n_samples, n_inputs), n_inputs is the same as
the number of classes
:param T: one-hot encoded targets, shape (n_samples, n_inputs)
:return: layer output, shape (n_samples, 1)
"""
# roll_coll = np.shape(X)
# input_sample_count = roll_coll[0]
# res = np.zeros((input_sample_count, 1))
# for s in range(input_sample_count):
# xb = X[s]
# xb_log = [np.math.log(x) for x in xb]
# tb = T[s]
# res[s] = -np.dot(tb, xb_log)
# return res
loss = - np.sum(np.multiply(T,np.log(X)) + np.multiply((1-T),np.log(1-X)))
return loss
pass # TODO IMPLEMENT
def delta(self, X, T):
"""
Computes delta vector for the output layer.
:param X: loss inputs (outputs of the previous layer), shape (n_samples, n_inputs), n_inputs is the same as
the number of classes
:param T: one-hot encoded targets, shape (n_samples, n_inputs)
:return: delta vector from the loss layer, shape (n_samples, n_inputs)
"""
# roll_coll = np.shape(X)
# input_sample_count = roll_coll[0]
# res = np.zeros((roll_coll[0], roll_coll[1]))
# for s in range(input_sample_count):
# res[s] = - T[s] / X[s]
# return res
delta = - (np.divide(T,X) - np.divide((1-T),(1-X)))
# delta = -(T/X -(1-T)/(1-X))
return delta
pass # TODO IMPLEMENT
class LossCrossEntropyForSoftmaxLogits(object):
def __init__(self, name):
super(LossCrossEntropyForSoftmaxLogits, self).__init__()
self.name = name
def forward(self, X, T):
pass # TODO IMPLEMENT
def delta(self, X, T):
pass # TODO IMPLEMENT
# ---------------------------------------
# -------------- MLP --------------------
# ---------------------------------------
class MLP(object):
def __init__(self, n_inputs, layers, loss, output_layers=[]):
"""
MLP constructor.
:param n_inputs:
:param layers: list of layers
:param loss: loss function layer
:param output_layers: list of layers appended to "layers" in evaluation phase, parameters of these are not used
in training phase
"""
self.n_inputs = n_inputs
self.layers = layers
self.output_layers = output_layers
self.loss = loss
self.first_param_layer = layers[-1]
for l in layers:
if l.has_params():
self.first_param_layer = l
break
def propagate(self, X, output_layers=True, last_layer=None):
"""
Feedforwad network propagation
:param X: input data, shape (n_samples, n_inputs)
:param output_layers: controls whether the self.output_layers are appended to the self.layers in evaluatin
:param last_layer: if not None, the propagation will stop at layer with this name
:return: propagated inputs, shape (n_samples, n_units_of_the_last_layer)
"""
layers = self.layers + (self.output_layers if output_layers else [])
if last_layer is not None:
assert isinstance(last_layer, basestring)
layer_names = map(lambda layer: layer.name, layers)
layers = layers[0: layer_names.index(last_layer) + 1]
for layer in layers:
print('{}\npropagate: {}\n->'.format(X, layer.name))
X = layer.forward(X)
print("X")
print(X)
layer.last_output = X
print('{}\n'.format(X))
return X
def evaluate(self, X, T):
"""
Computes loss.
:param X: input data, shape (n_samples, n_inputs)
:param T: target labels, shape (n_samples, n_outputs)
:return:
"""
return self.loss.forward(self.propagate(X, output_layers=False), T)
def gradient(self, X, T):
"""
Computes gradient of loss w.r.t. all network parameters.
:param X: input data, shape (n_samples, n_inputs)
:param T: target labels, shape (n_samples, n_outputs)
:return: a dict of records in which key is the layer.name and value the output of grad function
"""
grads = {}
layer_count = len(self.layers)
# delta = self.evaluate(X,T)
# print(delta)
# for i in range(layer_count - 1, -1, -1):
# layer = self.layers[i]
# if layer.has_params():
# if i == 0:
# grads[layer.name] = layer.grad(X, delta)
# # print(layer.name)
# else:
# left_layer = self.layers[i - 1]
# print(layer.name)
# print(left_layer.name)
# grads[layer.name] = layer.grad(left_layer.last_output, delta)
# delta = layer.delta(layer.last_output, delta)
# return grads
# for i in range(layer_count):
# layer = self.layers[len(layer_count)-i]
# if layer.has_params():
# if i == len(layer_count):
# grads[layer.name] = layer.grad(X, delta)
# # print(layer.name)
# else:
# right_layer = self.layers[i + 1]
# # print(layer.name)
# # print(left_layer.name)
# grads[layer.name] = layer.grad(right_layer.last_output, delta)
# delta = layer.delta(layer.last_output, delta)
# return grads
# for layer in self.layers:
# grads[layer.name] = layer.grad(X,delta)
# print('Gradient:')
grads = defaultdict(list)
layer_count = len(self.layers)
delta = self.loss.delta(self.layers[layer_count - 1].last_output, T)
# print('T:\n{}'.format(T))
# print('loss delta:\n{}'.format(delta))
for i in range(layer_count - 1, -1, -1):
layer = self.layers[i]
print(layer.name)
if layer.has_params():
if i == 0:
grads[layer.name] = layer.grad(X, delta)
else:
left_layer = self.layers[i - 1]
grads[layer.name] = layer.grad(left_layer.last_output, delta)
print('{}\ngrad:\n{}'.format(layer.name, grads[layer.name]))
delta = layer.delta(layer.last_output, delta)
print('{}\ndelta:\n{}'.format(layer.name, delta))
return grads
pass # TODO IMPLEMENT
# ---------------------------------------
# -------------- TRAINING ---------------
# ---------------------------------------
def accuracy(Y, T):
p = np.argmax(Y, axis=1)
t = np.argmax(T, axis=1)
return np.mean(p == t)
def train(net, X_train, T_train, batch_size=1, n_epochs=2, eta=0.1, X_test=None, T_test=None, verbose=False):
"""
Trains a network using vanilla gradient descent.
:param net:
:param X_train:
:param T_train:
:param batch_size:
:param n_epochs:
:param eta: learning rate
:param X_test:
:param T_test:
:param verbose: prints evaluation for each epoch if True
:return:
"""
n_samples = X_train.shape[0]
assert T_train.shape[0] == n_samples
assert batch_size <= n_samples
run_info = defaultdict(list)
def process_info(epoch):
loss_test, acc_test = np.nan, np.nan
Y = net.propagate(X_train)
loss_train = net.loss.forward(Y, T_train)
acc_train = accuracy(Y, T_train)
run_info['loss_train'].append(loss_train)
run_info['acc_train'].append(acc_train)
if X_test is not None:
Y = net.propagate(X_test)
loss_test = net.loss.forward(Y, T_test)
acc_test = accuracy(Y, T_test)
run_info['loss_test'].append(loss_test)
run_info['acc_test'].append(acc_test)
if verbose:
print('epoch: {}, loss: {}/{} accuracy: {}/{}'.format(epoch, np.mean(loss_train), np.nanmean(loss_test),
np.nanmean(acc_train), np.nanmean(acc_test)))
process_info('initial')
for epoch in range(1, n_epochs + 1):
offset = 0
while offset < n_samples:
last = min(offset + batch_size, n_samples)
if verbose:
print('.', end='')
grads = net.gradient(np.asarray(X_train[offset:last]), np.asarray(T_train[offset:last]))
for layer in net.layers:
if layer.has_params():
gs = grads[layer.name]
dtheta = list(map(lambda g: -eta * g, gs))
layer.update_params(dtheta)
offset += batch_size
# return
if verbose:
print()
process_info(epoch)
return run_info
# ---------------------------------------
# -------------- EXPERIMENTS ------------
# ---------------------------------------
def plot_convergence(run_info):
plt.plot(run_info['acc_train'], label='train')
plt.plot(run_info['acc_test'], label='test')
plt.xlabel('epoch')
plt.ylabel('accuracy')
plt.legend()
def plot_test_accuracy_comparison(run_info_dict):
keys = sorted(run_info_dict.keys())
for key in keys:
plt.plot(run_info_dict[key]['acc_test'], label=key)
plt.xlabel('epoch')
plt.ylabel('accuracy')
plt.legend()
def experiment_XOR():
X, T = load_XOR()
rng = np.random.RandomState(1234)
net = MLP(n_inputs=2,
layers=[
LinearLayer(n_inputs=2, n_units=4, rng=rng, name='Linear_1'),
ReLULayer(name='ReLU_1'),
LinearLayer(n_inputs=4, n_units=2, rng=rng, name='Linear_OUT'),
SoftmaxLayer(name='Softmax_OUT')
],
loss=LossCrossEntropy(name='CE'),
)
# print(X)
# print(T)
run_info = train(net, X, T, batch_size=4, eta=0.1, n_epochs=100, verbose=True)
plot_convergence(run_info)
plt.show()
print(net.propagate(X))
plot_2D_classification(X, T, net)
plt.show()
def experiment_spirals():
X_train, T_train, X_test, T_test = load_spirals()
experiments = (
('eta = 0.2', 0.2),
('eta = 1', 1.0),
('eta = 5', 5.0),
)
run_info_dict = {}
for name, eta in experiments:
rng = np.random.RandomState(1234)
net = MLP(n_inputs=2,
layers=[
LinearLayer(n_inputs=2, n_units=10, rng=rng, name='Linear_1'),
ReLULayer(name='ReLU_1'),
LinearLayer(n_inputs=10, n_units=3, rng=rng, name='Linear_OUT'),
SoftmaxLayer(name='Softmax_OUT')
],
loss=LossCrossEntropy(name='CE'),
)
run_info = train(net, X_train, T_train, batch_size=len(X_train), eta=eta, X_test=X_test, T_test=T_test,
n_epochs=1000, verbose=True)
run_info_dict[name] = run_info
# plot_spirals(X_train, T_train, net)
# plt.show()
# plot_convergence(run_info)
# plt.show()
plot_test_accuracy_comparison(run_info_dict)
plt.show()
# plt.savefig('spiral.pdf') # you can instead save figure to file
def experiment_MNIST():
X_train, T_train, X_test, T_test = load_MNIST()
np.seterr(all='raise', under='warn', over='warn')
rng = np.random.RandomState(1234)
net = MLP(n_inputs=28 * 28,
layers=[
LinearLayer(n_inputs=28 * 28, n_units=64, rng=rng, name='Linear_1'),
ReLULayer(name='ReLU_1'),
LinearLayer(n_inputs=64, n_units=64, rng=rng, name='Linear_2'),
ReLULayer(name='ReLU_2'),
LinearLayer(n_inputs=64, n_units=64, rng=rng, name='Linear_3'),
ReLULayer(name='ReLU_3'),
LinearLayer(n_inputs=64, n_units=64, rng=rng, name='Linear_4'),
ReLULayer(name='ReLU_4'),
LinearLayer(n_inputs=64, n_units=64, rng=rng, name='Linear_5'),
ReLULayer(name='ReLU_5'),
LinearLayer(n_inputs=64, n_units=10, rng=rng, name='Linear_OUT'),
],
loss=LossCrossEntropyForSoftmaxLogits(name='CE'),
output_layers=[SoftmaxLayer(name='Softmax_OUT')]
)
run_info = train(net, X_train, T_train, batch_size=3000, eta=1e-1, X_test=X_test, T_test=T_test, n_epochs=100,
verbose=True)
# plot_convergence(run_info)
# plt.show()
with open('MNIST_run_info.p', 'w') as f:
pickle.dump(run_info, f)
if __name__ == '__main__':
experiment_XOR()
# experiment_spirals()
# experiment_MNIST()
|
[
"jimmy.leblanc@etu.univ-nantes.fr"
] |
jimmy.leblanc@etu.univ-nantes.fr
|
6f280b2ebcba4084589b60a8f6f9e3f93de48dc3
|
8ad1a86ee4a2ea9a391fcaad02086af781a6b384
|
/cartoonify.py
|
409f7a2dc9434e9656e7bedb75a00b02b076a630
|
[
"MIT"
] |
permissive
|
adl1995/image-processing-filters
|
5238278594256b1cbce6dc218f53fd36bb6ee5e2
|
850e4a6e23ef0f3843cc306cf1e42569f705f07e
|
refs/heads/master
| 2020-04-06T04:27:13.163935
| 2017-02-23T09:16:31
| 2017-02-23T09:16:31
| 82,903,043
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,411
|
py
|
#!/usr/bin/env python
__author__ = "Adeel Ahmad"
__email__ = "adeelahmad14@hotmail.com"
__status__ = "Production"
import matplotlib.pyplot as plt
import numpy as np
import skimage as ski
import Image
def cartoonify(im, display=False):
"""
function receives an image and add its gradient magnitude in it and add it
to the original image to return a semi-cartoon image.
Note: You will have to scale the gradient-magnitue image
before adding it back to the input image.
Input:
im: input image to cartoonify
display: whether to display image or not...
NOTE: This function expects a gaussian filtered image
"""
kernel, kern_size = np.array([[-1,-1,-1] ,[0,0,0] ,[1,1,1]]), 3
gx, gy = np.zeros_like(im, dtype=float), np.zeros_like(im, dtype=float)
for i in range(im.shape[0] - (kern_size-1)):
for j in range(im.shape[1] - (kern_size-1)):
window = im[i:i + kern_size, j:j + kern_size]
gx[i,j], gy[i,j] = np.sum(window * kernel.T), np.sum(window * kernel)
magnitude = np.sqrt(gx**2 + gy**2)
magnitude = magnitude.astype(np.int64, copy=False)
cartoon = im + (im + magnitude)
if display == 1:
plt.imshow(cartoon, cmap='gray')
plt.suptitle('Cartoon')
plt.show()
return cartoon
|
[
"="
] |
=
|
9f426f14a9c50f481bb32c219bebb40e5e099ba0
|
ec8b04108de6eee3d1676df2d560045458c23b66
|
/bchscript/bchopcodes.py
|
1fda707d29cf5ff17553f3c18d109fb2278d9d61
|
[
"MIT"
] |
permissive
|
gandrewstone/bchscript
|
d8679d29603720d41c99f44a9ce7ae5501465582
|
c73a69f259b4cfb71546903c736e4a94fd0680d7
|
refs/heads/master
| 2021-07-15T15:29:58.218262
| 2020-05-13T19:39:19
| 2020-05-13T19:39:19
| 135,616,739
| 7
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,799
|
py
|
opcode2bin = {
# push value
"OP_0": 0x00,
"OP_FALSE": 0x00,
"OP_PUSHDATA1": 0x4c,
"OP_PUSHDATA2": 0x4d,
"OP_PUSHDATA4": 0x4e,
"OP_1NEGATE": 0x4f,
"OP_RESERVED": 0x50,
"OP_1": 0x51,
"OP_TRUE": 0x51,
"OP_2": 0x52,
"OP_3": 0x53,
"OP_4": 0x54,
"OP_5": 0x55,
"OP_6": 0x56,
"OP_7": 0x57,
"OP_8": 0x58,
"OP_9": 0x59,
"OP_10": 0x5a,
"OP_11": 0x5b,
"OP_12": 0x5c,
"OP_13": 0x5d,
"OP_14": 0x5e,
"OP_15": 0x5f,
"OP_16": 0x60,
# control
"OP_NOP": 0x61,
"OP_VER": 0x62,
"OP_IF": 0x63,
"OP_NOTIF": 0x64,
"OP_VERIF": 0x65,
"OP_VERNOTIF": 0x66,
"OP_ELSE": 0x67,
"OP_ENDIF": 0x68,
"OP_VERIFY": 0x69,
"OP_RETURN": 0x6a,
# stack ops
"OP_TOALTSTACK": 0x6b,
"OP_FROMALTSTACK": 0x6c,
"OP_2DROP": 0x6d,
"OP_2DUP": 0x6e,
"OP_3DUP": 0x6f,
"OP_2OVER": 0x70,
"OP_2ROT": 0x71,
"OP_2SWAP": 0x72,
"OP_IFDUP": 0x73,
"OP_DEPTH": 0x74,
"OP_DROP": 0x75,
"OP_DUP": 0x76,
"OP_NIP": 0x77,
"OP_OVER": 0x78,
"OP_PICK": 0x79,
"OP_ROLL": 0x7a,
"OP_ROT": 0x7b,
"OP_SWAP": 0x7c,
"OP_TUCK": 0x7d,
# splice ops
"OP_CAT": 0x7e,
"OP_SPLIT": 0x7f,
"OP_NUM2BIN": 0x80,
"OP_BIN2NUM": 0x81,
"OP_SIZE": 0x82,
# bit logic
"OP_INVERT": 0x83,
"OP_AND": 0x84,
"OP_OR": 0x85,
"OP_XOR": 0x86,
"OP_EQUAL": 0x87,
"OP_EQUALVERIFY": 0x88,
"OP_RESERVED1": 0x89,
"OP_RESERVED2": 0x8a,
# numeric
"OP_1ADD": 0x8b,
"OP_1SUB": 0x8c,
"OP_2MUL": 0x8d,
"OP_2DIV": 0x8e,
"OP_NEGATE": 0x8f,
"OP_ABS": 0x90,
"OP_NOT": 0x91,
"OP_0NOTEQUAL": 0x92,
"OP_ADD": 0x93,
"OP_SUB": 0x94,
"OP_MUL": 0x95,
"OP_DIV": 0x96,
"OP_MOD": 0x97,
"OP_LSHIFT": 0x98,
"OP_RSHIFT": 0x99,
"OP_BOOLAND": 0x9a,
"OP_BOOLOR": 0x9b,
"OP_NUMEQUAL": 0x9c,
"OP_NUMEQUALVERIFY": 0x9d,
"OP_NUMNOTEQUAL": 0x9e,
"OP_LESSTHAN": 0x9f,
"OP_GREATERTHAN": 0xa0,
"OP_LESSTHANOREQUAL": 0xa1,
"OP_GREATERTHANOREQUAL": 0xa2,
"OP_MIN": 0xa3,
"OP_MAX": 0xa4,
"OP_WITHIN": 0xa5,
# crypto
"OP_RIPEMD160": 0xa6,
"OP_SHA1": 0xa7,
"OP_SHA256": 0xa8,
"OP_HASH160": 0xa9,
"OP_HASH256": 0xaa,
"OP_CODESEPARATOR": 0xab,
"OP_CHECKSIG": 0xac,
"OP_CHECKSIGVERIFY": 0xad,
"OP_CHECKMULTISIG": 0xae,
"OP_CHECKMULTISIGVERIFY": 0xaf,
# expansion
"OP_NOP1": 0xb0,
"OP_CHECKLOCKTIMEVERIFY": 0xb1,
"OP_CHECKSEQUENCEVERIFY": 0xb2,
"OP_NOP4": 0xb3,
"OP_NOP5": 0xb4,
"OP_NOP6": 0xb5,
"OP_NOP7": 0xb6,
"OP_NOP8": 0xb7,
"OP_NOP9": 0xb8,
"OP_NOP10": 0xb9,
#"OP_DATASIGVERIFY": 0xbb
"OP_CHECKDATASIG": 0xba,
"OP_CHECKDATASIGVERIFY": 0xbb
}
|
[
"g.andrew.stone@gmail.com"
] |
g.andrew.stone@gmail.com
|
e717e626c031df9a19ca4ed3c60e1dc17c7e7a9d
|
d4f3089dfba275c82e1d3733d8318658ac594d32
|
/step5.1-compareNetworkWithGoldStandard.py
|
c715d7c9d3e651828545a0bf004c11171c49c60c
|
[] |
no_license
|
chemokine/python-scripts
|
34680a812ad55ad97976c1fcfc0930b2ffc18241
|
f74c0ec5ca1a154931c71c5975c5b412ce224b77
|
refs/heads/master
| 2021-01-25T14:39:32.317179
| 2016-08-17T18:13:11
| 2016-08-17T18:13:11
| 33,831,989
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,847
|
py
|
import os
import pandas as pd
import time
import sys
import pylab as pl
from sklearn import metrics
from sklearn.metrics import roc_curve, auc
resultFolder = "/global/scratch/xiaoxi/projects/GEO/scripts/result/network/cancer/Homo_sapiens__numberOfSimulation-100__numberOfDatasetsToSample-150__correlationMethod-"
def createPairName (pairName):
pairNameOut = sorted(pairName[[0,1]])
pairNameOut = pairNameOut[0]+"<==>"+pairNameOut[1]
return(pairNameOut)
def generateROC (correlationFolder,pvalue):
FilteredNetwork = []
goldStandard ="/global/scratch/xiaoxi/projects/GEO/scripts/data/CervicalCancer/3161.GenePairs_0.1_and_0.001_and_tumor.csv"
files = os.listdir(correlationFolder)
n=0
goldStandardData = pd.io.parsers.read_csv(goldStandard)[["name1","name2","t.median.corr"]]
pairNames = goldStandardData.apply (createPairName,axis=1)
goldStandardData.index = pairNames
for file in files:
n+=1
file = file.rstrip()
print file
# read in data
table = pd.io.parsers.read_csv(correlationFolder+"/"+file)
# get significant pairs , extract name1, name2, correlation coefficient, pvalue
table = table[table["Homo_sapiens.count.pvalue"]<pvalue].iloc[:,[1,2,4,5]]
# some subset dose not have any significant pair, so move on to next pair, otherwise, the empty dataframe will cause error
if table.shape[0]<1:
continue
# set rownames, use them to find correlation coefficient in gold standard
myPairNames = table.apply (createPairName,axis=1)
table.index = myPairNames
# find correlation coefficient in gold standard
goldStandardCorrelation = goldStandardData.loc[myPairNames,"t.median.corr"]
# for each pair in our network, label it as possitive based on 1) this pair exist in gold standard and 2)correlation directions in our network should be the same as that in gold standard
label = goldStandardCorrelation*table["Homo_sapiens.count.correlation.Coefficient"]/abs(goldStandardCorrelation*table["Homo_sapiens.count.correlation.Coefficient"]) # the pairs with different correlation direction will be labeled -1
# the NAs are those pairs in our network but not in gold standard, so change the label to -1
label[label!=label] = -1
# concatenate subsets
if len(FilteredNetwork)==0:
FilteredNetwork = table
outLable = pd.DataFrame({"a":label})
else:
FilteredNetwork = FilteredNetwork.append(table, ignore_index=False, verify_integrity=False)
outLable = outLable.append(pd.DataFrame({"a":label}), ignore_index=False, verify_integrity=False)
# calculate ROC
FilteredNetwork["label"] = outLable
fpr, tpr, thresholds = metrics.roc_curve(FilteredNetwork["label"], FilteredNetwork["Homo_sapiens.count.pvalue"], pos_label=1)
roc_auc = auc(fpr, tpr)
print "Area under the ROC curve : %f" % roc_auc
# get how many pairs are predicted under this pvalue cutoff
numberPredicted = FilteredNetwork.shape[0]
# Plot ROC curve
pl.clf()
pl.plot(fpr, tpr, label='ROC curve (area = %0.2f)' % roc_auc)
pl.plot([0, 1], [0, 1], 'k--')
pl.xlim([0.0, 1.0])
pl.ylim([0.0, 1.0])
pl.xlabel('False Positive Rate')
pl.ylabel('True Positive Rate')
pl.title('under pvalue'+str(pvalue)+'numberPredicted is:'+str(numberPredicted)+"\nCancerPaperHas 3162")
pl.legend(loc="lower right")
#pl.show()
pl.savefig(correlationFolder+'-pvalue'+str(pvalue)+'.png')
#for folder in ["kendall","spearman","pearson"]:
for folder in ["useBinary-0.67-1.5"]:
for pvalue in [0.00001,0.0001,0.001,0.01,0.05,0.1]:
generateROC (resultFolder+folder,pvalue)
|
[
"dfdongxiaoxi@gmail.com"
] |
dfdongxiaoxi@gmail.com
|
b09bf4b28d6b594b0e715effe5bc7c1c947ca501
|
335eb3cd12e97581dd6a83296953b89170f67302
|
/rock_paper_scissors.py
|
32d4fbc2e980d9cfedc63dcc7ec30d1a02dec2d5
|
[] |
no_license
|
eirkostop/Python
|
c4b6881dedf6d7af16311887a285f038d30ebc5f
|
e132403818ebb904fb07f4da35e3b11c21af482e
|
refs/heads/master
| 2020-05-25T03:49:13.106826
| 2019-11-16T09:38:49
| 2019-11-16T09:38:49
| 187,612,731
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,099
|
py
|
import random
choices = ['rock', 'paper', 'scissors']
player_points = 0
computer_points = 0
while player_points < 3 and computer_points < 3:
player_choice = input('Choose rock, paper or scissors: ')
player_choice = player_choice.lower()
while player_choice not in choices:
player_choice = input('Choose rock, paper or scissors: ')
computer_choice = random.choice(choices)
print('Computer chose', computer_choice)
if (player_choice == 'rock' and computer_choice == 'scissors' or
player_choice == 'paper' and computer_choice == 'rock' or
player_choice == 'scissors' and computer_choice == 'paper'):
player_points = player_points + 1
print('Player wins the round.')
elif player_choice == computer_choice:
print('It\'s a draw')
else:
computer_points = computer_points + 1
print('Computer wins the round.')
print('Score - Player:', player_points, 'Computer:', computer_points)
if player_points == 3:
print('Player wins the game.')
else:
print('Computer wins the game.')
|
[
"lysistrat@hotmail.com"
] |
lysistrat@hotmail.com
|
21b078718717a70a60a2c48e1176daaf13f729a3
|
fd238c847c0cfc71a1114ee09bde350e93f1b5b8
|
/practice1/practice1-9.py
|
1129cb79bc0759f03eb57d49b5c22ab440c977a5
|
[] |
no_license
|
ApplauseWow/IT_new_technique_assignment
|
2f934dab39be4a2b1bc9afb39a89841c96817994
|
cd6236459c3c78e757cde80998379f5d7d4c3399
|
refs/heads/master
| 2021-10-27T05:09:29.133907
| 2019-04-16T03:19:58
| 2019-04-16T03:19:58
| 180,495,470
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,002
|
py
|
# -*-coding:utf-8-*-
# created by HolyKwok 201610414206
# 统计电影工作人员
# practice1-9
import numpy as np
def static_num(top_dic, who):
"""
统计参与数
:param top_dic: 统计表
:param who: 统计对象
:return: none
"""
top_dic[who] = top_dic.get(who, 0) + 1
def list_top(statistic_list):
"""
显示统计结果
:param statistic_list: 统计的列表
:return: none
"""
sorted_list = sorted(statistic_list.items(), key=lambda x: x[1], reverse=True)
if_more = list(filter(lambda x: sorted_list[0][1] == x[1], sorted_list))
for i in if_more:
print(i[0], "最多", ">>>共:", i[1], "部")
movie_info = list() # 电影职员表
# 读取数据, 构建职员表
with open('movie_staff', 'r') as f:
lines = f.readlines() # 按行读取所有内容
for line in lines: # 读取每一行
line = line.split()
movie_info.append(line)
# 统计导演作品最多导演和参演最多的演员
conduct_num = dict() # 导演电影数
actor_num = dict() # 参演数
actor_list = list() # 演员表
for l in movie_info: # 遍历每一部电影
static_num(conduct_num, l[1]) # 统计导演作品数
# 与python2不同python3的map()与filter返回iterator
'''map -> Make an iterator that computes the function using arguments from each of the iterables.
Stops when the shortest iterable is exhausted.'''
# 此时iterator还没有开始遍历,需要一个能够调用__next__()的对象
list(map(lambda x: static_num(actor_num, x), l[2:])) # 统计演员参演数
list(map(actor_list.append, [a for a in l[2:] if a not in actor_list])) # 生成演员表['a1', 'a2',...]
# 统计共同参演最多的两位演员
# 构成无向图的邻接矩阵,对称的
# 与python2不同python3numpy.zeros参数需要提供一个seq序列,不再是单个参数
matrix = np.zeros((len(actor_list), len(actor_list))) # 生成邻接矩阵
# matrix = [[0 for i in range(len(actor_list))] for j in range(len(actor_list))]
for l in movie_info:
actor_couple = [(x, y) for x in l[2:] for y in l[2:] if x is not y and actor_list.index(x) < actor_list.index(y)] # 构成不重复的此部电影同台演员组合并且按照演员表索引前后顺序配对,即(1,2)不会(2,1)
def add_one(couple): # 创建一个闭包函数用于map
matrix[actor_list.index(couple[0])][actor_list.index(couple[1])] += 1
list(map(add_one, actor_couple)) # 统计共同参演数
list_top(conduct_num) # 展示统计导演作品最多结果
list_top(actor_num) # 展示统计演员参演最多结果
# print(matrix) # 展示共同参演结果
top_couple = max([max(a) for a in matrix]) # 最多的共同参演数
couples = np.argwhere(matrix == top_couple) # 找出等于最大值的所有索引
# print(type(couples), couples[0][0], couples[0][1])
for c in couples: # 展示共同参演最多的组合
print(actor_list[c[0]], "和", actor_list[c[1]], "最多")
|
[
"holykwok@163.com"
] |
holykwok@163.com
|
6d9ac4b8268198a0baea9753583be94ef5bf5a41
|
41cd308910049a9af29896550e94c4fa5ac7bd19
|
/practice37_pass.py
|
111fa0e82633b2b92fabc82acf2d0db35422eeac
|
[] |
no_license
|
OO-b/python_basic
|
627de5994d7cb7c91b2b4dea8ba6a2ab2b80317c
|
9f9aa62c11bb5f6cf67e107693b6c5386fc480e5
|
refs/heads/main
| 2023-06-19T23:07:01.932347
| 2021-07-18T14:37:55
| 2021-07-18T14:37:55
| 344,044,396
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,344
|
py
|
#pass
#: 아무것도 안하고 일단 넘어간다
# 일반유닛
class Unit:
def __init__(self, name, hp, speed):
self.name = name
self.hp = hp
self.speed = speed
def move(self, location):
print("[지상 유닛 이동]")
print("{0} : {1} 방향으로 이동합니다. [속도2]"\
.format(self.name, location, self.speed))
# 공격유닛
class AttackUnit(Unit):
def __init__(self, name, hp, speed, damage):
Unit.__init__(self, name, hp, speed)
self.damage = damage
def attack(self, location):
print("{0} :{1} 방향으로 적군을 공격합니다. [공격력 {2}]"\
.format(self.name, location, self.damage))
def damaged(self, damage):
print("{0} : {1} 데미지를 입었습니다".format(self.name, damage))
self.hp -= damage
print("{0} : 현재 체력은 {1} 입니다.".format(self.name, self.hp))
if self.hp <= 0:
print("{0} : 파괴되었습니다.".format(self.name))
# 드랍쉽 : 공중 유닛, 수송기 . 마린/파이어뱃/탱크 등을 수송. 공격기능 x
# 날수있는 기능을 가진 클래스
class Flyable:
def __init__(self, flying_speed):
self.flying_speed = flying_speed
def fly(self, name, location):
print("{0} : {1} 방향으로 날아갑니다. [속도 {2}]"\
.format(name, location, self.flying_speed))
#날 수 있는 공중 공격 유닛 클래스
class FlyableAttackUnit(AttackUnit, Flyable):
def __init__(self, name, hp, damage, flying_speed): # 다중상속 AttackUnit/Flyable
AttackUnit.__init__(self, name, 0, hp, damage) # 지상 speed 0
Flyable.__init__(self, flying_speed)
def move(self, location):
print("[공중 유닛 이동]")
self.fly(self.name, location)
# 건물
class BuildingUnit(Unit):
def __init__(self, name, hp, location):
pass # 아무것도 안하고 일단은 넘어간다
# 서플라이 디폿 : 건물, 1개 건물 = 8개 유닛 생성 가능
supply_depot = BuildingUnit("서플라이 디폿", 500, "7시")
def game_start():
print("[알림] 새로운 게임을 시작합니다.")
def game_over():
pass
game_start() #[알림] 새로운 게임을 시작합니다.
game_over() # 그냥 넘어감
|
[
"baeiiyy@gmail.com"
] |
baeiiyy@gmail.com
|
78053329357a80d9505891224f3f336f78edc62a
|
9517f00e8a84b3a59c395af8b0b8e045c04674f9
|
/covidData/manage.py
|
fbc808e6e6e3065cd6e0ad4d9df79866810a77c5
|
[] |
no_license
|
nkrishx/openAppRepo
|
b9a1d182ad4a13b6dc131a2690c40d365a96850b
|
9897ed640cc36dc8259a232ef980cd3ddc1221ee
|
refs/heads/master
| 2023-09-05T05:02:49.799799
| 2021-11-17T11:20:11
| 2021-11-17T11:20:11
| 427,826,249
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 665
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'covidData.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"naveen.gowda.rm@gmail.com"
] |
naveen.gowda.rm@gmail.com
|
84a465ac06841641b8d85e9347c56bd55734883f
|
cb087f1f5ce9d2a64e75614284fe2e38cdb85233
|
/src/cayenne_client.py
|
3cf7e3ba9e886e1c01a162128d9de5e828ba09f2
|
[
"MIT"
] |
permissive
|
dsidor/home-iot
|
e7fe6aea1695dd2e39718f9c22338df8eb2448c7
|
03313de5c5b7470e52d391d407af63e35db9e3c0
|
refs/heads/master
| 2022-04-26T23:22:55.598875
| 2022-03-16T15:34:30
| 2022-03-16T15:34:30
| 83,122,035
| 0
| 0
|
MIT
| 2022-03-16T15:34:32
| 2017-02-25T09:44:56
|
Python
|
UTF-8
|
Python
| false
| false
| 1,441
|
py
|
import cayenne.client
from cayenne.client import CayenneMessage, TYPE_RELATIVE_HUMIDITY, UNIT_PERCENT
import time
#from drivers.hts221 import Hts221
# Cayenne authentication info. This should be obtained from the Cayenne Dashboard.
MQTT_CLIENT_ID = '31b6c4c0-facd-11e6-a722-0dd60d860c87'
MQTT_USERNAME = 'f22327f0-a8eb-11e6-a7c1-b395fc8a1540'
MQTT_PASSWORD = '8d58b83e86d1ef3a0c676af8eff332963c8f190c'
# The callback for when a message is received from Cayenne.
def on_message(message):
"""
:param CayenneMessage message:
:return:
"""
print("message received: " + str(message))
print 'I should set channel', message.channel, 'to', message.value
# If there is an error processing the message return an error string, otherwise return nothing.
return None
client = cayenne.client.CayenneMQTTClient()
client.on_message = on_message
client.begin(MQTT_USERNAME, MQTT_PASSWORD, MQTT_CLIENT_ID)
i = 0
timestamp = 0
channels = {
'temperature': 1,
'humidity': 2,
}
PERIOD_S = 10
class SensorMock(object):
def read_temperature_and_humidity(self):
return 12, 23
while True:
client.loop()
time.sleep(PERIOD_S)
#sensor = Hts221()
sensor = SensorMock()
temperature, humidity = sensor.read_temperature_and_humidity()
client.celsiusWrite(channels['temperature'], temperature)
client.virtualWrite(channels['humidity'], humidity, TYPE_RELATIVE_HUMIDITY, UNIT_PERCENT)
|
[
"damian@kontakt.io"
] |
damian@kontakt.io
|
03354dc6ce266c9bba9011d61e93cc92ed2d2041
|
78bb44c98f241d16a853e318cdbe9573d307d3e1
|
/google/appengine/tools/devappserver2/devappserver2.py
|
6c15f6f360755323a22c83db4010b7bf4fade2c4
|
[
"Apache-2.0"
] |
permissive
|
vicmortelmans/catholicmissale
|
2a8129eef31142b43e66fbd72620c7454b3c929b
|
b36916181d87f4f31f5bbbb976a7e88f55296986
|
refs/heads/master
| 2021-01-17T11:36:49.768808
| 2020-11-17T18:14:16
| 2020-11-17T18:14:16
| 11,186,395
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 40,546
|
py
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""The main entry point for the new development server."""
import argparse
import errno
import getpass
import itertools
import logging
import os
import sys
import tempfile
import time
from google.appengine.api import appinfo
from google.appengine.api import request_info
from google.appengine.datastore import datastore_stub_util
from google.appengine.tools import boolean_action
from google.appengine.tools.devappserver2 import api_server
from google.appengine.tools.devappserver2 import application_configuration
from google.appengine.tools.devappserver2 import dispatcher
from google.appengine.tools.devappserver2 import gcd_application
from google.appengine.tools.devappserver2 import login
from google.appengine.tools.devappserver2 import runtime_config_pb2
from google.appengine.tools.devappserver2 import shutdown
from google.appengine.tools.devappserver2 import update_checker
from google.appengine.tools.devappserver2 import wsgi_request_info
from google.appengine.tools.devappserver2.admin import admin_server
# Initialize logging early -- otherwise some library packages may
# pre-empt our log formatting. NOTE: the level is provisional; it may
# be changed in main() based on the --debug flag.
logging.basicConfig(
level=logging.INFO,
format='%(levelname)-8s %(asctime)s %(filename)s:%(lineno)s] %(message)s')
# Valid choices for --log_level and their corresponding constants in
# runtime_config_pb2.Config.stderr_log_level.
_LOG_LEVEL_TO_RUNTIME_CONSTANT = {
'debug': 0,
'info': 1,
'warning': 2,
'error': 3,
'critical': 4,
}
# Valid choices for --dev_appserver_log_level and their corresponding Python
# logging levels
_LOG_LEVEL_TO_PYTHON_CONSTANT = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL,
}
# The default encoding used by the production interpreter.
_PROD_DEFAULT_ENCODING = 'ascii'
# The environment variable exposed in the devshell.
_DEVSHELL_ENV = 'DEVSHELL_CLIENT_PORT'
def _generate_storage_paths(app_id):
"""Yield an infinite sequence of possible storage paths."""
if sys.platform == 'win32':
# The temp directory is per-user on Windows so there is no reason to add
# the username to the generated directory name.
user_format = ''
else:
try:
user_name = getpass.getuser()
except Exception: # The possible set of exceptions is not documented.
user_format = ''
else:
user_format = '.%s' % user_name
tempdir = tempfile.gettempdir()
yield os.path.join(tempdir, 'appengine.%s%s' % (app_id, user_format))
for i in itertools.count(1):
yield os.path.join(tempdir, 'appengine.%s%s.%d' % (app_id, user_format, i))
def _get_storage_path(path, app_id):
"""Returns a path to the directory where stub data can be stored."""
_, _, app_id = app_id.replace(':', '_').rpartition('~')
if path is None:
for path in _generate_storage_paths(app_id):
try:
os.mkdir(path, 0700)
except OSError, e:
if e.errno == errno.EEXIST:
# Check that the directory is only accessable by the current user to
# protect against an attacker creating the directory in advance in
# order to access any created files. Windows has per-user temporary
# directories and st_mode does not include per-user permission
# information so assume that it is safe.
if sys.platform == 'win32' or (
(os.stat(path).st_mode & 0777) == 0700 and os.path.isdir(path)):
return path
else:
continue
raise
else:
return path
elif not os.path.exists(path):
os.mkdir(path)
return path
elif not os.path.isdir(path):
raise IOError('the given storage path %r is a file, a directory was '
'expected' % path)
else:
return path
class PortParser(object):
"""A parser for ints that represent ports."""
def __init__(self, allow_port_zero=True):
self._min_port = 0 if allow_port_zero else 1
def __call__(self, value):
try:
port = int(value)
except ValueError:
raise argparse.ArgumentTypeError('Invalid port: %r' % value)
if port < self._min_port or port >= (1 << 16):
raise argparse.ArgumentTypeError('Invalid port: %d' % port)
return port
def parse_per_module_option(
value, value_type, value_predicate,
single_bad_type_error, single_bad_predicate_error,
multiple_bad_type_error, multiple_bad_predicate_error,
multiple_duplicate_module_error):
"""Parses command line options that may be specified per-module.
Args:
value: A str containing the flag value to parse. Two formats are supported:
1. A universal value (may not contain a colon as that is use to
indicate a per-module value).
2. Per-module values. One or more comma separated module-value pairs.
Each pair is a module_name:value. An empty module-name is shorthand
for "default" to match how not specifying a module name in the yaml
is the same as specifying "module: default".
value_type: a callable that converts the string representation of the value
to the actual value. Should raise ValueError if the string can not
be converted.
value_predicate: a predicate to call on the converted value to validate
the converted value. Use "lambda _: True" if all values are valid.
single_bad_type_error: the message to use if a universal value is provided
and value_type throws a ValueError. The message must consume a single
format parameter (the provided value).
single_bad_predicate_error: the message to use if a universal value is
provided and value_predicate returns False. The message does not
get any format parameters.
multiple_bad_type_error: the message to use if a per-module value
either does not have two values separated by a single colon or if
value_types throws a ValueError on the second string. The message must
consume a single format parameter (the module_name:value pair).
multiple_bad_predicate_error: the message to use if a per-module value if
value_predicate returns False. The message must consume a single format
parameter (the module name).
multiple_duplicate_module_error: the message to use if the same module is
repeated. The message must consume a single formater parameter (the
module name).
Returns:
Either a single value of value_type for universal values or a dict of
str->value_type for per-module values.
Raises:
argparse.ArgumentTypeError: the value is invalid.
"""
if ':' not in value:
try:
single_value = value_type(value)
except ValueError:
raise argparse.ArgumentTypeError(single_bad_type_error % value)
else:
if not value_predicate(single_value):
raise argparse.ArgumentTypeError(single_bad_predicate_error)
return single_value
else:
module_to_value = {}
for module_value in value.split(','):
try:
module_name, single_value = module_value.split(':')
single_value = value_type(single_value)
except ValueError:
raise argparse.ArgumentTypeError(multiple_bad_type_error % module_value)
else:
module_name = module_name.strip()
if not module_name:
module_name = appinfo.DEFAULT_MODULE
if module_name in module_to_value:
raise argparse.ArgumentTypeError(
multiple_duplicate_module_error % module_name)
if not value_predicate(single_value):
raise argparse.ArgumentTypeError(
multiple_bad_predicate_error % module_name)
module_to_value[module_name] = single_value
return module_to_value
def parse_max_module_instances(value):
"""Returns the parsed value for the --max_module_instances flag.
Args:
value: A str containing the flag value for parse. The format should follow
one of the following examples:
1. "5" - All modules are limited to 5 instances.
2. "default:3,backend:20" - The default module can have 3 instances,
"backend" can have 20 instances and all other modules are
unaffected. An empty name (i.e. ":3") is shorthand for default
to match how not specifying a module name in the yaml is the
same as specifying "module: default".
Returns:
The parsed value of the max_module_instances flag. May either be an int
(for values of the form "5") or a dict of str->int (for values of the
form "default:3,backend:20").
Raises:
argparse.ArgumentTypeError: the value is invalid.
"""
return parse_per_module_option(
value, int, lambda instances: instances > 0,
'Invalid max instance count: %r',
'Max instance count must be greater than zero',
'Expected "module:max_instance_count": %r',
'Max instance count for module %s must be greater than zero',
'Duplicate max instance count for module %s')
def parse_threadsafe_override(value):
"""Returns the parsed value for the --threadsafe_override flag.
Args:
value: A str containing the flag value for parse. The format should follow
one of the following examples:
1. "False" - All modules override the YAML threadsafe configuration
as if the YAML contained False.
2. "default:False,backend:True" - The default module overrides the
YAML threadsafe configuration as if the YAML contained False, the
"backend" module overrides with a value of True and all other
modules use the value in the YAML file. An empty name (i.e.
":True") is shorthand for default to match how not specifying a
module name in the yaml is the same as specifying
"module: default".
Returns:
The parsed value of the threadsafe_override flag. May either be a bool
(for values of the form "False") or a dict of str->bool (for values of the
form "default:False,backend:True").
Raises:
argparse.ArgumentTypeError: the value is invalid.
"""
return parse_per_module_option(
value, boolean_action.BooleanParse, lambda _: True,
'Invalid threadsafe override: %r',
None,
'Expected "module:threadsafe_override": %r',
None,
'Duplicate threadsafe override value for module %s')
def parse_path(value):
"""Returns the given path with ~ and environment variables expanded."""
return os.path.expanduser(os.path.expandvars(value))
def create_command_line_parser():
"""Returns an argparse.ArgumentParser to parse command line arguments."""
# TODO: Add more robust argument validation. Consider what flags
# are actually needed.
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
arg_name = 'yaml_path'
arg_help = 'Path to a yaml file, or a directory containing yaml files'
if application_configuration.java_supported():
arg_name = 'yaml_or_war_path'
arg_help += ', or a directory containing WEB-INF/web.xml'
parser.add_argument(
'config_paths', metavar=arg_name, nargs='+', help=arg_help)
if _DEVSHELL_ENV in os.environ:
default_server_host = '0.0.0.0'
else:
default_server_host = 'localhost'
common_group = parser.add_argument_group('Common')
common_group.add_argument(
'-A', '--application', action='store', dest='app_id',
help='Set the application, overriding the application value from the '
'app.yaml file.')
common_group.add_argument(
'--host', default=default_server_host,
help='host name to which application modules should bind')
common_group.add_argument(
'--port', type=PortParser(), default=8080,
help='lowest port to which application modules should bind')
common_group.add_argument(
'--admin_host', default=default_server_host,
help='host name to which the admin server should bind')
common_group.add_argument(
'--admin_port', type=PortParser(), default=8000,
help='port to which the admin server should bind')
# TODO: Change this. Eventually we want a way to associate ports
# with external modules, with default values. For now we allow only one
# external module, with a port number that must be passed in here.
common_group.add_argument(
'--external_port', type=PortParser(), default=None,
help=argparse.SUPPRESS)
common_group.add_argument(
'--auth_domain', default='gmail.com',
help='name of the authorization domain to use')
common_group.add_argument(
'--storage_path', metavar='PATH',
type=parse_path,
help='path to the data (datastore, blobstore, etc.) associated with the '
'application.')
common_group.add_argument(
'--log_level', default='info',
choices=_LOG_LEVEL_TO_RUNTIME_CONSTANT.keys(),
help='the log level below which logging messages generated by '
'application code will not be displayed on the console')
common_group.add_argument(
'--max_module_instances',
type=parse_max_module_instances,
help='the maximum number of runtime instances that can be started for a '
'particular module - the value can be an integer, in what case all '
'modules are limited to that number of instances or a comma-seperated '
'list of module:max_instances e.g. "default:5,backend:3"')
common_group.add_argument(
'--use_mtime_file_watcher',
action=boolean_action.BooleanAction,
const=True,
default=False,
help='use mtime polling for detecting source code changes - useful if '
'modifying code from a remote machine using a distributed file system')
common_group.add_argument(
'--threadsafe_override',
type=parse_threadsafe_override,
help='override the application\'s threadsafe configuration - the value '
'can be a boolean, in which case all modules threadsafe setting will '
'be overridden or a comma-separated list of module:threadsafe_override '
'e.g. "default:False,backend:True"')
common_group.add_argument('--enable_mvm_logs',
action=boolean_action.BooleanAction,
const=True,
default=False,
help=argparse.SUPPRESS)
# PHP
php_group = parser.add_argument_group('PHP')
php_group.add_argument('--php_executable_path', metavar='PATH',
type=parse_path,
help='path to the PHP executable')
php_group.add_argument('--php_remote_debugging',
action=boolean_action.BooleanAction,
const=True,
default=False,
help='enable XDebug remote debugging')
php_group.add_argument('--php_gae_extension_path', metavar='PATH',
type=parse_path,
help='path to the GAE PHP extension')
php_group.add_argument('--php_xdebug_extension_path', metavar='PATH',
type=parse_path,
help='path to the xdebug extension')
# App Identity
appidentity_group = parser.add_argument_group('Application Identity')
appidentity_group.add_argument(
'--appidentity_email_address',
help='email address associated with a service account that has a '
'downloadable key. May be None for no local application identity.')
appidentity_group.add_argument(
'--appidentity_private_key_path',
help='path to private key file associated with service account '
'(.pem format). Must be set if appidentity_email_address is set.')
# Supressing the help text, as it is unlikely any typical user outside
# of Google has an appropriately set up test oauth server that devappserver2
# could talk to.
# URL to the oauth server that devappserver2 should use to authenticate the
# appidentity private key (defaults to the standard Google production server.
appidentity_group.add_argument(
'--appidentity_oauth_url',
help=argparse.SUPPRESS)
# Python
python_group = parser.add_argument_group('Python')
python_group.add_argument(
'--python_startup_script',
help='the script to run at the startup of new Python runtime instances '
'(useful for tools such as debuggers.')
python_group.add_argument(
'--python_startup_args',
help='the arguments made available to the script specified in '
'--python_startup_script.')
# Java
java_group = parser.add_argument_group('Java')
java_group.add_argument(
'--jvm_flag', action='append',
help='additional arguments to pass to the java command when launching '
'an instance of the app. May be specified more than once. Example: '
'--jvm_flag=-Xmx1024m --jvm_flag=-Xms256m')
# Custom
custom_group = parser.add_argument_group('Custom VM Runtime')
custom_group.add_argument(
'--custom_entrypoint',
help='specify an entrypoint for custom runtime modules. This is '
'required when such modules are present. Include "{port}" in the '
'string (without quotes) to pass the port number in as an argument. For '
'instance: --custom_entrypoint="gunicorn -b localhost:{port} '
'mymodule:application"',
default='')
# Blobstore
blobstore_group = parser.add_argument_group('Blobstore API')
blobstore_group.add_argument(
'--blobstore_path',
type=parse_path,
help='path to directory used to store blob contents '
'(defaults to a subdirectory of --storage_path if not set)',
default=None)
# TODO: Remove after the Files API is really gone.
blobstore_group.add_argument(
'--blobstore_warn_on_files_api_use',
action=boolean_action.BooleanAction,
const=True,
default=True,
help=argparse.SUPPRESS)
blobstore_group.add_argument(
'--blobstore_enable_files_api',
action=boolean_action.BooleanAction,
const=True,
default=False,
help=argparse.SUPPRESS)
# Cloud SQL
cloud_sql_group = parser.add_argument_group('Cloud SQL')
cloud_sql_group.add_argument(
'--mysql_host',
default='localhost',
help='host name of a running MySQL server used for simulated Google '
'Cloud SQL storage')
cloud_sql_group.add_argument(
'--mysql_port', type=PortParser(allow_port_zero=False),
default=3306,
help='port number of a running MySQL server used for simulated Google '
'Cloud SQL storage')
cloud_sql_group.add_argument(
'--mysql_user',
default='',
help='username to use when connecting to the MySQL server specified in '
'--mysql_host and --mysql_port or --mysql_socket')
cloud_sql_group.add_argument(
'--mysql_password',
default='',
help='password to use when connecting to the MySQL server specified in '
'--mysql_host and --mysql_port or --mysql_socket')
cloud_sql_group.add_argument(
'--mysql_socket',
help='path to a Unix socket file to use when connecting to a running '
'MySQL server used for simulated Google Cloud SQL storage')
# Datastore
datastore_group = parser.add_argument_group('Datastore API')
datastore_group.add_argument(
'--datastore_path',
type=parse_path,
default=None,
help='path to a file used to store datastore contents '
'(defaults to a file in --storage_path if not set)',)
datastore_group.add_argument('--clear_datastore',
action=boolean_action.BooleanAction,
const=True,
default=False,
help='clear the datastore on startup')
datastore_group.add_argument(
'--datastore_consistency_policy',
default='time',
choices=['consistent', 'random', 'time'],
help='the policy to apply when deciding whether a datastore write should '
'appear in global queries')
datastore_group.add_argument(
'--require_indexes',
action=boolean_action.BooleanAction,
const=True,
default=False,
help='generate an error on datastore queries that '
'requires a composite index not found in index.yaml')
datastore_group.add_argument(
'--auto_id_policy',
default=datastore_stub_util.SCATTERED,
choices=[datastore_stub_util.SEQUENTIAL,
datastore_stub_util.SCATTERED],
help='the type of sequence from which the datastore stub '
'assigns automatic IDs. NOTE: Sequential IDs are '
'deprecated. This flag will be removed in a future '
'release. Please do not rely on sequential IDs in your '
'tests.')
datastore_group.add_argument(
'--enable_cloud_datastore',
action=boolean_action.BooleanAction,
const=True,
default=False,
help=argparse.SUPPRESS #'enable the Google Cloud Datastore API.'
)
# Logs
logs_group = parser.add_argument_group('Logs API')
logs_group.add_argument(
'--logs_path', default=None,
help='path to a file used to store request logs (defaults to a file in '
'--storage_path if not set)',)
# Mail
mail_group = parser.add_argument_group('Mail API')
mail_group.add_argument(
'--show_mail_body',
action=boolean_action.BooleanAction,
const=True,
default=False,
help='logs the contents of e-mails sent using the Mail API')
mail_group.add_argument(
'--enable_sendmail',
action=boolean_action.BooleanAction,
const=True,
default=False,
help='use the "sendmail" tool to transmit e-mail sent '
'using the Mail API (ignored if --smtp_host is set)')
mail_group.add_argument(
'--smtp_host', default='',
help='host name of an SMTP server to use to transmit '
'e-mail sent using the Mail API')
mail_group.add_argument(
'--smtp_port', default=25,
type=PortParser(allow_port_zero=False),
help='port number of an SMTP server to use to transmit '
'e-mail sent using the Mail API (ignored if --smtp_host '
'is not set)')
mail_group.add_argument(
'--smtp_user', default='',
help='username to use when connecting to the SMTP server '
'specified in --smtp_host and --smtp_port')
mail_group.add_argument(
'--smtp_password', default='',
help='password to use when connecting to the SMTP server '
'specified in --smtp_host and --smtp_port')
mail_group.add_argument(
'--smtp_allow_tls',
action=boolean_action.BooleanAction,
const=True,
default=True,
help='Allow TLS to be used when the SMTP server announces TLS support '
'(ignored if --smtp_host is not set)')
# Matcher
prospective_search_group = parser.add_argument_group('Prospective Search API')
prospective_search_group.add_argument(
'--prospective_search_path', default=None,
type=parse_path,
help='path to a file used to store the prospective '
'search subscription index (defaults to a file in '
'--storage_path if not set)')
prospective_search_group.add_argument(
'--clear_prospective_search',
action=boolean_action.BooleanAction,
const=True,
default=False,
help='clear the prospective search subscription index')
# Search
search_group = parser.add_argument_group('Search API')
search_group.add_argument(
'--search_indexes_path', default=None,
type=parse_path,
help='path to a file used to store search indexes '
'(defaults to a file in --storage_path if not set)',)
search_group.add_argument(
'--clear_search_indexes',
action=boolean_action.BooleanAction,
const=True,
default=False,
help='clear the search indexes')
# Taskqueue
taskqueue_group = parser.add_argument_group('Task Queue API')
taskqueue_group.add_argument(
'--enable_task_running',
action=boolean_action.BooleanAction,
const=True,
default=True,
help='run "push" tasks created using the taskqueue API automatically')
# Misc
misc_group = parser.add_argument_group('Miscellaneous')
misc_group.add_argument(
'--allow_skipped_files',
action=boolean_action.BooleanAction,
const=True,
default=False,
help='make files specified in the app.yaml "skip_files" or "static" '
'handles readable by the application.')
# No help to avoid lengthening help message for rarely used feature:
# host name to which the server for API calls should bind.
misc_group.add_argument(
'--api_host', default=default_server_host,
help=argparse.SUPPRESS)
misc_group.add_argument(
'--api_port', type=PortParser(), default=0,
help='port to which the server for API calls should bind')
misc_group.add_argument(
'--automatic_restart',
action=boolean_action.BooleanAction,
const=True,
default=True,
help=('restart instances automatically when files relevant to their '
'module are changed'))
misc_group.add_argument(
'--dev_appserver_log_level', default='info',
choices=_LOG_LEVEL_TO_PYTHON_CONSTANT.keys(),
help='the log level below which logging messages generated by '
'the development server will not be displayed on the console (this '
'flag is more useful for diagnosing problems in dev_appserver.py rather '
'than in application code)')
misc_group.add_argument(
'--skip_sdk_update_check',
action=boolean_action.BooleanAction,
const=True,
default=False,
help='skip checking for SDK updates (if false, use .appcfg_nag to '
'decide)')
misc_group.add_argument(
'--default_gcs_bucket_name', default=None,
help='default Google Cloud Storage bucket name')
return parser
PARSER = create_command_line_parser()
def _clear_datastore_storage(datastore_path):
"""Delete the datastore storage file at the given path."""
# lexists() returns True for broken symlinks, where exists() returns False.
if os.path.lexists(datastore_path):
try:
os.remove(datastore_path)
except OSError, e:
logging.warning('Failed to remove datastore file %r: %s',
datastore_path,
e)
def _clear_prospective_search_storage(prospective_search_path):
"""Delete the perspective search storage file at the given path."""
# lexists() returns True for broken symlinks, where exists() returns False.
if os.path.lexists(prospective_search_path):
try:
os.remove(prospective_search_path)
except OSError, e:
logging.warning('Failed to remove prospective search file %r: %s',
prospective_search_path,
e)
def _clear_search_indexes_storage(search_index_path):
"""Delete the search indexes storage file at the given path."""
# lexists() returns True for broken symlinks, where exists() returns False.
if os.path.lexists(search_index_path):
try:
os.remove(search_index_path)
except OSError, e:
logging.warning('Failed to remove search indexes file %r: %s',
search_index_path,
e)
def _setup_environ(app_id):
"""Sets up the os.environ dictionary for the front-end server and API server.
This function should only be called once.
Args:
app_id: The id of the application.
"""
os.environ['APPLICATION_ID'] = app_id
class DevelopmentServer(object):
"""Encapsulates the logic for the development server.
Only a single instance of the class may be created per process. See
_setup_environ.
"""
def __init__(self):
# A list of servers that are currently running.
self._running_modules = []
self._module_to_port = {}
self._dispatcher = None
def module_to_address(self, module_name, instance=None):
"""Returns the address of a module."""
if module_name is None:
return self._dispatcher.dispatch_address
return self._dispatcher.get_hostname(
module_name,
self._dispatcher.get_default_version(module_name),
instance)
def start(self, options):
"""Start devappserver2 servers based on the provided command line arguments.
Args:
options: An argparse.Namespace containing the command line arguments.
"""
logging.getLogger().setLevel(
_LOG_LEVEL_TO_PYTHON_CONSTANT[options.dev_appserver_log_level])
configuration = application_configuration.ApplicationConfiguration(
options.config_paths, options.app_id)
if options.enable_cloud_datastore:
# This requires the oauth server stub to return that the logged in user
# is in fact an admin.
os.environ['OAUTH_IS_ADMIN'] = '1'
gcd_module = application_configuration.ModuleConfiguration(
gcd_application.generate_gcd_app(configuration.app_id.split('~')[1]))
configuration.modules.append(gcd_module)
if options.skip_sdk_update_check:
logging.info('Skipping SDK update check.')
else:
update_checker.check_for_updates(configuration)
# There is no good way to set the default encoding from application code
# (it needs to be done during interpreter initialization in site.py or
# sitecustomize.py) so just warn developers if they have a different
# encoding than production.
if sys.getdefaultencoding() != _PROD_DEFAULT_ENCODING:
logging.warning(
'The default encoding of your local Python interpreter is set to %r '
'while App Engine\'s production environment uses %r; as a result '
'your code may behave differently when deployed.',
sys.getdefaultencoding(), _PROD_DEFAULT_ENCODING)
if options.port == 0:
logging.warn('DEFAULT_VERSION_HOSTNAME will not be set correctly with '
'--port=0')
_setup_environ(configuration.app_id)
self._dispatcher = dispatcher.Dispatcher(
configuration,
options.host,
options.port,
options.auth_domain,
_LOG_LEVEL_TO_RUNTIME_CONSTANT[options.log_level],
self._create_php_config(options),
self._create_python_config(options),
self._create_java_config(options),
self._create_custom_config(options),
self._create_cloud_sql_config(options),
self._create_vm_config(options),
self._create_module_to_setting(options.max_module_instances,
configuration, '--max_module_instances'),
options.use_mtime_file_watcher,
options.automatic_restart,
options.allow_skipped_files,
self._create_module_to_setting(options.threadsafe_override,
configuration, '--threadsafe_override'),
options.external_port)
request_data = wsgi_request_info.WSGIRequestInfo(self._dispatcher)
storage_path = _get_storage_path(options.storage_path, configuration.app_id)
# TODO: Remove after the Files API is really gone.
api_server.set_filesapi_enabled(options.blobstore_enable_files_api)
if options.blobstore_warn_on_files_api_use:
api_server.enable_filesapi_tracking(request_data)
apis = self._create_api_server(
request_data, storage_path, options, configuration)
apis.start()
self._running_modules.append(apis)
self._dispatcher.start(options.api_host, apis.port, request_data)
xsrf_path = os.path.join(storage_path, 'xsrf')
admin = admin_server.AdminServer(options.admin_host, options.admin_port,
self._dispatcher, configuration, xsrf_path)
admin.start()
self._running_modules.append(admin)
try:
default = self._dispatcher.get_module_by_name('default')
apis.set_balanced_address(default.balanced_address)
except request_info.ModuleDoesNotExistError:
logging.warning('No default module found. Ignoring.')
def stop(self):
"""Stops all running devappserver2 modules."""
while self._running_modules:
self._running_modules.pop().quit()
if self._dispatcher:
self._dispatcher.quit()
@staticmethod
def _create_api_server(request_data, storage_path, options, configuration):
datastore_path = options.datastore_path or os.path.join(storage_path,
'datastore.db')
logs_path = options.logs_path or os.path.join(storage_path, 'logs.db')
search_index_path = options.search_indexes_path or os.path.join(
storage_path, 'search_indexes')
prospective_search_path = options.prospective_search_path or os.path.join(
storage_path, 'prospective-search')
blobstore_path = options.blobstore_path or os.path.join(storage_path,
'blobs')
if options.clear_datastore:
_clear_datastore_storage(datastore_path)
if options.clear_prospective_search:
_clear_prospective_search_storage(prospective_search_path)
if options.clear_search_indexes:
_clear_search_indexes_storage(search_index_path)
if options.auto_id_policy==datastore_stub_util.SEQUENTIAL:
logging.warn("--auto_id_policy='sequential' is deprecated. This option "
"will be removed in a future release.")
application_address = '%s' % options.host
if options.port and options.port != 80:
application_address += ':' + str(options.port)
user_login_url = '/%s?%s=%%s' % (login.LOGIN_URL_RELATIVE,
login.CONTINUE_PARAM)
user_logout_url = '%s&%s=%s' % (user_login_url, login.ACTION_PARAM,
login.LOGOUT_ACTION)
if options.datastore_consistency_policy == 'time':
consistency = datastore_stub_util.TimeBasedHRConsistencyPolicy()
elif options.datastore_consistency_policy == 'random':
consistency = datastore_stub_util.PseudoRandomHRConsistencyPolicy()
elif options.datastore_consistency_policy == 'consistent':
consistency = datastore_stub_util.PseudoRandomHRConsistencyPolicy(1.0)
else:
assert 0, ('unknown consistency policy: %r' %
options.datastore_consistency_policy)
api_server.maybe_convert_datastore_file_stub_data_to_sqlite(
configuration.app_id, datastore_path)
api_server.setup_stubs(
request_data=request_data,
app_id=configuration.app_id,
application_root=configuration.modules[0].application_root,
# The "trusted" flag is only relevant for Google administrative
# applications.
trusted=getattr(options, 'trusted', False),
appidentity_email_address=options.appidentity_email_address,
appidentity_private_key_path=os.path.abspath(
options.appidentity_private_key_path)
if options.appidentity_private_key_path else None,
blobstore_path=blobstore_path,
datastore_path=datastore_path,
datastore_consistency=consistency,
datastore_require_indexes=options.require_indexes,
datastore_auto_id_policy=options.auto_id_policy,
images_host_prefix='http://%s' % application_address,
logs_path=logs_path,
mail_smtp_host=options.smtp_host,
mail_smtp_port=options.smtp_port,
mail_smtp_user=options.smtp_user,
mail_smtp_password=options.smtp_password,
mail_enable_sendmail=options.enable_sendmail,
mail_show_mail_body=options.show_mail_body,
mail_allow_tls=options.smtp_allow_tls,
matcher_prospective_search_path=prospective_search_path,
search_index_path=search_index_path,
taskqueue_auto_run_tasks=options.enable_task_running,
taskqueue_default_http_server=application_address,
user_login_url=user_login_url,
user_logout_url=user_logout_url,
default_gcs_bucket_name=options.default_gcs_bucket_name,
appidentity_oauth_url=options.appidentity_oauth_url)
return api_server.APIServer(options.api_host, options.api_port,
configuration.app_id)
@staticmethod
def _create_php_config(options):
php_config = runtime_config_pb2.PhpConfig()
if options.php_executable_path:
php_config.php_executable_path = os.path.abspath(
options.php_executable_path)
php_config.enable_debugger = options.php_remote_debugging
if options.php_gae_extension_path:
php_config.gae_extension_path = os.path.abspath(
options.php_gae_extension_path)
if options.php_xdebug_extension_path:
php_config.xdebug_extension_path = os.path.abspath(
options.php_xdebug_extension_path)
return php_config
@staticmethod
def _create_python_config(options):
python_config = runtime_config_pb2.PythonConfig()
if options.python_startup_script:
python_config.startup_script = os.path.abspath(
options.python_startup_script)
if options.python_startup_args:
python_config.startup_args = options.python_startup_args
return python_config
@staticmethod
def _create_java_config(options):
java_config = runtime_config_pb2.JavaConfig()
if options.jvm_flag:
java_config.jvm_args.extend(options.jvm_flag)
return java_config
@staticmethod
def _create_custom_config(options):
custom_config = runtime_config_pb2.CustomConfig()
custom_config.custom_entrypoint = options.custom_entrypoint
return custom_config
@staticmethod
def _create_cloud_sql_config(options):
cloud_sql_config = runtime_config_pb2.CloudSQL()
cloud_sql_config.mysql_host = options.mysql_host
cloud_sql_config.mysql_port = options.mysql_port
cloud_sql_config.mysql_user = options.mysql_user
cloud_sql_config.mysql_password = options.mysql_password
if options.mysql_socket:
cloud_sql_config.mysql_socket = options.mysql_socket
return cloud_sql_config
@staticmethod
def _create_vm_config(options):
vm_config = runtime_config_pb2.VMConfig()
vm_config.enable_logs = options.enable_mvm_logs
return vm_config
@staticmethod
def _create_module_to_setting(setting, configuration, option):
"""Create a per-module dictionary configuration.
Creates a dictionary that maps a module name to a configuration
setting. Used in conjunction with parse_per_module_option.
Args:
setting: a value that can be None, a dict of str->type or a single value.
configuration: an ApplicationConfiguration object.
option: the option name the setting came from.
Returns:
A dict of str->type.
"""
if setting is None:
return {}
module_names = [module_configuration.module_name
for module_configuration in configuration.modules]
if isinstance(setting, dict):
# Warn and remove a setting if the module name is unknown.
module_to_setting = {}
for module_name, value in setting.items():
if module_name in module_names:
module_to_setting[module_name] = value
else:
logging.warning('Unknown module %r for %r', module_name, option)
return module_to_setting
# Create a dict with an entry for every module.
return {module_name: setting for module_name in module_names}
def main():
shutdown.install_signal_handlers()
# The timezone must be set in the devappserver2 process rather than just in
# the runtime so printed log timestamps are consistent and the taskqueue stub
# expects the timezone to be UTC. The runtime inherits the environment.
os.environ['TZ'] = 'UTC'
if hasattr(time, 'tzset'):
# time.tzet() should be called on Unix, but doesn't exist on Windows.
time.tzset()
options = PARSER.parse_args()
dev_server = DevelopmentServer()
try:
dev_server.start(options)
shutdown.wait_until_shutdown()
finally:
dev_server.stop()
if __name__ == '__main__':
main()
|
[
"vicmortelmans@gmail.com"
] |
vicmortelmans@gmail.com
|
548e71f94d58e9cc020a8279722b8113107429fe
|
57a70f402b555061b540da26e072d3227e2f9f1b
|
/Electshelf/testcase/test_018_send_delGoods_package.py
|
51e8a90636ea470a39368094549a728c0c9a0c39
|
[] |
no_license
|
yushen1993/yushen
|
46caa8fea19e71eb62fee53be8c28861a27112c5
|
97bd73032ba7141091b073722508bd48fe54a795
|
refs/heads/master
| 2020-11-30T15:32:43.094176
| 2020-08-07T02:56:19
| 2020-08-07T02:56:19
| 230,431,226
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,101
|
py
|
#-*- coding:utf-8 -*-
# @Author : yushen
# @Time : 2020-04-01 10:06
from electshelf_Common_Lib.common_libs import *
def test_018_send_delGoods_package():
'''删除分类下商品'''
#删除商品前先列出分类商品,获取caseid
response =commodityManage_queryCategoryCommodity()
assert response['meta']['errno'] == 0
assert response['meta']['msg'] == 'success'
try:
assert response[ 'result']['data'][0]['name'] == '1203商品'
assert response[ 'result']['data'][0]['shopId'] == '5c2c2adbb80a8117ea575ea2'
assert response[ 'result']['data'][0]['shopName'] == '语过添晴'
assert response[ 'result']['data'][0]['sourceList'] != None
except:
print("该商品也被删除,请检查是否有该商品")
getShop_caseID = response[ 'result']['data'][0]['id']
if getShop_caseID != None:
deleteCommodity = eshelfCaseLayer_deleteCommodity(int(getShop_caseID))
assert deleteCommodity['meta']['errno'] == 0
assert deleteCommodity['meta']['msg'] == 'success'
# 删除完成以后,再次调用列出商品,检查是否删除
listShop = commodityManage_queryCategoryCommodity()
assert listShop['meta']['errno'] == 0
assert listShop['meta']['msg'] == 'success'
assert listShop['result']['data'][0]['name'] != '1203商品'
#数据变更后,检查一键更新按钮是否可用
cheack_updata = updateData_judgeHasUpdate()
assert cheack_updata['meta']['errno'] == 0
assert cheack_updata['meta']['msg'] == 'success'
if cheack_updata['result']['data']['hasUpdate'] == True: #等于true,可更新
send_package = updateData_packUpdate() #点击一键更新
assert send_package['meta']['errno'] == 0
else:
print("无数据变更,无需更新")
#一键更新后再次检查一键更新是否置灰
cheack_end_updata = updateData_judgeHasUpdate()
assert cheack_end_updata['meta']['errno'] == 0
assert cheack_end_updata['meta']['msg'] == 'success'
assert cheack_end_updata['result']['data']['hasUpdate'] == False
|
[
"1140656553@qq.com"
] |
1140656553@qq.com
|
d4afff4bf853ce2396e510f7b56cc75c24dabd65
|
6dac326bc43ac671748b1866b109a808cc7dcf39
|
/backend/wallet/transaction_pool.py
|
453a5f95b373ff069e8fc84afd67ceaf19e17b62
|
[] |
no_license
|
gautyverma/Da-Coins
|
bbffee93983e647c6ff6a530465643ae94e9ebf2
|
fe2b96abeb28a2dfa8188325664df542565278c7
|
refs/heads/master
| 2022-11-15T07:45:46.428835
| 2020-07-09T16:02:18
| 2020-07-09T16:02:18
| 278,345,257
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,231
|
py
|
class TransactionPool:
def __init__(self):
self.transaction_map = {}
def set_transaction(self, transaction):
"""
Set a transaction in the transaction pool.
"""
self.transaction_map[transaction.id] = transaction
def existing_transaction(self, address):
"""
Find a transaction generated by address in the transcation pool
"""
for transaction in self.transaction_map.values():
if transaction.input['address'] == address:
return transaction
def transaction_data(self):
"""
Return the transaction of the transaction pool represented in their
json serialized form.
"""
return list(
map(lambda transaction: transaction.to_json(),
self.transaction_map.values()
))
def clear_blockchain_transactions(self, blockchain):
"""
Delete blockchain recorded from the transaction pool.
"""
for block in blockchain.chain:
for transaction in block.data:
try:
del self.transaction_map[transaction['id']]
except KeyError:
pass
|
[
"32440155+gautyverma@users.noreply.github.com"
] |
32440155+gautyverma@users.noreply.github.com
|
d8c998171350105a1f7df527c014dcc42b7a88d6
|
e493f7696a81ed31bb2de908ea6f9c05a4669be9
|
/autoshots/__init__.py
|
b36eb36e832971b589f059da0b43530bf2b24ddc
|
[] |
no_license
|
cz4rny/AutoShots
|
597d0eaeb2f537963f087b6b6fba6e90cbbbed14
|
301dadd19317ceb76297959e760842faeccc764c
|
refs/heads/master
| 2021-01-10T20:32:15.905313
| 2011-09-09T19:00:50
| 2011-09-09T19:00:50
| 2,355,324
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 84
|
py
|
# -*- coding: utf-8 -*-
import autoshots
import job
__all__ = ['autoshots', 'job']
|
[
"cezary.krzyzanowski@gmail.com"
] |
cezary.krzyzanowski@gmail.com
|
72c9c9813eee51be77582a53539c011c085177f0
|
3d599a1c9110388d2f2e60be687b7d01bc254feb
|
/doc/examples/python/forum/router_spin.py
|
53182acb16833723f0455652ad8cc4f3f702f784
|
[
"MIT"
] |
permissive
|
jacksontj/dataman
|
4bed72c6d15de618271163a3c59f9f3a1c52246a
|
49e54609ed1c4232d940bd60b76ebb79edc6d4af
|
refs/heads/master
| 2020-05-23T19:33:44.202445
| 2019-02-22T03:47:12
| 2019-02-22T03:50:06
| 84,783,502
| 12
| 7
|
MIT
| 2019-02-22T03:50:08
| 2017-03-13T04:19:04
|
Go
|
UTF-8
|
Python
| false
| false
| 1,541
|
py
|
import random
import json
import tornado.ioloop
import tornado.httpclient
http_client = tornado.httpclient.AsyncHTTPClient()
schema_json = json.load(open('example_forum_sharded.json'))
schema_json['name'] = 'example_forum'
# set the datastore id
schema_json['datastores'][0]['datastore_id'] = 54
@tornado.gen.coroutine
def ensure_database():
request = tornado.httpclient.HTTPRequest(
'http://127.0.0.1:8079/v1/database/example_forum',
method='POST',
body=json.dumps(schema_json),
connect_timeout=9999999,
request_timeout=9999999,
)
try:
ret = yield http_client.fetch(request)
print 'add database (', ret.request.method, ret.request.url, ')'
print ret.request_time
finally:
spawn_callback()
@tornado.gen.coroutine
def remove_database():
request = tornado.httpclient.HTTPRequest(
'http://127.0.0.1:8079/v1/database/example_forum',
method='DELETE',
connect_timeout=9999999,
request_timeout=9999999,
)
try:
ret = yield http_client.fetch(request)
print 'remove database (', ret.request.method, ret.request.url, ')'
print ret.request_time
finally:
spawn_callback()
funcs = [ensure_database, remove_database]
def spawn_callback():
ioloop.spawn_callback(random.choice(funcs))
def main():
for x in xrange(10):
spawn_callback()
if __name__ == '__main__':
ioloop = tornado.ioloop.IOLoop.current()
ioloop.spawn_callback(main)
ioloop.start()
|
[
"jacksontj.89@gmail.com"
] |
jacksontj.89@gmail.com
|
eab0d7e8ecde7d24751b1fd7c972a81e7ba3a896
|
22b19bb6404dae3f9e2f8340972c81efbd532e3b
|
/NetSCAN/netscan.py
|
20cffcca4674cc53519abfe1bca36167d777adc2
|
[] |
no_license
|
Talessil/parallelized-dc
|
0a75e10d4f0d4b87aaa62531b3fb8798f6a38d5d
|
0222d52879aaabde107e5ef9d90278f1a3f206aa
|
refs/heads/master
| 2021-02-11T08:54:55.469545
| 2021-01-24T00:05:10
| 2021-01-24T00:05:10
| 244,474,721
| 4
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,098
|
py
|
from neo4j import GraphDatabase
import pandas as pd
import csv
import time
graphdb = GraphDatabase.driver(uri="bolt://localhost:7687",auth=("neo4j", "1234"))
if __name__ == "__main__":
start_time = time.time()
session = graphdb.session()
session.run("Load csv with headers from 'file:/graph.csv' as csvline MERGE (u:Pessoa {idpessoa: toInteger(csvline.author_id1)}) ON CREATE SET u.idpessoa = toInteger(csvline.author_id1)")
session.run("Load csv with headers from 'file:/graph.csv' as csvline MERGE (u:Pessoa {idpessoa: toInteger(csvline.author_id2)}) ON CREATE SET u.idpessoa = toInteger(csvline.author_id2)")
session.run("Load csv with headers from 'file:/graph.csv' as csvline MATCH (a:Pessoa {idpessoa: toInteger(csvline.author_id1)}),(b:Pessoa {idpessoa: toInteger(csvline.author_id2)}) create (a)-[r:Publicou{total:toFloat(csvline.count)}]->(b)")
session.close()
session = graphdb.session()
session.run("CALL netscan.find_communities('Pessoa','Publicou','idpessoa','total', 0.5, 5, 1)")
session.close()
print("--- %s seconds ---" % (time.time() - start_time))
|
[
"noreply@github.com"
] |
Talessil.noreply@github.com
|
9bd5c03b079f38c49033ce349f05d09a582df453
|
e505339761042a6ab3e4c42157c365d53fdb28f3
|
/Language/Python3/Python编程从入门到实践/Chap8-函数/01_Album.py
|
19a77ee933e6fe15687fc40024ba7cf734741cd7
|
[
"Apache-2.0"
] |
permissive
|
edmontdants/Programming-Practice-Everyday
|
e3d56a36e543d763f0f632b34ace3ad00ffef8ef
|
029138569f455c22c0d093ab1cdb3962d71ca701
|
refs/heads/master
| 2023-02-04T00:30:29.860568
| 2020-12-19T12:40:59
| 2020-12-19T12:40:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,265
|
py
|
"""
@Author: huuuuusy
@GitHub: https://github.com/huuuuusy
系统: Ubuntu 18.04
IDE: VS Code 1.35.1
工具: python == 3.7.3
任务: 1. 编写一个名为make_album()的函数,它创建一个描述音乐专辑的字典
这个函数应接受歌手的名字和专辑名,并返回一个包含这两项信息的字典
使用这个函数创建三个表示不同专辑的字典,并打印每个返回的值,以核实字典正确地存储了专辑的信息
2. 给函数make_album()添加一个可选形参,以便能够存储专辑包含的歌曲数
如果调用这个函数时指定了歌曲数,就将这个值添加到表示专辑的字典中
调用这个函数,并至少在一次调用中指定专辑包含的歌曲数
"""
def make_album(artist, title, tracks=0):
album_dict = {
'artist': artist.title(),
'title': title.title(),
}
if tracks:
album_dict['tracks'] = tracks
return album_dict
album = make_album('metallica', 'ride the lightning')
print(album)
album = make_album('beethoven', 'ninth symphony')
print(album)
album = make_album('willie nelson', 'red-headed stranger')
print(album)
album = make_album('iron maiden', 'piece of mind', tracks=8)
print(album)
|
[
"hushiyu1995@qq.com"
] |
hushiyu1995@qq.com
|
34ca2b289959dcce975ee9aadfc5c93342a2b0a0
|
a4fdeb66f18ff6126a89309a256467f6f0310586
|
/modulo1_ordenacao/src/grafo.py
|
f3718197f70366114c15c4a2c6592346b69fb66f
|
[
"Apache-2.0"
] |
permissive
|
FelipeAguiar1/Works-ED2-2019.2
|
741d21c90fa7890e4afdb191618f50befbd95890
|
15220687c175fe1a50db83420cbaeaf6efcc561f
|
refs/heads/master
| 2020-08-06T08:51:35.489577
| 2019-10-04T23:06:19
| 2019-10-04T23:06:19
| 212,913,669
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,393
|
py
|
import json
import copy
class Grafo(object):
def __init__(self):
self.algoritimoDeOrdenacao = None
self.vertices = None
self.arestas = None
return
def _algortmoDeOrdencaoErro(self):
if self.algoritimoDeOrdenacao is None:
print('Algoritmo de Ordencação Nulo, finalizando programa.')
raise ValueError
def estabelecerAlgoritmoDeOrdencao(self, algoritimoDeOrdenacao):
self.algoritimoDeOrdenacao = algoritimoDeOrdenacao
def executarKruskal(self):
self._algortmoDeOrdencaoErro()
return self._kruskal()
def _conectaDuasArvoresDiferentes(self, floresta, aresta):
for arvore in floresta:
if aresta['source'] in arvore and aresta['target'] in arvore:
return False
return True
def _concatenaArvores(self, floresta, aresta):
arvoreA = None
arvoreB = None
for arvore in floresta:
if(aresta['source'] in arvore):
arvoreA = arvore
if(aresta['target'] in arvore):
arvoreB = arvore
if arvoreA is not None and arvoreB is not None:
if arvoreA != arvoreB:
novaArvore = arvoreA + arvoreB
floresta.remove(arvoreA)
floresta.remove(arvoreB)
floresta.append(novaArvore)
def _kruskal(self):
print('Executando kruskal, aguarde...')
floresta = [ [vertice['id'] ] for vertice in self.vertices]
arvoreGeradoraMinima = []
# Ordencão das arestas iniciada
arestasOrdenadas = self.algoritimoDeOrdenacao.ordenar( copy.copy(self.arestas))
# Ordencão das arestas finalizada
pop = 0
while len(arestasOrdenadas) > pop:
aresta = arestasOrdenadas[pop]
pop+=1
if(self._conectaDuasArvoresDiferentes(floresta, aresta)):
arvoreGeradoraMinima.append(aresta)
self._concatenaArvores(floresta, aresta)
return arvoreGeradoraMinima
def carregarGrafo(self, arquivoJson):
print('Carregando grafo, aguarde...')
with open(arquivoJson) as arquivo:
grafo_json = json.loads(arquivo.read())
self.vertices = (grafo_json['graph']['nodes'])
self.arestas = (grafo_json['graph']['edges'])
return True
|
[
"noreply@github.com"
] |
FelipeAguiar1.noreply@github.com
|
ecaf9b7b1a288e185e38861e2fd16d215ddb80a2
|
2a02e4e4bd05e0a6c71c302feb723e5817964b17
|
/caps_project/caps_project/asgi.py
|
bcd593334a8b8593afabb0ee4f02ccd20cfd7f16
|
[] |
no_license
|
dishankkalra23/blog-website
|
f92f0a529d2f601464f1c0027cebec2fd450b566
|
2c217a16b1afafd881f64b6d5b2752a5d6a782db
|
refs/heads/master
| 2023-04-21T21:22:39.611933
| 2021-05-02T07:15:14
| 2021-05-02T07:15:14
| 361,186,690
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 401
|
py
|
"""
ASGI config for caps_project project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'caps_project.settings')
application = get_asgi_application()
|
[
"dishankalra@gmail.com"
] |
dishankalra@gmail.com
|
73413f54b7519e3ecf3e8f8b3abd31abc01d4147
|
3c9d472aa6b02c2e0ae6a5ef1566fc266558249a
|
/test/augmentation/test_container.py
|
ca72bb148d7ed53333515458f77bb9ab563df26f
|
[
"Apache-2.0"
] |
permissive
|
unite-deals/kornia
|
bf23eb835443f4a19ff745b7795f3474b746c691
|
397e2e4928387004b42c66ebf614069dd619183c
|
refs/heads/master
| 2023-06-11T03:49:43.925818
| 2021-06-27T08:54:01
| 2021-06-27T08:54:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,618
|
py
|
import pytest
import torch
from torch.testing import assert_allclose
import kornia
import kornia.augmentation as K
from kornia.constants import BorderType
from kornia.geometry.transform import bbox_to_mask
class TestVideoSequential:
@pytest.mark.parametrize('shape', [(3, 4), (2, 3, 4), (2, 3, 5, 6), (2, 3, 4, 5, 6, 7)])
@pytest.mark.parametrize('data_format', ["BCTHW", "BTCHW"])
def test_exception(self, shape, data_format, device, dtype):
aug_list = K.VideoSequential(K.ColorJitter(0.1, 0.1, 0.1, 0.1), data_format=data_format, same_on_frame=True)
with pytest.raises(AssertionError):
input = torch.randn(*shape, device=device, dtype=dtype)
output = aug_list(input)
@pytest.mark.parametrize(
'augmentation',
[
K.RandomAffine(360, p=1.0),
K.CenterCrop((3, 3), p=1.0),
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0),
K.RandomCrop((5, 5), p=1.0),
K.RandomErasing(p=1.0),
K.RandomGrayscale(p=1.0),
K.RandomHorizontalFlip(p=1.0),
K.RandomVerticalFlip(p=1.0),
K.RandomPerspective(p=1.0),
K.RandomResizedCrop((5, 5), p=1.0),
K.RandomRotation(360.0, p=1.0),
K.RandomSolarize(p=1.0),
K.RandomPosterize(p=1.0),
K.RandomSharpness(p=1.0),
K.RandomEqualize(p=1.0),
K.RandomMotionBlur(3, 35.0, 0.5, p=1.0),
K.Normalize(torch.tensor([0.5, 0.5, 0.5]), torch.tensor([0.5, 0.5, 0.5]), p=1.0),
K.Denormalize(torch.tensor([0.5, 0.5, 0.5]), torch.tensor([0.5, 0.5, 0.5]), p=1.0),
],
)
@pytest.mark.parametrize('data_format', ["BCTHW", "BTCHW"])
def test_augmentation(self, augmentation, data_format, device, dtype):
input = torch.randint(255, (1, 3, 3, 5, 6), device=device, dtype=dtype).repeat(2, 1, 1, 1, 1) / 255.0
torch.manual_seed(21)
aug_list = K.VideoSequential(augmentation, data_format=data_format, same_on_frame=True)
output = aug_list(input)
@pytest.mark.parametrize('augmentations', [[K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=0.5), K.RandomAffine(360, p=0.5)]])
@pytest.mark.parametrize('data_format', ["BCTHW", "BTCHW"])
def test_p_half(self, augmentations, data_format, device, dtype):
input = torch.randn(1, 3, 3, 5, 6, device=device, dtype=dtype).repeat(2, 1, 1, 1, 1)
torch.manual_seed(21)
aug_list = K.VideoSequential(*augmentations, data_format=data_format, same_on_frame=True)
output = aug_list(input)
assert not (output[0] == input[0]).all()
assert (output[1] == input[1]).all()
@pytest.mark.parametrize(
'augmentations',
[
[K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0), K.RandomAffine(360, p=1.0)],
[K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0)],
[K.RandomAffine(360, p=1.0), kornia.color.BgrToRgb()],
[K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=0.0), K.RandomAffine(360, p=0.0)],
[K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=0.0)],
[K.RandomAffine(360, p=0.0)],
],
)
@pytest.mark.parametrize('data_format', ["BCTHW", "BTCHW"])
def test_same_on_frame(self, augmentations, data_format, device, dtype):
aug_list = K.VideoSequential(*augmentations, data_format=data_format, same_on_frame=True)
if data_format == 'BCTHW':
input = torch.randn(2, 3, 1, 5, 6, device=device, dtype=dtype).repeat(1, 1, 4, 1, 1)
output = aug_list(input)
assert (output[:, :, 0] == output[:, :, 1]).all()
assert (output[:, :, 1] == output[:, :, 2]).all()
assert (output[:, :, 2] == output[:, :, 3]).all()
if data_format == 'BTCHW':
input = torch.randn(2, 1, 3, 5, 6, device=device, dtype=dtype).repeat(1, 4, 1, 1, 1)
output = aug_list(input)
assert (output[:, 0] == output[:, 1]).all()
assert (output[:, 1] == output[:, 2]).all()
assert (output[:, 2] == output[:, 3]).all()
@pytest.mark.parametrize(
'augmentations', [[K.RandomAffine(360, p=1.0)], [K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0)]]
)
@pytest.mark.parametrize('data_format', ["BCTHW", "BTCHW"])
def test_against_sequential(self, augmentations, data_format, device, dtype):
aug_list_1 = K.VideoSequential(*augmentations, data_format=data_format, same_on_frame=False)
aug_list_2 = torch.nn.Sequential(*augmentations)
if data_format == 'BCTHW':
input = torch.randn(2, 3, 1, 5, 6, device=device, dtype=dtype).repeat(1, 1, 4, 1, 1)
if data_format == 'BTCHW':
input = torch.randn(2, 1, 3, 5, 6, device=device, dtype=dtype).repeat(1, 4, 1, 1, 1)
torch.manual_seed(0)
output_1 = aug_list_1(input)
param_1 = list(aug_list_1.children())[0]._params
torch.manual_seed(0)
if data_format == 'BCTHW':
input = input.transpose(1, 2)
output_2 = aug_list_2(input.reshape(-1, 3, 5, 6))
param_2 = list(aug_list_2.children())[0]._params
output_2 = output_2.view(2, 4, 3, 5, 6)
if data_format == 'BCTHW':
output_2 = output_2.transpose(1, 2)
assert (output_1 == output_2).all()
@pytest.mark.jit
@pytest.mark.skip(reason="turn off due to Union Type")
def test_jit(self, device, dtype):
B, C, D, H, W = 2, 3, 5, 4, 4
img = torch.ones(B, C, D, H, W, device=device, dtype=dtype)
op = K.VideoSequential(K.ColorJitter(0.1, 0.1, 0.1, 0.1), same_on_frame=True)
op_jit = torch.jit.script(op)
assert_allclose(op(img), op_jit(img))
class TestSequential:
@pytest.mark.parametrize('same_on_batch', [True, False, None])
@pytest.mark.parametrize("return_transform", [True, False, None])
@pytest.mark.parametrize("keepdim", [True, False, None])
def test_construction(self, same_on_batch, return_transform, keepdim):
K.ImageSequential(
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0),
K.RandomAffine(360, p=1.0),
same_on_batch=same_on_batch,
return_transform=return_transform,
keepdim=keepdim,
)
@pytest.mark.parametrize("return_transform", [True, False, None])
def test_forward(self, return_transform, device, dtype):
inp = torch.randn(1, 3, 30, 30, device=device, dtype=dtype)
aug = K.ImageSequential(
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0),
kornia.filters.MedianBlur((3, 3)),
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0, return_transform=True),
K.RandomAffine(360, p=1.0),
return_transform=return_transform,
)
out = aug(inp)
if isinstance(out, (tuple,)):
assert out[0].shape == inp.shape
else:
assert out.shape == inp.shape
class TestAugmentationSequential:
@pytest.mark.parametrize(
'data_keys', ["input", ["mask", "input"], ["input", "bbox_yxyx"], [0, 10], [BorderType.REFLECT]]
)
@pytest.mark.parametrize("augmentation_list", [K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0)])
def test_exception(self, augmentation_list, data_keys, device, dtype):
with pytest.raises(Exception): # AssertError and NotImplementedError
K.AugmentationSequential(augmentation_list, data_keys=data_keys)
def test_forward_and_inverse(self, device, dtype):
inp = torch.randn(1, 3, 1000, 500, device=device, dtype=dtype)
bbox = torch.tensor([[[355, 10], [660, 10], [660, 250], [355, 250]]], device=device, dtype=dtype)
keypoints = torch.tensor([[[465, 115], [545, 116]]], device=device, dtype=dtype)
mask = bbox_to_mask(
torch.tensor([[[155, 0], [900, 0], [900, 400], [155, 400]]], device=device, dtype=dtype), 1000, 500
)[:, None].float()
aug = K.AugmentationSequential(
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0),
K.RandomAffine(360, p=1.0),
data_keys=["input", "mask", "bbox", "keypoints"],
)
out = aug(inp, mask, bbox, keypoints)
assert out[0].shape == inp.shape
assert out[1].shape == mask.shape
assert out[2].shape == bbox.shape
assert out[3].shape == keypoints.shape
out_inv = aug.inverse(*out)
assert out_inv[0].shape == inp.shape
assert out_inv[1].shape == mask.shape
assert out_inv[2].shape == bbox.shape
assert out_inv[3].shape == keypoints.shape
def test_individual_forward_and_inverse(self, device, dtype):
inp = torch.randn(1, 3, 1000, 500, device=device, dtype=dtype)
bbox = torch.tensor([[[355, 10], [660, 10], [660, 250], [355, 250]]], device=device, dtype=dtype)
keypoints = torch.tensor([[[465, 115], [545, 116]]], device=device, dtype=dtype)
mask = bbox_to_mask(
torch.tensor([[[155, 0], [900, 0], [900, 400], [155, 400]]], device=device, dtype=dtype), 1000, 500
)[:, None].float()
aug = K.AugmentationSequential(K.RandomAffine(360, p=1.0, return_transform=True))
assert aug(inp, data_keys=['input'])[0].shape == inp.shape
aug = K.AugmentationSequential(K.RandomAffine(360, p=1.0, return_transform=False))
assert aug(inp, data_keys=['input']).shape == inp.shape
assert aug(mask, data_keys=['mask']).shape == mask.shape
assert aug(bbox, data_keys=['bbox']).shape == bbox.shape
assert aug(keypoints, data_keys=['keypoints']).shape == keypoints.shape
aug = K.AugmentationSequential(K.RandomAffine(360, p=1.0, return_transform=True))
assert aug.inverse(inp, data_keys=['input']).shape == inp.shape
aug = K.AugmentationSequential(K.RandomAffine(360, p=1.0, return_transform=True))
assert aug.inverse(bbox, data_keys=['bbox']).shape == bbox.shape
aug = K.AugmentationSequential(K.RandomAffine(360, p=1.0, return_transform=True))
assert aug.inverse(keypoints, data_keys=['keypoints']).shape == keypoints.shape
aug = K.AugmentationSequential(K.RandomAffine(360, p=1.0, return_transform=True))
assert aug.inverse(mask, data_keys=['mask']).shape == mask.shape
def test_forward_and_inverse_return_transform(self, device, dtype):
inp = torch.randn(1, 3, 1000, 500, device=device, dtype=dtype)
bbox = torch.tensor([[[355, 10], [660, 10], [660, 250], [355, 250]]], device=device, dtype=dtype)
keypoints = torch.tensor([[[465, 115], [545, 116]]], device=device, dtype=dtype)
mask = bbox_to_mask(
torch.tensor([[[155, 0], [900, 0], [900, 400], [155, 400]]], device=device, dtype=dtype), 1000, 500
)[:, None].float()
aug = K.AugmentationSequential(
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0, return_transform=True),
K.RandomAffine(360, p=1.0, return_transform=True),
data_keys=["input", "mask", "bbox", "keypoints"],
)
out = aug(inp, mask, bbox, keypoints)
assert out[0][0].shape == inp.shape
assert out[1].shape == mask.shape
assert out[2].shape == bbox.shape
assert out[3].shape == keypoints.shape
out_inv = aug.inverse(*out)
assert out_inv[0].shape == inp.shape
assert out_inv[1].shape == mask.shape
assert out_inv[2].shape == bbox.shape
assert out_inv[3].shape == keypoints.shape
def test_inverse_and_forward_return_transform(self, device, dtype):
inp = torch.randn(1, 3, 1000, 500, device=device, dtype=dtype)
bbox = torch.tensor([[[355, 10], [660, 10], [660, 250], [355, 250]]], device=device, dtype=dtype)
keypoints = torch.tensor([[[465, 115], [545, 116]]], device=device, dtype=dtype)
mask = bbox_to_mask(
torch.tensor([[[155, 0], [900, 0], [900, 400], [155, 400]]], device=device, dtype=dtype), 1000, 500
)[:, None].float()
aug = K.AugmentationSequential(
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0, return_transform=True),
K.RandomAffine(360, p=1.0, return_transform=True),
data_keys=["input", "mask", "bbox", "keypoints"],
)
out_inv = aug.inverse(inp, mask, bbox, keypoints)
assert out_inv[0].shape == inp.shape
assert out_inv[1].shape == mask.shape
assert out_inv[2].shape == bbox.shape
assert out_inv[3].shape == keypoints.shape
out = aug(inp, mask, bbox, keypoints)
assert out[0][0].shape == inp.shape
assert out[1].shape == mask.shape
assert out[2].shape == bbox.shape
assert out[3].shape == keypoints.shape
@pytest.mark.jit
@pytest.mark.skip(reason="turn off due to Union Type")
def test_jit(self, device, dtype):
B, C, H, W = 2, 3, 4, 4
img = torch.ones(B, C, H, W, device=device, dtype=dtype)
op = K.AugmentationSequential(
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=1.0), K.RandomAffine(360, p=1.0), same_on_batch=True
)
op_jit = torch.jit.script(op)
assert_allclose(op(img), op_jit(img))
class TestPatchSequential:
@pytest.mark.parametrize('shape', [(2, 3, 24, 24)])
@pytest.mark.parametrize('padding', ["same", "valid"])
@pytest.mark.parametrize('patchwise_apply', [True, False])
@pytest.mark.parametrize('same_on_batch', [True, False, None])
@pytest.mark.parametrize('keepdim', [True, False, None])
def test_forward(self, shape, padding, patchwise_apply, same_on_batch, keepdim, device, dtype):
seq = K.PatchSequential(
K.ImageSequential(
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=0.5),
K.RandomPerspective(0.2, p=0.5),
K.RandomSolarize(0.1, 0.1, p=0.5),
),
K.ColorJitter(0.1, 0.1, 0.1, 0.1),
K.ImageSequential(
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=0.5),
K.RandomPerspective(0.2, p=0.5),
K.RandomSolarize(0.1, 0.1, p=0.5),
),
K.ColorJitter(0.1, 0.1, 0.1, 0.1),
grid_size=(2, 2),
padding=padding,
patchwise_apply=patchwise_apply,
same_on_batch=same_on_batch,
keepdim=keepdim,
)
input = torch.randn(*shape, device=device, dtype=dtype)
trans = torch.randn(shape[0], 3, 3, device=device, dtype=dtype)
out = seq(input)
assert out.shape[-3:] == input.shape[-3:]
out = seq((input, trans))
assert out[0].shape[-3:] == input.shape[-3:]
assert out[1].shape == trans.shape
def test_intensity_only(self):
seq = K.PatchSequential(
K.ImageSequential(
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=0.5),
K.RandomPerspective(0.2, p=0.5),
K.RandomSolarize(0.1, 0.1, p=0.5),
),
K.ColorJitter(0.1, 0.1, 0.1, 0.1),
K.ImageSequential(
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=0.5),
K.RandomPerspective(0.2, p=0.5),
K.RandomSolarize(0.1, 0.1, p=0.5),
),
K.ColorJitter(0.1, 0.1, 0.1, 0.1),
grid_size=(2, 2),
)
assert not seq.is_intensity_only()
seq = K.PatchSequential(
K.ImageSequential(K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=0.5)),
K.ColorJitter(0.1, 0.1, 0.1, 0.1),
K.ColorJitter(0.1, 0.1, 0.1, 0.1, p=0.5),
K.ColorJitter(0.1, 0.1, 0.1, 0.1),
grid_size=(2, 2),
)
assert seq.is_intensity_only()
|
[
"noreply@github.com"
] |
unite-deals.noreply@github.com
|
fcdd07b215cc8020fc9283218ec26953827e5584
|
38bd2c22647e0ac575e48ca52f9c5365bf32733d
|
/List-2/sum13.py
|
6bf6818b382249eb505d15b6f3f6e4033e3a3d8a
|
[] |
no_license
|
KasperOmari/Coding-Bat-Solutions
|
dd3bc2f4b847d7e85e8b91868834c0a8014acef3
|
c1a21b6346b0901b34b683554235c5527a3c217f
|
refs/heads/master
| 2022-12-27T22:38:50.011476
| 2020-10-09T15:37:03
| 2020-10-09T15:37:03
| 150,594,582
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 182
|
py
|
def sum13(nums):
cnt=0
for i in range(len(nums)):
if(nums[i]!=13):
cnt+=nums[i]
if i<len(nums)-1 and nums[i] == 13:
nums[i]=0
nums[i+1]=0
return cnt
|
[
"mohammadomari852@hotmail.com"
] |
mohammadomari852@hotmail.com
|
9661a9d0dc51343f6a8a8b2309a834244ba67b1b
|
7ebd6061a5152f537b9d1838ecfd3a326089ee70
|
/src/ant/plus/rower.py
|
73545d9d9450e8dda7c6a596903ed3ac7b9965b8
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
mch/python-ant
|
13e629de185ecd5bb4f6ffd5520d5034a37d0ef7
|
02e045825434a17ffe113a82cc8191683223ea5f
|
refs/heads/master
| 2022-02-17T07:00:35.360513
| 2022-02-01T04:02:16
| 2022-02-01T04:02:16
| 89,795,395
| 21
| 14
|
MIT
| 2022-02-01T04:02:17
| 2017-04-29T15:46:59
|
Python
|
UTF-8
|
Python
| false
| false
| 1,785
|
py
|
# -*- coding: utf-8 -*-
from .plus import DeviceProfile
from .genericFEC import genericFEC
#################################################################################################
class rower(DeviceProfile):
channelPeriod = 8192
deviceType = 0x11 #FE-C
name = 'Rower'
def __init__(self, node, network, callbacks=None):
super(rower, self).__init__(node, network, callbacks)
self.page16 = genericFEC()
self._elapsedTime = 0.0
self._distanceTraveled = 0
self._instantaneousSpeed = 0.0
self._kmSpeed = 0.0
self._cadence = 0
self._power = 0
self._detected_device = None
def event_time_correction(self, time_difference):
return time_difference * 1000 / 1024
def processData(self, data):
with self.lock:
dataPageNumber = data[0]
if(dataPageNumber == 16):
self.page16.p16(data)
self._elapsedTime = self.page16.elapsedTime
self._distanceTraveled = self.page16.distanceTraveled
self._instantaneousSpeed = self.page16.instantaneousSpeed
self._kmSpeed = self.page16.kmSpeed
if(dataPageNumber == 22):
self._cadence = data[4]
self._power = data[5] + (256 * data[6])
if (self._power == 65535): ## FFFF invalid
self._power = 0.0
self._cadence = 0
#################################################################################
callback = self.callbacks.get('onRower')
if callback:
callback(self._elapsedTime, self._distanceTraveled, self._instantaneousSpeed, self._kmSpeed, self._cadence, self._power)
|
[
"noreply@github.com"
] |
mch.noreply@github.com
|
22677b52a969991dfb9411ba83a44021d785fc65
|
c6ebec2a2849baa82bc6528a480818329c6c1f38
|
/RoboND_Mapping/build/common_msgs/shape_msgs/catkin_generated/pkg.develspace.context.pc.py
|
ff07f30c170bc8dbf07f867d034b68a6b067bf2a
|
[] |
no_license
|
ykim104/udacityRoboND
|
01b7ffbadde28a210831dc6eda4cae73418b1b10
|
d755d1c089bd2b0923dd0af3fe5aed581cc7de9d
|
refs/heads/master
| 2020-03-16T04:19:05.701579
| 2018-06-02T02:30:09
| 2018-06-02T02:30:09
| 132,508,403
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 483
|
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/nvidia/catkin_ws/devel/include".split(';') if "/home/nvidia/catkin_ws/devel/include" != "" else []
PROJECT_CATKIN_DEPENDS = "geometry_msgs;message_runtime;std_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "shape_msgs"
PROJECT_SPACE_DIR = "/home/nvidia/catkin_ws/devel"
PROJECT_VERSION = "1.12.6"
|
[
"kimyejin2000@gmail.com"
] |
kimyejin2000@gmail.com
|
2768addb0155e27ca34a39ccb331b205053d5bc9
|
5ae0ca549a9355afe19c06896dd799aabae8b5b0
|
/Util_Cmd/StringContent.py
|
b8b128659a6657dd51cab844460069d35f1632d5
|
[] |
no_license
|
Jack-GVDL/Scheduler-Backend
|
b4aa2a353a4802ef0dcf934baf99722ee7e86571
|
2d61a00fcd46a9958c072490d9eecc0ee57c3f47
|
refs/heads/main
| 2023-08-28T01:33:53.806244
| 2021-04-07T13:36:41
| 2021-04-07T13:36:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,564
|
py
|
from typing import *
class StringContent:
def __init__(self, content: str, color_fore: str = None, color_back: str = None) -> None:
super().__init__()
# data
self.content: str = ""
self.color_fore: str = None
self.color_back: str = None
# operation
self.content = content
self.color_fore = color_fore
self.color_back = color_back
def __del__(self) -> None:
return
# Operation
# ...
class Control_StringContent:
def __init__(self) -> None:
super().__init__()
# data
self.content_list: List[StringContent] = []
self.func_output: Callable[[StringContent], None] = []
# operation
# ...
def __del__(self) -> None:
return
# Operation
def addContent(self, content: StringContent) -> bool:
self.content_list.append(content)
return True
def addString(self, string: str) -> bool:
self.content_list.append(StringContent(string))
return True
# TODO: not yet completed
def rmContent(self, content: StringContent) -> bool:
return False
def output(self) -> bool:
if self.func_output is None:
return False
# output all content (one-by-one)
for content in self.content_list:
self.func_output(content)
# output will clear the output buffer (content_list)
self.content_list.clear()
return True
# Operator Overloading
def __iadd__(self, other):
# check type
if type(other) == Control_StringContent:
self.content_list.extend(other.content_list)
elif type(other) == StringContent:
self.addContent(other)
elif type(other) == str:
self.addString(other)
return self
|
[
"33114105+Jack-GVDL@users.noreply.github.com"
] |
33114105+Jack-GVDL@users.noreply.github.com
|
cb7b43959e66eccc6a45b38d394ee5813bf926a6
|
d73a3ac0e7da9aece5d0144189a10d711490ca38
|
/Exercise Files/Chap14/modules.py
|
882ee6b17655b669c3ac997cfa1c12dd7c027c26
|
[] |
no_license
|
YancongLi/Python_Essential_Training_ExerciseFiles
|
f55fe7870ba9516410cb0aaf91c7d471bc117b1a
|
7405b014888fb463587aaf66c2dbf0b795deb21f
|
refs/heads/master
| 2020-03-12T12:26:19.415089
| 2018-08-06T21:30:35
| 2018-08-06T21:30:35
| 130,618,076
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 572
|
py
|
#!/usr/bin/env python3
# Copyright 2009-2017 BHG http://bw.org/
import sys
import os
import random
import datetime
def main():
v = sys.version_info
print('Python version {}.{}.{}'.format(*v))
print(os.name)
print(os.getenv('PATH'))
print(os.getcwd())
print(os.urandom(2).hex())
print(random.randint(1,10))
x = list(range(20))
print(x)
random.shuffle(x)
print(x)
now = datetime.datetime.now()
print(now)
print(now.year)
print(now.month)
print(now.day)
if __name__ == '__main__': main()
|
[
"yancong.li@alumni.ubc.ca"
] |
yancong.li@alumni.ubc.ca
|
9c8f03a0ffd05d3f984cc70d72601e55f3a6a65a
|
6069d7014aeb8070375a4b8af77190f79e62831e
|
/data.py
|
0e8330dd52eb43240626bfb01dcd176c3874e67f
|
[] |
no_license
|
0k/report_xml_crafting_libs
|
626de616de7fc343c71b65428b4c0fdf1b52f2d6
|
f1ef6ff7ddf59bae7a50aa108ddd2e625f516b77
|
refs/heads/master
| 2021-01-02T08:52:31.257203
| 2013-04-03T16:27:20
| 2013-04-03T16:27:20
| 9,198,994
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 265
|
py
|
"""Data manipulation functions
"""
from openerp.addons.report_xml.mako_tools import register, MakoParsable
@register
def group_by(elts, key):
heaps = {}
for elt in elts:
k = key(elt)
heaps[k] = heaps.get(k, []) + [elt]
return heaps
|
[
"valentin.lab@kalysto.org"
] |
valentin.lab@kalysto.org
|
d6b8e853f9e2dcfadc229abb2dea4c6529b52973
|
e9a5ab0267b49e2d2268db2807ac4fdf35ed3d54
|
/Python/barrage/barrage_test.py
|
3b1a32bb5b156842c2593771675f10260087f6f4
|
[] |
no_license
|
masterkeze/oldprojects
|
73f0303fc0e96ee566d6a235fecaef34e32c45e4
|
0460d1a0db5b0d4e5c223adcc2244e24ff8e53b5
|
refs/heads/master
| 2022-11-25T18:02:29.252241
| 2020-08-02T09:22:57
| 2020-08-02T09:22:57
| 284,399,246
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,179
|
py
|
from tkinter import *
import random,time,math
height = 400
width = 600
root = Tk()
root.title('Barrage test')
root.geometry('%sx%s+100+100'%(width,height))
C = Canvas(root, bg='gray', height=height,width=width)
C.place(x=-2,y=-2)
class bullet():
def __init__(self, diameter = 5, x=width/2, y=height/2, direction = 0, speed = 2.5, tag="default", color="blue"):
self.diameter = diameter
self.radius = self.diameter/2
self.x = x
self.y = y
self.coord = self.x-self.radius, self.y-self.radius,\
self.x+self.radius, self.y+self.radius
self.direction = direction
self.speed = speed
self.tag = tag
self.color = color
C.create_oval(self.coord, fill = self.color, outline = self.color, tag = self.tag)
C.update()
def keep_going(self):
C.delete(self.tag)
angle = self.direction*math.pi/180
self.x = self.x + math.cos(angle)*self.speed
self.y = self.y + math.sin(angle)*self.speed
self.coord = self.x-self.radius, self.y-self.radius,\
self.x+self.radius, self.y+self.radius
if self.x > 0 and self.x < width and self.y > 0 and self.y < height:
C.create_oval(self.coord, fill = self.color, outline = self.color, tag = self.tag)
C.update()
#print("moving")
return True
else:
return False
center = bullet(15,width/2,height/2,0,0,"default","white")
sharpnel = 4
angle = 0
angular_vec = 10
angular_acc = 0.1
update_list = list()
for t in range(5000):
if (t%5 == 0):
angle = angle + angular_vec
angular_vec = angular_vec + angular_acc
for n in range(sharpnel):
exec("bullet%s_%s = bullet(5,width/2,height/2,angle+n*360/sharpnel,2.5,\"bullet%s_%s\")"%(t,n,t,n))
exec("update_list.append(bullet%s_%s)"%(t,n))
index = 0
while(len(update_list) > index):
flag = update_list[index].keep_going()
if flag:
index = index + 1;
else:
update_list.pop(index)
time.sleep(0.0166)
|
[
"861168745@qq.com"
] |
861168745@qq.com
|
826424a346ad68a935553be2dea4bccd54fafd34
|
9f1fb85f723496724c4c1360029cc75e0c1ed109
|
/elgamal/elgamal.py
|
0489fd1d52736dfcc49596d78d710f928465aa59
|
[] |
no_license
|
mrybarczyk/krypto-projects
|
541dcf3fbc2802823c08d051f820c0c52c700b3b
|
bbb17a3d6c93c56cca5eed07c2942db23af5f0db
|
refs/heads/master
| 2023-03-14T16:55:37.866137
| 2021-03-31T15:44:01
| 2021-03-31T15:44:01
| 353,400,130
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,980
|
py
|
import sys
import random
import fractions
def error():
print("Podany parametr nie jest obslugiwany.")
def keygen():
while True:
try:
elgamal = open("elgamal.txt", "r")
except IOError:
print("Nie znaleziono pliku elgamal.txt")
break
p = int(elgamal.readline())
g = int(elgamal.readline())
# b = int(random.uniform(1, 100))
b = int(random.uniform(10000, 1000000))
beta = pow(g, b, p)
private = open("private.txt", "w")
private.write(str(p))
private.write("\n")
private.write(str(g))
private.write("\n")
private.write(str(b))
public = open("public.txt", "w")
public.write(str(p))
public.write("\n")
public.write(str(g))
public.write("\n")
public.write(str(beta))
private.close()
public.close()
elgamal.close()
break
def enc():
while True:
try:
plain = open("plain.txt", "r")
except IOError:
print("Nie znaleziono pliku plain.txt")
break
try:
public = open("public.txt", "r")
except IOError:
print("Nie znaleziono pliku public.txt")
break
p = int(public.readline())
g = int(public.readline())
beta = int(public.readline())
m = int(plain.read())
# k = int(random.uniform(1, 100))
k = int(random.uniform(10000, 1000000))
if m >= p:
print("Blad: niespelniony warunek m < p")
break
s1 = pow(g, k, p)
s2 = (m * pow(beta, k, p)) % p
crypto = open("crypto.txt", "w")
crypto.write(str(s1))
crypto.write("\n")
crypto.write(str(s2))
crypto.close()
break
def dec():
while True:
try:
crypto = open("crypto.txt", "r")
except IOError:
print("Nie znaleziono pliku crypto.txt")
break
try:
private = open("private.txt", "r")
except IOError:
print("Nie znaleziono pliku private.txt")
break
p = int(private.readline())
g = int(private.readline())
b = int(private.readline())
gk = int(crypto.readline())
mbetak = int(crypto.readline())
print("Zaczekaj do zakonczenia zadania.")
x = 1
while True:
if pow(g, x, p) == gk:
break
x += 1
mk = pow(g, b, p)
key = pow(mk, x, p)
y = pow(key, p-2, p)
m = (mbetak * y) % p
decrypt = open("decrypt.txt", "w")
decrypt.write(str(m))
decrypt.close()
private.close()
crypto.close()
print("Gotowe!")
break
def inv(k2, p2):
i = 0
j = 1
mem = p2
if p2 == 1:
j = 0
else:
while k2 > 1:
m = k2 / p2
temp = p2
p2 = k2 % p2
k2 = temp
temp = i
i = j - m * i
j = temp
if j < 0:
j += mem
return j
def signature():
while True:
try:
private = open("private.txt", "r")
except IOError:
print("Nie znaleziono pliku private.txt")
break
try:
plain = open("plain.txt", "r")
except IOError:
print("Nie znaleziono pliku plain.txt")
break
m = int(plain.read())
p = int(private.readline())
g = int(private.readline())
b = int(private.readline())
while True:
k = random.randrange(1, p)
if fractions.gcd(k, p-1) == 1:
break
r = pow(g, k, p)
x = int(((m - (b*r)) * inv(k, p-1)) % (p - 1))
s = open("signature.txt", "w")
s.write(str(r))
s.write("\n")
s.write(str(x))
s.close()
private.close()
plain.close()
break
def verifying():
while True:
try:
s = open("signature.txt", "r")
except IOError:
print("Nie znaleziono pliku signature.txt")
break
try:
public = open("public.txt", "r")
except IOError:
print("Nie znaleziono pliku public.txt")
break
try:
plain = open("plain.txt", "r")
except IOError:
print("Nie znaleziono pliku plain.txt")
break
r = int(s.readline())
x = int(s.readline())
p = int(public.readline())
g = int(public.readline())
beta = int(public.readline())
m = int(plain.read())
no1 = pow(g, m, p)
print(str(no1))
no2 = (pow(beta, r, p) * pow(r, x, p)) % p
print(str(no2))
if p > r >= 1 and no1 == no2:
test = "OK"
else:
test = "SIGNATURES DO NOT MATCH"
print(test)
v = open("verify.txt", "w")
v.write(test)
v.close()
s.close()
public.close()
plain.close()
break
if __name__ == "__main__":
a = str(sys.argv[1])
if a.lower() == "-k":
keygen()
elif a.lower() == "-e":
enc()
elif a.lower() == "-d":
dec()
elif a.lower() == "-s":
signature()
elif a.lower() == "-v":
verifying()
elif a.lower() == "--help":
print("/// Kryptografia zadanie 5 - program ElGamal ///")
print("\t-k: Generowanie kluczy")
print("\t-e: Szyfrowanie")
print("\t-d: Odszyfrowanie")
print("\t-s: Generowanie podpisu")
print("\t-v: Weryfikacja podpisu")
print("/// Autor: Marta Rybarczyk ///")
else:
error()
|
[
"martarybarczyk98@gmail.com"
] |
martarybarczyk98@gmail.com
|
72e84d55486531cd727781a7e20e14c5748f4fb7
|
ea0d8963a12d6a4c0691adcf2418868681c6c267
|
/main.py
|
2f9fefbaea02091e32283a51b756f84f0de5c7a5
|
[] |
no_license
|
ericmcmullen/Extreme-Weather-Alert-System
|
748d00f01b63b1990060b6a8ec1dbb0f4e1ed910
|
7d23ba37acb09e52181eefa7812a197d5d2da327
|
refs/heads/master
| 2020-05-15T10:21:13.016735
| 2019-04-19T20:23:11
| 2019-04-19T20:23:11
| 182,190,798
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,474
|
py
|
# Extreme Weather Alert System
# 2019 Eric McMullen
# Author: Eric McMullen <ericmcmullen.com>
# URL: <https://github.com/ericmcmullen/Extreme-Weather-Alert-System/>
# An Extreme Weather Alert System. Used openweathermap.org api,
# SMTP email sending and hosted for free on pythonanywhere.com.
# The program recieves current weather information from the
# openweathermap.org api and sends this information by email.
##//////////////////////////////////////////////////////
## Extreme Weather Alert System
##//////////////////////////////////////////////////////
import requests
import time
def weather_data(query):
res=requests.get('http://api.openweathermap.orgXXXXXXXXXXXXXXXXXX);
return res.json();
def print_weather(result,city):
hot = ("{}".format(result['main']['temp'])) # Isolating the current temperature from the API output.
return hot
def main():
city='Toronto'
recieve= 'XXXX@gmail.com' # Create a list of all emails which will recieve an alert.
print()
try:
query='q='+city;
w_data=weather_data(query);
tmp = print_weather(w_data,city)
#print(city,"is currently",float(tmp),"C")
#print (float(tmp))
extreme = (float(tmp))
extreme = float(35.00) # Set what the extreme temperature value is.
while (float(tmp) <= extreme):
print("Checking... the weather is not dangerously hot. Its only",float(tmp))
time.sleep(30) # Test for Current Temperature > 35 deg C every 30 seconds.
main()
except:
print('City name not found.')
if (float(tmp) >= extreme): #If current temperature is above 35 deg C.
import smtplib, ssl
def read_creds():
user = passw = ""
user = 'XXX@gmail.com' # Email address for the sender's email.
passw = 'XXXXXXXXXX' # Password for the sender's email.
return user, passw
port = 465
sender, password = read_creds()
sender = 'XXXXXXXXXXXX@gmail.com' # Email account for the sender's email.
subject = "Subject: Extreme Heat Alert!"
text = " It is currently " + str(tmp) + " deg C in " + str(city) + ". Stay hydrated!"
message = subject + text
context = ssl.create_default_context()
print("Starting to send")
with smtplib.SMTP_SSL("smtp.gmail.com", port, context=context) as server:
server.login(sender, password)
server.sendmail(sender, recieve, message)
print("Sent.")
exit() # Ending loop once the alert is sent. This stops numerous alerts.
main()
exit()
|
[
"noreply@github.com"
] |
ericmcmullen.noreply@github.com
|
9673be696228ffcd9432425eb21b08e390e2649b
|
1331557a8e49c42841d8cb8950a6b8d76b72f2fb
|
/labs/image_manipulation/lab_16.py
|
d82116a249481791e4239570d3b6d3cd687edf29
|
[] |
no_license
|
mfroehlich1/Bootcamp_Froehlich
|
9d6273d5e07102367773a5ff1ba5df8dd444004f
|
a2d80fd438ebb1575a63da2d7888847eaf56acf4
|
refs/heads/master
| 2020-05-04T05:39:17.400810
| 2019-05-10T00:57:31
| 2019-05-10T00:57:31
| 178,989,710
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 321
|
py
|
from PIL import Image
img = Image.open("Lenna_(test_image).png") # must be in same folder
width, height = img.size
pixels = img.load()
for x in range(width):
for y in range(height):
r, g, b = pixels[x, y,]
brightness = int(0.299*r + 0.587*g + 0.114*b)
pixels[x, y] = (b, r, g)
img.show()
|
[
"mitchel.froehlich@gmail.com"
] |
mitchel.froehlich@gmail.com
|
0c09866708a9484b26d01272c5a19419dc93c290
|
77e9243eae852d3890bac2ff408b8332f91613f6
|
/FUnIE-GAN-master/TF-Keras/nets/funieGAN.py
|
570e905b44f329342a1025837855a39afe67f3ff
|
[
"MIT"
] |
permissive
|
pj-solver/UnderWater-image-restored
|
1702aff4ca069dd12108ce949dc239699a873870
|
3130b516392ed561061b2b74102c1f480bb1be7c
|
refs/heads/master
| 2023-08-17T22:52:35.279874
| 2021-09-28T06:16:16
| 2021-09-28T06:16:16
| 411,114,771
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,511
|
py
|
"""
# > FUnIE-GAN architecture
# - Paper: https://arxiv.org/pdf/1903.09766.pdf
"""
from __future__ import print_function, division
## python libs
import os
import numpy as np
## tf-Keras libs
import tensorflow as tf
import keras.backend as K
from keras.models import Model
from keras.optimizers import Adam
from keras.layers import Input, Dropout, Concatenate
#from keras.layers.advanced_activations import LeakyReLU
from keras.layers.convolutional import UpSampling2D, Conv2D
from keras.layers import BatchNormalization, Activation, MaxPooling2D
from keras.applications import vgg19
def VGG19_Content(dataset='imagenet'):
# Load VGG, trained on imagenet data
vgg = vgg19.VGG19(include_top=False, weights=dataset)
vgg.trainable = False
content_layers = ['block5_conv2']
content_outputs = [vgg.get_layer(name).output for name in content_layers]
return Model(vgg.input, content_outputs)
class FUNIE_GAN():
def __init__(self, imrow=256, imcol=256, imchan=3, loss_meth='wgan'):
## input image shape
self.img_rows, self.img_cols, self.channels = imrow, imcol, imchan
self.img_shape = (self.img_rows, self.img_cols, self.channels)
## input images and their conditioning images
img_A = Input(shape=self.img_shape)
img_B = Input(shape=self.img_shape)
## conv 5_2 content from vgg19 network
self.vgg_content = VGG19_Content()
## output shape of D (patchGAN)
self.disc_patch = (16, 16, 1)
## number of filters in the first layer of G and D
self.gf, self.df = 32, 32
optimizer = Adam(0.0003, 0.5)
## Build and compile the discriminator
self.discriminator = self.FUNIE_discriminator()
self.discriminator.compile(loss='mse', optimizer=optimizer, metrics=['accuracy'])
## Build the generator
self.generator = self.FUNIE_generator2()
## By conditioning on B generate a fake version of A
fake_A = self.generator(img_B)
## For the combined model we will only train the generator
self.discriminator.trainable = False
## Discriminators determines validity of translated images / condition pairs
valid = self.discriminator([fake_A, img_B])
## compute the comboned loss
self.combined = Model(inputs=[img_A, img_B], outputs=[valid, fake_A])
self.combined.compile(loss=['mse', self.total_gen_loss], loss_weights=[0.2, 0.8], optimizer=optimizer)
def wasserstein_loss(self, y_true, y_pred):
# for wasserstein GAN loss
return K.mean(y_true * y_pred)
def perceptual_distance(self, y_true, y_pred):
"""
Calculating perceptual distance
Thanks to github.com/wandb/superres
"""
y_true = (y_true+1.0)*127.5 # [-1,1] -> [0, 255]
y_pred = (y_pred+1.0)*127.5 # [-1,1] -> [0, 255]
rmean = (y_true[:, :, :, 0] + y_pred[:, :, :, 0]) / 2
r = y_true[:, :, :, 0] - y_pred[:, :, :, 0]
g = y_true[:, :, :, 1] - y_pred[:, :, :, 1]
b = y_true[:, :, :, 2] - y_pred[:, :, :, 2]
return K.mean(K.sqrt((((512+rmean)*r*r)/256) + 4*g*g + (((767-rmean)*b*b)/256)))
def total_gen_loss(self, org_content, gen_content):
# custom perceptual loss function
vgg_org_content = self.vgg_content(org_content)
vgg_gen_content = self.vgg_content(gen_content)
content_loss = K.mean(K.square(vgg_org_content - vgg_gen_content), axis=-1)
mae_gen_loss = K.mean(K.abs(org_content-gen_content))
perceptual_loss = self.perceptual_distance(org_content, gen_content)
gen_total_err = 0.7*mae_gen_loss+0.3*content_loss # v1
# updated loss function in v2
#gen_total_err = 0.6*mae_gen_loss+0.3*content_loss+0.1*perceptual_loss
return gen_total_err
def FUNIE_generator1(self):
"""
Inspired by the U-Net Generator with skip connections
This is a much simpler architecture with fewer parameters (faster inference)
"""
def conv2d(layer_input, filters, f_size=3, bn=True):
## for downsampling
d = Conv2D(filters, kernel_size=f_size, strides=2, padding='same')(layer_input)
#d = LeakyReLU(alpha=0.2)(d)
d = Activation('relu')(d)
if bn: d = BatchNormalization(momentum=0.8)(d)
return d
def deconv2d(layer_input, skip_input, filters, f_size=3, dropout_rate=0):
## for upsampling
u = UpSampling2D(size=2)(layer_input)
u = Conv2D(filters, kernel_size=f_size, strides=1, padding='same', activation='relu')(u)
if dropout_rate: u = Dropout(dropout_rate)(u)
u = BatchNormalization(momentum=0.8)(u)
u = Concatenate()([u, skip_input])
return u
## input
d0 = Input(shape=self.img_shape); print(d0)
## downsample
d1 = conv2d(d0, self.gf*1, f_size=5, bn=False)
d2 = conv2d(d1, self.gf*4, f_size=4, bn=True)
d3 = conv2d(d2, self.gf*8, f_size=4, bn=True)
d4 = conv2d(d3, self.gf*8, f_size=3, bn=True)
d5 = conv2d(d4, self.gf*8, f_size=3, bn=True)
## upsample
u1 = deconv2d(d5, d4, self.gf*8)
u2 = deconv2d(u1, d3, self.gf*8)
u3 = deconv2d(u2, d2, self.gf*4)
u4 = deconv2d(u3, d1, self.gf*1)
u5 = UpSampling2D(size=2)(u4)
output_img = Conv2D(self.channels, kernel_size=4, strides=1, padding='same', activation='tanh')(u5)
print(output_img); print();
return Model(d0, output_img)
def FUNIE_generator2(self):
"""
Inspired by the U-Net Generator with skip connections
This is a much simpler architecture with fewer parameters (faster inference)
"""
def conv2d(layer_input, filters, f_size=3, bn=True):
## for downsampling
d = Conv2D(filters, kernel_size=f_size, padding='same')(layer_input)
#d = LeakyReLU(alpha=0.2)(d)
d = Activation('relu')(d)
if bn: d = BatchNormalization(momentum=0.75)(d)
return d
def deconv2d(layer_input, skip_input, filters, f_size=3, dropout_rate=0):
## for upsampling
u = UpSampling2D(size=2)(layer_input)
u = Conv2D(filters, kernel_size=f_size, strides=1, padding='same', activation='relu')(u)
if dropout_rate: u = Dropout(dropout_rate)(u)
u = BatchNormalization(momentum=0.8)(u)
u = Concatenate()([u, skip_input])
return u
## input
d0 = Input(shape=self.img_shape); print(d0)
## downsample
d1 = conv2d(d0, self.gf*1, f_size=5, bn=False)
d1a = MaxPooling2D(pool_size=(2, 2))(d1)
d2 = conv2d(d1a, self.gf*2, f_size=4, bn=True)
d3 = conv2d(d2, self.gf*2, f_size=4, bn=True)
d3a = MaxPooling2D(pool_size=(2, 2))(d3)
d4 = conv2d(d3a, self.gf*4, f_size=3, bn=True)
d5 = conv2d(d4, self.gf*4, f_size=3, bn=True)
d5a = MaxPooling2D(pool_size=(2, 2))(d5)
d6 = conv2d(d5a, self.gf*8, f_size=3, bn=True)
## upsample
u1 = deconv2d(d6, d5, self.gf*8)
u2 = deconv2d(u1, d3, self.gf*8)
u3 = deconv2d(u2, d1, self.gf*4)
u4 = conv2d(u3, self.gf*4, f_size=3)
u5 = conv2d(u4, self.gf*8, f_size=3)
output_img = Conv2D(self.channels, kernel_size=4, strides=1, padding='same', activation='tanh')(u5)
return Model(d0, output_img)
def FUNIE_discriminator(self):
"""
Inspired by the pix2pix discriminator
"""
def d_layer(layer_input, filters, strides_=2,f_size=3, bn=True):
## Discriminator layers
d = Conv2D(filters, kernel_size=f_size, strides=strides_, padding='same')(layer_input)
#d = LeakyReLU(alpha=0.2)(d)
d = Activation('relu')(d)
if bn: d = BatchNormalization(momentum=0.8)(d)
return d
img_A = Input(shape=self.img_shape)
img_B = Input(shape=self.img_shape)
## input
combined_imgs = Concatenate(axis=-1)([img_A, img_B])
## Discriminator layers
d1 = d_layer(combined_imgs, self.df, bn=False)
d2 = d_layer(d1, self.df*2)
d3 = d_layer(d2, self.df*4)
d4 = d_layer(d3, self.df*8)
validity = Conv2D(1, kernel_size=4, strides=1, padding='same')(d4)
# return model
return Model([img_A, img_B], validity)
|
[
"you@example.com"
] |
you@example.com
|
c93af26af9114e779d077516b4c930f1987d131c
|
93bf4bbafe0524335ea1216f7f2941348c2cd1bd
|
/tensorflow/examples/skflow/iris.py
|
ea44428d541ba09cea34e50db11e6a0be56b3d15
|
[
"Apache-2.0"
] |
permissive
|
sachinpro/sachinpro.github.io
|
c4951734b09588cad58711a76fe657f110163c11
|
c3bbd8d89818f5d8bb7296c851ed5e52c19728e3
|
refs/heads/master
| 2022-12-23T10:00:13.902459
| 2016-06-27T13:18:27
| 2016-06-27T13:25:58
| 25,289,839
| 1
| 1
|
Apache-2.0
| 2022-12-15T00:45:03
| 2014-10-16T06:44:30
|
C++
|
UTF-8
|
Python
| false
| false
| 1,313
|
py
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from sklearn import metrics, cross_validation
from tensorflow.contrib import learn
# Load dataset.
iris = learn.datasets.load_dataset('iris')
X_train, X_test, y_train, y_test = cross_validation.train_test_split(iris.data, iris.target,
test_size=0.2, random_state=42)
# Build 3 layer DNN with 10, 20, 10 units respectively.
classifier = learn.TensorFlowDNNClassifier(hidden_units=[10, 20, 10],
n_classes=3, steps=200)
# Fit and predict.
classifier.fit(X_train, y_train)
score = metrics.accuracy_score(y_test, classifier.predict(X_test))
print('Accuracy: {0:f}'.format(score))
|
[
"x0234443@ti.com"
] |
x0234443@ti.com
|
bbfb7ee7500a0b560a1bdffd3c10e2ab2d3b2efa
|
d64a443604ca21f7bc7da696ba53dc0861be92dd
|
/twitter_alike/twitter_alike/asgi.py
|
5a9108db232f7edd749b2364281371fdc2ced83b
|
[] |
no_license
|
tsuji1/django_twitter_tsuji1
|
bd7362ee0311d0af03312019c816da9e89b065c8
|
7e4d793d4fe6115cc892c04c909ff49b936ddc1c
|
refs/heads/master
| 2023-08-04T22:09:48.073169
| 2021-10-02T08:16:20
| 2021-10-02T08:16:20
| 405,905,434
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 403
|
py
|
"""
ASGI config for twitter_alike project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'twitter_alike.settings')
application = get_asgi_application()
|
[
"tyobigunngennsui@yahoo.co.jp"
] |
tyobigunngennsui@yahoo.co.jp
|
a505fe43518e4a7a7b8b40f574a5d25741e3b63b
|
ec9b9bc9f5a91d51489d27b2ee3f90087a94a4c5
|
/graph_linear_regression.py
|
c277cd2f8bb832c60c97e0343ca78191cc715d7f
|
[] |
no_license
|
xuetongwei/YelpReviwsAnalysis
|
e099b26def8b6c2f2ec90fee7ec09bf680a66544
|
fec47509abeaaede477e94618c70180ea97077ff
|
refs/heads/master
| 2021-08-23T12:28:35.749569
| 2017-12-04T22:38:08
| 2017-12-04T22:38:08
| 113,102,663
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,168
|
py
|
import matplotlib.pyplot as plt;
import numpy as np
width = 0.2
if __name__ == "__main__":
print("LINEAR REGRESSION")
x = 0.3155
y = -0.0733
objects = ("Ambience 1", "Ambience 2")
position = np.arange(len(objects))
performance = (x,y)
sample = plt.bar(position,performance,width,align="center",alpha = 0.5)
plt.xticks(position,objects)
plt.ylabel('Sentiment Coefficient')
plt.title("Star 5")
for rect in sample:
height = rect.get_height()
plt.text(rect.get_x() + rect.get_width()/2.0, height, '%d' % int(height), ha='center', va='bottom')
a = 1.0548
b = 2.0916
objects = ("Food 1", "Food 2")
position = np.arange(len(objects))
performance = (a,b)
sample = plt.bar(position+width,performance,width,align="center",alpha = 0.5)
plt.xticks(position,objects)
plt.ylabel('Sentiment Coefficient')
plt.title("Star 5")
for rect in sample:
height = rect.get_height()
plt.text(rect.get_x() + rect.get_width()/2.0, height, '%d' % int(height), ha='center', va='bottom')
m = -0.0705
n = 0.2214
objects = ("Service 1", "Service 2")
position = np.arange(len(objects))
performance = (m,n)
sample = plt.bar(position+2*width,performance,width,align="center",alpha = 0.5)
plt.xticks(position,objects)
plt.ylabel('Sentiment Coefficient')
plt.title("Star 5")
for rect in sample:
height = rect.get_height()
plt.text(rect.get_x() + rect.get_width()/2.0, height, '%d' % int(height), ha='center', va='bottom')
p = 2.0944
q = 2.0944
objects = ("Positive", "Negative")
position = np.arange(len(objects))
performance = (p,q)
sample = plt.bar(position+3*width,performance,width,align="center",alpha = 0.5)
plt.xticks(position,objects)
plt.ylabel('Sentiment Coefficient')
plt.title("Star 5")
for rect in sample:
height = rect.get_height()
plt.text(rect.get_x() + rect.get_width()/2.0, height, '%d' % int(height), ha='center', va='bottom')
plt.legend("AFSI")
plt.show()
|
[
"noreply@github.com"
] |
xuetongwei.noreply@github.com
|
b9f3c730e9b405fd15290052511631684da3d575
|
1ef354d23be4f3fbd6edecfe4edd720ee775f11c
|
/snippets/serializers.py
|
7ec53a42ef3ef097d0e58e5858602cc360e54f17
|
[
"MIT"
] |
permissive
|
damiclem/django-rest-tutorial
|
8d8fb945fa1adede242c47ab5a3a73fd4226e873
|
a8bfce3e94cd8c8d7b1bc2d8ed851980e38b86fa
|
refs/heads/main
| 2023-08-21T11:30:57.209332
| 2021-10-18T15:09:45
| 2021-10-18T15:09:45
| 417,547,669
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,365
|
py
|
# Import serializers library
from rest_framework import serializers
# Import snippet module
from snippets.models import Snippet, LANGUAGE_CHOICES, STYLE_CHOICES
# Import user module
from django.contrib.auth.models import User
# class SnippetSerializer(serializers.Serializer):
# class SnippetSerializer(serializers.ModelSerializer):
# NOTE differently from ModelSerializer, HyperlinkedModelSerializer:
# 1. does not include id by default;
# 2. includes an URL field by default;
# 3. uses HyperlinkedRelatedField for relationships.
class SnippetSerializer(serializers.HyperlinkedModelSerializer):
# # ID of the snippet
# id = serializers.IntegerField(read_only=True)
# # Title of the snippet
# title = serializers.CharField(required=False, allow_blank=True, max_length=100)
# # Code contained in the snippet
# code = serializers.CharField(style={'base_template': 'textarea.html'})
# # Other snippet parameter
# linenos = serializers.BooleanField(required=False)
# language = serializers.ChoiceField(choices=LANGUAGE_CHOICES, default='python')
# style = serializers.ChoiceField(choices=STYLE_CHOICES, default='friendly')
# NOTE This allows to read owner's username
# The untyped ReadOnlyField is always read-only, and will be used for serialized representations,
# but will not be used for updating model instances when they are deserialized
owner = serializers.ReadOnlyField(source='owner.username')
# Define highlighted piece of code
# NOTE it defines the name of the view
highlight = serializers.HyperlinkedIdentityField(view_name='snippet-highlight', format='html')
# Define meta-class
# NOTE modelserializer is just a shurtcut for predefined fields (by model) and default create() and update() methods
class Meta:
# Define referenced model
model = Snippet
# Define fields in referenced model
fields = ['url', 'id', 'highlight', 'owner',
'title', 'code', 'linenos', 'language', 'style', ]
# def create(self, validated_data):
# """
# Create and return a new `Snippet` instance, given the validated data.
# """
# return Snippet.objects.create(**validated_data)
# def update(self, instance, validated_data):
# """
# Update and return an existing `Snippet` instance, given the validated data.
# """
# instance.title = validated_data.get('title', instance.title)
# instance.code = validated_data.get('code', instance.code)
# instance.linenos = validated_data.get('linenos', instance.linenos)
# instance.language = validated_data.get('language', instance.language)
# instance.style = validated_data.get('style', instance.style)
# instance.save()
# return instance
# Define serialize for User
# class UserSerializer(serializers.ModelSerializer):
class UserSerializer(serializers.HyperlinkedModelSerializer):
# Define snippets
# snippets = serializers.PrimaryKeyRelatedField(many=True, queryset=Snippet.objects.all())
snippets = serializers.HyperlinkedRelatedField(many=True, view_name='snippet-detail', read_only=True)
# Define metadata
class Meta:
# Define related model
model = User
# Define fields to serialize
fields = ['url', 'id', 'username', 'snippets']
|
[
"damiano.clementel@gmail.com"
] |
damiano.clementel@gmail.com
|
be637a40b4825a8a91e3ae57ee0fe1e0afdde849
|
453d9e4f703d44d9f915b8f31f8a386476c76838
|
/LIstings/migrations/0007_auto_20191124_0437.py
|
0f0835de9d9986211747e3f2f4a9c4aded09abf9
|
[] |
no_license
|
narendraiungo/IUNGOInteriors
|
0fb7fe5ce60edcd6ff77c178f719a342b6a86758
|
19aa01ae4d3992088d3f3e0461682683bf218856
|
refs/heads/master
| 2020-09-17T00:56:24.387522
| 2019-11-25T12:04:52
| 2019-11-25T12:04:52
| 223,938,492
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,140
|
py
|
# Generated by Django 2.2.7 on 2019-11-24 04:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('LIstings', '0006_remove_iungouser_profile_image'),
]
operations = [
migrations.DeleteModel(
name='Architects',
),
migrations.DeleteModel(
name='Architects_InteriorDesigners',
),
migrations.DeleteModel(
name='Contractors',
),
migrations.DeleteModel(
name='InteriorDsigners',
),
migrations.RemoveField(
model_name='iungouser',
name='client_category',
),
migrations.RemoveField(
model_name='iungouser',
name='client_type',
),
migrations.AddField(
model_name='iungouser',
name='ClientType',
field=models.CharField(choices=[('Arch', 'Architects'), ('IntDes', 'InteriorDesiginers'), ('ArchInte', 'Architercts_InteriorDesigners'), ('Contr', 'Contractors')], default='', max_length=30),
preserve_default=False,
),
]
|
[
"narendra@iungoadvantec.com"
] |
narendra@iungoadvantec.com
|
ef0e043eabe1dd4b86656981f0f292e482495b8f
|
a4ce81b1bea451cf9ebe633f0aebc67d36efdfa3
|
/task_23/todo_list_02_用户登录/routes.py
|
fadc71321aa8b0cc6745790efe8bc7d781d05dae
|
[] |
no_license
|
DaZhiZi/python.davizi
|
c0618beb0fb9d86553ed281a72bdb12babe9841f
|
0fc0753754b9800b6544c7e04507cf74993f13e2
|
refs/heads/master
| 2022-10-25T12:59:14.362193
| 2020-06-11T03:04:59
| 2020-06-11T03:04:59
| 240,255,974
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,193
|
py
|
from utils import log
from models import Message
from models import User
import random
# 这个函数用来保存所有的 messages
message_list = []
# session 可以在服务器端实现过期功能
session = {}
def random_str():
"""
生成一个随机的字符串
"""
seed = 'abcdefjsad89234hdsfkljasdkjghigaksldf89weru'
s = ''
for i in range(16):
# 这里 len(seed) - 2 是因为我懒得去翻文档来确定边界了
random_index = random.randint(0, len(seed) - 2)
s += seed[random_index]
return s
def template(name):
"""
根据名字读取 templates 文件夹里的一个文件并返回
"""
path = 'templates/' + name
with open(path, 'r', encoding='utf-8') as f:
return f.read()
def current_user(request):
session_id = request.cookies.get('user', '')
username = session.get(session_id, '【游客】')
# username = request.cookies.get('user', '【游客】')
return username
def route_index(request):
"""
主页的处理函数, 返回主页的响应
"""
header = 'HTTP/1.1 210 VERY OK\r\nContent-Type: text/html\r\n'
body = template('index.html')
username = current_user(request)
body = body.replace('{{username}}', username)
r = header + '\r\n' + body
return r.encode(encoding='utf-8')
def response_with_headers(headers, code=200):
"""
Content-Type: text/html
Set-Cookie: user=gua
"""
header = 'HTTP/1.1 {} VERY OK\r\n'.format(code)
header += ''.join(['{}: {}\r\n'.format(k, v)
for k, v in headers.items()])
return header
def redirect(url):
"""
浏览器在收到 302 响应的时候
会自动在 HTTP header 里面找 Location 字段并获取一个 url
然后自动请求新的 url
"""
headers = {
'Location': url,
}
# 增加 Location 字段并生成 HTTP 响应返回
# 注意, 没有 HTTP body 部分
r = response_with_headers(headers, 302) + '\r\n'
return r.encode('utf-8')
"""
HTTP/1.1 302 xxx
Location: /
"""
def route_login(request):
"""
登录页面的路由函数
"""
headers = {
'Content-Type': 'text/html',
# 'Set-Cookie': 'height=169; gua=1; pwd=2; Path=/',
}
# log('login, headers', request.headers)
log('login, cookies', request.cookies)
username = current_user(request)
if request.method == 'POST':
form = request.form()
u = User.new(form)
if u.validate_login():
# 设置一个随机字符串来当令牌使用
session_id = random_str()
session[session_id] = u.username
headers['Set-Cookie'] = 'user={}'.format(session_id)
# 下面是把用户名存入 cookie 中
# headers['Set-Cookie'] = 'user={}'.format(u.username)
result = '登录成功'
else:
result = '用户名或者密码错误'
else:
result = ''
body = template('login.html')
body = body.replace('{{result}}', result)
body = body.replace('{{username}}', username)
header = response_with_headers(headers)
r = header + '\r\n' + body
log('login 的响应', r)
return r.encode(encoding='utf-8')
def route_register(request):
"""
注册页面的路由函数
"""
header = 'HTTP/1.1 210 VERY OK\r\nContent-Type: text/html\r\n'
if request.method == 'POST':
form = request.form()
u = User.new(form)
if u.validate_register():
u.save()
result = '注册成功<br> <pre>{}</pre>'.format(User.all())
else:
result = '用户名或者密码长度必须大于2'
else:
result = ''
body = template('register.html')
body = body.replace('{{result}}', result)
r = header + '\r\n' + body
return r.encode(encoding='utf-8')
def route_message(request):
"""
消息页面的路由函数
"""
username = current_user(request)
# 如果是未登录的用户, 重定向到 '/'+
if username == '【游客】':
log("**debug, route msg 未登录")
return redirect('/')
log('本次请求的 method', request.method)
if request.method == 'POST':
form = request.form()
msg = Message.new(form)
log('post', form)
message_list.append(msg)
# 应该在这里保存 message_list
header = 'HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n'
# body = '<h1>消息版</h1>'
body = template('html_basic.html')
msgs = '<br>'.join([str(m) for m in message_list])
body = body.replace('{{messages}}', msgs)
r = header + '\r\n' + body
return r.encode(encoding='utf-8')
def route_static(request):
"""
静态资源的处理函数, 读取图片并生成响应返回
"""
filename = request.query.get('file', 'doge.gif')
path = 'static/' + filename
with open(path, 'rb') as f:
header = b'HTTP/1.1 200 OK\r\nContent-Type: image/gif\r\n\r\n'
img = header + f.read()
return img
'''
5, 增加一个路由 /admin/users
只有 id 为 1 的用户可以访问这个页面, 其他用户访问会定向到 /login
这个页面显示了所有的用户 包括 id username password
6, 在 /admin/users 页面中新增一个表单
表单包括 id password 两个 input
管理员可以在这个表单中输入 id 和 新密码 来修改相应用户的密码
这个表单发送 POST 请求到 /admin/user/update
所以你要增加一个新的路由函数实现更新用户密码的功能
'''
def route_admin(request):
log('debug route_admin in')
username = current_user(request)
log('username', username)
u = User.find_by(username=username)
log('u', u)
if u is None or u.role != 1:
return redirect('/login')
else:
header = 'HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n'
# body = '<h1>消息版</h1>'
body = template('admin_user.html')
r = header + '\r\n' + body
return r.encode(encoding='utf-8')
pass
def route_admin_update(request):
# 你要增加一个新的路由函数实现更新用户密码的功能
# 得到数据 解析数据 修改数据 保存数据
username = current_user(request)
u = User.find_by(username=username)
if u is None or u.role != 1:
return redirect('/login')
if request.method == 'POST':
# 修改并且保存 user
form = request.form()
id = int(form.get('id', -1))
update_user = User.find_by(id=id)
log('update_user', update_user)
password = form.get('password', -1)
update_user.password = password
update_user.save()
# 浏览器发送数据过来被处理后, 重定向到首页
# 浏览器在请求新首页的时候, 就能看到新增的数据了
return redirect('/admin/users')
pass
# 路由字典
# key 是路由(路由就是 path)
# value 是路由处理函数(就是响应)
route_dict = {
'/': route_index,
'/login': route_login,
'/register': route_register,
'/messages': route_message,
# admin
'/admin/users': route_admin,
'/admin/user/update': route_admin_update,
}
|
[
"2799987642@qq.com"
] |
2799987642@qq.com
|
ac4141a56f2e09c75309c00199565753190c690b
|
81e6c0e33c7d249090c121fe943572463f9eff3b
|
/Dustin Ch 5.11 Math.py
|
52a4980dcb8f13d7ff4d80aaf224d4aecd3ecd6f
|
[] |
no_license
|
DustinSmith37/Python-Stuffs
|
a42d5da555db90d036b54a3164231868ec9a57ab
|
021cc95ba9ea8f33bbc1d7db33d61ffad30e3960
|
refs/heads/master
| 2020-04-09T18:55:14.572959
| 2018-12-05T14:45:15
| 2018-12-05T14:45:15
| 160,528,252
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,392
|
py
|
#Dustin Smith
#Problem 11
#Random math problems
#import random
import random
#question counter
question = 1
#input check
def inputCheckerInt(inputTopic):
while True:
try:
userInput = int(input(inputTopic))
return userInput
except ValueError:
print("Invalid input")
#repeat input check
def inputCheckerRepeat(inputTopic):
while True:
try:
userInput = input(inputTopic).upper()
if userInput != 'Y' and userInput != 'N':
int('a')
return userInput
except ValueError:
print("Invalid input")
#math function
def mathMaker():
number1 = random.randint(1,1000)
number2 = random.randint(1,1000)
correctAnswer = number1 + number2
while True:
userAnswer = inputCheckerInt("What is "+str(number1) + " + "\
+ str(number2) + ":")
if userAnswer == correctAnswer:
print("Well done!")
break
else:
print("Incorrect answer. Please try again.")
global question
question = question + 1
#question loop
repeat = 'Y'
while True:
if repeat == 'Y':
print("Question "+ str(question) + ": ")
mathMaker()
repeat = inputCheckerRepeat("Would you like to continue? y or n: ")
else:
break
|
[
"noreply@github.com"
] |
DustinSmith37.noreply@github.com
|
d96e0cb24eff922f7e3a208ea0bd1836652f6387
|
0bd7132b29ac5cf509e6728116f8d4af9a6561d8
|
/SSC-hcsr5.1.py
|
3196064c5d5f7d25393c27d2c9ed0744329eebac
|
[] |
no_license
|
ppigmans/SSC-Sensor-packet
|
e0ac326c5639a21f9597933562ed544d06ed0cc4
|
cfda67dc3c9bf308e8c1c5f9d6cfe5270a8415ac
|
refs/heads/master
| 2020-03-21T02:39:16.400031
| 2018-06-21T07:30:00
| 2018-06-21T07:30:00
| 138,010,076
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 822
|
py
|
import RPi.GPIO as gpio
import time
import paho.mqtt.client as mqtt
import json
gpio.setmode(gpio.BOARD)
spin1 = 7
spin2 = 11
gpio.setup(spin1, gpio.IN)
gpio.setup(spin2, gpio.IN)
host = '172.16.85.250'
user = 'weather-station'
msensor_1 = {0}
msensor_2 = {0}
next_reading = time.time()
INTERVAL=2
client = mqtt.Client()
client.username_pw_set(user)
client.connect(host, 1883, 60)
client.loop_start()
try:
while True:
if gpio.input(spin1):
print "Beweging waargenomen, Sensor 1!"
msensor_1 = 1
client.publish('WS/ms1', json.dumps(msensor_1), 1)
time.sleep(2)
next_reading += INTERVAL
sleep_time = next_reading-time.time()
if sleep_time > 0:
time.sleep(sleep_time)
client.loop_stop()
client.disconnect()
except KeyboardInterrupt:
pass
|
[
"noreply@github.com"
] |
ppigmans.noreply@github.com
|
1cf4d45df81860bebd238bedf179f212bac6e245
|
2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae
|
/python/python_21033.py
|
c56846f87dd0c3bf7107742e70803486ed9f6c15
|
[] |
no_license
|
AK-1121/code_extraction
|
cc812b6832b112e3ffcc2bb7eb4237fd85c88c01
|
5297a4a3aab3bb37efa24a89636935da04a1f8b6
|
refs/heads/master
| 2020-05-23T08:04:11.789141
| 2015-10-22T19:19:40
| 2015-10-22T19:19:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 37
|
py
|
# How to find the Pi subscript
match
|
[
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] |
ubuntu@ip-172-31-7-228.us-west-2.compute.internal
|
b0403f8e2d3d12b418734af371a7698e278a3789
|
a6b73b12fe2572dfec3ac0a5a2d9c01361d78e8c
|
/kctl/kctl.py
|
760a1c942b616a5c8ffb6c8ea26bc53be002ff28
|
[
"Apache-2.0"
] |
permissive
|
fdgonthier/tbxsosd
|
c80926beb4ee83f12dcfa15f79ada34632f5ecff
|
e5017139f2d06b962413b1985777f87fd2a45d47
|
refs/heads/master
| 2021-01-15T17:55:21.167620
| 2013-08-14T20:44:31
| 2013-08-14T20:44:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,036
|
py
|
#!/usr/bin/env python
# -*- mode: python; tab-width: 4; indent-tabs-mode: t; py-indent-offset: 4 -*-
#####
##### MAIN
#####
from sys import stdout, stderr
import os, sys, string, shutil, ConfigParser, readline, re, time, random, getopt
from kreadline import *
# kctl-lib
from kctllib.kdatabase import *
from kctllib.kparams import *
from kctllib.kexcept import *
import kctlcmd
# kpython
from kout import *
from kreadline import *
# This function prints the program usage.
def print_usage(where):
s = """Usage: kctl # interactive mode
kctl -h # prints this message
kctl help [command] # get help on one or all commands
kctl [-s] <command> [args] # run command
'-s' makes kctl put appropriate data in a scriptable format on the first line of output
"""
if where == 0:
out_raw(s)
else:
err_raw(s)
def main():
global cmds
kparams_init()
if kparams_get("debug"):
do_debug()
# Parse the options.
try:
long_opts = ["db_port=", "debug"]
(options, cmd_args) = getopt.gnu_getopt(sys.argv[1:], "hsd", long_opts)
except Exception, e:
err("Error: '%s'." % str(e))
print_usage(1)
sys.exit(1)
db_port = None
kparams_set("scriptable", False)
kparams_set("debug", False)
for opt, val in options:
if opt == '-h':
print_usage(0)
sys.exit(0)
if opt == '-s':
kparams_set("scriptable", True)
if opt == '--db_port':
db_port = val
if opt in ("-d", "--debug"):
kparams_set("debug", True)
db_init(db_port = db_port)
intr = CommandInterpreter(kctlcmd.command_classes, "kctl> ", debug_mode = kparams_get("debug"))
# Run a single command from the command line.
if len(cmd_args):
try:
intr.run_command(cmd_args)
except Exception, ex:
if kparams_get("debug"):
raise
else:
sys.stderr.write(str(ex) + "\n")
sys.exit(0)
# if logredirect is active, don't allow interactive mode
if kparams_get("logredirect"):
print_usage(0)
sys.exit(0)
intr.loop()
if __name__ == "__main__":
main()
|
[
"fdgonthier@lostwebsite.net"
] |
fdgonthier@lostwebsite.net
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.