text stringlengths 4 1.02M | meta dict |
|---|---|
import os
import shutil
import time
import unittest
from lib.util.mysqlBaseTestCase import mysqlBaseTestCase
server_requirements = [[],[]]
servers = []
server_manager = None
test_executor = None
# we explicitly use the --no-timestamp option
# here. We will be using a generic / vanilla backup dir
backup_path = None
class basicTest(mysqlBaseTestCase):
def setUp(self):
master_server = servers[0] # assumption that this is 'master'
backup_path = os.path.join(master_server.vardir, '_xtrabackup')
# remove backup path
if os.path.exists(backup_path):
shutil.rmtree(backup_path)
def test_basic1(self):
if servers[0].type not in ['mysql','percona']:
return
else:
self.servers = servers
innobackupex = test_executor.system_manager.innobackupex_path
xtrabackup = test_executor.system_manager.xtrabackup_path
master_server = servers[0] # assumption that this is 'master'
slave_server = servers[1]
backup_path = os.path.join(master_server.vardir, '_xtrabackup')
output_path = os.path.join(master_server.vardir, 'innobackupex.out')
exec_path = os.path.dirname(innobackupex)
orig_dumpfile = os.path.join(master_server.vardir,'orig_dumpfile')
slave_dumpfile = os.path.join(master_server.vardir, 'slave_dumpfile')
# populate our server with a test bed
test_cmd = "./gentest.pl --gendata=conf/percona/percona.zz"
retcode, output = self.execute_randgen(test_cmd, test_executor, servers)
#self.assertEqual(retcode, 0, msg=output)
# take a backup
cmd = ("%s --defaults-file=%s --user=root --port=%d"
" --host=127.0.0.1 --no-timestamp --slave-info"
" --ibbackup=%s %s" %( innobackupex
, master_server.cnf_file
, master_server.master_port
, xtrabackup
, backup_path))
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertEqual(retcode, 0, msg = output)
# take mysqldump of our current server state
self.take_mysqldump(master_server,databases=['test'],dump_path=orig_dumpfile)
# shutdown our server
slave_server.stop()
# prepare our backup
cmd = ("%s --apply-log --no-timestamp --use-memory=500M "
"--ibbackup=%s %s" %( innobackupex
, xtrabackup
, backup_path))
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertEqual(retcode, 0, msg = output)
# remove old datadir
shutil.rmtree(slave_server.datadir)
os.mkdir(slave_server.datadir)
# restore from backup
cmd = ("%s --defaults-file=%s --copy-back"
" --ibbackup=%s %s" %( innobackupex
, slave_server.cnf_file
, xtrabackup
, backup_path))
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertEqual(retcode,0, msg = output)
# get binlog info for slave
slave_file_path = os.path.join(slave_server.datadir,'xtrabackup_binlog_info')
slave_file = open(slave_file_path,'r')
binlog_file, binlog_pos = slave_file.readline().strip().split('\t')
slave_file.close()
# restart server (and ensure it doesn't crash)
slave_server.start()
self.assertEqual( slave_server.status, 1
, msg = 'Server failed restart from restored datadir...')
# update our slave's master info/ start replication
retcode, result_set = slave_server.set_master(master_server)
self.assertEqual(retcode, 0, msg=result_set)
# check the slave status
query = "SHOW SLAVE STATUS"
retcode, result_set = self.execute_query(query, slave_server)
result_set = result_set[0]
slave_master_port = result_set[3]
slave_binlog_file = result_set[5]
slave_io_running = result_set[10]
slave_sql_running = result_set[11]
self.assertEqual(slave_master_port, master_server.master_port)
self.assertEqual(slave_binlog_file, binlog_file)
self.assertEqual(slave_io_running, 'Yes')
self.assertEqual(slave_sql_running, 'Yes')
self.assertEqual(retcode,0, msg=result_set)
# take mysqldump of current server state
self.take_mysqldump(slave_server, databases=['test'],dump_path=slave_dumpfile)
# diff original vs. current server dump files
retcode, output = self.diff_dumpfiles(orig_dumpfile, slave_dumpfile)
self.assertTrue(retcode, output)
# create a new table on the master
query = ("CREATE TABLE t1 "
"(col1 int NOT NULL AUTO_INCREMENT PRIMARY KEY )"
)
retcode, result_set = self.execute_query(query, master_server)
# insert some rows
query = "INSERT INTO t1 VALUES (),(),(),(),()"
retcode, result_set = self.execute_query(query, master_server)
self.assertEqual(retcode,0,msg=result_set)
# wait a bit for the slave
# TODO: proper poll routine
time.sleep(5)
for query in ["SHOW CREATE TABLE t1"
,"SELECT * FROM t1"]:
diff = self.check_slaves_by_query(master_server, [slave_server], query)
self.assertEqual(diff,None,msg=diff)
def tearDown(self):
server_manager.reset_servers(test_executor.name)
| {
"content_hash": "e6118e738f69db666e414aa774685935",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 90,
"avg_line_length": 42.77622377622377,
"alnum_prop": 0.5558280202713749,
"repo_name": "jonzobrist/Percona-Server-5.1",
"id": "0fab9031fa770b9e727c11635493f4e84532a8cb",
"size": "6979",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kewpie/percona_tests/xtrabackup_basic/slave_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "63359"
},
{
"name": "C",
"bytes": "21629542"
},
{
"name": "C#",
"bytes": "186518"
},
{
"name": "C++",
"bytes": "26608206"
},
{
"name": "JavaScript",
"bytes": "34135"
},
{
"name": "Objective-C",
"bytes": "73098"
},
{
"name": "Perl",
"bytes": "2396066"
},
{
"name": "Puppet",
"bytes": "447114"
},
{
"name": "Python",
"bytes": "1181762"
},
{
"name": "R",
"bytes": "34454"
},
{
"name": "Racket",
"bytes": "2416"
},
{
"name": "Scilab",
"bytes": "7740"
},
{
"name": "Shell",
"bytes": "1287942"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from bcc import BPF
from ctypes import c_int
from time import sleep, strftime
from sys import argv
def usage():
print("USAGE: %s [interval [count]]" % argv[0])
exit()
# arguments
interval = 1
count = -1
if len(argv) > 1:
try:
interval = int(argv[1])
if interval == 0:
raise
if len(argv) > 2:
count = int(argv[2])
except: # also catches -h, --help
usage()
# load BPF program
b = BPF(text="""
#include <uapi/linux/ptrace.h>
enum stat_types {
S_READ = 1,
S_WRITE,
S_FSYNC,
S_OPEN,
S_CREATE,
S_MAXSTAT
};
BPF_ARRAY(stats, u64, S_MAXSTAT);
static void stats_increment(int key) {
u64 *leaf = stats.lookup(&key);
if (leaf) (*leaf)++;
}
void do_read(struct pt_regs *ctx) { stats_increment(S_READ); }
void do_write(struct pt_regs *ctx) { stats_increment(S_WRITE); }
void do_fsync(struct pt_regs *ctx) { stats_increment(S_FSYNC); }
void do_open(struct pt_regs *ctx) { stats_increment(S_OPEN); }
void do_create(struct pt_regs *ctx) { stats_increment(S_CREATE); }
""")
b.attach_kprobe(event="vfs_read", fn_name="do_read")
b.attach_kprobe(event="vfs_write", fn_name="do_write")
b.attach_kprobe(event="vfs_fsync", fn_name="do_fsync")
b.attach_kprobe(event="vfs_open", fn_name="do_open")
b.attach_kprobe(event="vfs_create", fn_name="do_create")
# stat column labels and indexes
stat_types = {
"READ": 1,
"WRITE": 2,
"FSYNC": 3,
"OPEN": 4,
"CREATE": 5
}
# header
print("%-8s " % "TIME", end="")
for stype in stat_types.keys():
print(" %8s" % (stype + "/s"), end="")
idx = stat_types[stype]
print("")
# output
i = 0
while (1):
if count > 0:
i += 1
if i > count:
exit()
try:
sleep(interval)
except KeyboardInterrupt:
pass
exit()
print("%-8s: " % strftime("%H:%M:%S"), end="")
# print each statistic as a column
for stype in stat_types.keys():
idx = stat_types[stype]
try:
val = b["stats"][c_int(idx)].value / interval
print(" %8d" % val, end="")
except:
print(" %8d" % 0, end="")
b["stats"].clear()
print("")
| {
"content_hash": "a3c9def9e4c5388e5a1c7715975f5d2b",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 66,
"avg_line_length": 23.347368421052632,
"alnum_prop": 0.5703336339044184,
"repo_name": "mcaleavya/bcc",
"id": "1764c6012f383073eb87eafc882ce70bbd23ffe0",
"size": "2634",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/vfsstat.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "321768"
},
{
"name": "C++",
"bytes": "920975"
},
{
"name": "CMake",
"bytes": "38841"
},
{
"name": "HTML",
"bytes": "2979"
},
{
"name": "LLVM",
"bytes": "4379"
},
{
"name": "Limbo",
"bytes": "6069"
},
{
"name": "Lua",
"bytes": "298149"
},
{
"name": "Makefile",
"bytes": "1481"
},
{
"name": "P4",
"bytes": "9242"
},
{
"name": "Python",
"bytes": "1206933"
},
{
"name": "Shell",
"bytes": "17023"
},
{
"name": "Yacc",
"bytes": "19817"
}
],
"symlink_target": ""
} |
"""
This file contains all of the configuration values for the application.
Update this file with the values for your specific Google Cloud project.
You can create and manage projects at https://console.developers.google.com
"""
import os
# The secret key is used by Flask to encrypt session cookies.
SECRET_KEY = 'secret'
# There are three different ways to store the data in the application.
# You can choose 'datastore', 'cloudsql', or 'mongodb'. Be sure to
# configure the respective settings for the one you choose below.
# You do not have to configure the other data backends. If unsure, choose
# 'datastore' as it does not require any additional configuration.
DATA_BACKEND = 'datastore'
# Google Cloud Project ID. This can be found on the 'Overview' page at
# https://console.developers.google.com
PROJECT_ID = 'your-project-id'
# Cloud Datastore dataset id, this is the same as your project id.
DATASTORE_DATASET_ID = PROJECT_ID
# CloudSQL & SQLAlchemy configuration
# Replace the following values the respective values of your Cloud SQL
# instance.
CLOUDSQL_USER = 'root'
CLOUDSQL_PASSWORD = 'your-cloudsql-password'
CLOUDSQL_DATABASE = 'bookshelf'
# Set this value to the Cloud SQL connection name, e.g.
# "project:region:cloudsql-instance".
# You must also update the value in app.yaml.
CLOUDSQL_CONNECTION_NAME = 'your-cloudsql-connection-name'
# The CloudSQL proxy is used locally to connect to the cloudsql instance.
# To start the proxy, use:
#
# $ cloud_sql_proxy -instances=your-connection-name=tcp:3306
#
# Port 3306 is the standard MySQL port. If you need to use a different port,
# change the 3306 to a different port number.
# Alternatively, you could use a local MySQL instance for testing.
LOCAL_SQLALCHEMY_DATABASE_URI = (
'mysql+pymysql://{user}:{password}@127.0.0.1:3306/{database}').format(
user=CLOUDSQL_USER, password=CLOUDSQL_PASSWORD,
database=CLOUDSQL_DATABASE)
# When running on App Engine a unix socket is used to connect to the cloudsql
# instance.
LIVE_SQLALCHEMY_DATABASE_URI = (
'mysql+pymysql://{user}:{password}@localhost/{database}'
'?unix_socket=/cloudsql/{connection_name}').format(
user=CLOUDSQL_USER, password=CLOUDSQL_PASSWORD,
database=CLOUDSQL_DATABASE, connection_name=CLOUDSQL_CONNECTION_NAME)
if os.environ.get('GAE_INSTANCE'):
SQLALCHEMY_DATABASE_URI = LIVE_SQLALCHEMY_DATABASE_URI
else:
SQLALCHEMY_DATABASE_URI = LOCAL_SQLALCHEMY_DATABASE_URI
# Mongo configuration
# If using mongolab, the connection URI is available from the mongolab control
# panel. If self-hosting on compute engine, replace the values below.
MONGO_URI = 'mongodb://user:password@host:27017/database'
# Google Cloud Storage and upload settings.
# Typically, you'll name your bucket the same as your project. To create a
# bucket:
#
# $ gsutil mb gs://<your-bucket-name>
#
# You also need to make sure that the default ACL is set to public-read,
# otherwise users will not be able to see their upload images:
#
# $ gsutil defacl set public-read gs://<your-bucket-name>
#
# You can adjust the max content length and allow extensions settings to allow
# larger or more varied file types if desired.
CLOUD_STORAGE_BUCKET = 'your-bucket-name'
MAX_CONTENT_LENGTH = 8 * 1024 * 1024
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])
# OAuth2 configuration.
# This can be generated from the Google Developers Console at
# https://console.developers.google.com/project/_/apiui/credential.
# Note that you will need to add all URLs that your application uses as
# authorized redirect URIs. For example, typically you would add the following:
#
# * http://localhost:8080/oauth2callback
# * https://<your-app-id>.appspot.com/oauth2callback.
#
# If you receive a invalid redirect URI error review you settings to ensure
# that the current URI is allowed.
GOOGLE_OAUTH2_CLIENT_ID = \
'your-client-id'
GOOGLE_OAUTH2_CLIENT_SECRET = 'your-client-secret'
| {
"content_hash": "ec10a5c453a6d3973cdcb87d59c98192",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 79,
"avg_line_length": 39.86868686868687,
"alnum_prop": 0.7494299467950342,
"repo_name": "wilcoant/bookshelf",
"id": "d40ef9bcddec920c7af30f00286a29c4088c930a",
"size": "4523",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "6-pubsub/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "52301"
},
{
"name": "Makefile",
"bytes": "1267"
},
{
"name": "Python",
"bytes": "251927"
},
{
"name": "Shell",
"bytes": "9754"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import json
import pytz
from dash.orgs.models import Org
from dash.orgs.views import OrgObjPermsMixin
from dash.utils import random_string
from datetime import datetime, date
from django.conf import settings
from django.contrib.auth.models import User
from django.core.files import File
from django.core.files.storage import default_storage
from django.core.files.temp import NamedTemporaryFile
from django.core.urlresolvers import reverse
from django.db import models
from django.http import HttpResponse
from django.utils.translation import ugettext_lazy as _
from smartmin.views import SmartReadView
from temba_client.utils import parse_iso8601
from xlwt import Workbook, XFStyle
from . import json_encode
from .email import send_email
class BaseExport(models.Model):
"""
Base class for exports
"""
org = models.ForeignKey(Org, verbose_name=_("Organization"), related_name='%(class)ss')
filename = models.CharField(max_length=512)
created_by = models.ForeignKey(User, related_name="%(class)ss")
created_on = models.DateTimeField(auto_now_add=True)
# overridden by subclasses
directory = None
download_view = None
DATE_STYLE = XFStyle()
DATE_STYLE.num_format_str = 'DD-MM-YYYY'
DATETIME_STYLE = XFStyle()
DATETIME_STYLE.num_format_str = 'DD-MM-YYYY HH:MM:SS'
MAX_SHEET_ROWS = 65535
def do_export(self):
"""
Does actual export. Called from a celery task.
"""
book = Workbook()
self.render_book(book)
temp = NamedTemporaryFile(delete=True)
book.save(temp)
temp.flush()
org_root = getattr(settings, 'SITE_ORGS_STORAGE_ROOT', 'orgs')
filename = '%s/%d/%s/%s.xls' % (org_root, self.org_id, self.directory, random_string(20))
default_storage.save(filename, File(temp))
self.filename = filename
self.save(update_fields=('filename',))
subject = "Your export is ready"
download_url = self.org.make_absolute_url(reverse(self.download_view, args=[self.pk]))
send_email([self.created_by], subject, 'utils/email/export', {'download_url': download_url})
# force a gc
import gc
gc.collect()
def render_book(self, book): # pragma: no cover
"""
Child classes implement this to populate the Excel book
"""
pass
def write_row(self, sheet, row, values):
for col, value in enumerate(values):
self.write_value(sheet, row, col, value)
def write_value(self, sheet, row, col, value):
if isinstance(value, bool):
sheet.write(row, col, "Yes" if value else "No")
elif isinstance(value, datetime):
value = value.astimezone(pytz.UTC).replace(tzinfo=None) if value else None
sheet.write(row, col, value, self.DATETIME_STYLE)
elif isinstance(value, date):
sheet.write(row, col, value, self.DATE_STYLE)
else:
sheet.write(row, col, value)
class Meta:
abstract = True
class BaseSearchExport(BaseExport):
"""
Base class for exports based on item searches which may be initiated by partner users
"""
partner = models.ForeignKey('cases.Partner', related_name='%(class)ss', null=True)
search = models.TextField()
@classmethod
def create(cls, org, user, search):
return cls.objects.create(org=org, partner=user.get_partner(org), created_by=user, search=json_encode(search))
def render_book(self, book):
search = self.get_search()
self.render_search(book, search)
def render_search(self, book, search): # pragma: no cover
pass
def get_search(self):
search = json.loads(self.search)
if 'after' in search:
search['after'] = parse_iso8601(search['after'])
if 'before' in search:
search['before'] = parse_iso8601(search['before'])
return search
class Meta:
abstract = True
class BaseDownloadView(OrgObjPermsMixin, SmartReadView):
"""
Download view for exports
"""
filename = None
template_name = 'download.haml'
@classmethod
def derive_url_pattern(cls, path, action):
return r'%s/download/(?P<pk>\d+)/' % path
def has_permission(self, request, *args, **kwargs):
if not super(BaseDownloadView, self).has_permission(request, *args, **kwargs):
return False
obj = self.get_object()
# if users is partner user, check this export is for their partner org
user_partner = self.request.user.get_partner(obj.org)
return not user_partner or user_partner == obj.partner
def derive_title(self):
return self.title
def get(self, request, *args, **kwargs):
if 'download' in request.GET:
export = self.get_object()
export_file = default_storage.open(export.filename, 'rb')
response = HttpResponse(export_file, content_type='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename=%s' % self.filename
return response
else:
return super(BaseDownloadView, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(BaseDownloadView, self).get_context_data(**kwargs)
current_url_name = self.request.resolver_match.url_name
context['download_url'] = '%s?download=1' % reverse(current_url_name, args=[self.object.pk])
return context
| {
"content_hash": "dba819a38e9546db49cfd7bd74e4a401",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 118,
"avg_line_length": 31.45762711864407,
"alnum_prop": 0.6487068965517241,
"repo_name": "xkmato/casepro",
"id": "3e279e8b4a5c9fcab8825165574c1323e25e739a",
"size": "5568",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "casepro/utils/export.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "3475"
},
{
"name": "CoffeeScript",
"bytes": "220522"
},
{
"name": "HTML",
"bytes": "104527"
},
{
"name": "PLpgSQL",
"bytes": "6012"
},
{
"name": "Python",
"bytes": "878626"
}
],
"symlink_target": ""
} |
"""Presubmit checks for SafeBrowsing download_file_types.
"""
def CheckVersionUpdatedInDownloadFileTypeList(input_api, output_api):
def IsDownloadFileTypeList(x):
return (input_api.os_path.basename(x.LocalPath()) ==
'download_file_types.asciipb')
download_file_types = input_api.AffectedFiles(
file_filter=IsDownloadFileTypeList)
if not download_file_types:
return []
for _, line in download_file_types[0].ChangedContents():
if line.strip().startswith('version_id: '):
return []
# It's enticing to do something fancy like checking whether the ID was in fact
# incremented or whether this is a whitespace-only or comment-only change.
# However, currently deleted lines don't show up in ChangedContents() and
# attempting to parse the asciipb file any more than we are doing above is
# likely not worth the trouble.
#
# At worst, the submitter can skip the presubmit check on upload if it isn't
# correct.
return [output_api.PresubmitError(
'Increment |version_id| in download_file_types.asciipb if you are '
'updating the file types proto.')]
def CheckChangeOnUpload(input_api, output_api):
# TODO(asanka): Add a PRESUBMIT check for verifying that the
# download_file_types.asciipb file is valid.
return CheckVersionUpdatedInDownloadFileTypeList(input_api, output_api)
| {
"content_hash": "ff0a1d09ccab86611d12fa2aa1b405ca",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 80,
"avg_line_length": 41,
"alnum_prop": 0.7331855136733185,
"repo_name": "endlessm/chromium-browser",
"id": "d617bd57210fd9e1de344f414636a26775f4f919",
"size": "1516",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "components/safe_browsing/core/resources/PRESUBMIT.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import os, sys
'''Utility tools to update file names under a directory'''
def removeNonAscii(s):
return "".join(i for i in s if ord(i)<128)
def move(directory, sub_str_to_replace, sub_str):
current_directory = os.path.dirname(os.path.abspath(__file__))
directory_to_update = ur""+os.path.join(current_directory, directory)
os.chdir(directory_to_update)
for file_name in os.listdir(directory_to_update):
new_file_name = removeNonAscii(file_name)
new_file_name = new_file_name.replace(sub_str_to_replace,sub_str)
if new_file_name != file_name:
if os.path.isfile(new_file_name):
print new_file_name + ' is existing, skipping change file name'
else:
os.rename(file_name, new_file_name)
if __name__ == '__main__':
if len(sys.argv) < 4:
print 'Usage:'
print 'python rm.py dir_name key_to_replace string'
sys.exit()
directory = sys.argv[1]
sub_str_to_replace = sys.argv[2]
sub_str = sys.argv[3]
move(directory, sub_str_to_replace, sub_str) | {
"content_hash": "deec5a6b805db39fe864757c769d6a91",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 79,
"avg_line_length": 36.6,
"alnum_prop": 0.6156648451730419,
"repo_name": "vollov/py-lab",
"id": "9027351f8ab338644991270ed590e04a58661524",
"size": "1122",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/rm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "22"
},
{
"name": "JavaScript",
"bytes": "685"
},
{
"name": "PLSQL",
"bytes": "6838"
},
{
"name": "Python",
"bytes": "254226"
},
{
"name": "Shell",
"bytes": "734"
},
{
"name": "Smarty",
"bytes": "1829"
}
],
"symlink_target": ""
} |
import sqlite3
import os, time
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.TableDataModel import TableDataModel
from Table.TableFile import TableFile
from Table.TableFunction import TableFunction
from Table.TablePcd import TablePcd
from Table.TableIdentifier import TableIdentifier
from Table.TableReport import TableReport
from MetaFileWorkspace.MetaFileTable import ModuleTable
from MetaFileWorkspace.MetaFileTable import PackageTable
from MetaFileWorkspace.MetaFileTable import PlatformTable
from Table.TableFdf import TableFdf
##
# Static definitions
#
DATABASE_PATH = "Ecc.db"
## Database
#
# This class defined the ECC databse
# During the phase of initialization, the database will create all tables and
# insert all records of table DataModel
#
# @param object: Inherited from object class
# @param DbPath: A string for the path of the ECC database
#
# @var Conn: Connection of the ECC database
# @var Cur: Cursor of the connection
# @var TblDataModel: Local instance for TableDataModel
#
class Database(object):
def __init__(self, DbPath):
self.DbPath = DbPath
self.Conn = None
self.Cur = None
self.TblDataModel = None
self.TblFile = None
self.TblFunction = None
self.TblIdentifier = None
self.TblPcd = None
self.TblReport = None
self.TblInf = None
self.TblDec = None
self.TblDsc = None
self.TblFdf = None
## Initialize ECC database
#
# 1. Delete all old existing tables
# 2. Create new tables
# 3. Initialize table DataModel
#
def InitDatabase(self, NewDatabase = True):
EdkLogger.verbose("\nInitialize ECC database started ...")
#
# Drop all old existing tables
#
if NewDatabase:
if os.path.exists(self.DbPath):
os.remove(self.DbPath)
self.Conn = sqlite3.connect(self.DbPath, isolation_level = 'DEFERRED')
self.Conn.execute("PRAGMA page_size=4096")
self.Conn.execute("PRAGMA synchronous=OFF")
# to avoid non-ascii charater conversion error
self.Conn.text_factory = str
self.Cur = self.Conn.cursor()
self.TblDataModel = TableDataModel(self.Cur)
self.TblFile = TableFile(self.Cur)
self.TblFunction = TableFunction(self.Cur)
self.TblIdentifier = TableIdentifier(self.Cur)
self.TblPcd = TablePcd(self.Cur)
self.TblReport = TableReport(self.Cur)
self.TblInf = ModuleTable(self.Cur)
self.TblDec = PackageTable(self.Cur)
self.TblDsc = PlatformTable(self.Cur)
self.TblFdf = TableFdf(self.Cur)
#
# Create new tables
#
if NewDatabase:
self.TblDataModel.Create()
self.TblFile.Create()
self.TblFunction.Create()
self.TblPcd.Create()
self.TblReport.Create()
self.TblInf.Create()
self.TblDec.Create()
self.TblDsc.Create()
self.TblFdf.Create()
#
# Init each table's ID
#
self.TblDataModel.InitID()
self.TblFile.InitID()
self.TblFunction.InitID()
self.TblPcd.InitID()
self.TblReport.InitID()
self.TblInf.InitID()
self.TblDec.InitID()
self.TblDsc.InitID()
self.TblFdf.InitID()
#
# Initialize table DataModel
#
if NewDatabase:
self.TblDataModel.InitTable()
EdkLogger.verbose("Initialize ECC database ... DONE!")
## Query a table
#
# @param Table: The instance of the table to be queried
#
def QueryTable(self, Table):
Table.Query()
## Close entire database
#
# Commit all first
# Close the connection and cursor
#
def Close(self):
#
# Commit to file
#
self.Conn.commit()
#
# Close connection and cursor
#
self.Cur.close()
self.Conn.close()
## Insert one file information
#
# Insert one file's information to the database
# 1. Create a record in TableFile
# 2. Create functions one by one
# 2.1 Create variables of function one by one
# 2.2 Create pcds of function one by one
# 3. Create variables one by one
# 4. Create pcds one by one
#
def InsertOneFile(self, File):
#
# Insert a record for file
#
FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)
if File.Model == DataClass.MODEL_FILE_C or File.Model == DataClass.MODEL_FILE_H:
IdTable = TableIdentifier(self.Cur)
IdTable.Table = "Identifier%s" % FileID
IdTable.Create()
#
# Insert function of file
#
for Function in File.FunctionList:
FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \
Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \
Function.BodyStartLine, Function.BodyStartColumn, FileID, \
Function.FunNameStartLine, Function.FunNameStartColumn)
#
# Insert Identifier of function
#
for Identifier in Function.IdentifierList:
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
#
# Insert Pcd of function
#
for Pcd in Function.PcdList:
PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
FileID, FunctionID, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
#
# Insert Identifier of file
#
for Identifier in File.IdentifierList:
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
#
# Insert Pcd of file
#
for Pcd in File.PcdList:
PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
FileID, -1, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
EdkLogger.verbose("Insert information from file %s ... DONE!" % File.FullPath)
## UpdateIdentifierBelongsToFunction
#
# Update the field "BelongsToFunction" for each Indentifier
#
#
def UpdateIdentifierBelongsToFunction_disabled(self):
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine, Model from Identifier"""
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
Records = self.Cur.fetchall()
for Record in Records:
IdentifierID = Record[0]
BelongsToFile = Record[1]
StartLine = Record[2]
EndLine = Record[3]
Model = Record[4]
#
# Check whether an identifier belongs to a function
#
EdkLogger.debug(4, "For common identifiers ... ")
SqlCommand = """select ID from Function
where StartLine < %s and EndLine > %s
and BelongsToFile = %s""" % (StartLine, EndLine, BelongsToFile)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
IDs = self.Cur.fetchall()
for ID in IDs:
SqlCommand = """Update Identifier set BelongsToFunction = %s where ID = %s""" % (ID[0], IdentifierID)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
#
# Check whether the identifier is a function header
#
EdkLogger.debug(4, "For function headers ... ")
if Model == DataClass.MODEL_IDENTIFIER_COMMENT:
SqlCommand = """select ID from Function
where StartLine = %s + 1
and BelongsToFile = %s""" % (EndLine, BelongsToFile)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
IDs = self.Cur.fetchall()
for ID in IDs:
SqlCommand = """Update Identifier set BelongsToFunction = %s, Model = %s where ID = %s""" % (ID[0], DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, IdentifierID)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
## UpdateIdentifierBelongsToFunction
#
# Update the field "BelongsToFunction" for each Indentifier
#
#
def UpdateIdentifierBelongsToFunction(self):
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine from Function"""
Records = self.TblFunction.Exec(SqlCommand)
Data1 = []
Data2 = []
for Record in Records:
FunctionID = Record[0]
BelongsToFile = Record[1]
StartLine = Record[2]
EndLine = Record[3]
#Data1.append(("'file%s'" % BelongsToFile, FunctionID, BelongsToFile, StartLine, EndLine))
#Data2.append(("'file%s'" % BelongsToFile, FunctionID, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, BelongsToFile, DataClass.MODEL_IDENTIFIER_COMMENT, StartLine - 1))
SqlCommand = """Update Identifier%s set BelongsToFunction = %s where BelongsToFile = %s and StartLine > %s and EndLine < %s""" % \
(BelongsToFile, FunctionID, BelongsToFile, StartLine, EndLine)
self.TblIdentifier.Exec(SqlCommand)
SqlCommand = """Update Identifier%s set BelongsToFunction = %s, Model = %s where BelongsToFile = %s and Model = %s and EndLine = %s""" % \
(BelongsToFile, FunctionID, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, BelongsToFile, DataClass.MODEL_IDENTIFIER_COMMENT, StartLine - 1)
self.TblIdentifier.Exec(SqlCommand)
# #
# # Check whether an identifier belongs to a function
# #
# print Data1
# SqlCommand = """Update ? set BelongsToFunction = ? where BelongsToFile = ? and StartLine > ? and EndLine < ?"""
# print SqlCommand
# EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
# self.Cur.executemany(SqlCommand, Data1)
#
# #
# # Check whether the identifier is a function header
# #
# EdkLogger.debug(4, "For function headers ... ")
# SqlCommand = """Update ? set BelongsToFunction = ?, Model = ? where BelongsToFile = ? and Model = ? and EndLine = ?"""
# EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
# self.Cur.executemany(SqlCommand, Data2)
#
# EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
EdkLogger.Initialize()
#EdkLogger.SetLevel(EdkLogger.VERBOSE)
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
EdkLogger.verbose("Start at " + time.strftime('%H:%M:%S', time.localtime()))
Db = Database(DATABASE_PATH)
Db.InitDatabase()
Db.QueryTable(Db.TblDataModel)
identifier1 = DataClass.IdentifierClass(-1, '', '', "i''1", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 32, 43, 54, 43)
identifier2 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 15, 43, 20, 43)
identifier3 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 55, 43, 58, 43)
identifier4 = DataClass.IdentifierClass(-1, '', '', "i1'", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 77, 43, 88, 43)
fun1 = DataClass.FunctionClass(-1, '', '', 'fun1', '', 21, 2, 60, 45, 1, 23, 0, [], [])
file = DataClass.FileClass(-1, 'F1', 'c', 'C:\\', 'C:\\F1.exe', DataClass.MODEL_FILE_C, '2007-12-28', [fun1], [identifier1, identifier2, identifier3, identifier4], [])
Db.InsertOneFile(file)
Db.UpdateIdentifierBelongsToFunction()
Db.QueryTable(Db.TblFile)
Db.QueryTable(Db.TblFunction)
Db.QueryTable(Db.TblPcd)
Db.QueryTable(Db.TblIdentifier)
Db.Close()
EdkLogger.verbose("End at " + time.strftime('%H:%M:%S', time.localtime()))
| {
"content_hash": "3ef73bdd17176d699906ff831d4808ca",
"timestamp": "",
"source": "github",
"line_count": 329,
"max_line_length": 177,
"avg_line_length": 40.33434650455927,
"alnum_prop": 0.606706857573474,
"repo_name": "egraba/vbox_openbsd",
"id": "2b981585d0006ca650421ff2f75254fe7a56bcbd",
"size": "13855",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "VirtualBox-5.0.0/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Database.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Ada",
"bytes": "88714"
},
{
"name": "Assembly",
"bytes": "4303680"
},
{
"name": "AutoIt",
"bytes": "2187"
},
{
"name": "Batchfile",
"bytes": "95534"
},
{
"name": "C",
"bytes": "192632221"
},
{
"name": "C#",
"bytes": "64255"
},
{
"name": "C++",
"bytes": "83842667"
},
{
"name": "CLIPS",
"bytes": "5291"
},
{
"name": "CMake",
"bytes": "6041"
},
{
"name": "CSS",
"bytes": "26756"
},
{
"name": "D",
"bytes": "41844"
},
{
"name": "DIGITAL Command Language",
"bytes": "56579"
},
{
"name": "DTrace",
"bytes": "1466646"
},
{
"name": "GAP",
"bytes": "350327"
},
{
"name": "Groff",
"bytes": "298540"
},
{
"name": "HTML",
"bytes": "467691"
},
{
"name": "IDL",
"bytes": "106734"
},
{
"name": "Java",
"bytes": "261605"
},
{
"name": "JavaScript",
"bytes": "80927"
},
{
"name": "Lex",
"bytes": "25122"
},
{
"name": "Logos",
"bytes": "4941"
},
{
"name": "Makefile",
"bytes": "426902"
},
{
"name": "Module Management System",
"bytes": "2707"
},
{
"name": "NSIS",
"bytes": "177212"
},
{
"name": "Objective-C",
"bytes": "5619792"
},
{
"name": "Objective-C++",
"bytes": "81554"
},
{
"name": "PHP",
"bytes": "58585"
},
{
"name": "Pascal",
"bytes": "69941"
},
{
"name": "Perl",
"bytes": "240063"
},
{
"name": "PowerShell",
"bytes": "10664"
},
{
"name": "Python",
"bytes": "9094160"
},
{
"name": "QMake",
"bytes": "3055"
},
{
"name": "R",
"bytes": "21094"
},
{
"name": "SAS",
"bytes": "1847"
},
{
"name": "Shell",
"bytes": "1460572"
},
{
"name": "SourcePawn",
"bytes": "4139"
},
{
"name": "TypeScript",
"bytes": "142342"
},
{
"name": "Visual Basic",
"bytes": "7161"
},
{
"name": "XSLT",
"bytes": "1034475"
},
{
"name": "Yacc",
"bytes": "22312"
}
],
"symlink_target": ""
} |
from .fetchers import NUMetadatasFetcher
from .fetchers import NUIKEGatewayConfigsFetcher
from .fetchers import NUIKESubnetsFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUIKEGateway(NURESTObject):
""" Represents a IKEGateway in the VSD
Notes:
Represents an IKE Gateway
"""
__rest_name__ = "ikegateway"
__resource_name__ = "ikegateways"
## Constants
CONST_IK_EV1_MODE_NONE = "NONE"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
CONST_IK_EV1_MODE_MAIN = "MAIN"
CONST_IKE_VERSION_V1 = "V1"
CONST_IK_EV1_MODE_AGGRESSIVE = "AGGRESSIVE"
CONST_IKE_VERSION_V2 = "V2"
def __init__(self, **kwargs):
""" Initializes a IKEGateway instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> ikegateway = NUIKEGateway(id=u'xxxx-xxx-xxx-xxx', name=u'IKEGateway')
>>> ikegateway = NUIKEGateway(data=my_dict)
"""
super(NUIKEGateway, self).__init__()
# Read/Write Attributes
self._ike_version = None
self._ik_ev1_mode = None
self._ip_address = None
self._name = None
self._last_updated_by = None
self._description = None
self._entity_scope = None
self._associated_enterprise_id = None
self._external_id = None
self.expose_attribute(local_name="ike_version", remote_name="IKEVersion", attribute_type=str, is_required=False, is_unique=False, choices=[u'V1', u'V2'])
self.expose_attribute(local_name="ik_ev1_mode", remote_name="IKEv1Mode", attribute_type=str, is_required=False, is_unique=False, choices=[u'AGGRESSIVE', u'MAIN', u'NONE'])
self.expose_attribute(local_name="ip_address", remote_name="IPAddress", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="associated_enterprise_id", remote_name="associatedEnterpriseID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.ike_gateway_configs = NUIKEGatewayConfigsFetcher.fetcher_with_object(parent_object=self, relationship="member")
self.ike_subnets = NUIKESubnetsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def ike_version(self):
""" Get ike_version value.
Notes:
The IKE Version
This attribute is named `IKEVersion` in VSD API.
"""
return self._ike_version
@ike_version.setter
def ike_version(self, value):
""" Set ike_version value.
Notes:
The IKE Version
This attribute is named `IKEVersion` in VSD API.
"""
self._ike_version = value
@property
def ik_ev1_mode(self):
""" Get ik_ev1_mode value.
Notes:
Mode for IKEv1
This attribute is named `IKEv1Mode` in VSD API.
"""
return self._ik_ev1_mode
@ik_ev1_mode.setter
def ik_ev1_mode(self, value):
""" Set ik_ev1_mode value.
Notes:
Mode for IKEv1
This attribute is named `IKEv1Mode` in VSD API.
"""
self._ik_ev1_mode = value
@property
def ip_address(self):
""" Get ip_address value.
Notes:
IP Address of the IKEv2 Gateway
This attribute is named `IPAddress` in VSD API.
"""
return self._ip_address
@ip_address.setter
def ip_address(self, value):
""" Set ip_address value.
Notes:
IP Address of the IKEv2 Gateway
This attribute is named `IPAddress` in VSD API.
"""
self._ip_address = value
@property
def name(self):
""" Get name value.
Notes:
Name of the IKEv2 Gateway
"""
return self._name
@name.setter
def name(self, value):
""" Set name value.
Notes:
Name of the IKEv2 Gateway
"""
self._name = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def description(self):
""" Get description value.
Notes:
Description of the IKEv2 Gateway
"""
return self._description
@description.setter
def description(self, value):
""" Set description value.
Notes:
Description of the IKEv2 Gateway
"""
self._description = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def associated_enterprise_id(self):
""" Get associated_enterprise_id value.
Notes:
The ID of the associated Enterprise
This attribute is named `associatedEnterpriseID` in VSD API.
"""
return self._associated_enterprise_id
@associated_enterprise_id.setter
def associated_enterprise_id(self, value):
""" Set associated_enterprise_id value.
Notes:
The ID of the associated Enterprise
This attribute is named `associatedEnterpriseID` in VSD API.
"""
self._associated_enterprise_id = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
| {
"content_hash": "8612bd3dcf8a8a3a46e9223cdedf1006",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 179,
"avg_line_length": 26.531157270029674,
"alnum_prop": 0.5452410244939044,
"repo_name": "nuagenetworks/vspk-python",
"id": "2a3dbeee40033d11aa8450c69921fc12eabf86fb",
"size": "10554",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vspk/v5_0/nuikegateway.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "12909327"
}
],
"symlink_target": ""
} |
"""
PyColourChooser
Copyright (C) 2002 Michael Gilfix
This file is part of PyColourChooser.
You should have received a file COPYING containing license terms
along with this program; if not, write to Michael Gilfix
(mgilfix@eecs.tufts.edu) for a copy.
This version of PyColourChooser is open source; you can redistribute it and/or
modify it under the terms listed in the file COPYING.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
"""
# 12/14/2003 - Jeff Grimmett (grimmtooth@softhome.net)
#
# o 2.5 compatability update.
#
# 12/21/2003 - Jeff Grimmett (grimmtooth@softhome.net)
#
# o wxPyColorChooser -> PyColorChooser
# o wxPyColourChooser -> PyColourChooser
#
import wx
import canvas
import colorsys
class PyColourSlider(canvas.Canvas):
"""A Pure-Python Colour Slider
The colour slider displays transitions from value 0 to value 1 in
HSV, allowing the user to select a colour within the transition
spectrum.
This class is best accompanying by a wxSlider that allows the user
to select a particular colour shade.
"""
HEIGHT = 172
WIDTH = 12
def __init__(self, parent, id, colour=None):
"""Creates a blank slider instance. A colour must be set before the
slider will be filled in."""
# Set the base colour first since our base class calls the buffer
# drawing function
self.SetBaseColour(colour)
canvas.Canvas.__init__(self, parent, id, size=(self.WIDTH, self.HEIGHT))
def SetBaseColour(self, colour):
"""Sets the base, or target colour, to use as the central colour
when calculating colour transitions."""
self.base_colour = colour
def GetBaseColour(self):
"""Return the current colour used as a colour base for filling out
the slider."""
return self.base_colour
def GetValue(self, pos):
"""Returns the colour value for a position on the slider. The position
must be within the valid height of the slider, or results can be
unpredictable."""
return self.buffer.GetPixelColour(0, pos)
def DrawBuffer(self):
"""Actual implementation of the widget's drawing. We simply draw
from value 0.0 to value 1.0 in HSV."""
if self.base_colour is None:
return
target_red = self.base_colour.Red()
target_green = self.base_colour.Green()
target_blue = self.base_colour.Blue()
h,s,v = colorsys.rgb_to_hsv(target_red / 255.0, target_green / 255.0,
target_blue / 255.0)
v = 1.0
vstep = 1.0 / self.HEIGHT
for y_pos in range(0, self.HEIGHT):
r,g,b = [c * 255.0 for c in colorsys.hsv_to_rgb(h,s,v)]
colour = wx.Colour(int(r), int(g), int(b))
self.buffer.SetPen(wx.Pen(colour, 1, wx.SOLID))
self.buffer.DrawRectangle(0, y_pos, 15, 1)
v = v - vstep
| {
"content_hash": "2b572b74a77327a5a427fb7a2f1170b6",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 80,
"avg_line_length": 34.19565217391305,
"alnum_prop": 0.638906547997457,
"repo_name": "ktan2020/legacy-automation",
"id": "31a602b9889ab4340457d4a753bed61b184ec742",
"size": "3146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "win/Lib/site-packages/wx-3.0-msw/wx/lib/colourchooser/pycolourslider.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "913"
},
{
"name": "Ada",
"bytes": "289"
},
{
"name": "Assembly",
"bytes": "687"
},
{
"name": "Boo",
"bytes": "540"
},
{
"name": "C",
"bytes": "40116"
},
{
"name": "C#",
"bytes": "474"
},
{
"name": "C++",
"bytes": "393"
},
{
"name": "CSS",
"bytes": "70883"
},
{
"name": "ColdFusion",
"bytes": "1012"
},
{
"name": "Common Lisp",
"bytes": "1034"
},
{
"name": "D",
"bytes": "1858"
},
{
"name": "Eiffel",
"bytes": "426"
},
{
"name": "Erlang",
"bytes": "9243"
},
{
"name": "FORTRAN",
"bytes": "1810"
},
{
"name": "Forth",
"bytes": "182"
},
{
"name": "Groovy",
"bytes": "2366"
},
{
"name": "Haskell",
"bytes": "816"
},
{
"name": "Haxe",
"bytes": "455"
},
{
"name": "Java",
"bytes": "1155"
},
{
"name": "JavaScript",
"bytes": "69444"
},
{
"name": "Lua",
"bytes": "795"
},
{
"name": "Matlab",
"bytes": "1278"
},
{
"name": "OCaml",
"bytes": "350"
},
{
"name": "Objective-C++",
"bytes": "885"
},
{
"name": "PHP",
"bytes": "1411"
},
{
"name": "Pascal",
"bytes": "388"
},
{
"name": "Perl",
"bytes": "252651"
},
{
"name": "Pike",
"bytes": "589"
},
{
"name": "Python",
"bytes": "42085780"
},
{
"name": "R",
"bytes": "1156"
},
{
"name": "Ruby",
"bytes": "480"
},
{
"name": "Scheme",
"bytes": "282"
},
{
"name": "Shell",
"bytes": "30518"
},
{
"name": "Smalltalk",
"bytes": "926"
},
{
"name": "Squirrel",
"bytes": "697"
},
{
"name": "Stata",
"bytes": "302"
},
{
"name": "SystemVerilog",
"bytes": "3145"
},
{
"name": "Tcl",
"bytes": "1039"
},
{
"name": "TeX",
"bytes": "1746"
},
{
"name": "VHDL",
"bytes": "985"
},
{
"name": "Vala",
"bytes": "664"
},
{
"name": "Verilog",
"bytes": "439"
},
{
"name": "Visual Basic",
"bytes": "2142"
},
{
"name": "XSLT",
"bytes": "152770"
},
{
"name": "ooc",
"bytes": "890"
},
{
"name": "xBase",
"bytes": "769"
}
],
"symlink_target": ""
} |
import rexviewer as r
import naali
import PythonQt
from PythonQt.QtGui import QTreeWidgetItem, QSizePolicy, QIcon, QHBoxLayout, QComboBox, QFileDialog, QMessageBox, QWidget, QTableWidgetItem
from PythonQt.QtUiTools import QUiLoader
from PythonQt.QtCore import QFile, QSize, SIGNAL
import Queue
import sys
import traceback
from xml.dom.minidom import getDOMImplementation
class ToolBarWindow():
def __init__(self, uistring, queue, endApplication, controller):
self.controller = controller
loader = QUiLoader()
uifile = QFile(uistring)
ui = loader.load(uifile)
self.gui = ui
width = ui.size.width()
height = ui.size.height()
uism = naali.ui
# uiprops = r.createUiWidgetProperty(1) #1 is ModuleWidget, shown at toolbar
# uiprops.SetMenuGroup(2) #2 is server tools group
# uiprops.widget_name_ = "Local Scene"
#uiprops.my_size_ = QSize(width, height) #not needed anymore, uimodule reads it
#self.proxywidget = r.createUiProxyWidget(ui, uiprops)
self.proxywidget = r.createUiProxyWidget(ui)
#if not uism.AddProxyWidget(self.proxywidget):
if not uism.AddWidgetToScene(self.proxywidget):
r.logInfo("Adding the ProxyWidget to the bar failed.")
# TODO: Due to core UI API refactor AddWidgetToMenu doesn't exist anymore.
#uism.AddWidgetToMenu(self.proxywidget, "Local Scene", "Server Tools", "./data/ui/images/menus/edbutton_LSCENE_normal.png")
self.inputQueue = queue
self.endApplication = endApplication
self.tableitems = []
pass
def on_exit(self):
try:
# end incoming loop
self.controller.isrunning = 0
self.inputQueue.put('__end__', '')
self.proxywidget.hide()
uism = naali.ui
# TODO: Due to core UI API refactor RemoveWidgetFromMenu doesn't exist anymore.
#uism.RemoveWidgetFromMenu(self.proxywidget)
uism.RemoveWidgetFromScene(self.proxywidget)
return True
except:
r.logInfo("LocalSceneWindow failure:")
traceback.print_exc()
return False
def processIncoming(self):
""" for receiving input message events from other threads """
while(self.inputQueue.qsize()):
try:
title, msg = self.inputQueue.get(0)
if(title=="__end__"):
self.controller.isrunning = 0
return
if(title=="__unload__"):
self.controller.unloadScene()
return
self.displayMessage(title, msg)
except Queue.Empty:
pass
except:
r.logInfo("Exception in processIncoming, LocalScene window")
def displayMessage(self, title, msg):
QMessageBox.information(None, title, msg)
class LocalSceneWindow(ToolBarWindow, QWidget):
UIFILE = "pymodules/localscene/localscene.ui"
def __init__(self, controller, queue, endApplication):
self.controller = controller
ToolBarWindow.__init__(self, "pymodules/localscene/localscene.ui", queue, endApplication, controller)
self.widget = self.gui
self.xpos = self.gui.findChild("QDoubleSpinBox", "xpos")
self.ypos = self.gui.findChild("QDoubleSpinBox", "ypos")
self.zpos = self.gui.findChild("QDoubleSpinBox", "zpos")
self.step1 = self.gui.findChild("QDoubleSpinBox", "step1")
#self.step1 = self.gui.findChild("QSpinBox", "step1")
self.xscale = self.gui.findChild("QDoubleSpinBox", "xscale")
self.yscale = self.gui.findChild("QDoubleSpinBox", "yscale")
self.zscale = self.gui.findChild("QDoubleSpinBox", "zscale")
self.rotateX = self.gui.findChild("QDoubleSpinBox", "rotateX")
self.rotateY = self.gui.findChild("QDoubleSpinBox", "rotateY")
self.rotateZ = self.gui.findChild("QDoubleSpinBox", "rotateZ")
self.rotateCenterX = self.gui.findChild("QDoubleSpinBox", "rotateCenterX")
self.rotateCenterY = self.gui.findChild("QDoubleSpinBox", "rotateCenterY")
self.rotateCenterZ = self.gui.findChild("QDoubleSpinBox", "rotateCenterZ")
self.btnLoad = self.gui.findChild("QPushButton", "pushButtonLoad")
self.btnUnload = self.gui.findChild("QPushButton", "pushButtonUnload")
self.btnPublish = self.gui.findChild("QPushButton", "pushButtonPublish")
self.btnSave = self.gui.findChild("QPushButton", "pushButtonSave")
self.btnSetOffset = self.gui.findChild("QPushButton", "pushButtonSetOffset")
self.chkBoxFlipZY = self.gui.findChild("QCheckBox", "checkBoxFlipZY")
self.checkBoxHighlight = self.gui.findChild("QCheckBox", "checkBoxHighlight")
self.checkBoxLockScale = self.gui.findChild("QCheckBox", "checkBoxLockScale")
self.checkBoxToCenter = self.gui.findChild("QCheckBox", "checkBoxToCenter")
self.checkBoxRotationPoint = self.gui.findChild("QCheckBox", "checkBoxRotationPoint")
# server end scene editing
self.btnLoadServerSceneList = self.gui.findChild("QPushButton", "pushButtonLoadServerSceneList")
self.btnLoadServerScene = self.gui.findChild("QPushButton", "pushButtonLoadServerScene")
self.btnUnloadServerScene = self.gui.findChild("QPushButton", "pushButtonUnloadServerScene")
self.btnDeleteServerScene = self.gui.findChild("QPushButton", "pushButtonDeleteServerScene")
self.btnDeleteActiveScene = self.gui.findChild("QPushButton", "pushButtonDeleteActiveScene")
self.tableWidgetServerScenes = self.gui.findChild("QTableWidget","tableWidgetServerScenes")
self.listViewScenesRegions = self.gui.findChild("QListWidget","listViewScenesRegions")
self.lineEditRegionName = self.gui.findChild("QLineEdit", "lineEditRegionName")
self.lineEditPublishName = self.gui.findChild("QLineEdit", "lineEditPublishName")
self.lineEditLoadRegion = self.gui.findChild("QLineEdit", "lineEditLoadRegion")
self.progressBar = self.gui.findChild("QProgressBar", "progressBar")
self.btnLoad.connect("clicked(bool)", self.btnLoadClicked)
self.btnUnload.connect("clicked(bool)", self.btnUnloadClicked)
self.btnPublish.connect("clicked(bool)", self.btnPublishClicked)
self.btnSave.connect("clicked(bool)", self.btnSaveClicked)
self.btnSetOffset.connect("clicked(bool)", self.btnSetOffsetClicked)
self.xpos.connect("valueChanged(double)", self.spinBoxXPosValueChanged)
self.ypos.connect("valueChanged(double)", self.spinBoxYPosValueChanged)
self.zpos.connect("valueChanged(double)", self.spinBoxZPosValueChanged)
self.step1.connect("valueChanged(double)", self.spinBoxPosStepPosValueChanged)
self.xscale.connect("valueChanged(double)", self.spinBoxXScaleValueChanged)
self.yscale.connect("valueChanged(double)", self.spinBoxYScaleValueChanged)
self.zscale.connect("valueChanged(double)", self.spinBoxZScaleValueChanged)
self.rotateX.connect("valueChanged(double)", self.spinBoxXRotateValueChanged)
self.rotateY.connect("valueChanged(double)", self.spinBoxYRotateValueChanged)
self.rotateZ.connect("valueChanged(double)", self.spinBoxZRotateValueChanged)
self.rotateCenterX.connect("valueChanged(double)", self.resetPointRotation)
self.rotateCenterY.connect("valueChanged(double)", self.resetPointRotation)
self.rotateCenterZ.connect("valueChanged(double)", self.resetPointRotation)
self.chkBoxFlipZY.connect("toggled(bool)", self.checkBoxZYToggled)
self.checkBoxHighlight.connect("toggled(bool)", self.checkBoxHighlightToggled)
self.checkBoxLockScale.connect("toggled(bool)", self.checkBoxLockScaleToggled)
self.btnLoadServerSceneList.connect("clicked(bool)", self.btnLoadServerSceneListClicked)
self.btnLoadServerScene.connect("clicked(bool)", self.btnLoadServerSceneClicked)
self.btnUnloadServerScene.connect("clicked(bool)", self.btnUnloadServerSceneClicked)
self.btnDeleteServerScene.connect("clicked(bool)", self.btnDeleteServerSceneClicked)
self.btnDeleteActiveScene.connect("clicked(bool)", self.btnDeleteActiveSceneClicked)
self.tableWidgetServerScenes.connect("itemSelectionChanged()", self.tableWidgetServerScenesItemSelectionChanged)
#self.progressBar.setTotalSteps(7)
self.progressBar.setMinimum(0)
self.progressBar.setMaximum(7)
self.progressBar.reset()
self.progressBar.setValue(0)
self.progressBar.setFormat("Upload progress: inactive %p%")
self.sizeLock = True
self.filename = ""
self.serverScenes = {} # key container
self.serverSceneNames = {}
self.serverSceneRegionLists = {} # key = sceneid, value = list of regions
self.serverSceneRegionCount = {} # key = sceneid, value = count of regions
self.currentSceneSelectionRegions = []
pass
def threadTest(self):
self.controller.closeThread()
def getButton(self, name, iconname, line, action):
size = QSize(16, 16)
button = buttons.PyPushButton()
icon = QIcon(iconname)
icon.actualSize(size)
button.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
button.setMaximumSize(size)
button.setMinimumSize(size)
button.text = ""
button.name = name
button.setIcon(icon)
button.setFlat(True)
button.setEnabled(False)
button.connect('clicked()', action)
line.buttons.append(button)
return button
def btnLoadClicked(self, args):
if(self.controller.bLocalSceneLoaded==False):
self.filename=QFileDialog.getOpenFileName(self.widget, "Select scene file", "", ("Scene files (*.scene)"))
#self.filename=QFileDialog.getOpenFileName(self.widget, "Select scene file", "*.scene")
if(self.filename!=""):
self.controller.loadScene(self.filename)
else:
pass
else:
self.displayMessage('LocalScene allready loaded', 'Unload current LocalScene, no support for multiple localscenes yet')
def btnUnloadClicked(self, args):
self.controller.unloadScene()
def btnPublishClicked(self, args):
#self.controller.publishScene(self.filename)
if(self.controller.bLocalSceneLoaded==False):
self.displayMessage('no local scene loaded', 'Load LocalScene first')
return
if( self.lineEditRegionName.text==""):
self.displayMessage('no region name', 'Give region name where scene is to be loaded firstd')
return
if(self.lineEditPublishName.text==""):
self.displayMessage('no scene name', 'Give upload scene a name before uploading.')
return
regName = self.lineEditRegionName.text
publishName = self.lineEditPublishName.text
self.controller.startUpload(self.filename, regName, publishName)
def btnSaveClicked(self, args):
self.controller.saveScene(self.filename)
def spinBoxXPosValueChanged(self, double):
self.controller.setxpos(double)
def spinBoxYPosValueChanged(self, double):
self.controller.setypos(double)
def spinBoxZPosValueChanged(self, double):
self.controller.setzpos(double)
def spinBoxPosStepPosValueChanged(self, double):
#switch={0:0.10, 1:1.00, 2:5.00}
#val = int(double)
#step=switch[val]
self.xpos.singleStep=double
self.ypos.singleStep=double
self.zpos.singleStep=double
self.rotateX.singleStep=double
self.rotateY.singleStep=double
self.rotateZ.singleStep=double
self.rotateCenterX.singleStep=double
self.rotateCenterY.singleStep=double
self.rotateCenterZ.singleStep=double
def resetPointRotation(self, double):
self.controller.resetPointRotation()
def spinBoxXScaleValueChanged(self, double):
if(self.sizeLock):
self.controller.setxscale(double)
self.controller.setyscale(double)
self.controller.setzscale(double)
self.yscale.setValue(double)
self.zscale.setValue(double)
else:
self.controller.setxscale(double)
def spinBoxYScaleValueChanged(self, double):
if(self.sizeLock):
self.controller.setxscale(double)
self.controller.setyscale(double)
self.controller.setzscale(double)
self.xscale.setValue(double)
self.zscale.setValue(double)
else:
self.controller.setyscale(double)
def spinBoxZScaleValueChanged(self, double):
if(self.sizeLock):
self.controller.setxscale(double)
self.controller.setyscale(double)
self.controller.setzscale(double)
self.xscale.setValue(double)
self.yscale.setValue(double)
else:
self.controller.setzscale(double)
def spinBoxXRotateValueChanged(self, double):
if(self.checkBoxRotationPoint.isChecked()):
x,y,z = self.getCenterPoint()
self.controller.rotateAroundPointX(double, x, y, z)
else:
self.controller.rotateX(double)
def spinBoxYRotateValueChanged(self, double):
if(self.checkBoxRotationPoint.isChecked()):
x,y,z = self.getCenterPoint()
self.controller.rotateAroundPointY(double, x, y, z)
else:
self.controller.rotateY(double)
def spinBoxZRotateValueChanged(self, double):
if(self.checkBoxRotationPoint.isChecked()):
x,y,z = self.getCenterPoint()
self.controller.rotateAroundPointZ(double, x, y, z)
else:
self.controller.rotateZ(double)
def btnSetOffsetClicked(self):
#valX = self.xpos.value
self.rotateCenterX.setValue(self.xpos.value)
self.rotateCenterY.setValue(self.ypos.value)
self.rotateCenterZ.setValue(self.zpos.value)
self.controller.resetPointRotation()
pass
def getCenterPoint(self):
return self.rotateCenterX.value, self.rotateCenterY.value, self.rotateCenterZ.value
def checkBoxZYToggled(self, enabled):
self.controller.checkBoxZYToggled(enabled)
def checkBoxHighlightToggled(self, enabled):
self.controller.checkBoxHighlightToggled(enabled)
def checkBoxLockScaleToggled(self, enabled):
self.sizeLock = enabled
# Server side scene handlers
def btnLoadServerSceneListClicked(self, args):
self.controller.startSceneAction("GetUploadSceneList")
pass
def btnLoadServerSceneClicked(self, args):
#if self.isThereItemSelected(self.listViewRegionScenes):
if self.isThereItemSelected(self.tableWidgetServerScenes):
#self.controller.
region = self.lineEditLoadRegion.text
sceneid = self.serverScenes[self.tableWidgetServerScenes.currentRow()]
data = self.constructLoadSceneData(region, sceneid)
#self.controller.startSceneAction("LoadServerScene", self.serverPassiveScenes[self.listViewRegionScenes.currentRow])
self.controller.startSceneAction("LoadServerScene", data)
pass
pass
def btnUnloadServerSceneClicked(self, args):
if self.isThereItemSelected(self.tableWidgetServerScenes):
tablerow = self.tableWidgetServerScenes.currentRow()
row = self.listViewScenesRegions.currentRow
param = (self.serverScenes[tablerow], self.currentSceneSelectionRegions[row])
self.controller.startSceneAction("UnloadServerScene", param)
pass
pass
def btnDeleteServerSceneClicked(self, args):
#self.controller.printOutCurrentCap()
# if self.isThereItemSelected(self.listViewRegionScenes):
# print self.serverPassiveScenes[self.listViewRegionScenes.currentRow]
# self.controller.startSceneAction("DeleteServerScene", self.serverPassiveScenes[self.listViewRegionScenes.currentRow])
# pass
pass
def btnDeleteActiveSceneClicked(self, args):
if self.isThereTableItemSelected(self.tableWidgetServerScenes):
self.controller.startSceneAction("DeleteServerScene", self.serverScenes[self.tableWidgetServerScenes.currentRow()])
pass
pass
def isThereItemSelected(self, list):
if list.currentItem() != None:
return True
else:
self.displayMessage('no scene selected', 'Select scene from list for loading/unloading/deleting it.')
return False
def isThereTableItemSelected(self, tablelist):
if tablelist.currentRow() != -1:
return True
else:
self.displayMessage('no scene selected', 'Select scene from table for loading/unloading/deleting it.')
return False
def setServerScenes(self, d):
self.tableWidgetServerScenes.clearContents()
self.serverSceneRegionLists = {}
self.serverSceneRegionCount = {}
#self.tableitems = []
#self.listViewRegionScenes.clear()
# need to figure out how to free tableitems
#del self.tableitems[:]
while(self.tableWidgetServerScenes.rowCount!=0):
self.tableWidgetServerScenes.removeRow(0)
uuids = []
if d!=None:
i = 0
j = 0
for k, vals in d.items():
sceneName, regionName, uuid = vals
if uuid not in uuids:
uuids.append(uuid)
self.tableWidgetServerScenes.insertRow (self.tableWidgetServerScenes.rowCount)
self.serverScenes[i] = uuid
self.serverSceneNames[i] = sceneName
sceneNameItem = QTableWidgetItem(sceneName)
self.tableitems.append(sceneNameItem)
self.tableWidgetServerScenes.setItem(self.tableWidgetServerScenes.rowCount-1, 0, sceneNameItem)
i+=1
# store regions
if(self.serverSceneRegionLists.has_key(uuid)==False):
self.serverSceneRegionLists[uuid] = []
self.serverSceneRegionLists[uuid].append(regionName)
if(self.serverSceneRegionCount.has_key(uuid)==False):
if regionName != "":
self.serverSceneRegionCount[uuid] = 1
else:
self.serverSceneRegionCount[uuid] = 0
elif regionName != "":
self.serverSceneRegionCount[uuid]+= 1
else:
self.serverSceneRegionCount[uuid] = 0
regionCountItem = QTableWidgetItem(str(self.serverSceneRegionCount[uuid]))
self.tableitems.append(regionCountItem)
row = 0
for r in self.serverScenes.keys():
if self.serverScenes[r]==uuid:
row = r
break
self.tableWidgetServerScenes.setItem(row, 1, regionCountItem)
pass
def constructLoadSceneData(self, regionTxt, sceneuuidTxt):
impl = getDOMImplementation()
doc = impl.createDocument(None, "loadscenedata", None)
top_element = doc.documentElement
region = doc.createElement('region')
sceneuuid = doc.createElement('sceneuuid')
top_element.appendChild(region)
top_element.appendChild(sceneuuid)
regionName = doc.createTextNode(regionTxt)
sceneUuid = doc.createTextNode(sceneuuidTxt)
region.appendChild(regionName)
sceneuuid.appendChild(sceneUuid)
return doc.toxml()
def tableWidgetServerScenesItemSelectionChanged(self):
self.currentSceneSelectionRegions = []
self.listViewScenesRegions.clear()
row = self.tableWidgetServerScenes.currentRow()
if(row!=-1):
uuid = self.serverScenes[row]
regions = self.serverSceneRegionLists[uuid]
for region in regions:
if(region!=""):
self.listViewScenesRegions.addItem(region)
self.currentSceneSelectionRegions.append(region)
pass
| {
"content_hash": "9537ae0c930969c972e7eb955aa3be85",
"timestamp": "",
"source": "github",
"line_count": 498,
"max_line_length": 139,
"avg_line_length": 41.321285140562246,
"alnum_prop": 0.6561376227038584,
"repo_name": "antont/tundra",
"id": "b463a37ab9b248ba695fc4ed34221c2e3dfd0696",
"size": "20578",
"binary": false,
"copies": "1",
"ref": "refs/heads/tundra2",
"path": "src/Application/PythonScriptModule/pymodules_old/localscene/window.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "110345"
},
{
"name": "C#",
"bytes": "76173"
},
{
"name": "C++",
"bytes": "4959154"
},
{
"name": "CoffeeScript",
"bytes": "2229"
},
{
"name": "JavaScript",
"bytes": "316308"
},
{
"name": "Objective-C",
"bytes": "222359"
},
{
"name": "Python",
"bytes": "999850"
},
{
"name": "Shell",
"bytes": "8224"
},
{
"name": "TypeScript",
"bytes": "230019"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from scipy.ndimage import map_coordinates
def reproject_image_into_polar(data, origin=None, Jacobian=False,
dr=1, dt=None):
"""
Reprojects a 2D numpy array (**data**) into a polar coordinate system,
with the pole placed at **origin** and the angle measured clockwise from
the upward direction. The resulting array has rows corresponding to the
radial grid, and columns corresponding to the angular grid.
Parameters
----------
data : 2D np.array
the image array
origin : tuple or None
(row, column) coordinates of the image origin. If ``None``, the
geometric center of the image is used.
Jacobian : bool
Include `r` intensity scaling in the coordinate transform.
This should be included to account for the changing pixel size that
occurs during the transform.
dr : float
radial coordinate spacing for the grid interpolation.
Tests show that there is not much point in going below 0.5.
dt : float or None
angular coordinate spacing (in radians).
If ``None``, the number of angular grid points will be set to the
largest dimension (the height or the width) of the image.
Returns
-------
output : 2D np.array
the polar image (r, theta)
r_grid : 2D np.array
meshgrid of radial coordinates
theta_grid : 2D np.array
meshgrid of angular coordinates
Notes
-----
Adapted from:
https://stackoverflow.com/questions/3798333/image-information-along-a-polar-coordinate-system
"""
ny, nx = data.shape[:2]
if origin is None:
origin = (ny // 2, nx // 2)
else:
origin = list(origin)
# wrap negative coordinates
if origin[0] < 0:
origin[0] += ny
if origin[1] < 0:
origin[1] += nx
# Determine what the min and max r and theta coords will be...
x, y = index_coords(data, origin=origin) # (x,y) coordinates of each pixel
r, theta = cart2polar(x, y) # convert (x,y) -> (r,θ), note θ=0 is vertical
nr = int(np.ceil((r.max() - r.min()) / dr))
if dt is None:
nt = max(nx, ny)
else:
# dt in radians
nt = int(np.ceil((theta.max() - theta.min()) / dt))
# Make a regular (in polar space) grid based on the min and max r & theta
r_i = np.linspace(r.min(), r.max(), nr, endpoint=False)
theta_i = np.linspace(theta.min(), theta.max(), nt, endpoint=False)
theta_grid, r_grid = np.meshgrid(theta_i, r_i)
# Convert the r and theta grids to Cartesian coordinates
X, Y = polar2cart(r_grid, theta_grid)
# then to a 2×n array of row and column indices for np.map_coordinates()
rowi = (origin[0] - Y).flatten()
coli = (X + origin[1]).flatten()
coords = np.vstack((rowi, coli))
# Remap with interpolation
# (making an array of floats even if the data has an integer type)
zi = map_coordinates(data, coords, output=float)
output = zi.reshape((nr, nt))
if Jacobian:
output *= r_i[:, np.newaxis]
return output, r_grid, theta_grid
def index_coords(data, origin=None):
"""
Creates `x` and `y` coordinates for the indices in a numpy array, relative
to the **origin**, with the `x` axis going to the right, and the `y` axis
going `up`.
Parameters
----------
data : numpy array
2D data. Only the array shape is used.
origin : tuple or None
(row, column). Defaults to the geometric center of the image.
Returns
-------
x, y : 2D numpy arrays
"""
ny, nx = data.shape[:2]
if origin is None:
origin_x, origin_y = nx // 2, ny // 2
else:
origin_y, origin_x = origin
# wrap negative coordinates
if origin_y < 0:
origin_y += ny
if origin_x < 0:
origin_x += nx
x, y = np.meshgrid(np.arange(float(nx)) - origin_x,
origin_y - np.arange(float(ny)))
return x, y
def cart2polar(x, y):
"""
Transform Cartesian coordinates to polar.
Parameters
----------
x, y : floats or arrays
Cartesian coordinates
Returns
-------
r, theta : floats or arrays
Polar coordinates
"""
r = np.sqrt(x**2 + y**2)
theta = np.arctan2(x, y) # θ referenced to vertical
return r, theta
def polar2cart(r, theta):
"""
Transform polar coordinates to Cartesian.
Parameters
-------
r, theta : floats or arrays
Polar coordinates
Returns
----------
x, y : floats or arrays
Cartesian coordinates
"""
y = r * np.cos(theta) # θ referenced to vertical
x = r * np.sin(theta)
return x, y
| {
"content_hash": "01d3c7beffc60231b07c1cc19678e329",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 97,
"avg_line_length": 29.4251497005988,
"alnum_prop": 0.5970695970695971,
"repo_name": "PyAbel/PyAbel",
"id": "acb0839b02a41e3229ca266f52903831c52d342c",
"size": "4944",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "abel/tools/polar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cython",
"bytes": "3187"
},
{
"name": "Python",
"bytes": "497584"
}
],
"symlink_target": ""
} |
"""Tests for grr.lib.bigquery."""
import io
import os
import time
from unittest import mock
from absl import app
from googleapiclient import errors
from grr_response_core import config
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.util import temp
from grr_response_core.lib.util.compat import json
from grr_response_server import bigquery
from grr.test_lib import test_lib
class BigQueryClientTest(test_lib.GRRBaseTest):
"""Tests BigQuery client."""
PROJECT_ID = "grr-dummy"
SERVICE_ACCOUNT_JSON = """{"type": "service_account"}"""
@mock.patch.object(bigquery, "ServiceAccountCredentials")
@mock.patch.object(bigquery.discovery, "build")
@mock.patch.object(bigquery.httplib2, "Http")
def testInsertData(self, mock_http, mock_build, mock_creds):
bq_client = bigquery.GetBigQueryClient(
service_account_json=self.SERVICE_ACCOUNT_JSON,
project_id=self.PROJECT_ID)
schema_path = os.path.join(config.CONFIG["Test.data_dir"], "bigquery",
"ExportedFile.schema")
schema_data = json.ReadFromPath(schema_path)
data_fd = open(
os.path.join(config.CONFIG["Test.data_dir"], "bigquery",
"ExportedFile.json.gz"), "rb")
now = rdfvalue.RDFDatetime.Now().AsSecondsSinceEpoch()
job_id = "hunts_HFFE1D044_Results_%s" % now
bq_client.InsertData("ExportedFile", data_fd, schema_data, job_id)
# We should have called insert once
insert = mock_build.return_value.jobs.return_value.insert
self.assertEqual(insert.call_count, 1)
self.assertEqual(
job_id, insert.call_args_list[0][1]["body"]["jobReference"]["jobId"])
def testRetryUpload(self):
bq_client = bigquery.BigQueryClient()
resp = mock.Mock()
resp.status = 503
error = mock.Mock()
error.resp = resp
job = mock.Mock()
# Always raise errors.HttpError on job.execute()
job.configure_mock(
**{"execute.side_effect": errors.HttpError(resp, b"nocontent")})
job_id = "hunts_HFFE1D044_Results_1446056474"
with temp.AutoTempFilePath() as filepath:
with io.open(filepath, "w", encoding="utf-8") as filedesc:
filedesc.write("{data}")
with mock.patch.object(time, "sleep") as mock_sleep:
with self.assertRaises(bigquery.BigQueryJobUploadError):
bq_client.RetryUpload(job, job_id, error)
# Make sure retry sleeps are correct.
max_calls = config.CONFIG["BigQuery.retry_max_attempts"]
retry_interval = config.CONFIG["BigQuery.retry_interval"]
multiplier = config.CONFIG["BigQuery.retry_multiplier"]
self.assertEqual(job.execute.call_count, max_calls)
mock_sleep.assert_has_calls([
mock.call(retry_interval.ToFractional(rdfvalue.SECONDS)),
mock.call(retry_interval.ToFractional(rdfvalue.SECONDS) * multiplier)
])
def main(argv):
del argv # Unused.
test_lib.main()
if __name__ == "__main__":
app.run(main)
| {
"content_hash": "07160002e04245ca5b7ce1e4baaeee15",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 77,
"avg_line_length": 33.47727272727273,
"alnum_prop": 0.6860149355057705,
"repo_name": "google/grr",
"id": "b10d825fdc5c995da10e92bc90d6d9236d5b2e6f",
"size": "2968",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "grr/server/grr_response_server/bigquery_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "12697"
},
{
"name": "C++",
"bytes": "54814"
},
{
"name": "Dockerfile",
"bytes": "1822"
},
{
"name": "HCL",
"bytes": "8451"
},
{
"name": "HTML",
"bytes": "366783"
},
{
"name": "JavaScript",
"bytes": "13088"
},
{
"name": "Jupyter Notebook",
"bytes": "199216"
},
{
"name": "Makefile",
"bytes": "3244"
},
{
"name": "PowerShell",
"bytes": "531"
},
{
"name": "Python",
"bytes": "8844725"
},
{
"name": "Roff",
"bytes": "444"
},
{
"name": "SCSS",
"bytes": "105120"
},
{
"name": "Shell",
"bytes": "48663"
},
{
"name": "Standard ML",
"bytes": "8172"
},
{
"name": "TypeScript",
"bytes": "2139377"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
# General imports
import re
from django.shortcuts import render, redirect
from django.http import HttpResponse, HttpResponseRedirect
from django.urls import reverse
from django.views import generic, View
from django.contrib import messages
# Import Models
from .forms import TripInformationForm, CompareOrFavoriteForm
from resorts.models import Resort, SkiPass
# Import Views
from resorts.views import resorts_table
# Import GeoIP2 for location guessing
from django.contrib.gis.geoip2 import GeoIP2
# Useful constants
DEFAULT_ADDRESS_FILL_IN = 'Let\'s go!'
def index(request):
""" Home page for Skidom.
On a GET request: Displays form for getting user trip details.
If user logged in and has favorite resorts, displays information
about the five of them with the most 24h snow.
Else, displays 5 resorts with the most 24h snow.
On a POST request: Processes user trip detail form.
Renders matching resorts and information to comparison template.
Args:
request (request): Page request.
Returns:
render: Renders valid resorts and info to comparison template
if successful. Else, reloads the page.
"""
resorts = Resort.objects.all()
if request.method == 'POST':
form = TripInformationForm(request.POST, pass_id = request.POST['pass_id'], starting_from = request.POST['user_address'])
if form.is_valid():
if form.cleaned_data['user_address'] not in ["", "Let\'s go!"]:
resorts = process_form(form)
if resorts:
return render(request, 'resorthub/compare.html', {'resorts_list': resorts})
else:
messages.warning(request, "No resorts matching criteria found. Please try again!")
return redirect('/')
else:
messages.warning(request, "Please enter a valid address!")
return redirect('/')
else:
header_message = "Where we\'d ski this weekend:"
if request.user.is_authenticated():
address = request.user.address
pass_id = request.user.pass_id
#if len(request.user.favorite_resorts.objects()) > 0:
# header_message = "What\'s up with your favorite resorts:"
# resorts = request.user.favorite_resorts.all()
else:
address = 'Let\'s go!'
pass_id = None
table = resorts_table(resorts, order_on = 'new_snow_24_hr')
form = TripInformationForm(pass_id = pass_id, starting_from=address)
return render(request, 'resorthub/index.html', {'form': form, 'header_message': header_message, 'resorts_list': table})
def process_form(form):
""" Processes trip information form to get resort list for display.
Args:
form (Form): Posted trip information form
Returns:
list: List of Resort dictionaries sorted by user choice and filtered by the pass option the user selected.
If no resorts with that pass are available, returns an empty list.
"""
address = form.cleaned_data['user_address']
pass_id = form.cleaned_data['pass_id']
if pass_id:
resorts = SkiPass.objects.get(name=pass_id).resorts.all()
else:
resorts = Resorts.objects.all()
return(resorts_table(resorts, user_address = address, number_to_display = len(resorts)))
def resort_listing(request):
""" Form page for either comparing selected resorts or adding them to the user's favorites.
On a GET request, displays all available resorts and their relevant information. There are two
buttons a user can use to make a POST request: either 'favorite' or 'compare.' As the information
displayed is static, uses raw Resort objects.
On a POST request, if 'favorite' selected: if user not authenticated, the user is redirected to login page. If the
user is authenticated, selected resorts are added to user favorites, and the user is redirected
to their profile. Elif 'compare' selected: selected resort information acquired, and information rendered
to comparison template.
Args:
request (request): Page request
Returns:
Redirect or render based on criteria above.
"""
if request.method == 'POST':
selected_resort_ids = request.POST.getlist('choices[]')
selected_resorts = Resort.objects.filter(pk__in=selected_resort_ids)
if ("compare" in request.POST.keys()):
if request.user.is_authenticated() and request.user.address != None:
starting_address = request.user.address.formatted
else:
#We use GeoIP2 here to guess the starting address based on the user's IP.
g = GeoIP2()
ip = request.META['REMOTE_ADDR']
try:
starting_address = g.city(ip)['city']
except:
starting_address = "Boston MA"
resorts_list = get_resort_list(selected_resorts, user_address = starting_address, number_to_display = len(selected_resorts), order_on='distance')
return render(request, 'resorthub/compare.html', {'resorts_list': resorts_list})
elif ("favorite" in request.POST.keys()):
if not request.user.is_authenticated():
return redirect("/accounts/login/")
else:
request.user.favorite_resorts.add(*selected_resorts)
request.user.save()
messages.success(request, "Resorts added to favorites.")
return redirect("/users/profile/")
else:
resorts_objects_list = Resort.objects.order_by('name')
return render(request, 'resorthub/resorts.html', {'resorts_list': resorts_objects_list})
def compare_listing(request, resorts_list=[]):
""" View for comparison page.
Args:
request (request): Page request
resorts_list (list): List of Resort dictionaries
Returns:
render: Renders resorts_list to comparison template
"""
return render(request, 'resorthub/compare.html', {'resorts_list': resorts_list})
| {
"content_hash": "8174488f8420c152b48fc114e7816e87",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 161,
"avg_line_length": 37.32544378698225,
"alnum_prop": 0.6353836398224477,
"repo_name": "racmariano/skidom",
"id": "dd96b1c1388c64297c2b58b2e59a80b579e96570",
"size": "6332",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backend/resorthub/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4094"
},
{
"name": "HTML",
"bytes": "10934"
},
{
"name": "JavaScript",
"bytes": "6583"
},
{
"name": "Python",
"bytes": "71579"
}
],
"symlink_target": ""
} |
from django import template
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils.html import format_html, mark_safe, strip_tags
from django.utils.text import capfirst, Truncator
from blogoland.confs import DEFAULT_DATE_FORMAT
from blogoland.models import Post, Category
register = template.Library()
DATE_FORMAT = getattr(settings, 'BLOGOLAND_DATE_FORMAT', DEFAULT_DATE_FORMAT)
# Helper function
def build_img_tag(img):
"""
Build a HTML Img TAG with the given img object.
"""
img_tag = '<img src="{0}" alt="{1}" />'.format(img.image.url, img.title)
return mark_safe(img_tag)
@register.simple_tag(takes_context=True)
def post_title(context):
"""
Returns the title with capfirst filter applied. If post isn't public
this tag will append a '[DRAFT]' string and output the final title.
"""
post = context['object']
title = capfirst(post.title)
if not post.is_public():
title = '[DRAFT] %s' % title
return title
@register.simple_tag(takes_context=True)
def post_date(context):
"""
Returns the formated date.
Default: '%d-%m-%Y'
"""
post = context['object']
return post.publication_date.strftime(DATE_FORMAT)
@register.simple_tag(takes_context=True)
def post_content(context):
"""
Check if the given instance wrapped in the context is a Post one. Then
parse the content of the post to HTML.
"""
try:
post = context['object']
if isinstance(post, Post):
return format_html(post.content)
except:
return ''
@register.simple_tag(takes_context=True)
def post_excerpt(context, word_limit=10):
"""
Returns the excerpt of the post. Strips the HTML tags and truncate the
words by the given limit.
"""
post = context['object']
return Truncator(strip_tags(post.content)).words(word_limit)
@register.simple_tag(takes_context=True)
def post_detail_image(context):
"""
Render the first detail image of the Post
"""
try:
post = context['object']
img = post.image_set.get_last_img_type('detail')
img_tag = build_img_tag(img)
return img_tag
except:
return ''
@register.simple_tag(takes_context=True)
def post_thumbnail_image(context):
"""
Render the first thumbnail image of the Post
"""
try:
post = context['object']
img = post.image_set.get_last_img_type('thumbnail')
img_tag = build_img_tag(img)
return img_tag
except:
return ''
@register.simple_tag(takes_context=True)
def get_post_gallery_images(context):
"""
Returns a list of img related objects selected as 'gallery'
"""
try:
post = context['object']
return post.image_set.filter(img_type='gallery')
except:
return []
@register.simple_tag
def get_latest_posts(post_limit=None):
"""
Returns a Qs of the latest public post sliced by limit.
"""
return Post.objects.get_public_posts()[:post_limit]
@register.simple_tag
def get_category_list(cat_limit=None):
"""
Return a QuerySet of Category object and can be sliced by limit.
"""
return Category.objects.all()[:cat_limit]
@register.simple_tag(takes_context=True)
def social_media_image_url(context):
"""
Returns the thumbnail URL built to be used in Open Graph tags.
"""
site = Site.objects.get_current()
try:
post = context['object']
img = post.image_set.get_last_img_type('thumbnail')
return 'http://{0}{1}'.format(site.domain, img.image.url)
except:
return
@register.simple_tag(takes_context=True)
def social_media_post_url(context):
"""
Returns the post's URL built to be used on Open Graph and Twitter Card.
"""
site = Site.objects.get_current()
try:
post = context['object']
return 'http://{0}{1}'.format(site.domain, post.get_absolute_url())
except:
return
@register.inclusion_tag('blogoland/snippets/paginator.html', takes_context=True)
def paginator(context):
"""
Template tag to easy including the pagination in template. This tag, only
pass the context to the default template.
"""
return context
| {
"content_hash": "66f3b9f997319cda09f57fa2f046b9b8",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 80,
"avg_line_length": 26.0920245398773,
"alnum_prop": 0.6536562426522454,
"repo_name": "marsxn/blogoland",
"id": "41bca3741f6a13b8a73dd71f88ebaa3a53f5bb40",
"size": "4276",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blogoland/templatetags/blogoland_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "691"
},
{
"name": "Python",
"bytes": "20769"
}
],
"symlink_target": ""
} |
import sys, binascii, random
def fromhex(h):
return binascii.unhexlify(str(h).encode("ascii"))
if sys.version_info[0] >= 3:
def flip_bit_in_byte(byte, whichbit):
return bytes([byte ^ (1 << whichbit)])
else:
def flip_bit_in_byte(byte, whichbit):
return chr(ord(byte) ^ (1 << whichbit))
def flip_bit(orig):
offset = random.randrange(0, len(orig))
whichbit = random.randrange(0, 8)
corrupted = (orig[:offset]
+ flip_bit_in_byte(orig[offset], whichbit)
+ orig[offset+1:])
return corrupted
| {
"content_hash": "f090b14da5d512d6afbb05e29445611e",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 59,
"avg_line_length": 29.789473684210527,
"alnum_prop": 0.6060070671378092,
"repo_name": "warner/python-tweetnacl",
"id": "9d05f9419606874960f35fe0938fc6ad6a3f2b62",
"size": "567",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "78437"
},
{
"name": "Python",
"bytes": "49020"
}
],
"symlink_target": ""
} |
"""
Description : Simple Python implementation of the Apriori Algorithm
Usage:
$python apriori.py -f DATASET.csv -s minSupport -c minConfidence
$python apriori.py -f DATASET.csv -s 0.15 -c 0.6
"""
import sys
from itertools import chain, combinations
from collections import defaultdict
from optparse import OptionParser
def subsets(arr):
""" Returns non empty subsets of arr"""
return chain(*[combinations(arr, i + 1) for i, a in enumerate(arr)])
def returnItemsWithMinSupport(itemSetVar, transactionList, minSupport, freqSet):
"""calculates the support for items in the itemSet and returns a subset
of the itemSet each of whose elements satisfies the minimum support"""
_itemSet = set()
_itemSetRemoved = set()
localSet = defaultdict(int)
freqSetVar = [list() for i in range(numVars)]
for idVar in range(numVars):
freqSetVar[idVar] = [list() for _ in range(len(freqSetVar[idVar]))]
for idItem in range(itemSetVar[idVar]):
freqSetVar[idVar][idItem] = [[0] for _ in range(tCiclo)]
for j in range(tCiclo):
for
for transaction in transactionList:
if item.issubset(transaction):
freqSet[item] += 1
localSet[item] += 1
for item, count in localSet.items():
support = float(count)/len(transactionList)
if support >= minSupport:
_itemSet.add(item)
else:
_itemSetRemoved.add(item)
return _itemSet, _itemSetRemoved
def joinSet(itemSet, removedSet, length):
"""Join a set with itself and returns the n-element itemsets"""
filteredSet = set()
flagAddItem = True
composed_set = set([i.union(j) for i in itemSet for j in itemSet if len(i.union(j)) == length])
newremovedSet = set()
for item in composed_set:
if removedSet != set([]):
for itemR in removedSet:
if itemR.issubset(item):
flagAddItem = False
break
if flagAddItem == True:
filteredSet.add(item)
else:
newremovedSet.add(item)
return filteredSet
def getItemSetTransactionList(data_iterator):
transactionList = list()
numVars = 4
itemSetVar = [set() for i in range(numVars)]
for transaction in data_iterator:
transactionList.append(transaction)
for i in range(numVars):
itemSetVar[i].add(transaction[i]) # Generate 1-itemSets
return itemSetVar, transactionList
def runApriori(data_iter, minSupport, minConfidence):
"""
run the apriori algorithm. data_iter is a record iterator
Return both:
- items (tuple, support)
- rules ((pretuple, posttuple), confidence)
"""
itemSet, transactionList = getItemSetTransactionList(data_iter)
freqSet = defaultdict(int)
largeSet = dict()
# Global dictionary which stores (key=n-itemSets,value=support)
# which satisfy minSupport
assocRules = dict()
# Dictionary which stores Association Rules
oneCSet, removedCSet = returnItemsWithMinSupport(itemSet,
transactionList,
minSupport,
freqSet)
currentLSet = oneCSet
removedLSet = removedCSet
k = 2
while(currentLSet != set([])):
largeSet[k-1] = currentLSet
currentLSet = joinSet(currentLSet, removedLSet, k)
currentCSet, removedCSet = returnItemsWithMinSupport(currentLSet,
transactionList,
minSupport,
freqSet)
currentLSet = currentCSet
removedLSet = removedCSet
k = k + 1
def getSupport(item):
"""local function which Returns the support of an item"""
return float(freqSet[item])/len(transactionList)
toRetItems = []
for key, value in largeSet.items():
toRetItems.extend([(tuple(item), getSupport(item))
for item in value])
toRetRules = []
for key, value in largeSet.items():
for item in value:
_subsets = map(frozenset, [x for x in subsets(item)])
for element in _subsets:
remain = item.difference(element)
if len(remain) > 0:
confidence = getSupport(item)/getSupport(element)
if confidence >= minConfidence:
toRetRules.append(((tuple(element), tuple(remain)),
confidence))
return toRetItems, toRetRules
def printResults(items, rules):
"""prints the generated itemsets sorted by support and the confidence rules sorted by confidence"""
for item, support in sorted(items, key=lambda tp: tp[1]):
print("item: %s , %.3f" % (str(item), support))
print("\n------------------------ RULES:")
for rule, confidence in sorted(rules, key=lambda tp: tp[1]):
pre, post = rule
print("Rule: %s ==> %s , %.3f" % (str(pre), str(post), confidence))
def dataFromFile(fname):
"""Function which reads from the file and yields a generator"""
file_iter = open(fname, 'rU')
for line in file_iter:
line = line.strip().rstrip(',') # Remove trailing comma
record = list(line.split(','))
yield record
if __name__ == "__main__":
optparser = OptionParser()
optparser.add_option('-f', '--inputFile',
dest='input',
help='filename containing csv',
default=None)
optparser.add_option('-s', '--minSupport',
dest='minS',
help='minimum support value',
default=0.15,
type='float')
optparser.add_option('-c', '--minConfidence',
dest='minC',
help='minimum confidence value',
default=0.6,
type='float')
(options, args) = optparser.parse_args()
inFile = None
if options.input is None:
inFile = sys.stdin
elif options.input is not None:
inFile = dataFromFile(options.input)
else:
print('No dataset filename specified, system with exit')
sys.exit('System will exit')
minSupport = options.minS
minConfidence = options.minC
items, rules = runApriori(inFile, minSupport, minConfidence)
printResults(items, rules)
| {
"content_hash": "ac8c4b442d7bfc9dd660170d9bd21f19",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 103,
"avg_line_length": 35.17857142857143,
"alnum_prop": 0.5524292965917331,
"repo_name": "sergiofsilva/apriori-basico",
"id": "8aa1fd354c9541c964bacb8e829aec990d0a311c",
"size": "6895",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apriori_temp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "26977"
}
],
"symlink_target": ""
} |
from ..gitpub import gitpub
def sort_repos(repo_list):
"""
Sort the repo_list using quicksort
Parameters
----------------------------------
repo_list : [gitpub.Repository()]
Array of friends (loaded from the input file)
=================================================
Returns:
-----------------------------------
repo_list : [gitpub.Repository()]
List of repositories sorted by number of stars
"""
if repo_list == []:
return []
else:
pivot = repo_list[0]
lesser = sort_repos([repo for repo in repo_list[1:] if repo.stargazers_count < pivot.stargazers_count])
greater = sort_repos([repo for repo in repo_list[1:] if repo.stargazers_count >= pivot.stargazers_count])
return lesser + [pivot] + greater
def main(username='defunkt'):
"""
Main module to put it all together
Loading Profile > Fetch public repos > Generating sorted list of repos
Parameters
----------------------------------------------------------------------
filename : str
Input file with friend details
==================================
Returns
---------------------------------------------------
sorted_repos : [gitpub.Repository()]
Array of repositories sorted by number of stars
"""
profile = gitpub.Profile()
profile.load_gh_profile(username)
profile.get_public_repos()
sorted_repos = reversed(sort_repos(profile.public_repos))
print ("%s(%s)'s most popular repositories by stargazers count are:" % (profile.name, profile.username))
for repo in sorted_repos:
print ("%s (%d stars)" % (repo.name, repo.stargazers_count))
return sorted_repos
if __name__ == '__main__':
sorted_repos = main()
| {
"content_hash": "3a62ad228c37908368a25a81104b1045",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 113,
"avg_line_length": 33.98076923076923,
"alnum_prop": 0.5359366157328805,
"repo_name": "Demfier/GitPub",
"id": "b6bcaef02644d5289bd7c8160bd00abcab3431f2",
"size": "1767",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build/lib/samples/most_popular_repo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19783"
}
],
"symlink_target": ""
} |
from webiopi.utils.types import toint
from webiopi.utils.types import M_JSON
from webiopi.devices.instance import deviceInstance
from webiopi.decorators.rest import request, response
class Pressure():
def __init__(self, altitude=0, external=None):
self.altitude = toint(altitude)
if isinstance(external, str):
self.external = deviceInstance(external)
else:
self.external = external
if self.external != None and not isinstance(self.external, Temperature):
raise Exception("external must be a Temperature sensor")
def __family__(self):
return "Pressure"
def __getPascal__(self):
raise NotImplementedError
def __getPascalAtSea__(self):
raise NotImplementedError
@request("GET", "sensor/pressure/pa")
@response("%d")
def getPascal(self):
return self.__getPascal__()
@request("GET", "sensor/pressure/hpa")
@response("%.2f")
def getHectoPascal(self):
return float(self.__getPascal__()) / 100.0
@request("GET", "sensor/pressure/sea/pa")
@response("%d")
def getPascalAtSea(self):
pressure = self.__getPascal__()
if self.external != None:
k = self.external.getKelvin()
if k != 0:
return float(pressure) / (1.0 / (1.0 + 0.0065 / k * self.altitude)**5.255)
return float(pressure) / (1.0 - self.altitude / 44330.0)**5.255
@request("GET", "sensor/pressure/sea/hpa")
@response("%.2f")
def getHectoPascalAtSea(self):
return self.getPascalAtSea() / 100.0
class Temperature():
def __family__(self):
return "Temperature"
def __getKelvin__(self):
raise NotImplementedError
def __getCelsius__(self):
raise NotImplementedError
def __getFahrenheit__(self):
raise NotImplementedError
def Kelvin2Celsius(self, value=None):
if value == None:
value = self.getKelvin()
return value - 273.15
def Kelvin2Fahrenheit(self, value=None):
if value == None:
value = self.getKelvin()
return value * 1.8 - 459.67
def Celsius2Kelvin(self, value=None):
if value == None:
value = self.getCelsius()
return value + 273.15
def Celsius2Fahrenheit(self, value=None):
if value == None:
value = self.getCelsius()
return value * 1.8 + 32
def Fahrenheit2Kelvin(self, value=None):
if value == None:
value = self.getFahrenheit()
return (value - 459.67) / 1.8
def Fahrenheit2Celsius(self, value=None):
if value == None:
value = self.getFahrenheit()
return (value - 32) / 1.8
@request("GET", "sensor/temperature/k")
@response("%.02f")
def getKelvin(self):
return self.__getKelvin__()
@request("GET", "sensor/temperature/c")
@response("%.02f")
def getCelsius(self):
return self.__getCelsius__()
@request("GET", "sensor/temperature/f")
@response("%.02f")
def getFahrenheit(self):
return self.__getFahrenheit__()
class Luminosity():
def __family__(self):
return "Luminosity"
def __getLux__(self):
raise NotImplementedError
@request("GET", "sensor/luminosity/lux")
@response("%.02f")
def getLux(self):
return self.__getLux__()
class Distance():
def __family__(self):
return "Distance"
def __getMillimeter__(self):
raise NotImplementedError
@request("GET", "sensor/distance/mm")
@response("%.02f")
def getMillimeter(self):
return self.__getMillimeter__()
@request("GET", "sensor/distance/cm")
@response("%.02f")
def getCentimeter(self):
return self.getMillimeter() / 10
@request("GET", "sensor/distance/m")
@response("%.02f")
def getMeter(self):
return self.getMillimeter() / 1000
@request("GET", "sensor/distance/in")
@response("%.02f")
def getInch(self):
return self.getMillimeter() / 0.254
@request("GET", "sensor/distance/ft")
@response("%.02f")
def getFoot(self):
return self.getInch() / 12
@request("GET", "sensor/distance/yd")
@response("%.02f")
def getYard(self):
return self.getInch() / 36
class Humidity():
def __family__(self):
return "Humidity"
def __getHumidity__(self):
raise NotImplementedError
@request("GET", "sensor/humidity/float")
@response("%f")
def getHumidity(self):
return self.__getHumidity__()
@request("GET", "sensor/humidity/percent")
@response("%d")
def getHumidityPercent(self):
return self.__getHumidity__() * 100
DRIVERS = {}
DRIVERS["bmp085"] = ["BMP085", "BMP180"]
DRIVERS["onewiretemp"] = ["DS1822", "DS1825", "DS18B20", "DS18S20", "DS28EA00"]
DRIVERS["tmpXXX"] = ["TMP75", "TMP102", "TMP275"]
DRIVERS["tslXXXX"] = ["TSL2561", "TSL2561CS", "TSL2561T", "TSL4531", "TSL45311", "TSL45313", "TSL45315", "TSL45317"]
DRIVERS["vcnl4000"] = ["VCNL4000"]
DRIVERS["hytXXX"] = ["HYT221"]
DRIVERS["bme280"] = ["BME280"]
DRIVERS["mcptmp"] = ["MCP9808"]
DRIVERS["htu21d"] = ["HTU21D"]
| {
"content_hash": "8a6c20beb7568c0a6a0466a11c3d711d",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 116,
"avg_line_length": 28.07936507936508,
"alnum_prop": 0.5830035801771245,
"repo_name": "thortex/rpi3-webiopi",
"id": "c60cabf2fda5a00bc8e52a241028587b4c9614d5",
"size": "5919",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webiopi_0.7.1/python/webiopi/devices/sensor/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "90026"
},
{
"name": "C++",
"bytes": "3839"
},
{
"name": "CSS",
"bytes": "1090"
},
{
"name": "HTML",
"bytes": "25670"
},
{
"name": "Java",
"bytes": "113423"
},
{
"name": "JavaScript",
"bytes": "44793"
},
{
"name": "Makefile",
"bytes": "1222"
},
{
"name": "Python",
"bytes": "312974"
},
{
"name": "Shell",
"bytes": "35452"
}
],
"symlink_target": ""
} |
"""Test v2 APIs for build resources."""
from __future__ import annotations
from typing import TYPE_CHECKING
import pydantic
import pytest
from mock import MagicMock
from werkzeug.exceptions import NotFound
from keeper.exceptions import ValidationError
from keeper.mediatypes import v2_json_type
from keeper.testutils import MockTaskQueue
if TYPE_CHECKING:
from unittest.mock import Mock
from keeper.testutils import TestClient
def test_builds_v2(client: TestClient, mocker: Mock) -> None:
task_queue = mocker.patch(
"keeper.taskrunner.inspect_task_queue", return_value=None
)
task_queue = MockTaskQueue(mocker)
mock_presigned_url = {
"url": "https://example.com",
"fields": {"key": "a/b/${filename}"},
}
presign_post_mock = mocker.patch(
"keeper.services.createbuild.presign_post_url_for_prefix",
new=MagicMock(return_value=mock_presigned_url),
)
presign_post_mock = mocker.patch(
"keeper.services.createbuild.presign_post_url_for_directory_object",
new=MagicMock(return_value=mock_presigned_url),
)
s3_resource_mock = mocker.patch(
"keeper.services.createbuild.open_s3_resource"
)
# Create default organization
from keeper.models import Organization, db
org = Organization(
slug="test",
title="Test",
root_domain="lsst.io",
fastly_domain="global.ssl.fastly.net",
bucket_name="bucket-name",
)
db.session.add(org)
db.session.commit()
# ========================================================================
# Add product /products/pipelines
mocker.resetall()
p = {
"slug": "pipelines",
"doc_repo": "https://github.com/lsst/pipelines_docs.git",
"title": "LSST Science Pipelines",
"root_domain": "lsst.io",
"root_fastly_domain": "global.ssl.fastly.net",
"bucket_name": "bucket-name",
}
r = client.post("/products/", p)
task_queue.apply_task_side_effects()
product_url = r.headers["Location"]
assert r.status == 201
# ========================================================================
# Add a sample edition
mocker.resetall()
e = {
"tracked_refs": ["main"],
"slug": "latest",
"title": "Latest",
"published_url": "pipelines.lsst.io",
}
r = client.post(product_url + "/editions/", e)
task_queue.apply_task_side_effects()
# Initially no builds
r = client.get("/products/pipelines/builds/")
assert r.status == 200
assert len(r.json["builds"]) == 0
# ========================================================================
# Add a build (using v2 api)
mocker.resetall()
b1 = {
"slug": "b1",
"github_requester": "jonathansick",
"git_refs": ["main"],
}
r = client.post(
"/products/pipelines/builds/", b1, headers={"Accept": v2_json_type}
)
task_queue.apply_task_side_effects()
s3_resource_mock.assert_called_once()
presign_post_mock.assert_called_once()
assert r.status == 201
assert r.json["product_url"] == product_url
assert r.json["slug"] == b1["slug"]
assert r.json["date_created"] is not None
assert r.json["date_ended"] is None
assert r.json["uploaded"] is False
assert r.json["published_url"] == "https://pipelines.lsst.io/builds/b1"
assert "post_prefix_urls" in r.json
assert "post_dir_urls" in r.json
assert len(r.json["surrogate_key"]) == 32 # should be a uuid4 -> hex
build_url = r.headers["Location"]
# ========================================================================
# Re-add build with same slug; should fail
mocker.resetall()
with pytest.raises(ValidationError):
r = client.post(
"/products/pipelines/builds/", b1, headers={"Accept": v2_json_type}
)
# ========================================================================
# List builds
mocker.resetall()
r = client.get("/products/pipelines/builds/")
assert r.status == 200
assert len(r.json["builds"]) == 1
# ========================================================================
# Get build
mocker.resetall()
r = client.get(build_url)
assert r.status == 200
assert r.json["bucket_name"] == "bucket-name"
assert r.json["bucket_root_dir"] == "pipelines/builds/b1"
# ========================================================================
# Register upload
mocker.resetall()
r = client.patch(build_url, {"uploaded": True})
task_queue.apply_task_side_effects()
assert r.status == 200
task_queue.assert_launched_once()
task_queue.assert_edition_build_v1(
"http://example.test/editions/1",
build_url,
)
task_queue.assert_edition_build_v1(
"http://example.test/editions/2",
build_url,
)
task_queue.assert_dashboard_build_v1(product_url)
r = client.get(build_url)
assert r.json["uploaded"] is True
# ========================================================================
# Check that the edition was rebuilt
mocker.resetall()
edition_data = client.get("http://example.test/editions/2")
assert edition_data.json["build_url"] == build_url
# ========================================================================
# Deprecate build
mocker.resetall()
r = client.delete("/builds/1")
task_queue.apply_task_side_effects()
assert r.status == 200
mocker.resetall()
r = client.get("/builds/1")
assert r.json["product_url"] == product_url
assert r.json["slug"] == b1["slug"]
assert r.json["date_created"] is not None
assert r.json["date_ended"] is not None
# Build no longer in listing
r = client.get("/products/pipelines/builds/")
assert r.status == 200
assert len(r.json["builds"]) == 0
# ========================================================================
# Add an auto-slugged build
mocker.resetall()
b2 = {"git_refs": ["main"]}
r = client.post("/products/pipelines/builds/", b2)
task_queue.apply_task_side_effects()
assert r.status == 201
assert r.json["slug"] == "1"
# ========================================================================
# Add an auto-slugged build
mocker.resetall()
b3 = {"git_refs": ["main"]}
r = client.post(
"/products/pipelines/builds/", b3, headers={"Accept": v2_json_type}
)
task_queue.apply_task_side_effects()
assert r.status == 201
assert r.json["slug"] == "2"
# ========================================================================
# Add a build missing 'git_refs'
mocker.resetall()
b4 = {"slug": "bad-build"}
with pytest.raises(pydantic.ValidationError):
r = client.post("/products/pipelines/builds/", b4)
# ========================================================================
# Add a build with a badly formatted git_refs
mocker.resetall()
b5 = {"slug": "another-bad-build", "git_refs": "main"}
with pytest.raises(pydantic.ValidationError):
r = client.post(
"/products/pipelines/builds/", b5, headers={"Accept": v2_json_type}
)
# ========================================================================
# Add a build and see if an edition was automatically created
mocker.resetall()
b6 = {"git_refs": ["tickets/DM-1234"]}
r = client.post(
"/products/pipelines/builds/", b6, headers={"Accept": v2_json_type}
)
task_queue.apply_task_side_effects()
assert r.status == 201
r = client.get("/products/pipelines/editions/")
assert len(r.json["editions"]) == 3
editions = sorted(r.json["editions"]) # postgres and sqlite differ orders
auto_edition_url = editions[-1]
r = client.get(auto_edition_url)
assert r.json["slug"] == "DM-1234"
# Authorizion tests: POST /products/<slug>/builds/ (v2) ======================
# Only the build-upload auth'd client should get in
def test_post_build_auth_anon(anon_client: TestClient) -> None:
r = anon_client.post(
"/products/test/builds/",
{"foo": "bar"},
headers={"Accept": v2_json_type},
)
assert r.status == 401
def test_post_build_auth_product_client(product_client: TestClient) -> None:
r = product_client.post(
"/products/test/builds/",
{"foo": "bar"},
headers={"Accept": v2_json_type},
)
assert r.status == 403
def test_post_build_auth_edition_client(edition_client: TestClient) -> None:
r = edition_client.post(
"/products/test/builds/",
{"foo": "bar"},
headers={"Accept": v2_json_type},
)
assert r.status == 403
def test_post_build_auth_builduploader_client(
upload_build_client: TestClient,
) -> None:
with pytest.raises(NotFound):
upload_build_client.post(
"/products/test/builds/",
{"foo": "bar"},
headers={"Accept": v2_json_type},
)
def test_post_build_auth_builddeprecator_client(
deprecate_build_client: TestClient,
) -> None:
r = deprecate_build_client.post(
"/products/test/builds/",
{"foo": "bar"},
headers={"Accept": v2_json_type},
)
assert r.status == 403
| {
"content_hash": "07c795198aad61343ddd0ab4b9b459bf",
"timestamp": "",
"source": "github",
"line_count": 306,
"max_line_length": 79,
"avg_line_length": 30.49346405228758,
"alnum_prop": 0.5423855963990998,
"repo_name": "lsst-sqre/ltd-keeper",
"id": "459a3778512cc66f683a9663237f44f4e3b61997",
"size": "9331",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/test_builds_v2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1137"
},
{
"name": "Dockerfile",
"bytes": "1764"
},
{
"name": "Jinja",
"bytes": "2878"
},
{
"name": "Makefile",
"bytes": "3607"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "436185"
},
{
"name": "Shell",
"bytes": "7523"
}
],
"symlink_target": ""
} |
from oslo_log import log as logging
from neutron.objects import router
from neutron.services.logapi.common import constants as log_const
from neutron.services.logapi.common import exceptions as log_exc
from neutron.services.logapi.common import validators
LOG = logging.getLogger(__name__)
EVENTS_DISABLE = [log_const.DROP_EVENT, log_const.ACCEPT_EVENT]
def _get_router(context, router_id):
router_obj = router.Router.get_object(context, id=router_id)
if not router_obj:
raise log_exc.ResourceNotFound(resource_id=router_id)
return router_obj
@validators.ResourceValidateRequest.register(log_const.SNAT)
def validate_snat_request(context, log_data):
"""Validate the incoming SNAT log request
This method validates whether SNAT log request is satisfied or not.
A ResourceNotFound will be raised if resource_id in log_data does not
belong to any Router object. This method will also raise a
RouterNotEnabledSnat exception in the case of a indicated router does not
enable SNAT feature.
"""
resource_id = log_data.get('resource_id')
event = log_data.get('event')
if not resource_id:
raise log_exc.ResourceIdNotSpecified(resource_type=log_const.SNAT)
if event in EVENTS_DISABLE:
raise log_exc.EventsDisabled(events=EVENTS_DISABLE,
resource_type=log_const.SNAT)
router_obj = _get_router(context, resource_id)
# Check whether SNAT is enabled or not
if not router_obj.enable_snat:
raise log_exc.RouterNotEnabledSnat(resource_id=resource_id)
# Check whether router gateway is set or not.
if not router_obj.gw_port_id:
raise log_exc.RouterGatewayNotSet(resource_id=resource_id)
| {
"content_hash": "18612800d4ac547d1593381ca7df2abd",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 77,
"avg_line_length": 38.48888888888889,
"alnum_prop": 0.7263279445727483,
"repo_name": "noironetworks/neutron",
"id": "ee7f91155211ef6c9472b8c8497b598955aff161",
"size": "2367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neutron/services/logapi/common/snat_validate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "11420614"
},
{
"name": "Shell",
"bytes": "38791"
}
],
"symlink_target": ""
} |
"""Handles authentication and authorization with Munki clients.
Classes:
AuthBase: base class
Auth1: implementation of Munki/Simian auth, server side
Auth1Client: implementation of Munki/Simian, client side
AuthSessionBase: base class to store session details
AuthSessionDict: session storage in a dict
Auth1ServerSession: session storage for Auth1 server
Auth1ClientSession: session storage for Auth1 client
"""
import warnings
warnings.filterwarnings(
'ignore', '.* sha module .*', DeprecationWarning, '.*', 0)
import array # (Mute warnings before cause) pylint: disable=g-bad-import-order,g-import-not-at-top
import base64
import datetime
import logging
import os
import struct
from simian.auth import tlslite_bridge
from simian.auth import x509
# Message separator
MSG_SEP = ' '
# Valid age of authentication tokens, in seconds
AGE_TOKEN_SECONDS = 6 * 60 * 60
# Valid age of Cn / Sn pair data, in seconds
AGE_CN_SECONDS = 5 * 60
# Valid age of any other default session data
AGE_DEFAULT_SECONDS = 6 * 60 * 60
AGE_APPLESUS_TOKEN_SECONDS = 21 * 24 * 60 * 60
# Minimum value for Cn (client nonce) value
MIN_VALUE_CN = 2**100
# Level values supplied to DoMunkiAuth() and used in session data
LEVEL_APPLESUS = -5
LEVEL_BASE = 0
LEVEL_ADMIN = 5
class Error(Exception):
"""Base."""
class NotAuthenticated(Error):
"""Not authenticated."""
def __init__(self, reason='Unknown'):
self.reason = reason
super(NotAuthenticated, self).__init__()
class AuthSessionError(Error):
"""There is a problem with the auth session."""
class KeyNotLoaded(Error):
"""A key is needed to complete this operation, but none has been loaded."""
class CertificateParseError(Error):
"""Error in parsing an X509 certificate."""
class CertificateError(Error):
"""Error in accepting the details the X509 certificate presents."""
class MessageError(Error):
"""Error in the format of a message."""
class SessionDataError(Error):
"""Session data is malformed."""
class CryptoError(Error):
"""Low level error in crypto libraries."""
class State(object):
"""States of a BaseAuth class."""
# Waiting for nothing
NONE = 'NONE'
# Waiting for input via Input()
INPUT = 'INPUT'
# Waiting to output via Output()
OUTPUT = 'OUTPUT'
class AuthState(object):
"""Authentication states of a BaseAuth class."""
# Authentication has not yet been determined.
UNKNOWN = 'UNKNOWN'
# Authentication passed with details supplied, nothing further needed.
OK = 'OK'
# Authentication failed with details supplied.
FAIL = 'FAIL'
class AuthSessionBase(object):
"""Base class for AuthSession session storage objects.
This class has no underlying storage mechanism and so raises
NotImplementedError for all actions.
_Create, _Get, _Put, DeleteById, Delete should be overriden in subclasses.
"""
def _Create(self, sid):
"""Create a session instance and return it.
Args:
sid: str, session id
Returns:
new session instance
"""
raise NotImplementedError
def _Get(self, sid):
"""Get a session instance from storage given its session id.
Args:
sid: str, session id
Returns:
session instance
"""
raise NotImplementedError
def _Put(self, session):
"""Put a session instance into storage.
Args:
session: obj, session instance
"""
raise NotImplementedError
def Set(self, sid, data=None, **kwargs):
"""Set session data.
Args:
sid: str, session id
data: str, optional, data to set
kwargs: dict, optional, other properties to set on session
"""
session = self._Create(sid)
session.data = data
if kwargs:
for k in kwargs:
if k not in ['sid', 'mtime']:
setattr(session, k, kwargs[k])
self._Put(session)
def Get(self, sid):
"""Get session data.
Args:
sid: str, session id
Returns:
None if no session with that session id exists, OR
value of 'data' property if session contains it and it is not None, OR
entire session property if session contains multiple values
"""
session = self._Get(sid)
if session:
if not self.ExpireOne(session):
if session.data is not None:
return session.data
else:
return session
def DeleteById(self, sid):
"""Delete session data for a session id.
Args:
sid: str, session id
"""
raise NotImplementedError
def _Now(self):
"""Return the current time in UTC.
Returns:
datetime.datetime in UTC timezone
"""
now = datetime.datetime.utcnow()
return now
def _Mtime(self, session):
"""Return the mtime, last modified time, of a session object.
Args:
session: obj, session instance
Returns:
datetime.datetime, mtime in UTC
"""
return session.mtime
def Delete(self, session):
"""Delete one session.
Args:
session: obj, session instance
"""
raise NotImplementedError
def All(self, min_age_seconds=None):
"""Iterate through all session entities, yielding each.
Args:
min_age_seconds: int seconds of minimum age sessions to return.
"""
raise NotImplementedError
def ExpireOne(self, session, now=None):
"""Expire old session data.
Args:
session: type unknown, one entire entity for one session record
now: datetime.datetime, optional, current time
Returns:
True, if the session is too old to use and was deleted
False, if the session is new enough to use
"""
if self.IsExpired(session, now=now):
self.Delete(session)
return True
else:
return False
def IsExpired(self, session, now=None):
"""Check whether session is expired."""
if now is None:
now = self._Now()
age = datetime.timedelta(seconds=AGE_DEFAULT_SECONDS)
session_mtime = self._Mtime(session)
if session_mtime:
d = now - session_mtime
else:
d = age + age # undefined mtime, forcefully make it too old
if d > age:
return True
else:
return False
@classmethod
def DefineSessionType(cls, name, prefix):
"""Define a session type.
This autogenerates 3 helper methods:
SetName()
GetName()
DelName()
And a constant SESSION_TYPE_PREFIX_NAME containing '{prefix}_'
The Set/Get/Del methods use '{prefix}_' with all keys.
Args:
name: str, like 'token' (case will be adjusted)
prefix: str, like 't' but not 't_'
"""
sane_name = name[0].upper() + name[1:].lower()
setattr(
cls, 'SESSION_TYPE_PREFIX_%s' % name.upper(), '%s_' % prefix)
setattr(
cls, 'Set%s' % sane_name,
lambda self, k, data=None, **kwargs: self.Set(
'%s_%s' % (prefix, k), data, **kwargs))
setattr(
cls, 'Get%s' % sane_name,
lambda self, k: self.Get('%s_%s' % (prefix, k)))
setattr(
cls, 'Del%s' % sane_name,
lambda self, k: self.DeleteById('%s_%s' % (prefix, k)))
class AuthSessionData(object):
"""Class to hold session data as properties.
This class is used to provide a consistent output interface
between different AuthSession* objects. This avoids returning
dicts from some, objects with properties from others, etc.
"""
def __init__(self, **kwargs):
if kwargs:
self.__dict__ = dict(kwargs)
def __contains__(self, item):
return item in self.__dict__
def __eq__(self, other):
if type(other) == dict:
return self.__dict__ == other
for k in self.__dict__:
if not hasattr(other, k) or self.__dict__[k] != getattr(other, k):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
class AuthSessionDict(AuthSessionBase):
"""AuthSession storage using an in-memory dict.
Uses a dict for indexing and AuthSessionData for session value data.
"""
def __init__(self):
self._sessions = {}
def _Create(self, sid):
"""Create a session instance and return it.
Args:
sid: str, session id
Returns:
new session instance
"""
return AuthSessionData(sid=sid, mtime=self._Now(), data=None)
def _Get(self, sid):
"""Get a session instance from storage given its session id.
Args:
sid: str, session id
Returns:
session instance
"""
return self._sessions.get(sid, None)
def _Put(self, session):
"""Put a session instance into storage.
Args:
session: obj, session instance
"""
self._sessions[session.sid] = session
def DeleteById(self, sid):
"""Delete session data for a session id.
Args:
sid: str, session id
"""
try:
del self._sessions[sid]
except KeyError:
pass
def Delete(self, session):
"""Delete one session.
Args:
session: obj, session instance
"""
self.DeleteById(session.sid)
def All(self, unused_min_age_seconds=None):
"""Iterate through all session entities, yielding each.
Yields:
session entity object
"""
for session in self._sessions:
yield self._sessions[session]
class Auth1ServerSession(AuthSessionDict):
"""AuthSession storage for Auth1 server."""
AuthSessionBase.DefineSessionType('cn', 'cn')
AuthSessionBase.DefineSessionType('token', 't')
class Auth1ClientSession(AuthSessionDict):
"""AuthSession storage for Auth1 client."""
class AuthBase(object):
"""Base authentication class."""
def __init__(self):
"""Init."""
self._output = None
self._error_output = []
self._session_class = self.GetSessionClass()
self._session = self._session_class()
self._default_state = self.DefaultState()
self.ResetState()
def GetSessionClass(self):
return AuthSessionBase
def DefaultState(self):
"""Return the default state of this auth class. Override in subclasses."""
return State.NONE
def ResetState(self):
"""Reset state and auth state to defaults."""
self._state = self._default_state
self._auth_state = AuthState.UNKNOWN
def AuthFail(self):
"""Set state to failed auth."""
self.ResetState()
self._auth_state = AuthState.FAIL
def _AddOutput(self, output):
"""Add output to the internal output buffer.
Note: Also sets state to Output.
Args:
output: str, to concatenate to the output buffer
or dict, to add to a dictionary output buffer
"""
if self._output is not None:
if type(output) is dict:
self._output.update(output)
else:
self._output += output
else:
self._output = output
self._state = State.OUTPUT
def _AddError(self, errstr):
"""Add error output to the error output buffer.
Args:
errstr: str, to concatenate to the error output buffer
"""
if self._error_output is None:
self._error_output = [errstr]
else:
self._error_output.append(errstr)
def ErrorOutput(self):
"""Return the errors output.
Returns:
list of strings, errors
"""
err_output = self._error_output
self._error_output = []
return err_output
def State(self):
"""Return the current state of the auth class.
Returns:
one of State.*
"""
return self._state
def Input(self, *unused_args, **unused_kwargs):
"""Accept input to auth methods."""
if self._state == State.INPUT:
# base class, we never do anything but accept input and
# make no decisions.
# subclasses, we return so that further action can occur.
return
raise ValueError('not waiting for input')
def Output(self):
"""Output from auth methods.
If the current state is not State.OUTPUT, nothing is returned.
Returns:
any data outputted from auth methods
"""
if self._state == State.OUTPUT:
output = self._output
self._output = None
# carefully return state to not waiting to output, but
# don't change the auth_state.
self._state = self._default_state
return output
def AuthState(self):
"""Return auth state.
If state is OK or FAIL, only returns this value once before
resetting to auth state UNKNOWN. This is a paranoia feature.
Consumers of this method will likely want to access this
method once for the state, then if/branch off of the saved
value.
Returns:
auth state, one of AuthState.*
"""
auth_state = self._auth_state
if self._auth_state in [AuthState.OK, AuthState.FAIL]:
self.ResetState()
return auth_state
def AuthStateOK(self):
"""Returns True if auth state is OK, else False."""
auth_state = self.AuthState()
return auth_state == AuthState.OK
def _SplitMessage(self, m, expect_len):
"""Load a message and return its items.
Args:
m: str, multiple items separated by MSG_SEP
expect_len: int, number of items to expect and require in m
Returns:
list of items
Raises:
MessageError: if the message is malformed or has too few items
"""
a = m.split(MSG_SEP)
if len(a) != expect_len:
raise MessageError('wrong number of message items %d %s', len(a), a)
return a
def _AssembleMessage(self, *args):
"""Export a message given items.
Args:
list of str items to include in the message
Returns:
string
"""
return MSG_SEP.join(args)
class Auth1(AuthBase):
"""Auth class involving key exchange and signed messages.
TODO(user): describe here or link to design doc
"""
TOKEN = 'Auth1Token'
def __init__(self, *args, **kwargs):
super(Auth1, self).__init__(*args, **kwargs)
self._key = None
self._cert = None
self._ca_pem = ''
self._server_cert_pem = ''
self._required_issuer = None
def GetSessionClass(self):
return Auth1ServerSession
def DefaultState(self):
"""Default state is to wait for INPUT."""
return State.INPUT
def Nonce(self):
"""Return a nonce.
Returns:
int, 128-bit nonce
"""
s = os.urandom(16)
i = struct.unpack('QQ', s) # QQ = two 8 byte unsigned ints
i = (i[0] << 64) + i[1]
return i
def NonceBase64(self):
"""Return a nonce in base64 output.
Returns:
str, 128-bit nonce encoded in base64
"""
return base64.urlsafe_b64encode(str(self.Nonce()))
def GetCurrentEpochTimeUTC(self):
"""Return the current time, in epoch seconds, UTC.
Returns:
int, epoch seconds, UTC
"""
return int(datetime.datetime.utcnow().strftime('%s'))
def _AuthToken(self):
"""Return an auth token for this instance.
Only returns a token if AuthState() is AuthState.OK.
Can't be used in advance for this instance, etc.
It is unlikely one would call this externally because of the
auth state clearing nature of AuthState().
Returns:
str token (in form of a large random int, base64'd)
None if auth_state is not OK
"""
if self._auth_state == AuthState.OK:
return self.NonceBase64()
def _LoadCert(self, certstr):
"""Load a certificate and return a cert object.
Args:
certstr: str, cert in PEM format
Returns:
x509.X509Certificate instance
Raises:
ValueError: if the cert is malformed
"""
try:
cert = x509.LoadCertificateFromPEM(certstr)
except x509.Error, e:
raise ValueError(str(e))
return cert
def _LoadKey(self, keystr):
"""Load a key and return a key object.
Args:
keystr: str, key in PEM format
Returns:
tlslite.utils.RSAKey instance
Raises:
ValueError: keystr is improperly formed
"""
try:
key = tlslite_bridge.parsePEMKey(keystr)
except (SyntaxError, AttributeError), e:
raise ValueError('invalid PEM key format: %s' % str(e))
return key
def Sign(self, datastr):
"""Sign data with our loaded key.
Args:
datastr: str, to sign
Returns:
str output of signed data
Raises:
KeyNotLoaded: if no key has been loaded with LoadSelfKey()
"""
if not self._key:
raise KeyNotLoaded
data_bytes = array.array('B')
data_bytes.fromstring(datastr)
sig_bytes = self._key.hashAndSign(data_bytes)
if isinstance(sig_bytes, bytearray):
# tlslite 0.4.9
return str(sig_bytes)
else:
# tlslite 0.3.8 array.array
return sig_bytes.tostring()
def LoadSelfKey(self, keystr):
"""Load a key and keep it as this instance's key.
Args:
keystr: str, bytes of key in PEM format
"""
key = self._LoadKey(keystr)
self._key = key
def LoadOtherCert(self, certstr):
"""Load a certificate and return a certificate object.
Args:
certstr: str, certificate in X509 PEM format
Returns:
x509.X509Certificate instance
"""
return self._LoadCert(certstr)
def LoadSelfCert(self, certstr):
"""Load a certificate and keep it as this instance's certificate.
Args:
certstr: str, certificate in X509 PEM format
"""
cert = self._LoadCert(certstr)
self._cert = cert
self._cert_str = certstr
def VerifyCertSignedByCA(self, cert):
"""Verify that a client cert was signed by the required CA cert.
Args:
cert: certificate object, client cert to verify
Returns:
True or False
"""
ca_cert = self.LoadOtherCert(self._ca_pem)
try:
return cert.IsSignedBy(ca_cert)
except (x509.Error, AssertionError), e:
logging.exception(str(e))
raise CryptoError(
'VerifyCertSignedByCA: IsSignedBy: %s' % str(e))
def VerifyDataSignedWithCert(self, data, signature, cert=None):
"""Verify that this cert signed this data.
Args:
data: str, data to verify signing
signature: str, signature data
cert: certificate object, or None for this instance's cert
Returns:
True or False
Raises:
CryptoError: if the underlying crypto APIs raise an assertion due to
malformed data
"""
if cert is None:
cert = self._cert
signature_b = array.array('B')
signature_b.fromstring(str(signature))
data_b = array.array('B')
data_b.fromstring(str(data))
try:
pk = cert.GetPublicKey()
return pk.hashAndVerify(signature_b, data_b)
except AssertionError, e:
logging.exception(str(e))
raise CryptoError(
'VerifyDataSignedWithCert: hashAndVerify: %s' % str(e))
def SessionSetCnSn(self, cn, sn):
"""Set a known Cn, Sn pair in sessions.
Args:
cn: str, client nonce
sn: str, server nonce
"""
self._session.SetCn(str(cn), str(sn))
def SessionVerifyKnownCnSn(self, cn, sn):
"""Verify that a Cn, Sn pair is known.
Args:
cn: str, client nonce
sn: str, server nonce
Returns:
bool, True if the pair is known (exists in session db)
"""
orig_sn = self._session.GetCn(str(cn))
# Get() returns None if the session is not found.
# carefully check for None here so that a cn lookup with None
# for sn value does not create a false positive.
if orig_sn is None:
h = 'SessionVerifyKnownCnSn(%s,%s)' % (str(cn), str(sn))
logging.debug('%s: orig_sn is None', h)
return False
elif orig_sn == AuthState.OK:
h = 'SessionVerifyKnownCnSn(%s,%s)' % (str(cn), str(sn))
logging.debug('%s: orig_sn != AuthState.OK', h)
return False
elif orig_sn != sn:
h = 'SessionVerifyKnownCnSn(%s,%s)' % (str(cn), str(sn))
logging.debug('%s: orig_sn (%s) != sn (%s)', h, orig_sn, sn)
return False
return True
def GetSessionIfAuthOK(self, token, require_level=None):
"""Check if auth is OK for a given token.
Args:
token: str, token string from SessionCreateAuthToken
require_level: int, optional, require this token to have at least
level require_level access
Returns:
session object if the auth token is known, state OK, level OK.
Raises:
AuthSessionError: auth token is unknown, state is not OK, or level not OK.
"""
session = self._session.GetToken(token)
if not session:
raise AuthSessionError('GetSessionIfAuthOK: session is None')
elif session.state != AuthState.OK:
raise AuthSessionError(
'GetSessionIfAuthOK: state (%s) != OK', session.state)
if require_level is not None and require_level > session.level:
raise AuthSessionError(
'GetSessionIfAuthOK: require_level (%s) session level (%s)',
require_level, session.level)
return session
def SessionGetUuid(self, token):
"""Retrieve uuid for a given token.
Args:
token: str, token string from SessionCreateAuthToken
Returns:
uuid str OR
None if token does not exist
"""
session = self._session.GetToken(token)
return getattr(session, 'uuid', None)
def SessionCreateAuthToken(self, uuid, level=LEVEL_BASE):
"""Create an auth token and set it in the session db.
Args:
uuid: str, uuid for client which is receiving token
level: int, optional, default LEVEL_BASE, level for session
Returns:
str, token
"""
token = self._AuthToken()
self._session.SetToken(token, state=AuthState.OK, uuid=uuid, level=level)
return token
def SessionCreateUserAuthToken(self, user, level=LEVEL_BASE):
"""Create a session for a user who has already been authenticated.
Only call this method when some other type of authentication has
already occured, for example a SSO system, has validated this user.
The token returned is fully authenticated and receiver will be able
to immediately use it.
Args:
user: str, some user id like 'foo' or 'foo@example.com', etc
level: int, optional, default LEVEL_BASE, level for session
Returns:
str, token
"""
self.ResetState()
user = str(user)
self._auth_state = AuthState.OK
token = self.SessionCreateAuthToken(uuid=user, level=level)
self.ResetState()
return token
def SessionDelCn(self, cn):
"""Delete any session data for a Cn.
Args:
cn: str, client nonce
"""
self._session.DelCn(str(cn))
def SessionDelToken(self, token):
"""Delete a token from session data.
Args:
token: str, token string from SessionCreateAuthToken
"""
self._session.DelToken(token)
def Input(self, n=None, m=None, s=None): # pylint: disable=arguments-differ
"""Input parameters to the auth function.
Callers should provide n, OR m and s.
Args:
n: str, nonce from client, an integer in str form e.g. '12345'
m: str, message from client
s: str, b64 signature from client
Raises:
ValueError: if invalid combination of arguments supplied
"""
super(Auth1, self).Input()
self.ResetState() # paranoia clear auth_state, tests run OK without
if n is not None and m is None and s is None:
#logging.debug('Auth step 1')
try:
cn = int(n)
except ValueError:
logging.error('Non-integer Cn was supplied: %s', str(n))
self.AuthFail()
return
if cn < MIN_VALUE_CN:
logging.error('Cn value is too small: %d', cn)
self.AuthFail()
return
sn = self.Nonce()
m = self._AssembleMessage(str(cn), str(sn))
sig = self.Sign(m)
sig = base64.urlsafe_b64encode(sig)
m = self._AssembleMessage(m, sig)
self._AddOutput(m)
self.SessionSetCnSn(cn, sn)
#logging.debug('Server supplied Sn %s for Cn %s', sn, cn)
elif m is not None and s is not None and n is None:
#logging.debug('Auth step 2')
class _Error(Exception):
"""Temporary exception used here."""
log_prefix = ''
cn = None
try:
# open up the message to get the client cert 'c'.
try:
(c, cn, sn) = self._SplitMessage(m, 3)
except MessageError, e:
raise _Error('SplitMessage MessageError (%s)', str(e))
log_prefix = ''
# signature 's' and client cert 'c' are urlsafe_base64
try:
s = base64.urlsafe_b64decode(str(s))
c = base64.urlsafe_b64decode(str(c))
except TypeError, e:
raise _Error('Invalid c or s parameter b64 format(%s)', str(e))
# load X509 client cert 'c' into object
try:
client_cert = self.LoadOtherCert(c)
except ValueError, e:
raise _Error('Invalid cert supplied %s' % str(e))
# sanity check
if not client_cert.GetPublicKey():
raise _Error('Malformed X509 cert with no public key')
client_cert.SetRequiredIssuer(self._required_issuer)
try:
client_cert.CheckAll()
except x509.Error, e:
raise _Error('X509 certificate error: %s' % str(e))
# obtain uuid from cert
uuid = client_cert.GetSubject()
log_prefix = uuid
# client_cert is loaded
#logging.debug('%s Client cert loaded', log_prefix)
#logging.debug('%s Message = %s', log_prefix, m)
# verify that the client cert is legitimate
if not self.VerifyCertSignedByCA(client_cert):
raise _Error('Client cert is not signed by the required CA')
# verify that the message was signed by the client cert
if not self.VerifyDataSignedWithCert(m, s, client_cert):
raise _Error('Signed message does not verify')
# verify that the Sn was the one offered to this Cn previously
if not self.SessionVerifyKnownCnSn(cn, sn):
raise _Error('Client offered unknown Sn %s', sn)
# success!
#logging.debug('%s Client auth successful', log_prefix)
# careful here, switching the state setting and AddOutput
# lines causes a hard to test bug (because Input() test mocks out
# AuthToken())
self._auth_state = AuthState.OK
token = self.SessionCreateAuthToken(uuid)
self._AddOutput(token)
except (_Error, CryptoError), e:
logging.warning('%s Auth error: %s', log_prefix, e.args)
logging.debug('%s Auth message: %s', log_prefix, m)
logging.debug(
'%s Auth sig: %s', log_prefix, base64.urlsafe_b64encode(s))
self.AuthFail()
# no matter what, delete the current cn:sn pair if an attempt
# was made to auth against it, success or not.
if cn is not None:
self.SessionDelCn(cn)
else:
#logging.debug('Auth step unknown')
raise ValueError('invalid input')
class Auth1Client(Auth1):
"""Client class for Auth1 style auth."""
def __init__(self, *args, **kwargs):
super(Auth1Client, self).__init__(*args, **kwargs)
self._key = None # this client's private key
def GetSessionClass(self):
return Auth1ClientSession
def Input(self, m=None, t=None): # pylint: disable=arguments-differ
"""Accept input to auth methods.
Callers should provide either m OR t, or neither, but not both.
Args:
m: str, message from server (cn, sn, signature)
t: str, token reply from server
Raises:
ValueError: if invalid combination of arguments supplied
"""
self.ResetState()
# no input - step 0, producing step 1 input
if m is None and t is None:
cn = str(self.Nonce())
self._AddOutput(cn)
self._session.Set('cn', cn)
# message input - step 1 output, produce step 2 input
elif m is not None and t is None:
class _Error(Exception):
"""Temporary exception used here."""
cn = None
try:
# open up the message to get the client nonce (cn),
# server nonce (sn) and signature (s)
try:
(cn, sn, s) = self._SplitMessage(m, 3)
except MessageError, e:
raise _Error('SplitMessage MessageError (%s)' % str(e))
try:
s = base64.urlsafe_b64decode(str(s))
except TypeError, e:
raise _Error('Invalid s parameter b64 format (%s)' % str(e))
# verify cert is a server cert
try:
server_cert = self.LoadOtherCert(self._server_cert_pem)
except ValueError, e:
raise _Error('Server cert load error: %s' % str(e))
if not self.VerifyCertSignedByCA(server_cert):
raise _Error('Server cert is not signed by known CA')
# load the Cn value that this client used previously
orig_cn = self._session.Get('cn')
if cn != orig_cn:
raise _Error('Server supplied Cn does not match our Cn')
# verify signature on message "Cn Sn"
tmp_m = self._AssembleMessage(cn, sn)
if not self.VerifyDataSignedWithCert(tmp_m, s, server_cert):
raise _Error('Sn is not signed by server cert')
# create return message: base64_client_cert cn sn
c = base64.urlsafe_b64encode(self._cert_str)
out_m = self._AssembleMessage(c, cn, sn)
sig = self.Sign(out_m)
sig = base64.urlsafe_b64encode(str(sig))
#logging.debug('M= %s', out_m)
#logging.debug('S= %s', sig)
self._AddOutput({'m': out_m, 's': sig})
except _Error, e:
self._session.DeleteById('cn')
self._AddError(str(e))
self.AuthFail()
# token input - step 3 input
elif t is not None and m is None:
if t == Auth1.TOKEN:
self._session.DeleteById('cn')
self.ResetState()
self._auth_state = AuthState.OK
else:
self.AuthFail()
# unknown input
else:
raise ValueError('Invalid input')
| {
"content_hash": "84611e7de68e9f116814c4dda3f93998",
"timestamp": "",
"source": "github",
"line_count": 1086,
"max_line_length": 99,
"avg_line_length": 27.211786372007367,
"alnum_prop": 0.6333581483486735,
"repo_name": "googlearchive/simian",
"id": "af23f5117978a7dce7db38fac7f18a8395ebcfcc",
"size": "30174",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/simian/auth/base.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "38259"
},
{
"name": "HTML",
"bytes": "97532"
},
{
"name": "JavaScript",
"bytes": "34498"
},
{
"name": "Makefile",
"bytes": "8173"
},
{
"name": "Python",
"bytes": "1422429"
},
{
"name": "Shell",
"bytes": "13277"
}
],
"symlink_target": ""
} |
from abc import ABCMeta, abstractmethod
from enum import Enum
from utils.binary import *
class SimulatorState(Enum):
UNINITIALIZED = 0
INITIALIZED = 1
LOADED = 2
RUNNING = 3
PAUSED = 4
TERMINATED = 5
class Simulator(metaclass=ABCMeta):
"""Abstract base class for a processor simulator implementation
Besides a number of necessary methods, it must also contain a state variable,
a memory array and a dictionary of named registers.
Configuration object must contain all the properties listed below, modified
to fit the processor wich is being simulated"""
state = SimulatorState.UNINITIALIZED
memory = []
annotations = []
breakpoints = set()
registers = {}
config = { # Default values, modify to model different processors
'ENDIANNESS': 'little',
'WORD_SIZE_BYTES': 4,
'WORD_SIZE_BITS': 32,
'HALFWORD_SIZE_BYTES': 2,
'HALFWORD_SIZE_BITS': 16,
'ADDRESS_SIZE_BYTES': 4,
'ADDRESS_SIZE_BITS': 32,
'MEMORY_SIZE_BYTES': 65536,
'MEMORY_SIZE_WORDS': 65536 // 4
}
# Processor state procedures
@abstractmethod
def init(self):
pass
# TODO:: Standardize .p file format
def load(self, p_file_name):
if self.state != SimulatorState.INITIALIZED:
raise RuntimeError('Cannot load a program, processor in invalid state')
with open(p_file_name, "r") as p_file:
address_end_pos = 2 * self.config['ADDRESS_SIZE_BYTES']
annotation_start_pos = address_end_pos + 3 * self.config['WORD_SIZE_BYTES'] + 1
lines = [(line[:annotation_start_pos], line[annotation_start_pos:])
for line in p_file if line[:annotation_start_pos].rstrip()]
last_line_number = 0
for (code, annotation) in lines:
current_line_number = (int(code[:address_end_pos], 16) if code[:address_end_pos].strip() else last_line_number + self.config['WORD_SIZE_BYTES'])
self.annotations[current_line_number] = annotation
if self.config['ENDIANNESS'] == 'little':
for i in range(0, self.config['WORD_SIZE_BYTES']):
self.memory[current_line_number + i] = Binary8.from_hex(code[address_end_pos + 2 + 3 * i: address_end_pos + 5 + 3 * i])
else:
raise NotImplementedError('Big endian unsupported yet')
last_line_number = current_line_number
self.state = SimulatorState.LOADED
def run(self):
if self.state not in (SimulatorState.LOADED, SimulatorState.PAUSED):
raise RuntimeError('Cannot run, processor in invalid state')
self.state = SimulatorState.RUNNING
while self.state == SimulatorState.RUNNING:
self.execute_single()
def run_step(self):
if self.state not in (SimulatorState.LOADED, SimulatorState.PAUSED):
raise RuntimeError('Cannot run a step, processor in invalid state')
self.execute_single()
if self.state != SimulatorState.TERMINATED:
self.state = SimulatorState.PAUSED
def pause(self):
self.state = SimulatorState.PAUSED
def stop(self):
self.state = SimulatorState.TERMINATED
# Execution procedures
@abstractmethod
def execute_single(self):
pass
@abstractmethod
def execute_instruction(self, instruction):
pass
# Processor memory functions
def is_valid_address(self, address):
"""Tests whether a given address is valid for a given processor
Modify if neccessary for a specific processor, say if addressing in words,
not bytes, or if address space is different"""
return int(address) >= 0 and int(address) <= self.config['MEMORY_SIZE_BYTES']
def get_word_from_memory(self, address):
"""Return a word from memory at a given address"""
if not self.is_valid_address(address):
raise ValueError('Invalid address provided, cannot load word from this memory location')
if self.config['ENDIANNESS'] == 'little':
word = self.memory[address]
for i in range(1, self.config['WORD_SIZE_BYTES']):
word = self.memory[address + i] // word
return word
else:
raise NotImplementedError('Big endian not supported yet')
def get_halfword_from_memory(self, address):
"""Return a halfword from memory at a given address"""
if not self.is_valid_address(address):
raise ValueError('Invalid address provided, cannot load byte from this memory location')
if self.config['ENDIANNESS'] == 'little':
word = self.memory[address]
for i in range(1, self.config['HALFWORD_SIZE_BYTES']):
word = self.memory[address + i] // word
return word
else:
raise NotImplementedError('Big endian not supported yet')
def get_byte_from_memory(self, address):
"""Return a byte from memory at a given address"""
if not self.is_valid_address(address):
raise ValueError('Invalid address provided, cannot load byte from this memory location')
return self.memory[address]
def set_word_in_memory(self, address, word):
"""Place a word into memory at a given address"""
if not self.is_valid_address(address):
raise ValueError('Invalid address provided, cannot store word to this memory location')
if self.config['ENDIANNESS'] == 'little':
for i in range(0, self.config['WORD_SIZE_BYTES']):
self.memory[address + self.config['WORD_SIZE_BYTES'] - i - 1] = Binary8.from_digits(word[8 * i: 8 * (i + 1)])
else:
raise NotImplementedError('Big endian not supported yet')
def set_halfword_in_memory(self, address, halfword):
"""Place a halfword into memory at a given address"""
if not self.is_valid_address(address):
raise ValueError('Invalid address provided, cannot store halfword to this memory location')
if self.config['ENDIANNESS'] == 'little':
for i in range(0, self.config['HALFWORD_SIZE_BYTES']):
self.memory[address + self.config['HALFWORD_SIZE_BYTES'] - i - 1] = Binary8.from_digits(halfword[8 * i: 8 * (i + 1)])
else:
raise NotImplementedError('Big endian not supported yet')
def set_byte_in_memory(self, address, byte):
"""Place a byte into memory at a given address"""
if not self.is_valid_address(address):
raise ValueError('Invalid address provided, cannot store byte from to memory location.')
self.memory[address] = byte
# Processor breakpoints functions
def toggle_breakpoint(self, line_number):
if not self.is_valid_address(line_number):
raise ValueError('Invalid address for a breakpoint')
if line_number in self.breakpoints:
self.breakpoints.remove(line_number)
else:
self.breakpoints.add(line_number)
def is_breakpoint_at(self, line_number):
return line_number in self.breakpoints
| {
"content_hash": "df131625f903dd14728df6d6871872cb",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 160,
"avg_line_length": 37.630208333333336,
"alnum_prop": 0.6264359861591695,
"repo_name": "zjurelinac/PEAS",
"id": "33920f9b461b4083cba8ea69cf47dc67ade4056e",
"size": "7225",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "v1/simulators/simulator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "711"
},
{
"name": "Python",
"bytes": "59011"
}
],
"symlink_target": ""
} |
"""
Rebuild the Ui files
Note: If the system complains about not finding PyQt4 from pyuic, and you are using
anaconda, change the script in ~/anaconda/bin/pyuic4 to start with python2.7, not pythonw2.7
"""
import os, sys
uic = 'pyuic4'
sys.argv.pop(0)
if len(sys.argv) > 1 and sys.argv[1] == '--pyside':
sys.argv.pop(0)
uic = 'pyside-uic'
paths = sys.argv
if len(paths) == 0:
paths = ['.']
uifiles = []
for root in paths:
if os.path.isdir(root):
for path, sd, files in os.walk(root):
if os.path.join('acq4', 'pyqtgraph') in path:
continue
for f in files:
if f.endswith('.ui'):
uifiles.append(os.path.join(path, f))
elif os.path.isfile(root):
if root.endswith('.ui'):
uifiles.append(root)
else:
raise Exception('Not a .ui file: %s' % root)
else:
raise Exception('Not a file or directory: %s' % root)
for ui in uifiles:
py = os.path.splitext(ui)[0] + '.py'
if not os.path.exists(py) or os.stat(py).st_mtime < os.stat(ui).st_mtime:
os.system('%s %s > %s' % (uic, ui, py))
print py
| {
"content_hash": "65d6a2d930cdee51fa3033071a579b83",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 92,
"avg_line_length": 30.657894736842106,
"alnum_prop": 0.5690987124463519,
"repo_name": "mgraupe/acq4",
"id": "e8ae9da9ad7336d370a460d40bd8e9295476b9cc",
"size": "1183",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tools/rebuildUi.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "AMPL",
"bytes": "3037"
},
{
"name": "Batchfile",
"bytes": "247"
},
{
"name": "C",
"bytes": "757367"
},
{
"name": "C++",
"bytes": "1222891"
},
{
"name": "CSS",
"bytes": "716"
},
{
"name": "Inno Setup",
"bytes": "1606"
},
{
"name": "MATLAB",
"bytes": "1752"
},
{
"name": "Makefile",
"bytes": "30"
},
{
"name": "Processing",
"bytes": "13403"
},
{
"name": "Python",
"bytes": "6110588"
},
{
"name": "Shell",
"bytes": "70"
}
],
"symlink_target": ""
} |
"""Interface for accessing Cargo settings (stored in the sublime-project
file).
These are used by the build system to determine how to run Cargo.
Cargo Info
==========
When the `cargo_exec` Sublime command is run, you pass in a named command to
run. There is a default set of commands defined here in CARGO_COMMANDS (users
can create custom commands and pass them in with `command_info`). See
`docs/build.md` for a description of the different `command_info` values.
Project Settings
================
Settings can be stored (under the "cargo_build" key) to alter how cargo is
run. See `docs/build.md` for a description.
"""
import sublime
import os
import shlex
from . import util, target_detect
CARGO_COMMANDS = {
'auto': {
'name': 'Automatic',
'command': 'auto',
'allows_target': True,
'allows_target_triple': True,
'allows_release': True,
'allows_features': True,
'allows_json': True,
},
'build': {
'name': 'Build',
'command': 'build',
'allows_target': True,
'allows_target_triple': True,
'allows_release': True,
'allows_features': True,
'allows_json': True,
},
'run': {
'name': 'Run',
'command': 'run',
'allows_target': ('bin', 'example'),
'allows_target_triple': True,
'allows_release': True,
'allows_features': True,
'allows_json': True,
'json_stop_pattern': '^\s*Running ',
},
'check': {
'name': 'Check',
'command': 'check',
'allows_target': True,
'allows_target_triple': True,
'allows_release': True,
'allows_features': True,
'allows_json': True,
},
'test': {
'name': 'Test',
'command': 'test',
'allows_target': True,
'allows_target_triple': True,
'allows_release': True,
'allows_features': True,
'allows_json': True,
},
'bench': {
'name': 'Bench',
'command': 'bench',
'allows_target': True,
'allows_target_triple': True,
'allows_release': False,
'allows_features': True,
'allows_json': True,
},
'clean': {
'name': 'Clean',
'command': 'clean',
},
'doc': {
'name': 'Doc',
'command': 'doc',
'allows_target': ['lib', 'bin'],
'allows_target_triple': True,
'allows_release': True,
'allows_features': True,
'allows_json': False,
},
'clippy': {
'name': 'Clippy',
'command': 'clippy',
'allows_target': False,
'allows_target_triple': True,
'allows_release': True,
'allows_features': True,
'allows_json': True,
},
'script': {
'name': 'Script',
'command': 'script',
'allows_target': False,
'allows_target_triple': False,
'allows_release': False,
'allows_features': False,
'allows_json': False,
'requires_view_path': True,
'requires_manifest': False,
},
}
CARGO_BUILD_DEFAULTS = {
'variants': {
'clippy': {
'toolchain': 'nightly',
}
}
}
class CargoSettings(object):
"""Interface to Cargo project settings stored in `sublime-project`
file."""
# Sublime window.
window = None
# Data in the sublime project file. Empty dictionary if nothing is set.
project_data = None
def __init__(self, window):
self.window = window
def load(self):
self.project_data = self.window.project_data()
if self.project_data is None:
# Window does not have a Sublime project.
self.project_data = {}
self.re_settings = sublime.load_settings('RustEnhanced.sublime-settings')
def get_global_default(self, key, default=None):
internal_default = CARGO_BUILD_DEFAULTS.get('defaults', {})\
.get(key, default)
return self.re_settings.get('cargo_build', {})\
.get('defaults', {})\
.get(key, internal_default)
def set_global_default(self, key, value):
cb = self.re_settings.get('cargo_build', {})
cb.setdefault('defaults', {})[key] = value
self.re_settings.set('cargo_build', cb)
sublime.save_settings('RustEnhanced.sublime-settings')
def get_project_default(self, key, default=None):
return self.project_data.get('settings', {})\
.get('cargo_build', {})\
.get('defaults', {})\
.get(key, default)
def set_project_default(self, key, value):
self.project_data.setdefault('settings', {})\
.setdefault('cargo_build', {})\
.setdefault('defaults', {})[key] = value
self._set_project_data()
def get_global_variant(self, variant, key, default=None):
internal_default = CARGO_BUILD_DEFAULTS.get('variants', {})\
.get(variant, {})\
.get(key, default)
return self.re_settings.get('cargo_build', {})\
.get('variants', {})\
.get(variant, {})\
.get(key, internal_default)
def set_global_variant(self, variant, key, value):
cb = self.re_settings.get('cargo_build', {})
cb.setdefault('variants', {})\
.setdefault(variant, {})[key] = value
self.re_settings.set('cargo_build', cb)
sublime.save_settings('RustEnhanced.sublime-settings')
def get_project_variant(self, variant, key, default=None):
return self.project_data.get('settings', {})\
.get('cargo_build', {})\
.get('variants', {})\
.get(variant, {})\
.get(key, default)
def set_project_variant(self, variant, key, value):
self.project_data.setdefault('settings', {})\
.setdefault('cargo_build', {})\
.setdefault('variants', {})\
.setdefault(variant, {})[key] = value
self._set_project_data()
def get_project_package_default(self, path, key, default=None):
path = os.path.normpath(path)
return self.project_data.get('settings', {})\
.get('cargo_build', {})\
.get('paths', {})\
.get(path, {})\
.get('defaults', {})\
.get(key, default)
def set_project_package_default(self, path, key, value):
path = os.path.normpath(path)
self.project_data.setdefault('settings', {})\
.setdefault('cargo_build', {})\
.setdefault('paths', {})\
.setdefault(path, {})\
.setdefault('defaults', {})[key] = value
self._set_project_data()
def get_project_package_variant(self, path, variant, key, default=None):
path = os.path.normpath(path)
return self.project_data.get('settings', {})\
.get('cargo_build', {})\
.get('paths', {})\
.get(path, {})\
.get('variants', {})\
.get(variant, {})\
.get(key, default)
def set_project_package_variant(self, path, variant, key, value):
path = os.path.normpath(path)
self.project_data.setdefault('settings', {})\
.setdefault('cargo_build', {})\
.setdefault('paths', {})\
.setdefault(path, {})\
.setdefault('variants', {})\
.setdefault(variant, {})[key] = value
self._set_project_data()
def get_project_package_target(self, path, target, key, default=None):
path = os.path.normpath(path)
return self.project_data.get('settings', {})\
.get('cargo_build', {})\
.get('paths', {})\
.get(path, {})\
.get('targets', {})\
.get(target, {})\
.get(key, default)
def set_project_package_target(self, path, target, key, value):
path = os.path.normpath(path)
self.project_data.setdefault('settings', {})\
.setdefault('cargo_build', {})\
.setdefault('paths', {})\
.setdefault(path, {})\
.setdefault('targets', {})\
.setdefault(target, {})[key] = value
self._set_project_data()
def get_project_base(self, key, default=None):
return self.project_data.get('settings', {})\
.get('cargo_build', {})\
.get(key, default)
def set_project_base(self, key, value):
self.project_data.setdefault('settings', {})\
.setdefault('cargo_build', {})[key] = value
self._set_project_data()
def _set_project_data(self):
if self.window.project_file_name() is None:
# XXX: Better way to display a warning? Is
# sublime.error_message() reasonable?
print(util.multiline_fix("""
Rust Enhanced Warning: This window does not have an associated sublime-project file.
Any changes to the Cargo build settings will be lost if you close the window."""))
self.window.set_project_data(self.project_data)
def determine_target(self, cmd_name, settings_path,
cmd_info=None, override=None):
if cmd_info is None:
cmd_info = CARGO_COMMANDS[cmd_name]
target = None
if cmd_info.get('allows_target', False):
if override:
tcfg = override
else:
tcfg = self.get_project_package_variant(settings_path, cmd_name, 'target')
if tcfg == 'auto':
# If this fails, leave target as None and let Cargo sort it
# out (it may display an error).
if util.active_view_is_rust():
td = target_detect.TargetDetector(self.window)
view = self.window.active_view()
targets = td.determine_targets(view.file_name())
if len(targets) == 1:
src_path, cmd_line = targets[0]
target = ' '.join(cmd_line)
else:
target = tcfg
return target
def get_computed(self, settings_path, variant, target, key,
default=None, initial_settings={}):
"""Get the configuration value for the given key."""
v = initial_settings.get(key)
if v is None:
v = self.get_project_package_target(settings_path, target, key)
if v is None:
v = self.get_project_package_variant(settings_path, variant, key)
if v is None:
v = self.get_project_package_default(settings_path, key)
if v is None:
v = self.get_project_variant(variant, key)
if v is None:
v = self.get_global_variant(variant, key)
if v is None:
v = self.get_project_default(key)
if v is None:
v = self.get_global_default(key, default)
return v
def get_merged(self, settings_path, variant, target, key,
initial_settings={}):
"""Get the configuration value for the given key.
This assumes the value is a dictionary, and will merge all values from
each level. This is primarily used for the `env` environment
variables.
"""
result = self.get_global_default(key, {}).copy()
proj_def = self.get_project_default(key, {})
result.update(proj_def)
glbl_var = self.get_global_variant(variant, key, {})
result.update(glbl_var)
proj_var = self.get_project_variant(variant, key, {})
result.update(proj_var)
pp_def = self.get_project_package_default(settings_path, key, {})
result.update(pp_def)
pp_var = self.get_project_package_variant(settings_path, variant, key, {})
result.update(pp_var)
pp_tar = self.get_project_package_target(settings_path, target, key, {})
result.update(pp_tar)
initial = initial_settings.get(key, {})
result.update(initial)
return result
def get_command(self, cmd_name, cmd_info,
settings_path, working_dir,
initial_settings={}, force_json=False):
"""Generates the command arguments for running Cargo.
:param cmd_name: The name of the command, the key used to select a
"variant".
:param cmd_info: Dictionary from `CARGO_COMMANDS` with rules on how to
construct the command.
:param settings_path: The absolute path to the Cargo project root
directory or script.
:param working_dir: The directory where Cargo is to be run (typically
the project root).
:keyword initial_settings: Initial settings to inject which override
all other settings.
:keyword force_json: If True, will force JSON output.
:Returns: A dictionary with the keys:
- `command`: The command to run as a list of strings.
- `env`: Dictionary of environment variables (or None).
- `msg_rel_path`: The root path to use for relative paths in
messages.
- `rustc_version`: The version of rustc being used as a string,
such as '1.25.0-nightly'.
Returns None if the command cannot be constructed.
"""
target = self.determine_target(cmd_name, settings_path,
cmd_info=cmd_info, override=initial_settings.get('target'))
def get_computed(key, default=None):
return self.get_computed(settings_path, cmd_name, target, key,
default=default, initial_settings=initial_settings)
result = ['cargo']
toolchain = get_computed('toolchain', None)
if toolchain:
result.append('+' + toolchain)
# Command to run.
result.append(cmd_info['command'])
# Default target.
if target:
result.extend(target.split())
# target_triple
if cmd_info.get('allows_target_triple', False):
v = get_computed('target_triple', None)
if v:
result.extend(['--target', v])
# release (profile)
if cmd_info.get('allows_release', False):
v = get_computed('release', False)
if v:
result.append('--release')
if force_json or (cmd_info.get('allows_json', False) and
util.get_setting('show_errors_inline', True)):
result.append('--message-format=json')
# features
if cmd_info.get('allows_features', False):
v = get_computed('no_default_features', False)
if v:
result.append('--no-default-features')
v = get_computed('features', None)
if v:
if v.upper() == 'ALL':
result.append('--all-features')
else:
result.append('--features')
result.append(v)
# Add path from current active view (mainly for "cargo script").
if cmd_info.get('requires_view_path', False):
script_path = get_computed('script_path')
if not script_path:
if not util.active_view_is_rust():
sublime.error_message(util.multiline_fix("""
Cargo build command %r requires the current view to be a Rust source file.""" % cmd_info['name']))
return None
script_path = self.window.active_view().file_name()
result.append(script_path)
def expand(s):
return sublime.expand_variables(s,
self.window.extract_variables())
# Extra args.
extra_cargo_args = get_computed('extra_cargo_args')
if extra_cargo_args:
extra_cargo_args = expand(extra_cargo_args)
result.extend(shlex.split(extra_cargo_args))
extra_run_args = get_computed('extra_run_args')
if extra_run_args:
extra_run_args = expand(extra_run_args)
result.append('--')
result.extend(shlex.split(extra_run_args))
# Compute the environment.
env = self.get_merged(settings_path, cmd_name, target, 'env',
initial_settings=initial_settings)
for k, v in env.items():
env[k] = os.path.expandvars(v)
if not env:
env = None
# Determine the base path for paths in messages.
#
# Starting in Rust 1.24, all messages and symbols are relative to the
# workspace root instead of the package root.
metadata = util.get_cargo_metadata(self.window, working_dir, toolchain)
if metadata and 'workspace_root' in metadata:
# 'workspace_root' key added in 1.24.
msg_rel_path = metadata['workspace_root']
else:
msg_rel_path = working_dir
rustc_version = util.get_rustc_version(self.window, working_dir, toolchain=toolchain)
return {
'command': result,
'env': env,
'msg_rel_path': msg_rel_path,
'rustc_version': rustc_version,
}
| {
"content_hash": "40a3d36e266250d51d1b6133c21a7a88",
"timestamp": "",
"source": "github",
"line_count": 483,
"max_line_length": 122,
"avg_line_length": 37.88198757763975,
"alnum_prop": 0.5151117669563317,
"repo_name": "dten/sublime-rust",
"id": "d92ff0d998c0f9456e16ce1e0602cfd0b2d6bb89",
"size": "18297",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rust/cargo_settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "195010"
},
{
"name": "Rust",
"bytes": "57774"
}
],
"symlink_target": ""
} |
from django.contrib.admin.helpers import InlineAdminForm
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
# local test models
from models import (Holder, Inner, InnerInline, Holder2, Inner2, Holder3,
Inner3, Person, OutfitItem, Fashionista, Teacher, Parent, Child)
class TestInline(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
self.change_url = '/test_admin/admin/admin_inlines/holder/%i/' % holder.id
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def tearDown(self):
self.client.logout()
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
response = self.client.get(self.change_url)
inner_formset = response.context[-1]['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
inner = Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get('/test_admin/admin/admin_inlines/holder/%i/'
% holder.id)
self.assertContains(response, '<label>Inner readonly label:</label>')
def test_many_to_many_inlines(self):
"Autogenerated many-to-many inlines are displayed correctly (#13407)"
response = self.client.get('/test_admin/admin/admin_inlines/author/add/')
# The heading for the m2m inline block uses the right text
self.assertContains(response, '<h2>Author-book relationships</h2>')
# The "add another" label is correct
self.assertContains(response, 'Add another Author-Book Relationship')
# The '+' is dropped from the autogenerated form prefix (Author_books+)
self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't cary her own bags.
data = {
'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0,
'shoppingweakness_set-MAX_NUM_FORMS': 0,
'_save': u'Save',
'person': person.id,
'max_weight': 0,
'shoppingweakness_set-0-item': item.id,
}
response = self.client.post('/test_admin/admin/admin_inlines/fashionista/add/', data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1)
def test_tabular_non_field_errors(self):
"""
Ensure that non_field_errors are displayed correctly, including the
right value for colspan. Refs #13510.
"""
data = {
'title_set-TOTAL_FORMS': 1,
'title_set-INITIAL_FORMS': 0,
'title_set-MAX_NUM_FORMS': 0,
'_save': u'Save',
'title_set-0-title1': 'a title',
'title_set-0-title2': 'a different title',
}
response = self.client.post('/test_admin/admin/admin_inlines/titlecollection/add/', data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbock.
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist"><li>The two titles must be the same</li></ul></td></tr>')
def test_no_parent_callable_lookup(self):
"""Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable"""
# Identically named callable isn't present in the parent ModelAdmin,
# rendering of the add view shouldn't explode
response = self.client.get('/test_admin/admin/admin_inlines/novel/add/')
self.assertEqual(response.status_code, 200)
# View should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="chapter_set-group">')
def test_callable_lookup(self):
"""Admin inline should invoke local callable when its name is listed in readonly_fields"""
response = self.client.get('/test_admin/admin/admin_inlines/poll/add/')
self.assertEqual(response.status_code, 200)
# Add parent object view should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="question_set-group">')
# The right callabe should be used for the inline readonly_fields
# column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>')
def test_help_text(self):
"""
Ensure that the inlines' model field help texts are displayed when
using both the stacked and tabular layouts.
Ref #8190.
"""
response = self.client.get('/test_admin/admin/admin_inlines/holder4/add/')
self.assertContains(response, '<p class="help">Awesome stacked help text is awesome.</p>', 4)
self.assertContains(response, '<img src="/static/admin/img/icon-unknown.gif" alt="(Awesome tabular help text is awesome.)" title="Awesome tabular help text is awesome." />', 1)
class TestInlineMedia(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def tearDown(self):
self.client.logout()
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = '/test_admin/admin/admin_inlines/holder/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = '/test_admin/admin/admin_inlines/holder3/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = '/test_admin/admin/admin_inlines/holder2/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
class TestInlineAdminForm(TestCase):
def test_immutable_content_type(self):
"""Regression for #9362
The problem depends only on InlineAdminForm and its "original"
argument, so we can safely set the other arguments to None/{}. We just
need to check that the content_type argument of Child isn't altered by
the internals of the inline form."""
sally = Teacher.objects.create(name='Sally')
john = Parent.objects.create(name='John')
joe = Child.objects.create(name='Joe', teacher=sally, parent=john)
iaf = InlineAdminForm(None, None, {}, {}, joe)
parent_ct = ContentType.objects.get_for_model(Parent)
self.assertEqual(iaf.original.content_type, parent_ct)
| {
"content_hash": "8a3e1c7e65756ce7b76560d741fa0bf9",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 184,
"avg_line_length": 45.51497005988024,
"alnum_prop": 0.6492566767530588,
"repo_name": "disqus/django-old",
"id": "61d4634f8bf5d071f82e0358ff549dc2c3a1edb3",
"size": "7601",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/regressiontests/admin_inlines/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "85749"
},
{
"name": "Python",
"bytes": "7413553"
},
{
"name": "Shell",
"bytes": "9076"
}
],
"symlink_target": ""
} |
"""
Django settings for lowfat project.
Generated by 'django-admin startproject' using Django 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
from collections import OrderedDict
URL_SRC = "https://github.com/softwaresaved/lowfat"
VERSION = "1.18.2"
SETTINGS_EXPORT = [
'URL_SRC',
'VERSION',
]
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_iy7)5@ids_q5m(b4!q$-)ie)&-943zx37$+9-9b#988^*f-+4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.flatpages',
'django_countries',
'tagulous',
'autofixture',
'crispy_forms',
'social_django',
'dbbackup',
'constance',
'constance.backends.database',
'django_extensions',
'datetimewidget',
'simple_history',
'lowfat',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'simple_history.middleware.HistoryRequestMiddleware',
]
ROOT_URLCONF = 'lowfat.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django_settings_export.settings_export',
'social_django.context_processors.backends',
'constance.context_processors.config',
'lowfat.context.site',
'lowfat.context.maintenance',
'lowfat.context.organisation',
],
},
},
]
WSGI_APPLICATION = 'lowfat.wsgi.application'
SERIALIZATION_MODULES = {
'xml': 'tagulous.serializers.xml_serializer',
'json': 'tagulous.serializers.json',
'python': 'tagulous.serializers.python',
'yaml': 'tagulous.serializers.pyyaml',
}
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Logging
# https://docs.djangoproject.com/en/1.11/ref/settings/#logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.handlers.TimedRotatingFileHandler',
'filename': os.path.join(BASE_DIR, 'lowfat.log'),
'when': 'W6',
'backupCount': 4,
'formatter': 'timestamped',
},
},
'loggers': {
'': {
'handlers': ['file'],
'level': 'INFO',
'propagate': True,
},
},
'formatters': {
'timestamped': {
'format': '[{asctime} {levelname} {module}.{funcName}:{lineno}] {message}',
'style': '{',
}
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTHENTICATION_BACKENDS = (
'social_core.backends.github.GithubOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
SOCIAL_AUTH_PIPELINE = (
'social_core.pipeline.social_auth.social_details', # default pipeline
'social_core.pipeline.social_auth.social_uid', # default pipeline
'social_core.pipeline.social_auth.auth_allowed', # default pipeline
'social_core.pipeline.social_auth.social_user', # default pipeline
'social_core.pipeline.user.get_username', # default pipeline
'social_core.pipeline.user.create_user', # default pipeline
'social_core.pipeline.social_auth.associate_user', # default pipeline
'social_core.pipeline.social_auth.load_extra_data', # default pipeline
'social_core.pipeline.user.user_details', # default pipeline
'lowfat.auth.wire_profile',
)
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-gb'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = False
USE_TZ = False
DATE_FORMAT = "l, d F Y" # British English style
DATETIME_FORMAT = "l, d F Y" # British English style
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
# Stored files
# https://docs.djangoproject.com/en/1.9/ref/settings/#media-url
MEDIA_URL = '/upload/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'upload')
# Authentication system
# https://docs.djangoproject.com/en/1.9/topics/auth/default/
LOGIN_URL = '/login/' # The URL where requests are redirected for login, especially when using the login_required() decorator.
LOGIN_REDIRECT_URL = '/dashboard/'
# Email
# Email backend for development (print on console)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Email backend for development (save on file)
# EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
# EMAIL_FILE_PATH = '/tmp/lowfat-emails'
# Default email address to use for various automated correspondence from the site manager(s).
DEFAULT_FROM_EMAIL = 'no-reply@software.ac.uk'
# The email address that error messages come from.
SERVER_EMAIL = 'no-reply@software.ac.uk'
# A list of all the people who get code error notifications.
ADMINS = [
('admin', 'admin@software.ac.uk'),
]
# Subject-line prefix for email messages sent
EMAIL_SUBJECT_PREFIX = ""
# Backup
DBBACKUP_STORAGE = 'django.core.files.storage.FileSystemStorage'
DBBACKUP_STORAGE_OPTIONS = {
'location': os.path.join(BASE_DIR, 'backups'),
}
DBBACKUP_GPG_ALWAYS_TRUST = True
DBBACKUP_GPG_RECIPIENT = "" # XXX This variable need to be filled for --encrypt or --decrypt work properly.
# Run time variables
# Powered by Constance
CONSTANCE_BACKEND = 'constance.backends.database.DatabaseBackend'
CONSTANCE_IGNORE_ADMIN_VERSION_CHECK = True
CONSTANCE_CONFIG = OrderedDict([
("ORGANISATION_NAME", (
"Software Sustainability Institute",
"Default organisation name.",
)),
("ORGANISATION_WEBSITE", (
"https://www.software.ac.uk/",
"Default organisation website.",
)),
("FUNDS_FROM_DEFAULT", (
"F",
"Default funds used for expense.",
)),
("GRANTS_DEFAULT", (
"SSI3",
"Default grant for expenses.",
)),
("FELLOWS_MANAGEMENT_EMAIL", (
"fellows-management@software.ac.uk",
"Contact address to fellows management staffs.",
)),
("ONETIME_APPROVAL_EMAIL", (
"fellows-management@software.ac.uk",
"Address for approval of one-time requests.",
)),
("WEBSITE_GATEKEEPER", (
"Gatekeeper Name",
"Name of website gatekeeper, e.g. 'John'.",
)),
("WEBSITE_GATEKEEPER_EMAIL", (
"gatekeeper@software.ac.uk",
"Email of website gatekeeper, e.g. 'john@software.ac.uk'.",
)),
("STAFFS_EMAIL", (
"['Software Sustainability Institute <fellows-management@software.ac.uk>']",
"Contact address of staffs, e.g. ['John <john@example.com>', 'Mary <mary@example.com>'].",
)),
("STAFFS_EMAIL", (
"['Software Sustainability Institute <fellows-management@software.ac.uk>']",
"Contact address of staffs, e.g. ['John <john@example.com>', 'Mary <mary@example.com>'].",
)),
("STAFF_EMAIL_NOTIFICATION", (
False,
"Notification to staffs by email.",
)),
("STAFF_EMAIL_REMINDER", (
False,
"Reminder staffs of pending tasks by email.",
)),
("DAYS_TO_ANSWER_BACK", (
3,
"Days to answer back before receive a email reminder.",
)),
("STAFF_EMAIL_FOLLOW_UP", (
False,
"Reminder staff of tasks to follow up with by email.",
)),
("FOLLOW_UP_DAY", (
0,
"Weekday to send follow up email to staff. 0 for Monday.",
)),
("CLAIMANT_EMAIL_NOTIFICATION", (
False,
"Notification to claimant by email.",
)),
("MAINTENANCE_DAY", (
4,
"Day when maintenance normaly take place.",
)),
("MAINTENANCE_HOUR", (
9,
"Hour when maintenance normaly take placece.",
)),
("FELLOWSHIP_EXPENSES_END_DAY", (
31,
"Day deadline that expenses must be submited.",
)),
("FELLOWSHIP_EXPENSES_END_MONTH", (
3,
"Month deadline that expenses must be submited.",
)),
("PRE_APPROVED_FUNDING_REQUEST_BUDGET", (
250,
"Maximum budget for pre approved funding requests, e.g. 250.",
)),
("CALENDAR_ACCESS_TOKEN", (
"lKI7BSE7JCWaIw54xywVZy8zRTKLqOM3",
"Token to access the calendar.",
)),
])
# Flatpages
SITE_ID = 1
| {
"content_hash": "6b4e38882a9966b0a3a7169481b9c53d",
"timestamp": "",
"source": "github",
"line_count": 360,
"max_line_length": 127,
"avg_line_length": 28.988888888888887,
"alnum_prop": 0.6422958988118053,
"repo_name": "softwaresaved/fat",
"id": "51a5f19fd9383bbef6818825ed6bbd542ef06aa2",
"size": "10436",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lowfat/settings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "3202"
},
{
"name": "HTML",
"bytes": "38552"
},
{
"name": "JavaScript",
"bytes": "653"
},
{
"name": "Python",
"bytes": "235043"
},
{
"name": "Shell",
"bytes": "1346"
}
],
"symlink_target": ""
} |
'''
localtion
----------
* /asyncread
feature
----------
* Process-level global variable
* Asynchronous Read
* Decorator Demo
'''
import os
import time
import tornado.web
from tornado.web import HTTPError
from tornado.httpclient import AsyncHTTPClient
from tornado import gen
from util import g_logger
from util import HttpUtil
from util import decorator as util_decorator
from base import BaseHandler
from domain.object.error import ErrorCode as ECODE
from domain.object.error import BaseError
class AsyncReadHandler(BaseHandler):
@util_decorator.validate_ip(_logger=g_logger)
@tornado.web.asynchronous
@gen.engine
def get(self):
self.name = self._check_argument('name', expect_types=(str, unicode))
urlist = ['http://www.example.com', 'http://www.google.com']
try:
for url in urlist:
request = tornado.httpclient.HTTPRequest(url,
method='GET',
connect_timeout=1,
request_timeout=5,
user_agent='TNS-SPIDER',
)
http_client = AsyncHTTPClient()
response = yield gen.Task(http_client.fetch, request)
result = self.__handle_async_request(response)
self.finish(result)
break
else:
self.finish({'e_code':ECODE.HTTP, 'e_msg': 'SUCCESS'})
pass
except HTTPError, e:
g_logger.error(e, exc_info=True)
self.api_response({'e_code':ECODE.HTTP, 'e_msg': '%s' % e})
raise StopIteration
except BaseError, e:
g_logger.error(e, exc_info=True)
self.api_response({'e_code':e.e_code, 'e_msg': '%s' % e})
raise StopIteration
except Exception, e:
g_logger.error(e, exc_info=True)
self.api_response({'e_code':e.e_code, 'e_msg': '%s' % e})
raise StopIteration
def __handle_async_request(self, response):
if response is None:
return None
#g_logger.debug('STACK_CONTEXT\tself.name=%s' % self.name)
g_logger.debug('RESPONSE_ERROR\t%s' % response.error)
g_logger.debug('RESPONSE\t%s' % response)
return response.body
| {
"content_hash": "a45fdcd8a3ebad26e581afd1ebbd116e",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 81,
"avg_line_length": 32.39473684210526,
"alnum_prop": 0.5398050365556458,
"repo_name": "bufferx/tns",
"id": "7ca23ad959ce950d7e2bdd787387eca21d1e86b9",
"size": "3112",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tns/web/action/async.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "36934"
}
],
"symlink_target": ""
} |
from unittest import TestCase
from azure.mgmt.compute.models import StorageAccountTypes
from mock import Mock, MagicMock
from cloudshell.cp.azure.domain.common.vm_details_provider import VmDetailsProvider
class TestVmDetailsProvider(TestCase):
def setUp(self):
self.resource_id_parser = MagicMock()
self.network_service = MagicMock()
self.vm_details_provider = VmDetailsProvider(self.network_service, self.resource_id_parser)
self.logger = MagicMock()
self.network_client = MagicMock()
def test_prepare_vm_details_from_market(self):
instance = Mock()
instance.storage_profile.image_reference.publisher = 'Param 1'
instance.storage_profile.image_reference.offer = 'Param 2'
instance.storage_profile.image_reference.sku = 'Param 3'
instance.hardware_profile.vm_size = 'Param 4'
instance.storage_profile.os_disk.os_type.name = 'Param 5'
instance.storage_profile.os_disk.managed_disk.storage_account_type = StorageAccountTypes.premium_lrs
instance.network_profile = Mock()
instance.network_profile.network_interfaces = MagicMock()
vm_instance_data = self.vm_details_provider.create(instance, True, self.logger, self.network_client,
'').vmInstanceData
self.assertTrue(
self._get_value(vm_instance_data, 'Image Publisher') == instance.storage_profile.image_reference.publisher)
self.assertTrue(
self._get_value(vm_instance_data, 'Image Offer') == instance.storage_profile.image_reference.offer)
self.assertTrue(self._get_value(vm_instance_data, 'Image SKU') == instance.storage_profile.image_reference.sku)
self.assertTrue(self._get_value(vm_instance_data, 'VM Size') == instance.hardware_profile.vm_size)
self.assertTrue(
self._get_value(vm_instance_data, 'Operating System') == instance.storage_profile.os_disk.os_type.name)
self.assertTrue(self._get_value(vm_instance_data, 'Disk Type') == 'SSD')
def test_prepare_vm_details_from_image(self):
instance = Mock()
instance.network_profile = Mock()
instance.network_profile.network_interfaces = MagicMock()
resource_group = 'Group 1'
self.resource_id_parser.get_image_name = Mock(return_value='Image Name')
self.resource_id_parser.get_resource_group_name = Mock(return_value=resource_group)
instance.hardware_profile.vm_size = 'Param 4'
instance.storage_profile.os_disk.os_type.name = 'Param 5'
instance.storage_profile.os_disk.managed_disk.storage_account_type = StorageAccountTypes.premium_lrs
vm_instance_data = self.vm_details_provider.create(instance, False, self.logger, self.network_client,
resource_group).vmInstanceData
self.assertTrue(self._get_value(vm_instance_data, 'Image') == 'Image Name')
self.assertTrue(self._get_value(vm_instance_data, 'Image Resource Group') == resource_group)
self.assertTrue(self._get_value(vm_instance_data, 'VM Size') == instance.hardware_profile.vm_size)
self.assertTrue(
self._get_value(vm_instance_data, 'Operating System') == instance.storage_profile.os_disk.os_type.name)
self.assertTrue(self._get_value(vm_instance_data, 'Disk Type') == 'SSD')
def test_prepare_vm_network_data_single_nic(self):
network_interface = Mock()
network_interface.resource_guid = 'Param Guid'
network_interface.name = 'Param Name'
network_interface.primary = True
network_interface.mac_address = 'Mac Param'
ip_configuration = Mock()
ip_configuration.private_ip_address = 'Param Ip Address'
ip_configuration.public_ip_address = Mock()
ip_configuration.subnet.id = 'a/a'
network_interface.ip_configurations = [ip_configuration]
resource_group = 'Group 1'
self.network_client.network_interfaces.get = Mock(return_value=network_interface)
nic = Mock()
nic.id = "/azure_resource_id/nic_name"
instance = Mock()
instance.network_profile = Mock()
instance.network_profile.network_interfaces = [nic]
public_ip = Mock()
public_ip.ip_address = 'Public Address Param'
public_ip.public_ip_allocation_method = 'Public Method Param'
self.network_service.get_public_ip = Mock(return_value=public_ip)
network_interface_objects = self.vm_details_provider._get_vm_network_data(instance, self.network_client,
resource_group, self.logger)
nic = network_interface_objects[0]
self.assertTrue(nic.interfaceId == network_interface.resource_guid)
self.assertTrue(nic.networkId == ip_configuration.subnet.id.split('/')[-1])
network_data = nic.networkData
self.assertTrue(self._get_value(network_data, 'IP') == ip_configuration.private_ip_address)
self.assertTrue(self._get_value(network_data, 'MAC Address') == network_interface.mac_address)
self.assertTrue(self._get_value(network_data, 'Public IP') == public_ip.ip_address)
self.assertTrue(self._get_value(network_data, "Public IP Type") == public_ip.public_ip_allocation_method)
def _get_value(self, data, key):
for item in data:
if item.key == key:
return item.value
return None
| {
"content_hash": "432cc869bd5e953178e0b25736c90553",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 119,
"avg_line_length": 52.18867924528302,
"alnum_prop": 0.6576283441793204,
"repo_name": "QualiSystems/Azure-Shell",
"id": "8b6023d63fc57ff748e6c10a70a65742176b27ef",
"size": "5532",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "package/tests/test_cp/test_azure/test_common/test_vm_details_provider.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "906"
},
{
"name": "Python",
"bytes": "654779"
},
{
"name": "Shell",
"bytes": "616"
}
],
"symlink_target": ""
} |
__author__ = 'Riley Flynn (nint8835)'
class PluginNotFoundException(Exception):
"""An exception raised when something tries to access a plugin that doesn't exist"""
pass
class MultiplePluginsFoundException(Exception):
"""An exception raised when there are multiple plugins found with a certain name, so it cannot get just one"""
pass
class CommandNotFoundException(Exception):
"""An exception raised when something attempts to get info for a non-existant command from the CommandRegistry"""
pass
class MultpleCommandsFoundException(Exception):
"""An exception raised when something tries to get info for one command, but there are multiple commands that match"""
pass
| {
"content_hash": "79e6ec185240428a9058cabeed945af9",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 122,
"avg_line_length": 33.714285714285715,
"alnum_prop": 0.7556497175141242,
"repo_name": "nint8835/NintbotForDiscord",
"id": "1a5ec0dd549b421b328885c50a0dddde8522edec",
"size": "708",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NintbotForDiscord/Exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "103908"
}
],
"symlink_target": ""
} |
from lacore.source.chunked import ChunkedFile as BaseChunkedFile
class ChunkedFile(BaseChunkedFile):
maxchunk = 20971520
| {
"content_hash": "a86313fa0c0e9383d0f521895d23b11e",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 64,
"avg_line_length": 25.4,
"alnum_prop": 0.8188976377952756,
"repo_name": "longaccess/longaccess-client",
"id": "3cbefa8ddbd2f704a105dcae47a9caede1c81207",
"size": "127",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lacli/source/chunked.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "15176"
},
{
"name": "PowerShell",
"bytes": "1624"
},
{
"name": "Python",
"bytes": "342473"
},
{
"name": "Shell",
"bytes": "4450"
}
],
"symlink_target": ""
} |
"""
Utility functions for access to OS level info and URI parsing
"""
import collections
import getpass
import logging
import os
import platform
# All systems do not support pwd module
try:
import pwd
except ImportError:
pwd = None
# Python 2 & 3 compatibility
try:
from urllib import parse as _urlparse
except ImportError:
import urlparse as _urlparse
try:
from urllib.parse import unquote
except ImportError:
from urllib import unquote
LOGGER = logging.getLogger(__name__)
PARSED = collections.namedtuple('Parsed',
'scheme,netloc,path,params,query,fragment,'
'username,password,hostname,port')
PYPY = platform.python_implementation().lower() == 'pypy'
KEYWORDS = ['connect_timeout',
'client_encoding',
'options',
'application_name',
'fallback_application_name',
'keepalives',
'keepalives_idle',
'keepalives_interval',
'keepalives_count',
'sslmode',
'requiressl',
'sslcompression',
'sslcert',
'sslkey',
'sslrootcert',
'sslcrl',
'requirepeer',
'krbsrvname',
'gsslib',
'service']
def get_current_user():
"""Return the current username for the logged in user
:rtype: str
"""
if pwd is None:
return getpass.getuser()
else:
try:
return pwd.getpwuid(os.getuid())[0]
except KeyError as error:
LOGGER.error('Could not get logged-in user: %s', error)
def parse_qs(query_string):
"""Return the parsed query string in a python2/3 agnostic fashion
:param str query_string: The URI query string
:rtype: dict
"""
return _urlparse.parse_qs(query_string)
def uri(host='localhost', port=5432, dbname='postgres', user='postgres',
password=None):
"""Return a PostgreSQL connection URI for the specified values.
:param str host: Host to connect to
:param int port: Port to connect on
:param str dbname: The database name
:param str user: User to connect as
:param str password: The password to use, None for no password
:return str: The PostgreSQL connection URI
"""
if port:
host = '%s:%s' % (host, port)
if password:
return 'postgresql://%s:%s@%s/%s' % (user, password, host, dbname)
return 'postgresql://%s@%s/%s' % (user, host, dbname)
def uri_to_kwargs(uri):
"""Return a URI as kwargs for connecting to PostgreSQL with psycopg2,
applying default values for non-specified areas of the URI.
:param str uri: The connection URI
:rtype: dict
"""
parsed = urlparse(uri)
default_user = get_current_user()
password = unquote(parsed.password) if parsed.password else None
kwargs = {'host': parsed.hostname,
'port': parsed.port,
'dbname': parsed.path[1:] or default_user,
'user': parsed.username or default_user,
'password': password}
values = parse_qs(parsed.query)
if 'host' in values:
kwargs['host'] = values['host'][0]
for k in [k for k in values if k in KEYWORDS]:
kwargs[k] = values[k][0] if len(values[k]) == 1 else values[k]
try:
if kwargs[k].isdigit():
kwargs[k] = int(kwargs[k])
except AttributeError:
pass
return kwargs
def urlparse(url):
"""Parse the URL in a Python2/3 independent fashion.
:param str url: The URL to parse
:rtype: Parsed
"""
value = 'http%s' % url[5:] if url[:5] == 'postgresql' else url
parsed = _urlparse.urlparse(value)
path, query = parsed.path, parsed.query
hostname = parsed.hostname if parsed.hostname else ''
return PARSED(parsed.scheme.replace('http', 'postgresql'),
parsed.netloc,
path,
parsed.params,
query,
parsed.fragment,
parsed.username,
parsed.password,
hostname.replace('%2F', '/').replace('%2f', '/'),
parsed.port)
| {
"content_hash": "d1cf3ad82cd0ca31c5e127a6007d95f3",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 75,
"avg_line_length": 28.113333333333333,
"alnum_prop": 0.5816931467868153,
"repo_name": "gmr/queries",
"id": "b729c9c59b94ee5bd5a3ca2a9a5e77f177353fd2",
"size": "4217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "queries/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "121484"
},
{
"name": "Shell",
"bytes": "753"
}
],
"symlink_target": ""
} |
import torch.nn as nn
from utils import *
class linknet(nn.Module):
def __init__(self, feature_scale=4, n_classes=21, is_deconv=True, in_channels=3, is_batchnorm=True):
super(linknet, self).__init__()
self.is_deconv = is_deconv
self.in_channels = in_channels
self.is_batchnorm = is_batchnorm
self.feature_scale = feature_scale
self.layers = [2, 2, 2, 2] # Currently hardcoded for ResNet-18
filters = [64, 128, 256, 512]
filters = [x / self.feature_scale for x in filters]
self.inplanes = filters[0]
# Encoder
self.convbnrelu1 = conv2DBatchNormRelu(in_channels=3, k_size=7, n_filters=64,
padding=3, stride=2, bias=False)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
block = residualBlock
self.encoder1 = self._make_layer(block, filters[0], self.layers[0])
self.encoder2 = self._make_layer(block, filters[1], self.layers[1], stride=2)
self.encoder3 = self._make_layer(block, filters[2], self.layers[2], stride=2)
self.encoder4 = self._make_layer(block, filters[3], self.layers[3], stride=2)
self.avgpool = nn.AvgPool2d(7)
# Decoder
self.decoder4 = linknetUp(filters[3], filters[2])
self.decoder4 = linknetUp(filters[2], filters[1])
self.decoder4 = linknetUp(filters[1], filters[0])
self.decoder4 = linknetUp(filters[0], filters[0])
# Final Classifier
self.finaldeconvbnrelu1 = nn.Sequential(nn.ConvTranspose2d(filters[0], 32/feature_scale, 3, 2, 1),
nn.BatchNorm2d(32/feature_scale),
nn.ReLU(inplace=True),)
self.finalconvbnrelu2 = conv2DBatchNormRelu(in_channels=32/feature_scale, k_size=3, n_filters=32/feature_scale, padding=1, stride=1)
self.finalconv3 = nn.Conv2d(32/feature_scale, n_classes, 2, 2, 0)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
# Encoder
x = self.convbnrelu1(x)
x = self.maxpool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder with Skip Connections
d4 = self.decoder4(e4)
d4 += e3
d3 = self.decoder3(d4)
d3 += e2
d2 = self.decoder2(d3)
d2 += e1
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finaldeconvbnrelu1(d1)
f2 = self.finalconvbnrelu2(f1)
f3 = self.finalconv3(f2)
return f3
| {
"content_hash": "d77fdcd6771c82d406e092fd64e47b2a",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 140,
"avg_line_length": 38.13953488372093,
"alnum_prop": 0.5801829268292683,
"repo_name": "ibadami/pytorch-semseg",
"id": "26faa6c40d388bf9dcb2998e0796edc64bc9d47f",
"size": "3280",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ptsemseg/models/linknet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "62160"
}
],
"symlink_target": ""
} |
from office365.directory.object import DirectoryObject
class ResourceSpecificPermissionGrant(DirectoryObject):
"""
Declares the permission that has been granted to a specific Azure AD app for an instance of a resource
in Microsoft Graph.
"""
pass
| {
"content_hash": "249a71450ad4d1c36cde7ede172f5ad4",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 106,
"avg_line_length": 29.88888888888889,
"alnum_prop": 0.758364312267658,
"repo_name": "vgrem/Office365-REST-Python-Client",
"id": "067a513c354cfc5507759931c23e59dd618b89a6",
"size": "269",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "office365/directory/resource_specific_permission_grant.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1659292"
}
],
"symlink_target": ""
} |
"""
MiniNExT
See README for details
"""
| {
"content_hash": "835b1669d56f59911bbcc47aa71ce5fd",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 22,
"avg_line_length": 10,
"alnum_prop": 0.675,
"repo_name": "USC-NSL/miniNExT",
"id": "77e329ea4074657ffeb6f8278d12a64283b8f835",
"size": "40",
"binary": false,
"copies": "1",
"ref": "refs/heads/1.4.0",
"path": "__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9873"
},
{
"name": "Makefile",
"bytes": "1966"
},
{
"name": "Python",
"bytes": "50058"
},
{
"name": "Shell",
"bytes": "899"
}
],
"symlink_target": ""
} |
"""
WSGI config for pums project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pums.settings")
application = get_wsgi_application()
| {
"content_hash": "41d6b0271298d04a1c8c45fd86438b60",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 24.0625,
"alnum_prop": 0.7662337662337663,
"repo_name": "buaawp/pums",
"id": "1baee86a4b99a12928d2ded89a1e52985a8a08d6",
"size": "385",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pums/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18014"
},
{
"name": "HTML",
"bytes": "13186"
},
{
"name": "JavaScript",
"bytes": "6154"
},
{
"name": "Python",
"bytes": "22365"
}
],
"symlink_target": ""
} |
import logging
import numpy as np
from pybar.scans.scan_digital import DigitalScan
from pybar.fei4.register_utils import invert_pixel_mask
from pybar.analysis.analyze_raw_data import AnalyzeRawData
from pybar.run_manager import RunManager
from pybar.analysis.plotting.plotting import plot_occupancy
class StuckPixelTuning(DigitalScan):
'''Stuck pixel scan to detect and disable stuck pixels (Hitbus/HitOR always high).
'''
_default_run_conf = DigitalScan._default_run_conf.copy()
_default_run_conf = {
"broadcast_commands": True,
"threaded_scan": True,
"mask_steps": 3, # mask steps
"n_injections": 100, # number of injections
"use_enable_mask": False, # if True, use Enable mask during scan, if False, all pixels will be enabled
"disable_for_mask": ['Enable'], # list of masks for which noisy pixels will be disabled
"enable_for_mask": ['Imon'], # list of masks for which noisy pixels will be enabled
"overwrite_mask": False # if True, overwrite existing masks
}
def configure(self):
commands = []
commands.extend(self.register.get_commands("ConfMode"))
pixel_reg = "C_High"
self.register.set_pixel_register_value(pixel_reg, 0)
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name=pixel_reg))
pixel_reg = "C_Low"
self.register.set_pixel_register_value(pixel_reg, 0)
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name=pixel_reg))
self.register_utils.send_commands(commands)
def analyze(self):
with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data:
analyze_raw_data.create_source_scan_hist = True
analyze_raw_data.interpreter.set_warning_output(False)
analyze_raw_data.create_tot_hist = False
analyze_raw_data.interpret_word_table()
analyze_raw_data.plot_histograms()
analyze_raw_data.interpreter.print_summary()
occ_hist = analyze_raw_data.out_file_h5.root.HistOcc[:, :, 0].T
occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1'))
# noisy pixels are set to 1
occ_mask[occ_hist < self.n_injections] = 1
# make inverse
inv_occ_mask = invert_pixel_mask(occ_mask)
if self.overwrite_mask:
for mask in self.disable_for_mask:
self.register.set_pixel_register_value(mask, inv_occ_mask)
else:
for mask in self.disable_for_mask:
enable_mask = np.logical_and(inv_occ_mask, self.register.get_pixel_register_value(mask))
self.register.set_pixel_register_value(mask, enable_mask)
if self.overwrite_mask:
for mask in self.enable_for_mask:
self.register.set_pixel_register_value(mask, occ_mask)
else:
for mask in self.enable_for_mask:
disable_mask = np.logical_or(occ_mask, self.register.get_pixel_register_value(mask))
self.register.set_pixel_register_value(mask, disable_mask)
plot_occupancy(occ_mask.T, title='Stuck Pixels', z_max=1, filename=analyze_raw_data.output_pdf)
for mask in self.disable_for_mask:
mask_name = self.register.pixel_registers[mask]['name']
plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
for mask in self.enable_for_mask:
mask_name = self.register.pixel_registers[mask]['name']
plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
if __name__ == "__main__":
with RunManager('configuration.yaml') as runmngr:
runmngr.run_run(StuckPixelTuning)
| {
"content_hash": "6c9d081f63ba19843c5609a957b0b678",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 154,
"avg_line_length": 51.3625,
"alnum_prop": 0.625699683621319,
"repo_name": "SiLab-Bonn/pyBAR",
"id": "44d83ff444788e5ebf3b7bc694b1efb1f33325e6",
"size": "4109",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pybar/scans/tune_stuck_pixel.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "4666"
},
{
"name": "Python",
"bytes": "1117928"
},
{
"name": "SystemVerilog",
"bytes": "23473"
},
{
"name": "Tcl",
"bytes": "5086"
},
{
"name": "Verilog",
"bytes": "246285"
}
],
"symlink_target": ""
} |
import functools
import uuid
import django.utils.timezone
from django.db import migrations, models
import eventstore.validators
class Migration(migrations.Migration):
dependencies = [("eventstore", "0040_momconnectimport_last_uploaded_row")]
operations = [
migrations.CreateModel(
name="Covid19TriageStart",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
(
"msisdn",
models.CharField(
max_length=255,
validators=[
functools.partial(
eventstore.validators._phone_number,
*(),
**{"country": "ZA"}
)
],
),
),
("source", models.CharField(max_length=255)),
(
"timestamp",
models.DateTimeField(
db_index=True, default=django.utils.timezone.now
),
),
(
"created_by",
models.CharField(blank=True, default="", max_length=255),
),
],
)
]
| {
"content_hash": "b7643875cf22ad319073364372e2fb37",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 78,
"avg_line_length": 29.641509433962263,
"alnum_prop": 0.3723742838956079,
"repo_name": "praekeltfoundation/ndoh-hub",
"id": "2828ddacd4dc7dc8ad43e982e33e9771f534ff08",
"size": "1621",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "eventstore/migrations/0041_covid19triagestart.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "450"
},
{
"name": "HTML",
"bytes": "2200"
},
{
"name": "Python",
"bytes": "957306"
},
{
"name": "Shell",
"bytes": "2796"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import boto3
from moto import mock_secretsmanager
from botocore.exceptions import ClientError
import sure # noqa
import string
import unittest
from nose.tools import assert_raises
@mock_secretsmanager
def test_get_secret_value():
conn = boto3.client('secretsmanager', region_name='us-west-2')
create_secret = conn.create_secret(Name='java-util-test-password',
SecretString="foosecret")
result = conn.get_secret_value(SecretId='java-util-test-password')
assert result['SecretString'] == 'foosecret'
@mock_secretsmanager
def test_get_secret_that_does_not_exist():
conn = boto3.client('secretsmanager', region_name='us-west-2')
with assert_raises(ClientError):
result = conn.get_secret_value(SecretId='i-dont-exist')
@mock_secretsmanager
def test_create_secret():
conn = boto3.client('secretsmanager', region_name='us-east-1')
result = conn.create_secret(Name='test-secret', SecretString="foosecret")
assert result['ARN'] == (
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad')
assert result['Name'] == 'test-secret'
secret = conn.get_secret_value(SecretId='test-secret')
assert secret['SecretString'] == 'foosecret'
@mock_secretsmanager
def test_get_random_password_default_length():
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password()
assert len(random_password['RandomPassword']) == 32
@mock_secretsmanager
def test_get_random_password_default_requirements():
# When require_each_included_type, default true
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password()
# Should contain lowercase, upppercase, digit, special character
assert any(c.islower() for c in random_password['RandomPassword'])
assert any(c.isupper() for c in random_password['RandomPassword'])
assert any(c.isdigit() for c in random_password['RandomPassword'])
assert any(c in string.punctuation
for c in random_password['RandomPassword'])
@mock_secretsmanager
def test_get_random_password_custom_length():
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password(PasswordLength=50)
assert len(random_password['RandomPassword']) == 50
@mock_secretsmanager
def test_get_random_exclude_lowercase():
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password(PasswordLength=55,
ExcludeLowercase=True)
assert any(c.islower() for c in random_password['RandomPassword']) == False
@mock_secretsmanager
def test_get_random_exclude_uppercase():
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password(PasswordLength=55,
ExcludeUppercase=True)
assert any(c.isupper() for c in random_password['RandomPassword']) == False
@mock_secretsmanager
def test_get_random_exclude_characters_and_symbols():
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password(PasswordLength=20,
ExcludeCharacters='xyzDje@?!.')
assert any(c in 'xyzDje@?!.' for c in random_password['RandomPassword']) == False
@mock_secretsmanager
def test_get_random_exclude_numbers():
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password(PasswordLength=100,
ExcludeNumbers=True)
assert any(c.isdigit() for c in random_password['RandomPassword']) == False
@mock_secretsmanager
def test_get_random_exclude_punctuation():
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password(PasswordLength=100,
ExcludePunctuation=True)
assert any(c in string.punctuation
for c in random_password['RandomPassword']) == False
@mock_secretsmanager
def test_get_random_include_space_false():
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password(PasswordLength=300)
assert any(c.isspace() for c in random_password['RandomPassword']) == False
@mock_secretsmanager
def test_get_random_include_space_true():
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password(PasswordLength=4,
IncludeSpace=True)
assert any(c.isspace() for c in random_password['RandomPassword']) == True
@mock_secretsmanager
def test_get_random_require_each_included_type():
conn = boto3.client('secretsmanager', region_name='us-west-2')
random_password = conn.get_random_password(PasswordLength=4,
RequireEachIncludedType=True)
assert any(c in string.punctuation for c in random_password['RandomPassword']) == True
assert any(c in string.ascii_lowercase for c in random_password['RandomPassword']) == True
assert any(c in string.ascii_uppercase for c in random_password['RandomPassword']) == True
assert any(c in string.digits for c in random_password['RandomPassword']) == True
@mock_secretsmanager
def test_get_random_too_short_password():
conn = boto3.client('secretsmanager', region_name='us-west-2')
with assert_raises(ClientError):
random_password = conn.get_random_password(PasswordLength=3)
@mock_secretsmanager
def test_get_random_too_long_password():
conn = boto3.client('secretsmanager', region_name='us-west-2')
with assert_raises(Exception):
random_password = conn.get_random_password(PasswordLength=5555)
| {
"content_hash": "86b2a4751329a0488746305f710e99dd",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 94,
"avg_line_length": 41.03448275862069,
"alnum_prop": 0.6858823529411765,
"repo_name": "Brett55/moto",
"id": "6fefeb56f0de089cb86283d927babe9248e4c48b",
"size": "5950",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_secretsmanager/test_secretsmanager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "443"
},
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "1143"
},
{
"name": "Python",
"bytes": "4485007"
},
{
"name": "Ruby",
"bytes": "188"
},
{
"name": "Shell",
"bytes": "797"
}
],
"symlink_target": ""
} |
"""Array printing function
$Id: arrayprint.py,v 1.9 2005/09/13 13:58:44 teoliphant Exp $
"""
from __future__ import division, absolute_import, print_function
__all__ = ["array2string", "set_printoptions", "get_printoptions"]
__docformat__ = 'restructuredtext'
#
# Written by Konrad Hinsen <hinsenk@ere.umontreal.ca>
# last revision: 1996-3-13
# modified by Jim Hugunin 1997-3-3 for repr's and str's (and other details)
# and by Perry Greenfield 2000-4-1 for numarray
# and by Travis Oliphant 2005-8-22 for numpy
import sys
from functools import reduce
from . import numerictypes as _nt
from .umath import maximum, minimum, absolute, not_equal, isnan, isinf
from .multiarray import (array, format_longfloat, datetime_as_string,
datetime_data)
from .fromnumeric import ravel
from .numeric import asarray
if sys.version_info[0] >= 3:
_MAXINT = sys.maxsize
_MININT = -sys.maxsize - 1
else:
_MAXINT = sys.maxint
_MININT = -sys.maxint - 1
def product(x, y): return x*y
_summaryEdgeItems = 3 # repr N leading and trailing items of each dimension
_summaryThreshold = 1000 # total items > triggers array summarization
_float_output_precision = 8
_float_output_suppress_small = False
_line_width = 75
_nan_str = 'nan'
_inf_str = 'inf'
_formatter = None # formatting function for array elements
def set_printoptions(precision=None, threshold=None, edgeitems=None,
linewidth=None, suppress=None,
nanstr=None, infstr=None,
formatter=None):
"""
Set printing options.
These options determine the way floating point numbers, arrays and
other NumPy objects are displayed.
Parameters
----------
precision : int, optional
Number of digits of precision for floating point output (default 8).
threshold : int, optional
Total number of array elements which trigger summarization
rather than full repr (default 1000).
edgeitems : int, optional
Number of array items in summary at beginning and end of
each dimension (default 3).
linewidth : int, optional
The number of characters per line for the purpose of inserting
line breaks (default 75).
suppress : bool, optional
Whether or not suppress printing of small floating point values
using scientific notation (default False).
nanstr : str, optional
String representation of floating point not-a-number (default nan).
infstr : str, optional
String representation of floating point infinity (default inf).
formatter : dict of callables, optional
If not None, the keys should indicate the type(s) that the respective
formatting function applies to. Callables should return a string.
Types that are not specified (by their corresponding keys) are handled
by the default formatters. Individual types for which a formatter
can be set are::
- 'bool'
- 'int'
- 'timedelta' : a `numpy.timedelta64`
- 'datetime' : a `numpy.datetime64`
- 'float'
- 'longfloat' : 128-bit floats
- 'complexfloat'
- 'longcomplexfloat' : composed of two 128-bit floats
- 'numpy_str' : types `numpy.string_` and `numpy.unicode_`
- 'str' : all other strings
Other keys that can be used to set a group of types at once are::
- 'all' : sets all types
- 'int_kind' : sets 'int'
- 'float_kind' : sets 'float' and 'longfloat'
- 'complex_kind' : sets 'complexfloat' and 'longcomplexfloat'
- 'str_kind' : sets 'str' and 'numpystr'
See Also
--------
get_printoptions, set_string_function, array2string
Notes
-----
`formatter` is always reset with a call to `set_printoptions`.
Examples
--------
Floating point precision can be set:
>>> np.set_printoptions(precision=4)
>>> print np.array([1.123456789])
[ 1.1235]
Long arrays can be summarised:
>>> np.set_printoptions(threshold=5)
>>> print np.arange(10)
[0 1 2 ..., 7 8 9]
Small results can be suppressed:
>>> eps = np.finfo(float).eps
>>> x = np.arange(4.)
>>> x**2 - (x + eps)**2
array([ -4.9304e-32, -4.4409e-16, 0.0000e+00, 0.0000e+00])
>>> np.set_printoptions(suppress=True)
>>> x**2 - (x + eps)**2
array([-0., -0., 0., 0.])
A custom formatter can be used to display array elements as desired:
>>> np.set_printoptions(formatter={'all':lambda x: 'int: '+str(-x)})
>>> x = np.arange(3)
>>> x
array([int: 0, int: -1, int: -2])
>>> np.set_printoptions() # formatter gets reset
>>> x
array([0, 1, 2])
To put back the default options, you can use:
>>> np.set_printoptions(edgeitems=3,infstr='inf',
... linewidth=75, nanstr='nan', precision=8,
... suppress=False, threshold=1000, formatter=None)
"""
global _summaryThreshold, _summaryEdgeItems, _float_output_precision, \
_line_width, _float_output_suppress_small, _nan_str, _inf_str, \
_formatter
if linewidth is not None:
_line_width = linewidth
if threshold is not None:
_summaryThreshold = threshold
if edgeitems is not None:
_summaryEdgeItems = edgeitems
if precision is not None:
_float_output_precision = precision
if suppress is not None:
_float_output_suppress_small = not not suppress
if nanstr is not None:
_nan_str = nanstr
if infstr is not None:
_inf_str = infstr
_formatter = formatter
def get_printoptions():
"""
Return the current print options.
Returns
-------
print_opts : dict
Dictionary of current print options with keys
- precision : int
- threshold : int
- edgeitems : int
- linewidth : int
- suppress : bool
- nanstr : str
- infstr : str
- formatter : dict of callables
For a full description of these options, see `set_printoptions`.
See Also
--------
set_printoptions, set_string_function
"""
d = dict(precision=_float_output_precision,
threshold=_summaryThreshold,
edgeitems=_summaryEdgeItems,
linewidth=_line_width,
suppress=_float_output_suppress_small,
nanstr=_nan_str,
infstr=_inf_str,
formatter=_formatter)
return d
def _leading_trailing(a):
from . import numeric as _nc
if a.ndim == 1:
if len(a) > 2*_summaryEdgeItems:
b = _nc.concatenate((a[:_summaryEdgeItems],
a[-_summaryEdgeItems:]))
else:
b = a
else:
if len(a) > 2*_summaryEdgeItems:
l = [_leading_trailing(a[i]) for i in range(
min(len(a), _summaryEdgeItems))]
l.extend([_leading_trailing(a[-i]) for i in range(
min(len(a), _summaryEdgeItems), 0, -1)])
else:
l = [_leading_trailing(a[i]) for i in range(0, len(a))]
b = _nc.concatenate(tuple(l))
return b
def _boolFormatter(x):
if x:
return ' True'
else:
return 'False'
def repr_format(x):
return repr(x)
def _array2string(a, max_line_width, precision, suppress_small, separator=' ',
prefix="", formatter=None):
if max_line_width is None:
max_line_width = _line_width
if precision is None:
precision = _float_output_precision
if suppress_small is None:
suppress_small = _float_output_suppress_small
if formatter is None:
formatter = _formatter
if a.size > _summaryThreshold:
summary_insert = "..., "
data = _leading_trailing(a)
else:
summary_insert = ""
data = ravel(asarray(a))
formatdict = {'bool' : _boolFormatter,
'int' : IntegerFormat(data),
'float' : FloatFormat(data, precision, suppress_small),
'longfloat' : LongFloatFormat(precision),
'complexfloat' : ComplexFormat(data, precision,
suppress_small),
'longcomplexfloat' : LongComplexFormat(precision),
'datetime' : DatetimeFormat(data),
'timedelta' : TimedeltaFormat(data),
'numpystr' : repr_format,
'str' : str}
if formatter is not None:
fkeys = [k for k in formatter.keys() if formatter[k] is not None]
if 'all' in fkeys:
for key in formatdict.keys():
formatdict[key] = formatter['all']
if 'int_kind' in fkeys:
for key in ['int']:
formatdict[key] = formatter['int_kind']
if 'float_kind' in fkeys:
for key in ['float', 'longfloat']:
formatdict[key] = formatter['float_kind']
if 'complex_kind' in fkeys:
for key in ['complexfloat', 'longcomplexfloat']:
formatdict[key] = formatter['complex_kind']
if 'str_kind' in fkeys:
for key in ['numpystr', 'str']:
formatdict[key] = formatter['str_kind']
for key in formatdict.keys():
if key in fkeys:
formatdict[key] = formatter[key]
# find the right formatting function for the array
dtypeobj = a.dtype.type
if issubclass(dtypeobj, _nt.bool_):
format_function = formatdict['bool']
elif issubclass(dtypeobj, _nt.integer):
if issubclass(dtypeobj, _nt.timedelta64):
format_function = formatdict['timedelta']
else:
format_function = formatdict['int']
elif issubclass(dtypeobj, _nt.floating):
if issubclass(dtypeobj, _nt.longfloat):
format_function = formatdict['longfloat']
else:
format_function = formatdict['float']
elif issubclass(dtypeobj, _nt.complexfloating):
if issubclass(dtypeobj, _nt.clongfloat):
format_function = formatdict['longcomplexfloat']
else:
format_function = formatdict['complexfloat']
elif issubclass(dtypeobj, (_nt.unicode_, _nt.string_)):
format_function = formatdict['numpystr']
elif issubclass(dtypeobj, _nt.datetime64):
format_function = formatdict['datetime']
else:
format_function = formatdict['numpystr']
# skip over "["
next_line_prefix = " "
# skip over array(
next_line_prefix += " "*len(prefix)
lst = _formatArray(a, format_function, len(a.shape), max_line_width,
next_line_prefix, separator,
_summaryEdgeItems, summary_insert)[:-1]
return lst
def _convert_arrays(obj):
from . import numeric as _nc
newtup = []
for k in obj:
if isinstance(k, _nc.ndarray):
k = k.tolist()
elif isinstance(k, tuple):
k = _convert_arrays(k)
newtup.append(k)
return tuple(newtup)
def array2string(a, max_line_width=None, precision=None,
suppress_small=None, separator=' ', prefix="",
style=repr, formatter=None):
"""
Return a string representation of an array.
Parameters
----------
a : ndarray
Input array.
max_line_width : int, optional
The maximum number of columns the string should span. Newline
characters splits the string appropriately after array elements.
precision : int, optional
Floating point precision. Default is the current printing
precision (usually 8), which can be altered using `set_printoptions`.
suppress_small : bool, optional
Represent very small numbers as zero. A number is "very small" if it
is smaller than the current printing precision.
separator : str, optional
Inserted between elements.
prefix : str, optional
An array is typically printed as::
'prefix(' + array2string(a) + ')'
The length of the prefix string is used to align the
output correctly.
style : function, optional
A function that accepts an ndarray and returns a string. Used only
when the shape of `a` is equal to ``()``, i.e. for 0-D arrays.
formatter : dict of callables, optional
If not None, the keys should indicate the type(s) that the respective
formatting function applies to. Callables should return a string.
Types that are not specified (by their corresponding keys) are handled
by the default formatters. Individual types for which a formatter
can be set are::
- 'bool'
- 'int'
- 'timedelta' : a `numpy.timedelta64`
- 'datetime' : a `numpy.datetime64`
- 'float'
- 'longfloat' : 128-bit floats
- 'complexfloat'
- 'longcomplexfloat' : composed of two 128-bit floats
- 'numpy_str' : types `numpy.string_` and `numpy.unicode_`
- 'str' : all other strings
Other keys that can be used to set a group of types at once are::
- 'all' : sets all types
- 'int_kind' : sets 'int'
- 'float_kind' : sets 'float' and 'longfloat'
- 'complex_kind' : sets 'complexfloat' and 'longcomplexfloat'
- 'str_kind' : sets 'str' and 'numpystr'
Returns
-------
array_str : str
String representation of the array.
Raises
------
TypeError
if a callable in `formatter` does not return a string.
See Also
--------
array_str, array_repr, set_printoptions, get_printoptions
Notes
-----
If a formatter is specified for a certain type, the `precision` keyword is
ignored for that type.
This is a very flexible function; `array_repr` and `array_str` are using
`array2string` internally so keywords with the same name should work
identically in all three functions.
Examples
--------
>>> x = np.array([1e-16,1,2,3])
>>> print np.array2string(x, precision=2, separator=',',
... suppress_small=True)
[ 0., 1., 2., 3.]
>>> x = np.arange(3.)
>>> np.array2string(x, formatter={'float_kind':lambda x: "%.2f" % x})
'[0.00 1.00 2.00]'
>>> x = np.arange(3)
>>> np.array2string(x, formatter={'int':lambda x: hex(x)})
'[0x0L 0x1L 0x2L]'
"""
if a.shape == ():
x = a.item()
if isinstance(x, tuple):
x = _convert_arrays(x)
lst = style(x)
elif reduce(product, a.shape) == 0:
# treat as a null array if any of shape elements == 0
lst = "[]"
else:
lst = _array2string(a, max_line_width, precision, suppress_small,
separator, prefix, formatter=formatter)
return lst
def _extendLine(s, line, word, max_line_len, next_line_prefix):
if len(line.rstrip()) + len(word.rstrip()) >= max_line_len:
s += line.rstrip() + "\n"
line = next_line_prefix
line += word
return s, line
def _formatArray(a, format_function, rank, max_line_len,
next_line_prefix, separator, edge_items, summary_insert):
"""formatArray is designed for two modes of operation:
1. Full output
2. Summarized output
"""
if rank == 0:
obj = a.item()
if isinstance(obj, tuple):
obj = _convert_arrays(obj)
return str(obj)
if summary_insert and 2*edge_items < len(a):
leading_items, trailing_items, summary_insert1 = \
edge_items, edge_items, summary_insert
else:
leading_items, trailing_items, summary_insert1 = 0, len(a), ""
if rank == 1:
s = ""
line = next_line_prefix
for i in range(leading_items):
word = format_function(a[i]) + separator
s, line = _extendLine(s, line, word, max_line_len, next_line_prefix)
if summary_insert1:
s, line = _extendLine(s, line, summary_insert1, max_line_len, next_line_prefix)
for i in range(trailing_items, 1, -1):
word = format_function(a[-i]) + separator
s, line = _extendLine(s, line, word, max_line_len, next_line_prefix)
word = format_function(a[-1])
s, line = _extendLine(s, line, word, max_line_len, next_line_prefix)
s += line + "]\n"
s = '[' + s[len(next_line_prefix):]
else:
s = '['
sep = separator.rstrip()
for i in range(leading_items):
if i > 0:
s += next_line_prefix
s += _formatArray(a[i], format_function, rank-1, max_line_len,
" " + next_line_prefix, separator, edge_items,
summary_insert)
s = s.rstrip() + sep.rstrip() + '\n'*max(rank-1, 1)
if summary_insert1:
s += next_line_prefix + summary_insert1 + "\n"
for i in range(trailing_items, 1, -1):
if leading_items or i != trailing_items:
s += next_line_prefix
s += _formatArray(a[-i], format_function, rank-1, max_line_len,
" " + next_line_prefix, separator, edge_items,
summary_insert)
s = s.rstrip() + sep.rstrip() + '\n'*max(rank-1, 1)
if leading_items or trailing_items > 1:
s += next_line_prefix
s += _formatArray(a[-1], format_function, rank-1, max_line_len,
" " + next_line_prefix, separator, edge_items,
summary_insert).rstrip()+']\n'
return s
class FloatFormat(object):
def __init__(self, data, precision, suppress_small, sign=False):
self.precision = precision
self.suppress_small = suppress_small
self.sign = sign
self.exp_format = False
self.large_exponent = False
self.max_str_len = 0
try:
self.fillFormat(data)
except (TypeError, NotImplementedError):
# if reduce(data) fails, this instance will not be called, just
# instantiated in formatdict.
pass
def fillFormat(self, data):
from . import numeric as _nc
with _nc.errstate(all='ignore'):
special = isnan(data) | isinf(data)
valid = not_equal(data, 0) & ~special
non_zero = absolute(data.compress(valid))
if len(non_zero) == 0:
max_val = 0.
min_val = 0.
else:
max_val = maximum.reduce(non_zero)
min_val = minimum.reduce(non_zero)
if max_val >= 1.e8:
self.exp_format = True
if not self.suppress_small and (min_val < 0.0001
or max_val/min_val > 1000.):
self.exp_format = True
if self.exp_format:
self.large_exponent = 0 < min_val < 1e-99 or max_val >= 1e100
self.max_str_len = 8 + self.precision
if self.large_exponent:
self.max_str_len += 1
if self.sign:
format = '%+'
else:
format = '%'
format = format + '%d.%de' % (self.max_str_len, self.precision)
else:
format = '%%.%df' % (self.precision,)
if len(non_zero):
precision = max([_digits(x, self.precision, format)
for x in non_zero])
else:
precision = 0
precision = min(self.precision, precision)
self.max_str_len = len(str(int(max_val))) + precision + 2
if _nc.any(special):
self.max_str_len = max(self.max_str_len,
len(_nan_str),
len(_inf_str)+1)
if self.sign:
format = '%#+'
else:
format = '%#'
format = format + '%d.%df' % (self.max_str_len, precision)
self.special_fmt = '%%%ds' % (self.max_str_len,)
self.format = format
def __call__(self, x, strip_zeros=True):
from . import numeric as _nc
with _nc.errstate(invalid='ignore'):
if isnan(x):
if self.sign:
return self.special_fmt % ('+' + _nan_str,)
else:
return self.special_fmt % (_nan_str,)
elif isinf(x):
if x > 0:
if self.sign:
return self.special_fmt % ('+' + _inf_str,)
else:
return self.special_fmt % (_inf_str,)
else:
return self.special_fmt % ('-' + _inf_str,)
s = self.format % x
if self.large_exponent:
# 3-digit exponent
expsign = s[-3]
if expsign == '+' or expsign == '-':
s = s[1:-2] + '0' + s[-2:]
elif self.exp_format:
# 2-digit exponent
if s[-3] == '0':
s = ' ' + s[:-3] + s[-2:]
elif strip_zeros:
z = s.rstrip('0')
s = z + ' '*(len(s)-len(z))
return s
def _digits(x, precision, format):
s = format % x
z = s.rstrip('0')
return precision - len(s) + len(z)
class IntegerFormat(object):
def __init__(self, data):
try:
max_str_len = max(len(str(maximum.reduce(data))),
len(str(minimum.reduce(data))))
self.format = '%' + str(max_str_len) + 'd'
except (TypeError, NotImplementedError):
# if reduce(data) fails, this instance will not be called, just
# instantiated in formatdict.
pass
except ValueError:
# this occurs when everything is NA
pass
def __call__(self, x):
if _MININT < x < _MAXINT:
return self.format % x
else:
return "%s" % x
class LongFloatFormat(object):
# XXX Have to add something to determine the width to use a la FloatFormat
# Right now, things won't line up properly
def __init__(self, precision, sign=False):
self.precision = precision
self.sign = sign
def __call__(self, x):
if isnan(x):
if self.sign:
return '+' + _nan_str
else:
return ' ' + _nan_str
elif isinf(x):
if x > 0:
if self.sign:
return '+' + _inf_str
else:
return ' ' + _inf_str
else:
return '-' + _inf_str
elif x >= 0:
if self.sign:
return '+' + format_longfloat(x, self.precision)
else:
return ' ' + format_longfloat(x, self.precision)
else:
return format_longfloat(x, self.precision)
class LongComplexFormat(object):
def __init__(self, precision):
self.real_format = LongFloatFormat(precision)
self.imag_format = LongFloatFormat(precision, sign=True)
def __call__(self, x):
r = self.real_format(x.real)
i = self.imag_format(x.imag)
return r + i + 'j'
class ComplexFormat(object):
def __init__(self, x, precision, suppress_small):
self.real_format = FloatFormat(x.real, precision, suppress_small)
self.imag_format = FloatFormat(x.imag, precision, suppress_small,
sign=True)
def __call__(self, x):
r = self.real_format(x.real, strip_zeros=False)
i = self.imag_format(x.imag, strip_zeros=False)
if not self.imag_format.exp_format:
z = i.rstrip('0')
i = z + 'j' + ' '*(len(i)-len(z))
else:
i = i + 'j'
return r + i
class DatetimeFormat(object):
def __init__(self, x, unit=None,
timezone=None, casting='same_kind'):
# Get the unit from the dtype
if unit is None:
if x.dtype.kind == 'M':
unit = datetime_data(x.dtype)[0]
else:
unit = 's'
# If timezone is default, make it 'local' or 'UTC' based on the unit
if timezone is None:
# Date units -> UTC, time units -> local
if unit in ('Y', 'M', 'W', 'D'):
self.timezone = 'UTC'
else:
self.timezone = 'local'
else:
self.timezone = timezone
self.unit = unit
self.casting = casting
def __call__(self, x):
return "'%s'" % datetime_as_string(x,
unit=self.unit,
timezone=self.timezone,
casting=self.casting)
class TimedeltaFormat(object):
def __init__(self, data):
if data.dtype.kind == 'm':
nat_value = array(['NaT'], dtype=data.dtype)[0]
v = data[not_equal(data, nat_value)].view('i8')
if len(v) > 0:
# Max str length of non-NaT elements
max_str_len = max(len(str(maximum.reduce(v))),
len(str(minimum.reduce(v))))
else:
max_str_len = 0
if len(v) < len(data):
# data contains a NaT
max_str_len = max(max_str_len, 5)
self.format = '%' + str(max_str_len) + 'd'
self._nat = "'NaT'".rjust(max_str_len)
def __call__(self, x):
if x + 1 == x:
return self._nat
else:
return self.format % x.astype('i8')
| {
"content_hash": "6b898560b89828e0374c211e4b9b0376",
"timestamp": "",
"source": "github",
"line_count": 755,
"max_line_length": 91,
"avg_line_length": 34.264900662251655,
"alnum_prop": 0.5422110552763819,
"repo_name": "jankoslavic/numpy",
"id": "b8acaee9771256b8087bf8746f87a9e8f9df730d",
"size": "25870",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "numpy/core/arrayprint.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "7554223"
},
{
"name": "C++",
"bytes": "28696"
},
{
"name": "FORTRAN",
"bytes": "6310"
},
{
"name": "Makefile",
"bytes": "2574"
},
{
"name": "Python",
"bytes": "5930485"
},
{
"name": "Shell",
"bytes": "2241"
}
],
"symlink_target": ""
} |
from flask import Blueprint, Response, render_template, request, session, redirect, jsonify,\
url_for, make_response, current_app
from flask.ext.security import current_user
import os
import urllib
app = Blueprint('scale', __name__, template_folder='templates')
| {
"content_hash": "968e0cb4a091e6307c86d165aecad76c",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 93,
"avg_line_length": 29.444444444444443,
"alnum_prop": 0.769811320754717,
"repo_name": "wigginslab/lean-workbench",
"id": "c393971322395406e2248328373fe4ea4ea81a6e",
"size": "265",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lean_workbench/scale/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "203"
},
{
"name": "CSS",
"bytes": "8523037"
},
{
"name": "HTML",
"bytes": "1204783"
},
{
"name": "JavaScript",
"bytes": "1385939"
},
{
"name": "Makefile",
"bytes": "1307"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "353032"
},
{
"name": "Shell",
"bytes": "2058"
}
],
"symlink_target": ""
} |
"""blackberry.py: Module which contains functionality enabling push notification
messages to be sent to the Blackberry Push Service. """
from datetime import datetime, timedelta
import base64
import uuid
from StringIO import StringIO
from twisted.internet.protocol import Protocol
from twisted.python import log
from twisted.internet import reactor
from twisted.web.client import Agent
from twisted.internet.ssl import ClientContextFactory
from twisted.web.client import FileBodyProducer
from twisted.web.http_headers import Headers
MAX_DELAY = 2
BOUNDARY = "boundary-marker"
SUCCESS_CODE = "1001"
MESSAGE_TEMPLATE = """
--{boundary}--
Content-Type: application/xml
<?xml version="1.0"?>
<!DOCTYPE pap PUBLIC "-//WAPFORUM//DTD PAP 2.1//EN"
"http://www.openmobilealliance.org/tech/DTD/pap_2.1.dtd">
<pap>
<push-message push-id="{unique_push_id}"
source-reference="{application_id}"
deliver-before-timestamp="{timestamp}">
{device_segment}
<quality-of-service delivery-method="confirmed"/>
</push-message>
</pap>
--{boundary}--
Content-Encoding: binary
Content-Type: text/html
{content}
--{boundary}--
"""
class WebClientContextFactory(ClientContextFactory):
""" Context Factory used to connect to the push service
over SSL. """
def getContext(self, hostname, port):
""" Returns a ClientConextFactory used to prepare a
connection to the Blackberry push service. """
return ClientContextFactory.getContext(self)
class BlackberryResponse(Protocol):
""" Protocol used to read the response from the request that is sent
to the Blackberry Push Service. """
def __init__(self):
self.data = ""
def dataReceived(self, bytes):
""" Append the bytes received to the current block of data. """
self.data += bytes
def connectionLost(self, reason):
""" Called when the connection to the push service has been lost,
indicating. """
if SUCCESS_CODE not in self.data:
log.err("Blackberry Push Message was not accepted for " \
"processing: {0}".format(reason.getErrorMessage()))
else:
log.msg("Blackberry Push Message was accepted")
class BlackberryService(object):
""" Sets up and controls the instances of the Blackberry client
factory. """
def __init__(self, hostname, application_id, application_password):
contextFactory = WebClientContextFactory()
self.blackberry_hostname = hostname
self.application_id = application_id
self.application_password = application_password
self.agent = Agent(reactor, contextFactory)
def errorReceived(self, error_detail):
""" Callback which is invoked when an error is detected when
attempting to send a request to the push service. """
log.err("Error thrown when executing request: {0}".format(error_detail))
def responseReceived(self, response):
""" Callback which is invoked when a response is received from the
push service. Invokes the response protocol to read the contents
of the body contained in the response. """
if response.code == 200:
response.deliverBody(BlackberryResponse())
else:
log.err("Did not receive 200 response: {0}".
format(str(response.code)))
def construct_message(self, device_list, message_text):
""" Creates a new message with the recipients as specified in
the device list, with the payload provided. """
device_segment = ""
message_id = str(uuid.uuid4())
timestamp = (datetime.utcnow() +
timedelta(hours=MAX_DELAY)). \
strftime("%Y-%m-%dT%H:%M:%SZ")
for device in device_list:
device_segment += "<address address-value=\"{0}\"/>".format(device)
payload = MESSAGE_TEMPLATE.format(timestamp=timestamp,
content=message_text,
unique_push_id = message_id,
boundary=BOUNDARY,
device_segment = device_segment,
application_id=self.application_id)
return payload
def send_message(self, device_list, message_text):
""" Constructs a message from the device list and payload provided. """
payload = self.construct_message(device_list, message_text)
self._submit_request(payload)
def _submit_request(self, payload):
""" Private method which wraps the payload in a HTTP request and
submits it as a POST method to the Blackberry push service.
Request is made using a Deferred object to ensure that it
is a non blocking event when waiting for the repsonse. """
body = FileBodyProducer(StringIO(payload))
deferred_request = self.agent.request('POST', self.blackberry_hostname,
Headers({'User-Agent': ['pushpy'],
'Authorization': ['Basic %s' % base64.b64encode("%s:%s" %
(self.application_id,
self.application_password))],
'Content-type': ['multipart/related; \
boundary={boundary}; \
type=application/xml; \
charset=us-ascii'.format(
boundary=BOUNDARY)]}),
bodyProducer=body)
deferred_request.addCallback(self.responseReceived)
deferred_request.addErrback(self.errorReceived)
return deferred_request
| {
"content_hash": "20b9119bb720f6eb90fb2e707c894918",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 80,
"avg_line_length": 36.961783439490446,
"alnum_prop": 0.6129588144063416,
"repo_name": "trulabs/pushpy",
"id": "e80f4f103a8f59a4c95c99a6b43a5869501bfe5d",
"size": "5803",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pushpy/blackberry.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "39925"
}
],
"symlink_target": ""
} |
from .remove_chartchunk import remove_borders
from .scatter import category_scatter
from .stacked_barplot import stacked_barplot
from .enrichment_plot import enrichment_plot
__all__ = ["remove_borders", "category_scatter",
"stacked_barplot", "enrichment_plot"] | {
"content_hash": "ca01455c39d3e588d6ac3388688376ec",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 48,
"avg_line_length": 44.666666666666664,
"alnum_prop": 0.7686567164179104,
"repo_name": "YoungKwonJo/mlxtend",
"id": "47f94c548f484e2828239c3df6336549541101aa",
"size": "385",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mlxtend/plotting/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "9319018"
},
{
"name": "Shell",
"bytes": "4151"
}
],
"symlink_target": ""
} |
import datetime
import fnmatch
import json
import logging
import os
import pathlib
import typing
from zipfile import ZipFile
import pandas as pd
import requests
from google.cloud import storage
PIPELINES_NAME_INPATIENT = [
"inpatient_charges_2011",
"inpatient_charges_2012",
"inpatient_charges_2013",
"inpatient_charges_2014",
"inpatient_charges_2015",
]
PIPELINES_NAME_OUTPATIENT = [
"outpatient_charges_2011",
"outpatient_charges_2012",
"outpatient_charges_2013",
"outpatient_charges_2014",
]
def main(
source_url: str,
source_file: pathlib.Path,
target_file: pathlib.Path,
target_gcs_bucket: str,
target_gcs_path: str,
headers: typing.List[str],
rename_mappings: dict,
pipeline_name: str,
) -> None:
logging.info("Creating 'files' folder")
pathlib.Path("./files").mkdir(parents=True, exist_ok=True)
logging.info(f"Downloading file {source_url}")
download_file(source_url, source_file)
logging.info(f"Opening file {source_file}")
if pipeline_name in (PIPELINES_NAME_INPATIENT + PIPELINES_NAME_OUTPATIENT):
with ZipFile(source_file) as zipped_files:
file_list = zipped_files.namelist()
csv_file = fnmatch.filter(file_list, "*.csv")
data = zipped_files.open(*csv_file)
df = pd.read_csv(data)
else:
df = pd.read_csv(str(source_file))
logging.info(f"Transformation Process Starting.. {source_file}")
rename_headers(df, rename_mappings)
filter_null_rows(
df, PIPELINES_NAME_INPATIENT, PIPELINES_NAME_OUTPATIENT, pipeline_name
)
df = df[headers]
logging.info(f"Transformation Process complete .. {source_file}")
logging.info(f"Saving to output file.. {target_file}")
try:
save_to_new_file(df, file_path=str(target_file))
except Exception as e:
logging.error(f"Error saving output file: {e}.")
logging.info(
f"Uploading output file to.. gs://{target_gcs_bucket}/{target_gcs_path}"
)
upload_file_to_gcs(target_file, target_gcs_bucket, target_gcs_path)
logging.info(
"CMS Medicare process completed at "
+ str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
)
def rename_headers(df: pd.DataFrame, rename_mappings: dict) -> None:
df.rename(columns=rename_mappings, inplace=True)
def filter_null_rows(
df: pd.DataFrame,
PIPELINES_NAME_INPATIENT: typing.List[str],
PIPELINES_NAME_OUTPATIENT: typing.List[str],
pipeline_name: str,
) -> pd.DataFrame:
if pipeline_name in PIPELINES_NAME_INPATIENT:
return df.dropna(subset=["drg_definition", "provider_id"], inplace=True)
elif pipeline_name in PIPELINES_NAME_OUTPATIENT:
return df.dropna(subset=["apc", "provider_id"], inplace=True)
else:
return df
def save_to_new_file(df: pd.DataFrame, file_path: str) -> None:
df.to_csv(file_path, float_format="%.0f", index=False)
def download_file(source_url: str, source_file: pathlib.Path) -> None:
logging.info(f"Downloading {source_url} into {source_file}")
r = requests.get(source_url, stream=True)
if r.status_code == 200:
with open(source_file, "wb") as f:
for chunk in r:
f.write(chunk)
else:
logging.error(f"Couldn't download {source_url}: {r.text}")
def upload_file_to_gcs(file_path: pathlib.Path, gcs_bucket: str, gcs_path: str) -> None:
storage_client = storage.Client()
bucket = storage_client.bucket(gcs_bucket)
blob = bucket.blob(gcs_path)
blob.upload_from_filename(file_path)
if __name__ == "__main__":
logging.getLogger().setLevel(logging.INFO)
main(
source_url=os.environ["SOURCE_URL"],
source_file=pathlib.Path(os.environ["SOURCE_FILE"]).expanduser(),
target_file=pathlib.Path(os.environ["TARGET_FILE"]).expanduser(),
target_gcs_bucket=os.environ["TARGET_GCS_BUCKET"],
target_gcs_path=os.environ["TARGET_GCS_PATH"],
headers=json.loads(os.environ["CSV_HEADERS"]),
rename_mappings=json.loads(os.environ["RENAME_MAPPINGS"]),
pipeline_name=os.environ["PIPELINE_NAME"],
)
| {
"content_hash": "430afba63e52f773756ebb3f7be67280",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 88,
"avg_line_length": 30.02158273381295,
"alnum_prop": 0.6573208722741433,
"repo_name": "llooker/public-datasets-pipelines",
"id": "c794db79e1c409f9740ddcdae5e84fd9e88510ef",
"size": "4749",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "datasets/cms_medicare/pipelines/_images/run_csv_transform_kub/csv_transform.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "58993"
},
{
"name": "HCL",
"bytes": "394340"
},
{
"name": "Jinja",
"bytes": "11245"
},
{
"name": "Jupyter Notebook",
"bytes": "15325"
},
{
"name": "Python",
"bytes": "2616241"
}
],
"symlink_target": ""
} |
import pygame
import pygame.camera
from pygame.locals import *
class VideoCapturePlayer(object):
size = ( 640, 480 )
def __init__(self, **argd):
self.__dict__.update(**argd)
super(VideoCapturePlayer, self).__init__(**argd)
# create a display surface. standard pygame stuff
self.display = pygame.display.set_mode( self.size, 0 )
self.init_cams(0)
def init_cams(self, which_cam_idx):
# gets a list of available cameras.
self.clist = pygame.camera.list_cameras()
print self.clist
if not self.clist:
raise ValueError("Sorry, no cameras detected.")
try:
cam_id = self.clist[which_cam_idx]
except IndexError:
cam_id = self.clist[0]
# creates the camera of the specified size and in RGB colorspace
self.camera = pygame.camera.Camera(cam_id, self.size, "RGB")
# starts the camera
self.camera.start()
self.clock = pygame.time.Clock()
# create a surface to capture to. for performance purposes, you want the
# bit depth to be the same as that of the display surface.
self.snapshot = pygame.surface.Surface(self.size, 0, self.display)
def get_and_flip(self):
# if you don't want to tie the framerate to the camera, you can check and
# see if the camera has an image ready. note that while this works
# on most cameras, some will never return true.
if 0 and self.camera.query_image():
# capture an image
self.snapshot = self.camera.get_image(self.snapshot)
if 0:
self.snapshot = self.camera.get_image(self.snapshot)
#self.snapshot = self.camera.get_image()
# blit it to the display surface. simple!
self.display.blit(self.snapshot, (0,0))
else:
self.snapshot = self.camera.get_image(self.display)
#self.display.blit(self.snapshot, (0,0))
pygame.display.flip()
def main(self):
going = True
while going:
events = pygame.event.get()
for e in events:
if e.type == QUIT or (e.type == KEYDOWN and e.key == K_ESCAPE):
going = False
if e.type == KEYDOWN:
if e.key in range(K_0, K_0+10) :
self.init_cams(e.key - K_0)
self.get_and_flip()
self.clock.tick()
print (self.clock.get_fps())
def main():
pygame.init()
pygame.camera.init()
VideoCapturePlayer().main()
pygame.quit()
if __name__ == '__main__':
main()
| {
"content_hash": "fa0967838ef738d953d36cf34d2e393b",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 81,
"avg_line_length": 30.272727272727273,
"alnum_prop": 0.5701951951951952,
"repo_name": "jingzhehu/udacity_mlnd",
"id": "aa29718ec16db5a86666290b15f083c825100e64",
"size": "2754",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "P4_Training_a_Smartcab_to_Drive/smartcab/pygame/examples/camera.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1364822"
},
{
"name": "Java",
"bytes": "5793"
},
{
"name": "JavaScript",
"bytes": "3616"
},
{
"name": "Jupyter Notebook",
"bytes": "2543744"
},
{
"name": "Python",
"bytes": "1241314"
}
],
"symlink_target": ""
} |
"""Contains the loss scaling optimizer class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import smart_cond
from tensorflow.python.keras import backend
from tensorflow.python.keras import optimizers
from tensorflow.python.keras.mixed_precision.experimental import loss_scale as keras_loss_scale_module
from tensorflow.python.keras.optimizer_v2 import optimizer_v2
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.training.experimental import loss_scale as loss_scale_module
from tensorflow.python.util.tf_export import keras_export
class _UnwrapPreventer(object):
"""Wrapper that DistributionStrategy will not unwrap.
Typically, DistributionStrategy will unwrap values when going from a cross-
replica context to a replica context via `call_for_each_replica`. This class
is a wrapper that DistributionStrategy will not unwrap, so it can be used to
prevent it from unwrapping a value.
TODO(reedwm): Find/implement a better way of preventing values from being
unwrapped by DistributionStrategy
"""
def __init__(self, value):
self.value = value
@keras_export('keras.mixed_precision.experimental.LossScaleOptimizer')
class LossScaleOptimizer(optimizer_v2.OptimizerV2):
"""An optimizer that applies loss scaling.
Loss scaling is a process that multiplies the loss by a multiplier called the
loss scale, and divides each gradient by the same multiplier. The pseudocode
for this process is:
```
loss = ...
loss *= loss_scale
grads = gradients(loss, vars)
grads /= loss_scale
```
Mathematically, loss scaling has no effect, but can help avoid numerical
underflow in intermediate gradients when float16 tensors are used. By
multiplying the loss, each intermediate gradient will have the same multiplier
applied.
The loss scale can either be a fixed constant, chosen by the user, or be
dynamically determined. Dynamically determining the loss scale is convenient
as a loss scale does not have to be explicitly chosen. However it reduces
performance.
This optimizer wraps another optimizer and applies loss scaling to it via a
`LossScale`. Loss scaling is applied whenever gradients are
computed, either through `minimize()` or `get_gradients()`. The loss scale is
updated via `LossScale.update()` whenever gradients are applied, either
through `minimize()` or `apply_gradients()`. For example:
```python
opt = tf.keras.optimizers.SGD(0.1)
opt = tf.keras.mixed_precision.experimental.LossScaleOptimizer(opt, "dynamic")
# 'minimize' applies loss scaling to the loss and updates the loss sale.
opt.minimize(loss_fn)
```
If a `tf.GradientTape` is used to compute gradients instead of
`LossScaleOptimizer.minimize` or `LossScaleOptimizer.get_gradients`, the loss
and gradients must be scaled manually. This can be done by calling
`LossScaleOptimizer.get_scaled_loss` before passing the loss to
`tf.GradientTape`, and `LossScaleOptimizer.get_unscaled_gradients` after
computing the gradients with `tf.GradientTape`. For example:
```python
opt = tf.keras.mixed_precision.experimental.LossScaleOptimizer(...)
vars = ...
with tf.GradientTape() as tape:
loss = ...
scaled_loss = opt.get_scaled_loss(loss)
scaled_grads = tape.gradient(scaled_loss, vars)
grads = opt.get_unscaled_gradients(scaled_grads)
opt.apply_gradients(zip(grads, vars)) # Loss scale will be updated here
```
"""
def __init__(self, optimizer, loss_scale):
"""Initializes this loss scale optimizer.
Args:
optimizer: The Optimizer instance to wrap.
loss_scale: The loss scale to scale the loss and gradients. This can
either be an int/float to use a fixed loss scale, the string "dynamic"
to use dynamic loss scaling, or an instance of a LossScale. The string
"dynamic" equivalent to passing `DynamicLossScale()`, and passing an
int/float is equivalent to passing a FixedLossScale with the given loss
scale.
"""
if not isinstance(optimizer, optimizer_v2.OptimizerV2):
raise ValueError('"optimizer" must be an instance of OptimizerV2, but '
'got: %s' % optimizer)
if hasattr(optimizer, 'clipnorm'):
raise ValueError('LossScaleOptimizer does not support wrapping '
'optimizers with a clipnorm. Optimizer %s has clipnorm '
'%s' % (optimizer, optimizer.clipnorm))
if hasattr(optimizer, 'clipvalue'):
raise ValueError('LossScaleOptimizer does not support wrapping '
'optimizers with a clipvalue. Optimizer %s has '
'clipvalue %s' % (optimizer, optimizer.clipvalue))
self._optimizer = optimizer
self._loss_scale = keras_loss_scale_module.get(loss_scale)
if self._loss_scale is None:
raise ValueError('loss_scale cannot be None.')
for weight in loss_scale_module.get_loss_scale_weights(self._loss_scale):
# We cannot call `track_variable` in the LossScale class itself, because a
# file outside of Keras cannot depend on a Keras file. Calling it here
# instead is OK, because a variable only needs to be tracked if used with
# a Keras class, and the only way to use LossScale with a Keras class is
# through the LossScaleOptimizer.
backend.track_variable(weight)
self._track_trackable(self._optimizer, 'base_optimizer')
self._track_trackable(self._loss_scale, 'loss_scale')
# Needed because the superclass's __getattribute__ checks this.
self._hyper = {}
@property
def loss_scale(self):
"""The `LossScale` instance associated with this optimizer."""
return self._loss_scale
def get_scaled_loss(self, loss):
"""Scales the loss by the loss scale.
This method is only needed if you compute gradients manually, e.g. with
`tf.GradientTape`. In that case, call this method to scale the loss before
passing the loss to `tf.GradientTape`. If you use
`LossScaleOptimizer.minimize` or `LossScaleOptimizer.get_gradients`, loss
scaling is automatically applied and this method is unneeded.
If this method is called, `get_unscaled_gradients` should also be called.
See the `tf.keras.mixed_precision.experimental.LossScaleOptimizer` doc for
an example.
Args:
loss: The loss, which will be multiplied by the loss scale. Can either be
a tensor or a callable returning a tensor.
Returns:
`loss` multiplied by `LossScaleOptimizer.loss_scale()`.
"""
loss_scale = self._loss_scale()
if callable(loss):
def new_loss():
loss_val = loss()
return loss_val * math_ops.cast(loss_scale, loss_val.dtype)
return new_loss
else:
return loss * math_ops.cast(loss_scale, loss.dtype)
def get_unscaled_gradients(self, grads):
"""Unscales the gradients by the loss scale.
This method is only needed if you compute gradients manually, e.g. with
`tf.GradientTape`. In that case, call this method to unscale the gradients
after computing them with `tf.GradientTape`. If you use
`LossScaleOptimizer.minimize` or `LossScaleOptimizer.get_gradients`, loss
scaling is automatically applied and this method is unneeded.
If this method is called, `get_scaled_loss` should also be called. See
the `tf.keras.mixed_precision.experimental.LossScaleOptimizer` doc for an
example.
Args:
grads: A list of tensors, each which will be divided by the loss scale.
Can have None values, which are ignored.
Returns:
A new list the same size as `grads`, where every non-None value in `grads`
is divided by `LossScaleOptimizer.loss_scale()`.
"""
loss_scale = self._loss_scale()
loss_scale_reciprocal = 1. / loss_scale
return [g * math_ops.cast(loss_scale_reciprocal, g.dtype) if g is not None
else None for g in grads]
def _compute_gradients(self, loss, var_list, grad_loss=None):
loss = self.get_scaled_loss(loss)
grads_and_vars = self._optimizer._compute_gradients(loss, var_list, # pylint: disable=protected-access
grad_loss)
grads = [g for g, _ in grads_and_vars]
variables = [v for _, v in grads_and_vars]
unscaled_grads = self.get_unscaled_gradients(grads)
return list(zip(unscaled_grads, variables))
def get_gradients(self, loss, params):
loss = self.get_scaled_loss(loss)
grads = self._optimizer.get_gradients(loss, params)
return self.get_unscaled_gradients(grads)
def apply_gradients(self, grads_and_vars, name=None):
if distribution_strategy_context.in_cross_replica_context():
raise ValueError('apply_gradients() must be called in a replica context.')
grads_and_vars = tuple(grads_and_vars)
return distribution_strategy_context.get_replica_context().merge_call(
self._apply_gradients_cross_replica, args=(grads_and_vars, name))
def _apply_gradients_cross_replica(self, distribution, grads_and_vars, name):
grads = [g for g, _ in grads_and_vars]
loss_scale_update_op, should_apply_grads = self._loss_scale.update(grads)
def apply_fn():
# We do not want DistributionStrategy to unwrap any MirroredVariables in
# grads_and_vars, because even in a replica context, the wrapped optimizer
# expects mirrored variables. So we wrap the variables with an
# _UnwrapPreventer, preventing DistributionStrategy from unwrapping the
# MirroredVariables.
wrapped_vars = _UnwrapPreventer([v for _, v in grads_and_vars])
return distribution.extended.call_for_each_replica(
self._apply_gradients, args=(grads, wrapped_vars, name))
# Note: We must call this cond() in a cross-replica context.
# DistributionStrategy does not support having a cond in a replica context
# with a branch that calls `merge_call`, and self._optimizer.apply_gradients
# calls `merge_call`.
maybe_apply_op = smart_cond.smart_cond(should_apply_grads,
apply_fn,
control_flow_ops.no_op)
return control_flow_ops.group(maybe_apply_op, loss_scale_update_op)
def _apply_gradients(self, grads, wrapped_vars, name):
return self._optimizer.apply_gradients(list(zip(grads, wrapped_vars.value)),
name)
def get_config(self):
serialized_optimizer = optimizers.serialize(self._optimizer)
serialized_loss_scale = keras_loss_scale_module.serialize(self._loss_scale)
return {
'optimizer': serialized_optimizer,
'loss_scale': serialized_loss_scale,
}
@classmethod
def from_config(cls, config, custom_objects=None):
config = config.copy() # Make a copy, since we mutate config
config['optimizer'] = optimizers.deserialize(
config['optimizer'], custom_objects=custom_objects)
config['loss_scale'] = keras_loss_scale_module.deserialize(
config['loss_scale'], custom_objects=custom_objects)
return cls(**config)
# Delegations: We delegate most OptimizerV2 methods to the wrapped optimizer
# below.
@property
def iterations(self):
return self._optimizer.iterations
@iterations.setter
def iterations(self, variable):
self._optimizer.iterations = variable
def get_slot_names(self):
return self._optimizer.get_slot_names()
def variables(self):
return self._optimizer.variables()
@property
def weights(self):
return self._optimizer.weights
def get_weights(self):
return self._optimizer.get_weights()
def set_weights(self, weights):
return self._optimizer.set_weights(weights)
# For the most part, we only expose methods in the base OptimizerV2, not
# individual subclasses like Adam. However, although "learning_rate" and "lr"
# properties are not part of the base OptimizerV2 class, they are part of most
# subclasses, so we expose them here for convenience.
@property
def learning_rate(self):
return self._optimizer.learning_rate
@learning_rate.setter
def learning_rate(self, lr):
self._optimizer.learning_rate = lr
@property
def lr(self):
return self._optimizer.lr
@lr.setter
def lr(self, lr):
self._optimizer.lr = lr
def get_slot(self, var, slot_name):
# We cannot implement get_slot for the following reason: When saving a
# checkpoint, two optimizers cannot share slot variables. Since both the
# LossScaleOptimizer and the wrapped optimizer (self and self._optimizer
# respectively) are checkpointed, we cannot expose the wrapped optimizer's
# slots in the LossScaleOptimizer. Otherwise, a checkpoint would believe
# both optimizers share slot variables.
raise AttributeError(
'You cannot call get_slot on a LossScaleOptimizer. This limitation '
'will be removed in the future.')
def add_slot(self, var, slot_name, initializer='zeros'):
# We disallow adding a slot for consistency with `get_slot`.
raise AttributeError(
'You cannot call add_slot on a LossScaleOptimizer. This limitation '
'will be removed in the future.')
# We do not override some OptimizerV2 methods. For each, we describe why we do
# not delegate them to self._optimizer:
# * get_updates: get_updates() calls get_gradients(). Since we override
# get_gradients(), we cannot delegate get_updates() to self._optimizer,
# otherwise the overridden get_gradients() method would not be called.
# Luckily, get_updates() does not access any OptimizerV2 fields, so
# inheriting the OptimizerV2 version works fine.
# * minimize: We don't delegate for a similar as get_updates(): it calls
# both self._compute_gradients() and self.apply_gradients(), and both need
# to have the LossScaleOptimizer version called.
# TODO(reedwm): Maybe merge this class's functionality into OptimizerV2.
# TODO(reedwm): Maybe throw an error if mixed precision is used without this
# optimizer being used.
| {
"content_hash": "7d780e0ad2d0f754e023a20a6007f93e",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 107,
"avg_line_length": 42.210682492581604,
"alnum_prop": 0.7069947275922671,
"repo_name": "jhseu/tensorflow",
"id": "1dcf4a7f248a0a246b8044346f6f3429c926b9c6",
"size": "14914",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/mixed_precision/experimental/loss_scale_optimizer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "27480"
},
{
"name": "Batchfile",
"bytes": "49527"
},
{
"name": "C",
"bytes": "875455"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "80051513"
},
{
"name": "CMake",
"bytes": "6500"
},
{
"name": "Dockerfile",
"bytes": "112748"
},
{
"name": "Go",
"bytes": "1853641"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "961600"
},
{
"name": "Jupyter Notebook",
"bytes": "549457"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1729057"
},
{
"name": "Makefile",
"bytes": "62498"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "304661"
},
{
"name": "PHP",
"bytes": "4236"
},
{
"name": "Pascal",
"bytes": "318"
},
{
"name": "Pawn",
"bytes": "19515"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "36791185"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Roff",
"bytes": "2705"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "SWIG",
"bytes": "56741"
},
{
"name": "Shell",
"bytes": "685877"
},
{
"name": "Smarty",
"bytes": "35147"
},
{
"name": "Starlark",
"bytes": "3504187"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from . import views
urlpatterns = [
url(r'^$', views.manage_competition),
url(r'^(\d+)/$', views.get_competition),
url(r'^edit/$', views.update_competition),
#url(r'^add/', views.add_competition, name='add_competition'),
#url(r'^competition/create/', login_required(views.create_competition), name='createCompetition'),
#url(r'^crud/competition/?$', CompetitionCrudView.as_view(), name='crud_competition'),
#url(r'^competitions/', login_required(WebIndexView.as_view()), name='competitions'),
#url(r'^home/(?P<company_name>\w+)', HomeView.as_view(), name='home'),
#url(r'^competition/(?P<company_name>\w+)', CompetitionView.as_view(), name='competition'),
#url(r'^video/add', AddVideoView.as_view(), name='addvideo'),
]
| {
"content_hash": "6aece2068fc91a50ccc455a4cc231f02",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 102,
"avg_line_length": 50.1764705882353,
"alnum_prop": 0.6787807737397421,
"repo_name": "acuestap/smarttools_test",
"id": "06316652f227a05ae66b480513d82c4dcf6f697a",
"size": "853",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "competitions/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "417972"
},
{
"name": "HTML",
"bytes": "29941"
},
{
"name": "JavaScript",
"bytes": "175384"
},
{
"name": "Python",
"bytes": "42869"
}
],
"symlink_target": ""
} |
import json
import csv
with open('../DataVisualization2016/Data/gem2015.csv', 'rb') as mycsvfile:
thedata = csv.reader(mycsvfile)
# for row in thedata:
# print(row[0]+"\t \t"+row[1]+"\t \t"+row[2]+"\t \t"+row[3]+"\t \t"+row[4])
with open('nlgemeenten2009.json') as json_data:
d = json.load(json_data)
for row in thedata:
for gemeente in d['objects']['gemeenten']['geometries']:
# print gemeente['properties']['gemeente']
if row[1] == gemeente['properties']['gemeente']:
gemeente['properties']['inwoners'] = row[5]
gemeente['properties']['man'] = row[6]
gemeente['properties']['vrouw'] = row[7]
gemeente['properties']['bev dichtheid'] = row[21]
gemeente['properties']['aantal hectare'] = row[22]
gemeente['properties']['aantal woningen'] = row[42]
# print d['objects']['gemeenten']
# print(d['objects']['gemeenten']['geometries'][6])
with open('output.json', 'w') as outfile:
json.dump(d, outfile)
| {
"content_hash": "5f0fd85817e57c3d86e86152651ac69a",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 83,
"avg_line_length": 40.074074074074076,
"alnum_prop": 0.5628465804066544,
"repo_name": "MatthijsBon/MatthijsBon.github.io",
"id": "5bd73c0736a77fdbc19c7bcccd61f25b5af8562a",
"size": "1082",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "json_extend.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7149"
},
{
"name": "CoffeeScript",
"bytes": "14590"
},
{
"name": "HTML",
"bytes": "79833"
},
{
"name": "JavaScript",
"bytes": "97848"
},
{
"name": "PHP",
"bytes": "2282"
},
{
"name": "Python",
"bytes": "1082"
}
],
"symlink_target": ""
} |
import time
import hashlib
import matplotlib.pyplot as plot
# Variables
string = "The brown fox jumps over the lazy dog"
algo = ['MD5', 'SHA-1', 'SHA-224', 'SHA-256', 'SHA-384', 'SHA-512']
colors = ['b', 'c', 'y', 'm', 'r', 'k']
results = {}
# Fetch iterations and step from user
iterations = int(raw_input("Iterations: "))
while iterations < 1 or iterations > 1000000:
iterations = int(raw_input("Please enter a valid value for the number of iterations (1-1000000): "))
step = int(raw_input("Step: "))
while step < 1 or step > 1000000:
step = int(raw_input("Please enter a valid value for the step (1-1000000): "))
print "\nBenchmarking in progress..\n"
# MD5
Start = time.time()
for i in range (iterations):
for j in range ((i+1)*step):
hashlib.md5(string)
results[0,(i+1)*step] = (time.time() - Start)
print "\nCompleted MD5 benchmarking.\n"
# SHA-1
Start = time.time()
for i in range (iterations):
for j in range ((i+1)*step):
hashlib.sha1(string)
results[1, (i+1)*step] = (time.time() - Start)
print "\nCompleted SHA-1 benchmarking.\n"
# SHA-224
Start = time.time()
for i in range (iterations):
for j in range ((i+1)*step):
hashlib.sha224(string)
results[2, (i+1)*step] = (time.time() - Start)
print "\nCompleted SHA-224 benchmarking.\n"
# SHA-256
Start = time.time()
for i in range (iterations):
for j in range ((i+1)*step):
hashlib.sha256(string)
results[3, (i+1)*step] = (time.time() - Start)
print "\nCompleted SHA-256 benchmarking.\n"
# SHA-384
Start = time.time()
for i in range (iterations):
for j in range ((i+1)*step):
hashlib.sha384(string)
results[4, (i+1)*step] = (time.time() - Start)
print "\nCompleted SHA-384 benchmarking.\n"
# SHA-512
Start = time.time()
for i in range (iterations):
for j in range ((i+1)*step):
hashlib.sha512(string)
results[5, (i+1)*step] = (time.time() - Start)
print "\nCompleted SHA-512 benchmarking.\n"
# Generate plot and print results
print "\n---------- Report ----------\n"
for i in range(6):
print algo[i]
for j in range (iterations):
print (j+1)*step, 'iterations in', results[i,(j+1)*step]*pow(10,3), 'ms'
plot.plot((j+1)*step, results[i,(j+1)*step]*pow(10,3),colors[i]+'o', label=str(algo[i]) if j == 0 else "")
print '\n'
plot.xlabel('Iterations')
plot.ylabel('Execution time in milliseconds')
plot.title('HashMark', fontsize=40, color='white')
plot.legend(loc=2)
plot.grid(True)
plot.show() | {
"content_hash": "4d40c5e1c3bf447fe2507c7b3053349e",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 114,
"avg_line_length": 29.987951807228917,
"alnum_prop": 0.6343913218159903,
"repo_name": "SubNader/HashMark",
"id": "5410244105d8be9cb78acb6f0c8e005e7b08bd46",
"size": "2489",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "benchmark.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2489"
}
],
"symlink_target": ""
} |
import copy
import os
import sys
import UserDict
import uuid
import fixtures
import six
from glance.cmd import replicator as glance_replicator
from glance.openstack.common import jsonutils
from glance.tests import utils as test_utils
IMG_RESPONSE_ACTIVE = {
'content-length': '0',
'property-image_state': 'available',
'min_ram': '0',
'disk_format': 'aki',
'updated_at': '2012-06-25T02:10:36',
'date': 'Thu, 28 Jun 2012 07:20:05 GMT',
'owner': '8aef75b5c0074a59aa99188fdb4b9e90',
'id': '6d55dd55-053a-4765-b7bc-b30df0ea3861',
'size': '4660272',
'property-image_location': 'ubuntu-bucket/oneiric-server-cloudimg-amd64-'
'vmlinuz-generic.manifest.xml',
'property-architecture': 'x86_64',
'etag': 'f46cfe7fb3acaff49a3567031b9b53bb',
'location': 'http://127.0.0.1:9292/v1/images/'
'6d55dd55-053a-4765-b7bc-b30df0ea3861',
'container_format': 'aki',
'status': 'active',
'deleted': 'False',
'min_disk': '0',
'is_public': 'False',
'name': 'ubuntu-bucket/oneiric-server-cloudimg-amd64-vmlinuz-generic',
'checksum': 'f46cfe7fb3acaff49a3567031b9b53bb',
'created_at': '2012-06-25T02:10:32',
'protected': 'False',
'content-type': 'text/html; charset=UTF-8'
}
IMG_RESPONSE_QUEUED = copy.copy(IMG_RESPONSE_ACTIVE)
IMG_RESPONSE_QUEUED['status'] = 'queued'
IMG_RESPONSE_QUEUED['id'] = '49b2c782-ee10-4692-84f8-3942e9432c4b'
IMG_RESPONSE_QUEUED['location'] = ('http://127.0.0.1:9292/v1/images/'
+ IMG_RESPONSE_QUEUED['id'])
class FakeHTTPConnection(object):
def __init__(self):
self.count = 0
self.reqs = {}
self.last_req = None
self.host = 'localhost'
self.port = 9292
def prime_request(self, method, url, in_body, in_headers,
out_code, out_body, out_headers):
if not url.startswith('/'):
url = '/' + url
hkeys = in_headers.keys()
hkeys.sort()
hashable = (method, url, in_body, ' '.join(hkeys))
flat_headers = []
for key in out_headers:
flat_headers.append((key, out_headers[key]))
self.reqs[hashable] = (out_code, out_body, flat_headers)
def request(self, method, url, body, headers):
self.count += 1
hkeys = headers.keys()
hkeys.sort()
hashable = (method, url, body, ' '.join(hkeys))
if hashable not in self.reqs:
options = []
for h in self.reqs:
options.append(repr(h))
raise Exception('No such primed request: %s "%s"\n'
'%s\n\n'
'Available:\n'
'%s'
% (method, url, hashable, '\n\n'.join(options)))
self.last_req = hashable
def getresponse(self):
class FakeResponse(object):
def __init__(self, (code, body, headers)):
self.body = six.StringIO(body)
self.headers = headers
self.status = code
def read(self, count=1000000):
return self.body.read(count)
def getheaders(self):
return self.headers
return FakeResponse(self.reqs[self.last_req])
class ImageServiceTestCase(test_utils.BaseTestCase):
def test_rest_errors(self):
c = glance_replicator.ImageService(FakeHTTPConnection(), 'noauth')
for code, exc in [(400, glance_replicator.ServerErrorException),
(401, glance_replicator.AuthenticationException),
(403, glance_replicator.AuthenticationException),
(409,
glance_replicator.ImageAlreadyPresentException),
(500, glance_replicator.ServerErrorException)]:
c.conn.prime_request('GET',
('v1/images/'
'5dcddce0-cba5-4f18-9cf4-9853c7b207a6'), '',
{'x-auth-token': 'noauth'}, code, '', {})
self.assertRaises(exc, c.get_image,
'5dcddce0-cba5-4f18-9cf4-9853c7b207a6')
def test_rest_get_images(self):
c = glance_replicator.ImageService(FakeHTTPConnection(), 'noauth')
# Two images, one of which is queued
resp = {'images': [IMG_RESPONSE_ACTIVE, IMG_RESPONSE_QUEUED]}
c.conn.prime_request('GET', 'v1/images/detail?is_public=None', '',
{'x-auth-token': 'noauth'},
200, jsonutils.dumps(resp), {})
c.conn.prime_request('GET',
('v1/images/detail?marker=%s&is_public=None'
% IMG_RESPONSE_QUEUED['id']),
'', {'x-auth-token': 'noauth'},
200, jsonutils.dumps({'images': []}), {})
imgs = list(c.get_images())
self.assertEqual(len(imgs), 2)
self.assertEqual(c.conn.count, 2)
def test_rest_get_image(self):
c = glance_replicator.ImageService(FakeHTTPConnection(), 'noauth')
image_contents = 'THISISTHEIMAGEBODY'
c.conn.prime_request('GET',
'v1/images/%s' % IMG_RESPONSE_ACTIVE['id'],
'', {'x-auth-token': 'noauth'},
200, image_contents, IMG_RESPONSE_ACTIVE)
body = c.get_image(IMG_RESPONSE_ACTIVE['id'])
self.assertEqual(body.read(), image_contents)
def test_rest_header_list_to_dict(self):
i = [('x-image-meta-banana', 42),
('gerkin', 12),
('x-image-meta-property-frog', 11),
('x-image-meta-property-duck', 12)]
o = glance_replicator.ImageService._header_list_to_dict(i)
self.assertIn('banana', o)
self.assertIn('gerkin', o)
self.assertIn('properties', o)
self.assertIn('frog', o['properties'])
self.assertIn('duck', o['properties'])
self.assertNotIn('x-image-meta-banana', o)
def test_rest_get_image_meta(self):
c = glance_replicator.ImageService(FakeHTTPConnection(), 'noauth')
c.conn.prime_request('HEAD',
'v1/images/%s' % IMG_RESPONSE_ACTIVE['id'],
'', {'x-auth-token': 'noauth'},
200, '', IMG_RESPONSE_ACTIVE)
header = c.get_image_meta(IMG_RESPONSE_ACTIVE['id'])
self.assertIn('id', header)
def test_rest_dict_to_headers(self):
i = {'banana': 42,
'gerkin': 12,
'properties': {'frog': 1,
'kernel_id': None}
}
o = glance_replicator.ImageService._dict_to_headers(i)
self.assertIn('x-image-meta-banana', o)
self.assertIn('x-image-meta-gerkin', o)
self.assertIn('x-image-meta-property-frog', o)
self.assertIn('x-image-meta-property-kernel_id', o)
self.assertEqual(o['x-image-meta-property-kernel_id'], '')
self.assertNotIn('properties', o)
def test_rest_add_image(self):
c = glance_replicator.ImageService(FakeHTTPConnection(), 'noauth')
image_body = 'THISISANIMAGEBODYFORSURE!'
image_meta_with_proto = {}
image_meta_with_proto['x-auth-token'] = 'noauth'
image_meta_with_proto['Content-Type'] = 'application/octet-stream'
image_meta_with_proto['Content-Length'] = len(image_body)
for key in IMG_RESPONSE_ACTIVE:
image_meta_with_proto['x-image-meta-%s' % key] = \
IMG_RESPONSE_ACTIVE[key]
c.conn.prime_request('POST', 'v1/images',
image_body, image_meta_with_proto,
200, '', IMG_RESPONSE_ACTIVE)
headers, body = c.add_image(IMG_RESPONSE_ACTIVE, image_body)
self.assertEqual(headers, IMG_RESPONSE_ACTIVE)
self.assertEqual(c.conn.count, 1)
def test_rest_add_image_meta(self):
c = glance_replicator.ImageService(FakeHTTPConnection(), 'noauth')
image_meta = {'id': '5dcddce0-cba5-4f18-9cf4-9853c7b207a6'}
image_meta_headers = \
glance_replicator.ImageService._dict_to_headers(image_meta)
image_meta_headers['x-auth-token'] = 'noauth'
image_meta_headers['Content-Type'] = 'application/octet-stream'
c.conn.prime_request('PUT', 'v1/images/%s' % image_meta['id'],
'', image_meta_headers, 200, '', '')
headers, body = c.add_image_meta(image_meta)
class FakeHttpResponse(object):
def __init__(self, headers, data):
self.headers = headers
self.data = six.StringIO(data)
def getheaders(self):
return self.headers
def read(self, amt=None):
return self.data.read(amt)
FAKEIMAGES = [{'status': 'active', 'size': 100, 'dontrepl': 'banana',
'id': '5dcddce0-cba5-4f18-9cf4-9853c7b207a6'},
{'status': 'deleted', 'size': 200, 'dontrepl': 'banana',
'id': 'f4da1d2a-40e8-4710-b3aa-0222a4cc887b'},
{'status': 'active', 'size': 300, 'dontrepl': 'banana',
'id': '37ff82db-afca-48c7-ae0b-ddc7cf83e3db'}]
FAKEIMAGES_LIVEMASTER = [{'status': 'active', 'size': 100,
'dontrepl': 'banana',
'id': '5dcddce0-cba5-4f18-9cf4-9853c7b207a6'},
{'status': 'deleted', 'size': 200,
'dontrepl': 'banana',
'id': 'f4da1d2a-40e8-4710-b3aa-0222a4cc887b'},
{'status': 'deleted', 'size': 300,
'dontrepl': 'banana',
'id': '37ff82db-afca-48c7-ae0b-ddc7cf83e3db'},
{'status': 'active', 'size': 100,
'dontrepl': 'banana',
'id': '15648dd7-8dd0-401c-bd51-550e1ba9a088'}]
class FakeImageService(object):
def __init__(self, http_conn, authtoken):
self.authtoken = authtoken
def get_images(self):
if self.authtoken == 'livemastertoken':
return FAKEIMAGES_LIVEMASTER
return FAKEIMAGES
def get_image(self, id):
return FakeHttpResponse({}, 'data')
def get_image_meta(self, id):
for img in FAKEIMAGES:
if img['id'] == id:
return img
return {}
def add_image_meta(self, meta):
return {'status': 200}, None
def add_image(self, meta, data):
return {'status': 200}, None
def get_image_service():
return FakeImageService
def check_no_args(command, args):
options = UserDict.UserDict()
no_args_error = False
orig_img_service = glance_replicator.get_image_service
try:
glance_replicator.get_image_service = get_image_service
command(options, args)
except TypeError:
no_args_error = True
finally:
glance_replicator.get_image_service = orig_img_service
return no_args_error
def check_bad_args(command, args):
options = UserDict.UserDict()
bad_args_error = False
orig_img_service = glance_replicator.get_image_service
try:
glance_replicator.get_image_service = get_image_service
command(options, args)
except ValueError:
bad_args_error = True
finally:
glance_replicator.get_image_service = orig_img_service
return bad_args_error
class ReplicationCommandsTestCase(test_utils.BaseTestCase):
def test_replication_size(self):
options = UserDict.UserDict()
options.slavetoken = 'slavetoken'
args = ['localhost:9292']
stdout = sys.stdout
orig_img_service = glance_replicator.get_image_service
sys.stdout = six.StringIO()
try:
glance_replicator.get_image_service = get_image_service
glance_replicator.replication_size(options, args)
sys.stdout.seek(0)
output = sys.stdout.read()
finally:
sys.stdout = stdout
glance_replicator.get_image_service = orig_img_service
output = output.rstrip()
self.assertEqual(output, 'Total size is 400 bytes across 2 images')
def test_replication_size_with_no_args(self):
args = []
command = glance_replicator.replication_size
self.assertTrue(check_no_args(command, args))
def test_replication_size_with_bad_args(self):
args = ['aaa']
command = glance_replicator.replication_size
self.assertTrue(check_bad_args(command, args))
def test_replication_dump(self):
tempdir = self.useFixture(fixtures.TempDir()).path
options = UserDict.UserDict()
options.chunksize = 4096
options.mastertoken = 'mastertoken'
options.metaonly = False
args = ['localhost:9292', tempdir]
orig_img_service = glance_replicator.get_image_service
self.addCleanup(setattr, glance_replicator,
'get_image_service', orig_img_service)
glance_replicator.get_image_service = get_image_service
glance_replicator.replication_dump(options, args)
for active in ['5dcddce0-cba5-4f18-9cf4-9853c7b207a6',
'37ff82db-afca-48c7-ae0b-ddc7cf83e3db']:
imgfile = os.path.join(tempdir, active)
self.assertTrue(os.path.exists(imgfile))
self.assertTrue(os.path.exists('%s.img' % imgfile))
with open(imgfile) as f:
d = jsonutils.loads(f.read())
self.assertIn('status', d)
self.assertIn('id', d)
self.assertIn('size', d)
for inactive in ['f4da1d2a-40e8-4710-b3aa-0222a4cc887b']:
imgfile = os.path.join(tempdir, inactive)
self.assertTrue(os.path.exists(imgfile))
self.assertFalse(os.path.exists('%s.img' % imgfile))
with open(imgfile) as f:
d = jsonutils.loads(f.read())
self.assertIn('status', d)
self.assertIn('id', d)
self.assertIn('size', d)
def test_replication_dump_with_no_args(self):
args = []
command = glance_replicator.replication_dump
self.assertTrue(check_no_args(command, args))
def test_replication_dump_with_bad_args(self):
args = ['aaa', 'bbb']
command = glance_replicator.replication_dump
self.assertTrue(check_bad_args(command, args))
def test_replication_load(self):
tempdir = self.useFixture(fixtures.TempDir()).path
def write_image(img, data):
imgfile = os.path.join(tempdir, img['id'])
with open(imgfile, 'w') as f:
f.write(jsonutils.dumps(img))
if data:
with open('%s.img' % imgfile, 'w') as f:
f.write(data)
for img in FAKEIMAGES:
cimg = copy.copy(img)
# We need at least one image where the stashed metadata on disk
# is newer than what the fake has
if cimg['id'] == '5dcddce0-cba5-4f18-9cf4-9853c7b207a6':
cimg['extra'] = 'thisissomeextra'
# This is an image where the metadata change should be ignored
if cimg['id'] == 'f4da1d2a-40e8-4710-b3aa-0222a4cc887b':
cimg['dontrepl'] = 'thisisyetmoreextra'
write_image(cimg, 'kjdhfkjshdfkjhsdkfd')
# And an image which isn't on the destination at all
new_id = str(uuid.uuid4())
cimg['id'] = new_id
write_image(cimg, 'dskjfhskjhfkfdhksjdhf')
# And an image which isn't on the destination, but lacks image
# data
new_id_missing_data = str(uuid.uuid4())
cimg['id'] = new_id_missing_data
write_image(cimg, None)
# A file which should be ignored
badfile = os.path.join(tempdir, 'kjdfhf')
with open(badfile, 'w') as f:
f.write(jsonutils.dumps([1, 2, 3, 4, 5]))
# Finally, we're ready to test
options = UserDict.UserDict()
options.dontreplicate = 'dontrepl dontreplabsent'
options.slavetoken = 'slavetoken'
args = ['localhost:9292', tempdir]
orig_img_service = glance_replicator.get_image_service
try:
glance_replicator.get_image_service = get_image_service
updated = glance_replicator.replication_load(options, args)
finally:
glance_replicator.get_image_service = orig_img_service
self.assertIn('5dcddce0-cba5-4f18-9cf4-9853c7b207a6', updated)
self.assertNotIn('f4da1d2a-40e8-4710-b3aa-0222a4cc887b', updated)
self.assertIn(new_id, updated)
self.assertNotIn(new_id_missing_data, updated)
def test_replication_load_with_no_args(self):
args = []
command = glance_replicator.replication_load
self.assertTrue(check_no_args(command, args))
def test_replication_load_with_bad_args(self):
args = ['aaa', 'bbb']
command = glance_replicator.replication_load
self.assertTrue(check_bad_args(command, args))
def test_replication_livecopy(self):
options = UserDict.UserDict()
options.chunksize = 4096
options.dontreplicate = 'dontrepl dontreplabsent'
options.mastertoken = 'livemastertoken'
options.slavetoken = 'liveslavetoken'
options.metaonly = False
args = ['localhost:9292', 'localhost:9393']
orig_img_service = glance_replicator.get_image_service
try:
glance_replicator.get_image_service = get_image_service
updated = glance_replicator.replication_livecopy(options, args)
finally:
glance_replicator.get_image_service = orig_img_service
self.assertEqual(len(updated), 2)
def test_replication_livecopy_with_no_args(self):
args = []
command = glance_replicator.replication_livecopy
self.assertTrue(check_no_args(command, args))
def test_replication_livecopy_with_bad_args(self):
args = ['aaa', 'bbb']
command = glance_replicator.replication_livecopy
self.assertTrue(check_bad_args(command, args))
def test_replication_compare(self):
options = UserDict.UserDict()
options.chunksize = 4096
options.dontreplicate = 'dontrepl dontreplabsent'
options.mastertoken = 'livemastertoken'
options.slavetoken = 'liveslavetoken'
options.metaonly = False
args = ['localhost:9292', 'localhost:9393']
orig_img_service = glance_replicator.get_image_service
try:
glance_replicator.get_image_service = get_image_service
differences = glance_replicator.replication_compare(options, args)
finally:
glance_replicator.get_image_service = orig_img_service
self.assertIn('15648dd7-8dd0-401c-bd51-550e1ba9a088', differences)
self.assertEqual(differences['15648dd7-8dd0-401c-bd51-550e1ba9a088'],
'missing')
self.assertIn('37ff82db-afca-48c7-ae0b-ddc7cf83e3db', differences)
self.assertEqual(differences['37ff82db-afca-48c7-ae0b-ddc7cf83e3db'],
'diff')
def test_replication_compare_with_no_args(self):
args = []
command = glance_replicator.replication_compare
self.assertTrue(check_no_args(command, args))
def test_replication_compare_with_bad_args(self):
args = ['aaa', 'bbb']
command = glance_replicator.replication_compare
self.assertTrue(check_bad_args(command, args))
class ReplicationUtilitiesTestCase(test_utils.BaseTestCase):
def test_check_upload_response_headers(self):
glance_replicator._check_upload_response_headers({'status': 'active'},
None)
d = {'image': {'status': 'active'}}
glance_replicator._check_upload_response_headers({},
jsonutils.dumps(d))
self.assertRaises(
glance_replicator.UploadException,
glance_replicator._check_upload_response_headers, {}, None)
def test_image_present(self):
client = FakeImageService(None, 'noauth')
self.assertTrue(glance_replicator._image_present(
client, '5dcddce0-cba5-4f18-9cf4-9853c7b207a6'))
self.assertFalse(glance_replicator._image_present(
client, uuid.uuid4()))
def test_dict_diff(self):
a = {'a': 1, 'b': 2, 'c': 3}
b = {'a': 1, 'b': 2}
c = {'a': 1, 'b': 1, 'c': 3}
d = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
# Only things that the first dict has which the second dict doesn't
# matter here.
self.assertFalse(glance_replicator._dict_diff(a, a))
self.assertTrue(glance_replicator._dict_diff(a, b))
self.assertTrue(glance_replicator._dict_diff(a, c))
self.assertFalse(glance_replicator._dict_diff(a, d))
| {
"content_hash": "3cb7c62ded83f27b25def68c193ae48f",
"timestamp": "",
"source": "github",
"line_count": 561,
"max_line_length": 78,
"avg_line_length": 37.65597147950089,
"alnum_prop": 0.5774674556213018,
"repo_name": "tanglei528/glance",
"id": "e0c501ada83ca90946188af31958a0cda77741a9",
"size": "21752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "glance/tests/unit/test_glance_replicator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3193082"
},
{
"name": "Shell",
"bytes": "7168"
}
],
"symlink_target": ""
} |
import re
import sys
from datetime import datetime
from threading import Lock
from xml.etree import ElementTree as ET
from com.ericsson.xn.server.parser.SgsnmmeParser import SgsnmmeNodeInfo
lock = Lock()
versionTuple = sys.version_info[:2]
version = '.'.join(repr(v) for v in versionTuple)
class SgsnmmeCmdHandler():
_key_map = {
'nid': 'NeId',
'lid': 'LicID',
'kix1': 'KiX1',
'pwx1': 'PwX1',
'sqx1': 'NumSqnGroupsX1',
'ipx1': 'LicIpX1',
'px1': 'LicPortX1',
'tx1n': 'Tx1NoMsg',
'kix2': 'KiX2',
'pwx2': 'PwX2',
'sqx2': 'NumSqnGroupsX2',
'ipx2': 'LicIpX2',
'px2': 'LicPortX2',
'tx2n': 'Tx2Normal',
'tx2c': 'Tx2Checkstate',
'ipx3': 'LicIpX3',
'px3': 'LicPortX3',
'tx3n': 'Tx3Normal'
}
_help_cmds = [
'gsh modify_li2_sgsn -name sgsnli2 -lid lic888 -nid sgsnx01 -kix1 1234567812345678 -pwx1 1234567812345678 '
'-ipx1 10.140.0.8 -px1 51001 -sqx1 42 -tx1n 300 -kix2 1234567812345678 -pwx2 1234567812345678 -ipx2 '
'10.140.0.81 -px2 12307 -sqx2 12 -tx2c 300 -tx2n 30 -ipx3 10.140.0.81 -px3 12308 -tx3n 30',
'gsh modify_li2_sgsn -name sgsnli2 -tx1n 300',
'gsh modify_li2_sgsn -name sgsnli2 -kix2 1234567812345678',
'gsh list_li2_sgsn',
'gsh get_li2_sgsn -name sgsnli2',
'gsh modify_li2_mme -name mmeli2s -nid sgsnmmex01',
'gsh list_li2_mme',
'gsh get_li2_mme -name mmeli2',
'gsh list_ip_service_address',
'gsh list_ip_service_address -sn LI-C',
'gsh get_ip_network_subnet -nw Gom -ip 10.20.24.1',
'gsh delete_ip_service_address -sn LI-[CU] -ip 10.20.24.1',
'gsh create_ip_network_subnet -nw Gom -ip 10.20.24.1 -mask 255.255.255.255',
'gsh create_ip_service_address -sn LI-[CU] -ip 10.20.24.1',
'gsh check_config',
'gsh activate_config_pending',
'gsh modify_feature -name lawful_interception_var2 -state [on|off]',
'gsh get_feature -name lawful_interception_var2',
'gsh modify_li2_alarm_level -an nodeRestart -al general',
'gsh list_li2_alarm_level'
]
def __init__(self, logger, xml_path, line):
self.logger = logger
self.xml_path = xml_path
self.line = line
self.argvs = line.split()
self.return_str = ['ERROR']
self.perform_action()
def perform_action(self):
if self.line and '' != self.line:
if 'gsh' == self.argvs[0]:
if 1 < len(self.argvs):
if self.argvs[1].startswith('list_li2'):
self.get_node_name()
elif self.argvs[1].startswith('get_li2'):
self.get_all_conf_info()
elif self.argvs[1].startswith('list_ip_service_address'):
self.list_ip_service_address()
# elif self.argvs[1].startswith('modify_li2_'):
elif 'modify_li2_mme' == self.argvs[1] or 'modify_li2_sgsn' == self.argvs[1]:
self.set_sgsn_mme_normal_parameters()
elif self.argvs[1].startswith('get_ip_network_subnet'):
self.get_network_subnet()
elif self.argvs[1].startswith('create_ip_network_subnet'):
self.create_ip_network()
elif self.argvs[1].startswith('delete_ip_service_address'):
self.delete_li_c_u_ip()
elif self.argvs[1].startswith('create_ip_service_address'):
self.create_ip_service_address()
elif self.argvs[1].startswith('check_config'):
self.check_config()
elif self.argvs[1].startswith('activate_config_pending'):
self.activate_config_pending()
elif self.argvs[1].startswith('modify_feature'):
self.modify_feature()
elif self.argvs[1].startswith('get_feature'):
self.get_feature()
elif 'modify_li2_alarm_level' == self.argvs[1]:
self.modify_li2_alarm_level()
def modify_li2_alarm_level(self):
if 6 != len(self.argvs):
return
if '-an' != self.argvs[2] or '-al' != self.argvs[4]:
return
if 'general' != self.argvs[5] and 'severe' != self.argvs[5]:
return
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
alarms = node['alarm']
k = self.argvs[3]
if k[0].islower():
k = k[0].upper() + k[1:]
if not alarms.has_key(k):
return
et = ET.parse(self.xml_path)
et.find('.//%s' % k).text = self.argvs[5]
self._write_back_to_file(et)
del self.return_str[:]
def list_li2_alarm_level(self):
if 2 != len(self.argvs):
return
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
del self.return_str[:]
alarms = node['alarm']
self.return_str.append('Parameter' + ' ' * (43 - len('Parameter')) + 'Active Data')
self.return_str.append('-' * 54)
for k, v in alarms.iteritems():
self.return_str.append(k + ' ' * (43 - len(k)) + v)
def get_feature(self):
str_re = '^gsh\s+get_feature\s+\-name\s+lawful_interception_var2$'
if not re.compile(str_re).match(self.line):
return
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
del self.return_str[:]
self.return_str.append('Parameter Active Data Planned Data ')
self.return_str.append('-----------------------------------------------------')
self.return_str.append('timestamp ' + node['node_paras']['StateTime'] + ' _ ')
self.return_str.append('planState _ _ ')
self.return_str.append('keyId (LicenseKeyId) ' + node['node_paras']['KeyId'] + ' _ ')
self.return_str.append('lic (LicenseState) granted _ ')
self.return_str.append('state (FeatureState) ' + node['node_paras']['State'] + ' _ ')
def modify_feature(self):
re_str = '^gsh\s+modify_feature\s+\-name\s+lawful_interception_var2\s+\-state\s+o(?:n|ff)$'
if not re.compile(re_str).match(self.line):
return
# will not check the default on/off state
et = ET.parse(self.xml_path)
time_now = datetime.now().strftime('%Y%m%d%H%M%S')
et.find(".//State").text = self.argvs[5]
et.find(".//StateTime").text = time_now
self._write_back_to_file(et)
del self.return_str[:]
def activate_config_pending(self):
if 2 != len(self.argvs) or 'activate_config_pending' != self.argvs[1]:
return
t_now = datetime.now().strftime('%Y%m%d%H%M%S')
et = ET.parse(self.xml_path)
et.find(".//ActiveTimeIP").text = t_now
self._write_back_to_file(et)
del self.return_str[:]
self.return_str.append('Wait!')
def create_ip_service_address(self):
str_re = '^\s*gsh\s+create_ip_service_address\s+\-sn\s+LI\-[C|U]\s+\-ip\s+(?:(?:25[0-5]|2[0-4][0-9]|' \
'[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'
if not re.compile(str_re).match(self.line):
return
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
if node['node_paras'].has_key(self.argvs[3]):
return
if node['node_paras']['BindIP'] != self.argvs[5]:
return
et = ET.parse(self.xml_path)
li_et = ET.Element(self.argvs[3])
li_et.text = self.argvs[5]
li_et.tail = '\n\t\t'
et.getroot().append(li_et)
et.find(".//BindIP").text = ''
self._write_back_to_file(et)
del self.return_str[:]
def delete_li_c_u_ip(self):
str_re = '^\s*gsh\s+delete_ip_service_address\s+\-sn\s+LI\-[C|U]\s+\-ip\s+(?:(?:25[0-5]|2[0-4][0-9]|' \
'[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'
pattern = re.compile(str_re)
if not pattern.match(self.line):
return
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
if node['node_paras'][self.argvs[3]] != self.argvs[5]:
return
et = ET.parse(self.xml_path)
et.getroot().remove(et.find(".//" + self.argvs[3]))
self._write_back_to_file(et)
del self.return_str[:]
def create_ip_network(self):
if 8 != len(self.argvs):
return
if 'create_ip_network_subnet' != self.argvs[1] or '-nw' != self.argvs[2] or 'Gom' != self.argvs[3] or \
'-ip' != self.argvs[4] or '-mask' != self.argvs[6]:
return
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
if node['node_paras']['Mask'] != self.argvs[7]:
return
str_re = '^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'
pattern = re.compile(str_re)
ip_compile = pattern.match(self.argvs[5])
if not ip_compile:
return
et = ET.parse(self.xml_path)
et.find(".//BindIP").text = self.argvs[5]
self._write_back_to_file(et)
del self.return_str[:]
def get_network_subnet(self):
# validation of the CMD
if 6 != len(self.argvs):
return
if 'get_ip_network_subnet' != self.argvs[1] or '-nw' != self.argvs[2] or \
'Gom' != self.argvs[3] or '-ip' != self.argvs[4]:
return
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
if node['node_paras']['LI-C'] != self.argvs[5] or node['node_paras']['LI-U'] != self.argvs[5]:
return
# return the correct net-mask
del self.return_str[:]
self.return_str.append('Parameter Active Data Planned Data ')
self.return_str.append('----------------------------------------------------')
self.return_str.append('timestamp ' + node['node_paras']['ActiveTime'] + ' _ ')
self.return_str.append('planState _ _ ')
self.return_str.append('mask (IpSubnetMask) ' + node['node_paras']['Mask'] + ' _ ')
def _write_back_to_file(self, et):
global lock, version
if '2.7' == version:
et.write(self.xml_path, encoding='utf-8', xml_declaration=True, method='xml')
else:
et.write(self.xml_path, encoding='utf-8')
def set_sgsn_mme_normal_parameters(self):
# check if the command is correct
if '-name' != self.argvs[2]:
return
if 4 > len(self.argvs):
return
if len(self.argvs) & 1:
# this means the number of key, value is not even number
return
# input_values = {self.argvs[i][1:]: self.argvs[i + 1] for i in range(4, len(self.argvs), 2)}
# fit the python version 2.6.6 since redhat 6.x with python 2.6.6
input_values = {}
for i in range(4, len(self.argvs), 2):
input_values[self.argvs[i][1:]] = self.argvs[i + 1]
setting_values = {}
et = ET.parse(self.xml_path)
node_root = None
for k in input_values.keys():
if not self._key_map.has_key(k):
# Un-recognized keys, will treat as illegal input
return
else:
setting_values[self._key_map[k]] = input_values[k]
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
if 'modify_li2_sgsn' == self.argvs[1] and self.argvs[3] == node['sgsn']['Name']:
node_root = et.find(".//sgsn")
elif 'modify_li2_mme' == self.argvs[1] and self.argvs[3] == node['mme']['Name']:
node_root = et.find(".//mme")
else:
return
# will update the values
for k, v in setting_values.iteritems():
node_root.find("./" + k).text = v
# write values to files
self._write_back_to_file(et)
# remove ERROR message
del self.return_str[:]
def list_ip_service_address(self):
if 2 == len(self.argvs):
if 'list_ip_service_address' == self.argvs[1]:
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
del self.return_str[:]
self.return_str.append('ps Class Identifiers | ')
self.return_str.append('---------------------------------------------------------------')
for k, v in node['node_paras'].iteritems():
self.return_str.append(('A\tip_service_address\t-sn\t' + k + ' ' * (11 - len(k)) + '-ip ' +
v))
elif 4 == len(self.argvs):
if 'list_ip_service_address' == self.argvs[1] and '-sn' == self.argvs[2] and 'LI-C' == self.argvs[3]:
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
del self.return_str[:]
self.return_str.append('ps Class Identifiers | ')
self.return_str.append('------------------------------------------------------- ')
self.return_str.append(('A ip_service_address -sn LI-C -ip ' + node['node_paras']['LI-C']))
elif 'list_ip_service_address' == self.argvs[1] and '-sn' == self.argvs[2] and 'LI-U' == self.argvs[3]:
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
del self.return_str[:]
self.return_str.append('ps Class Identifiers | ')
self.return_str.append('------------------------------------------------------- ')
self.return_str.append(('A ip_service_address -sn LI-U -ip ' + node['node_paras']['LI-U']))
def get_all_conf_info(self):
if 4 == len(self.argvs):
if '-name' == self.argvs[2]:
if 'get_li2_mme' == self.argvs[1]:
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
if node['mme']['Name'] != self.argvs[3]:
return
else:
del self.return_str[:]
self.return_str.append('Parameter' + ' ' * 19 +'Active Data')
self.return_str.append('-----------------------------------------------------')
for k, v in node['mme'].iteritems():
self.return_str.append((k + ' ' * (28 - len(k)) + v))
elif 'get_li2_sgsn' == self.argvs[1]:
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
if node['sgsn']['Name'] != self.argvs[3]:
return
else:
del self.return_str[:]
self.return_str.append('Parameter' + ' ' * 19 + 'Active Data')
self.return_str.append('-----------------------------------------------------')
for k, v in node['sgsn'].iteritems():
self.return_str.append((k + ' ' * (28 - len(k)) + v))
def get_node_name(self):
if 'list_li2_mme' == self.argvs[1]:
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
del self.return_str[:]
self.return_str.append('Parameter' + ' ' * 19 + 'Active Data')
self.return_str.append('-----------------------------------------------------')
self.return_str.append(('Name' + ' ' * 24 + node['mme']['Name']))
elif 'list_li2_sgsn' == self.argvs[1]:
node = SgsnmmeNodeInfo(self.logger, self.xml_path).get_node_info()
del self.return_str[:]
self.return_str.append('Parameter' + ' ' * 19 + 'Active Data')
self.return_str.append('-----------------------------------------------------')
self.return_str.append(('Name' + ' ' * 24 + node['sgsn']['Name']))
elif 'list_li2_alarm_level' == self.argvs[1]:
self.list_li2_alarm_level()
def get_return_str(self):
return [line.encode('utf-8') for line in self.return_str]
def get_help_cmds(self):
return [line.encode('utf-8') for line in self._help_cmds]
def check_config(self):
if 2 != len(self.argvs) or 'check_config' != self.argvs[1]:
return
del self.return_str[:]
self.return_str.append('Wait! Estimated processing time is 15 seconds')
self.return_str.append('ip_network')
self.return_str.append('Warning: ip_network -nw S1_MME_2; ip_service_address -sn S1-MME-2; Incomplete '
'Configuration; ip_interface -ifn IpInterfaceName -ip InterfaceIpAddress -nw '
'IpNetworkName; IP interface(s) is not configured for the IP Service in the IP network.')
self.return_str.append('Warning: ip_network -nw S6a_2; ip_service_address -sn S6a-2; Incomplete Configuration; '
'ip_interface -ifn IpInterfaceName -ip InterfaceIpAddress -nw IpNetworkName; '
'IP interface(s) is not configured for the IP Service in the IP network.')
self.return_str.append('Warning: ip_network -nw SGs-MME-2; ip_service_address -sn SGs-2; Incomplete '
'Configuration; ip_interface -ifn IpInterfaceName -ip InterfaceIpAddress -nw '
'IpNetworkName; IP interface(s) is not configured for the IP Service in the IP network.')
self.return_str.append('Warning: ip_network -nw SS7-CN-2; ip_service_address -sn CN-SS7-2; Incomplete '
'Configuration; ip_interface -ifn IpInterfaceName -ip InterfaceIpAddress -nw '
'IpNetworkName; IP interface(s) is not configured for the IP Service in the IP network.')
self.return_str.append('Warning: ip_network -nw SS7-Iu-2; ip_service_address -sn Iu-SS7-2; Incomplete '
'Configuration; ip_interface -ifn IpInterfaceName -ip InterfaceIpAddress -nw '
'IpNetworkName; IP interface(s) is not configured for the IP Service in the IP network.')
self.return_str.append('')
self.return_str.append('outbound_pf_policy')
self.return_str.append('Warning: outbound_pf_policy -ofp ETH_1_25_1_293 ; Conflicting Values; ; '
'There is no user (an IP interface) of this packet filter policy.')
self.return_str.append('Warning: outbound_pf_policy -ofp ETH_1_25_1_261 ; Conflicting Values; ; '
'There is no user (an IP interface) of this packet filter policy.')
self.return_str.append('Warning: outbound_pf_policy -ofp ETH_1_25_1_251 ; Conflicting Values; ; '
'There is no user (an IP interface) of this packet filter policy.')
self.return_str.append('Warning: outbound_pf_policy -ofp ETH_1_25_1_241 ; Conflicting Values; ; '
'There is no user (an IP interface) of this packet filter policy.')
self.return_str.append('Warning: outbound_pf_policy -ofp ETH_1_25_1_231 ; Conflicting Values; ; '
'There is no user (an IP interface) of this packet filter policy.')
self.return_str.append('Warning: outbound_pf_policy -ofp ETH_1_25_1_221 ; Conflicting Values; ; '
'There is no user (an IP interface) of this packet filter policy.')
self.return_str.append('Warning: outbound_pf_policy -ofp ETH_1_25_1_217 ; Conflicting Values; ; '
'There is no user (an IP interface) of this packet filter policy.')
self.return_str.append('Warning: outbound_pf_policy -ofp ETH_1_25_1_215 ; Conflicting Values; ; '
'There is no user (an IP interface) of this packet filter policy.')
self.return_str.append('Warning: outbound_pf_policy -ofp ETH_1_25_1_213 ; Conflicting Values; ; '
'There is no user (an IP interface) of this packet filter policy.')
self.return_str.append('Warning: outbound_pf_policy -ofp ETH_1_25_1_211 ; Conflicting Values; ; '
'There is no user (an IP interface) of this packet filter policy.') | {
"content_hash": "a4384f393cada25db92b07c4a5babcd5",
"timestamp": "",
"source": "github",
"line_count": 395,
"max_line_length": 120,
"avg_line_length": 52.93417721518988,
"alnum_prop": 0.5187239944521498,
"repo_name": "lowitty/zacademy",
"id": "c58e96e064c1304702eb555637fef58b9912e0f4",
"size": "20933",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "com/ericsson/xn/server/handler/SgsnmmeHandler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Logos",
"bytes": "2170"
},
{
"name": "Python",
"bytes": "429221"
},
{
"name": "Shell",
"bytes": "73304"
}
],
"symlink_target": ""
} |
from __future__ import division, print_function, absolute_import
import struct
import tempfile
import warnings
import zlib
import numpy as np
from numpy.compat import asstr
# Define the different data types that can be found in an IDL save file
DTYPE_DICT = {1: '>u1',
2: '>i2',
3: '>i4',
4: '>f4',
5: '>f8',
6: '>c8',
7: '|O',
8: '|O',
9: '>c16',
10: '|O',
11: '|O',
12: '>u2',
13: '>u4',
14: '>i8',
15: '>u8'}
# Define the different record types that can be found in an IDL save file
RECTYPE_DICT = {0: "START_MARKER",
1: "COMMON_VARIABLE",
2: "VARIABLE",
3: "SYSTEM_VARIABLE",
6: "END_MARKER",
10: "TIMESTAMP",
12: "COMPILED",
13: "IDENTIFICATION",
14: "VERSION",
15: "HEAP_HEADER",
16: "HEAP_DATA",
17: "PROMOTE64",
19: "NOTICE",
20: "DESCRIPTION"}
# Define a dictionary to contain structure definitions
STRUCT_DICT = {}
def _align_32(f):
'''Align to the next 32-bit position in a file'''
pos = f.tell()
if pos % 4 != 0:
f.seek(pos + 4 - pos % 4)
return
def _skip_bytes(f, n):
'''Skip `n` bytes'''
f.read(n)
return
def _read_bytes(f, n):
'''Read the next `n` bytes'''
return f.read(n)
def _read_byte(f):
'''Read a single byte'''
return np.uint8(struct.unpack('>B', f.read(4)[:1])[0])
def _read_long(f):
'''Read a signed 32-bit integer'''
return np.int32(struct.unpack('>l', f.read(4))[0])
def _read_int16(f):
'''Read a signed 16-bit integer'''
return np.int16(struct.unpack('>h', f.read(4)[2:4])[0])
def _read_int32(f):
'''Read a signed 32-bit integer'''
return np.int32(struct.unpack('>i', f.read(4))[0])
def _read_int64(f):
'''Read a signed 64-bit integer'''
return np.int64(struct.unpack('>q', f.read(8))[0])
def _read_uint16(f):
'''Read an unsigned 16-bit integer'''
return np.uint16(struct.unpack('>H', f.read(4)[2:4])[0])
def _read_uint32(f):
'''Read an unsigned 32-bit integer'''
return np.uint32(struct.unpack('>I', f.read(4))[0])
def _read_uint64(f):
'''Read an unsigned 64-bit integer'''
return np.uint64(struct.unpack('>Q', f.read(8))[0])
def _read_float32(f):
'''Read a 32-bit float'''
return np.float32(struct.unpack('>f', f.read(4))[0])
def _read_float64(f):
'''Read a 64-bit float'''
return np.float64(struct.unpack('>d', f.read(8))[0])
class Pointer(object):
'''Class used to define pointers'''
def __init__(self, index):
self.index = index
return
class ObjectPointer(Pointer):
'''Class used to define object pointers'''
pass
def _read_string(f):
'''Read a string'''
length = _read_long(f)
if length > 0:
chars = _read_bytes(f, length)
_align_32(f)
chars = asstr(chars)
else:
chars = ''
return chars
def _read_string_data(f):
'''Read a data string (length is specified twice)'''
length = _read_long(f)
if length > 0:
length = _read_long(f)
string_data = _read_bytes(f, length)
_align_32(f)
else:
string_data = ''
return string_data
def _read_data(f, dtype):
'''Read a variable with a specified data type'''
if dtype == 1:
if _read_int32(f) != 1:
raise Exception("Error occurred while reading byte variable")
return _read_byte(f)
elif dtype == 2:
return _read_int16(f)
elif dtype == 3:
return _read_int32(f)
elif dtype == 4:
return _read_float32(f)
elif dtype == 5:
return _read_float64(f)
elif dtype == 6:
real = _read_float32(f)
imag = _read_float32(f)
return np.complex64(real + imag * 1j)
elif dtype == 7:
return _read_string_data(f)
elif dtype == 8:
raise Exception("Should not be here - please report this")
elif dtype == 9:
real = _read_float64(f)
imag = _read_float64(f)
return np.complex128(real + imag * 1j)
elif dtype == 10:
return Pointer(_read_int32(f))
elif dtype == 11:
return ObjectPointer(_read_int32(f))
elif dtype == 12:
return _read_uint16(f)
elif dtype == 13:
return _read_uint32(f)
elif dtype == 14:
return _read_int64(f)
elif dtype == 15:
return _read_uint64(f)
else:
raise Exception("Unknown IDL type: %i - please report this" % dtype)
def _read_structure(f, array_desc, struct_desc):
'''
Read a structure, with the array and structure descriptors given as
`array_desc` and `structure_desc` respectively.
'''
nrows = array_desc['nelements']
columns = struct_desc['tagtable']
dtype = []
for col in columns:
if col['structure'] or col['array']:
dtype.append(((col['name'].lower(), col['name']), np.object_))
else:
if col['typecode'] in DTYPE_DICT:
dtype.append(((col['name'].lower(), col['name']),
DTYPE_DICT[col['typecode']]))
else:
raise Exception("Variable type %i not implemented" %
col['typecode'])
structure = np.recarray((nrows,), dtype=dtype)
for i in range(nrows):
for col in columns:
dtype = col['typecode']
if col['structure']:
structure[col['name']][i] = _read_structure(f,
struct_desc['arrtable'][col['name']],
struct_desc['structtable'][col['name']])
elif col['array']:
structure[col['name']][i] = _read_array(f, dtype,
struct_desc['arrtable'][col['name']])
else:
structure[col['name']][i] = _read_data(f, dtype)
# Reshape structure if needed
if array_desc['ndims'] > 1:
dims = array_desc['dims'][:int(array_desc['ndims'])]
dims.reverse()
structure = structure.reshape(dims)
return structure
def _read_array(f, typecode, array_desc):
'''
Read an array of type `typecode`, with the array descriptor given as
`array_desc`.
'''
if typecode in [1, 3, 4, 5, 6, 9, 13, 14, 15]:
if typecode == 1:
nbytes = _read_int32(f)
if nbytes != array_desc['nbytes']:
warnings.warn("Not able to verify number of bytes from header")
# Read bytes as numpy array
array = np.fromstring(f.read(array_desc['nbytes']),
dtype=DTYPE_DICT[typecode])
elif typecode in [2, 12]:
# These are 2 byte types, need to skip every two as they are not packed
array = np.fromstring(f.read(array_desc['nbytes'] * 2),
dtype=DTYPE_DICT[typecode])[1::2]
else:
# Read bytes into list
array = []
for i in range(array_desc['nelements']):
dtype = typecode
data = _read_data(f, dtype)
array.append(data)
array = np.array(array, dtype=np.object_)
# Reshape array if needed
if array_desc['ndims'] > 1:
dims = array_desc['dims'][:int(array_desc['ndims'])]
dims.reverse()
array = array.reshape(dims)
# Go to next alignment position
_align_32(f)
return array
def _read_record(f):
'''Function to read in a full record'''
record = {'rectype': _read_long(f)}
nextrec = _read_uint32(f)
nextrec += _read_uint32(f) * 2 ** 32
_skip_bytes(f, 4)
if not record['rectype'] in RECTYPE_DICT:
raise Exception("Unknown RECTYPE: %i" % record['rectype'])
record['rectype'] = RECTYPE_DICT[record['rectype']]
if record['rectype'] in ["VARIABLE", "HEAP_DATA"]:
if record['rectype'] == "VARIABLE":
record['varname'] = _read_string(f)
else:
record['heap_index'] = _read_long(f)
_skip_bytes(f, 4)
rectypedesc = _read_typedesc(f)
if rectypedesc['typecode'] == 0:
if nextrec == f.tell():
record['data'] = None # Indicates NULL value
else:
raise ValueError("Unexpected type code: 0")
else:
varstart = _read_long(f)
if varstart != 7:
raise Exception("VARSTART is not 7")
if rectypedesc['structure']:
record['data'] = _read_structure(f, rectypedesc['array_desc'],
rectypedesc['struct_desc'])
elif rectypedesc['array']:
record['data'] = _read_array(f, rectypedesc['typecode'],
rectypedesc['array_desc'])
else:
dtype = rectypedesc['typecode']
record['data'] = _read_data(f, dtype)
elif record['rectype'] == "TIMESTAMP":
_skip_bytes(f, 4 * 256)
record['date'] = _read_string(f)
record['user'] = _read_string(f)
record['host'] = _read_string(f)
elif record['rectype'] == "VERSION":
record['format'] = _read_long(f)
record['arch'] = _read_string(f)
record['os'] = _read_string(f)
record['release'] = _read_string(f)
elif record['rectype'] == "IDENTIFICATON":
record['author'] = _read_string(f)
record['title'] = _read_string(f)
record['idcode'] = _read_string(f)
elif record['rectype'] == "NOTICE":
record['notice'] = _read_string(f)
elif record['rectype'] == "DESCRIPTION":
record['description'] = _read_string_data(f)
elif record['rectype'] == "HEAP_HEADER":
record['nvalues'] = _read_long(f)
record['indices'] = []
for i in range(record['nvalues']):
record['indices'].append(_read_long(f))
elif record['rectype'] == "COMMONBLOCK":
record['nvars'] = _read_long(f)
record['name'] = _read_string(f)
record['varnames'] = []
for i in range(record['nvars']):
record['varnames'].append(_read_string(f))
elif record['rectype'] == "END_MARKER":
record['end'] = True
elif record['rectype'] == "UNKNOWN":
warnings.warn("Skipping UNKNOWN record")
elif record['rectype'] == "SYSTEM_VARIABLE":
warnings.warn("Skipping SYSTEM_VARIABLE record")
else:
raise Exception("record['rectype']=%s not implemented" %
record['rectype'])
f.seek(nextrec)
return record
def _read_typedesc(f):
'''Function to read in a type descriptor'''
typedesc = {'typecode': _read_long(f), 'varflags': _read_long(f)}
if typedesc['varflags'] & 2 == 2:
raise Exception("System variables not implemented")
typedesc['array'] = typedesc['varflags'] & 4 == 4
typedesc['structure'] = typedesc['varflags'] & 32 == 32
if typedesc['structure']:
typedesc['array_desc'] = _read_arraydesc(f)
typedesc['struct_desc'] = _read_structdesc(f)
elif typedesc['array']:
typedesc['array_desc'] = _read_arraydesc(f)
return typedesc
def _read_arraydesc(f):
'''Function to read in an array descriptor'''
arraydesc = {'arrstart': _read_long(f)}
if arraydesc['arrstart'] == 8:
_skip_bytes(f, 4)
arraydesc['nbytes'] = _read_long(f)
arraydesc['nelements'] = _read_long(f)
arraydesc['ndims'] = _read_long(f)
_skip_bytes(f, 8)
arraydesc['nmax'] = _read_long(f)
arraydesc['dims'] = []
for d in range(arraydesc['nmax']):
arraydesc['dims'].append(_read_long(f))
elif arraydesc['arrstart'] == 18:
warnings.warn("Using experimental 64-bit array read")
_skip_bytes(f, 8)
arraydesc['nbytes'] = _read_uint64(f)
arraydesc['nelements'] = _read_uint64(f)
arraydesc['ndims'] = _read_long(f)
_skip_bytes(f, 8)
arraydesc['nmax'] = 8
arraydesc['dims'] = []
for d in range(arraydesc['nmax']):
v = _read_long(f)
if v != 0:
raise Exception("Expected a zero in ARRAY_DESC")
arraydesc['dims'].append(_read_long(f))
else:
raise Exception("Unknown ARRSTART: %i" % arraydesc['arrstart'])
return arraydesc
def _read_structdesc(f):
'''Function to read in a structure descriptor'''
structdesc = {}
structstart = _read_long(f)
if structstart != 9:
raise Exception("STRUCTSTART should be 9")
structdesc['name'] = _read_string(f)
predef = _read_long(f)
structdesc['ntags'] = _read_long(f)
structdesc['nbytes'] = _read_long(f)
structdesc['predef'] = predef & 1
structdesc['inherits'] = predef & 2
structdesc['is_super'] = predef & 4
if not structdesc['predef']:
structdesc['tagtable'] = []
for t in range(structdesc['ntags']):
structdesc['tagtable'].append(_read_tagdesc(f))
for tag in structdesc['tagtable']:
tag['name'] = _read_string(f)
structdesc['arrtable'] = {}
for tag in structdesc['tagtable']:
if tag['array']:
structdesc['arrtable'][tag['name']] = _read_arraydesc(f)
structdesc['structtable'] = {}
for tag in structdesc['tagtable']:
if tag['structure']:
structdesc['structtable'][tag['name']] = _read_structdesc(f)
if structdesc['inherits'] or structdesc['is_super']:
structdesc['classname'] = _read_string(f)
structdesc['nsupclasses'] = _read_long(f)
structdesc['supclassnames'] = []
for s in range(structdesc['nsupclasses']):
structdesc['supclassnames'].append(_read_string(f))
structdesc['supclasstable'] = []
for s in range(structdesc['nsupclasses']):
structdesc['supclasstable'].append(_read_structdesc(f))
STRUCT_DICT[structdesc['name']] = structdesc
else:
if not structdesc['name'] in STRUCT_DICT:
raise Exception("PREDEF=1 but can't find definition")
structdesc = STRUCT_DICT[structdesc['name']]
return structdesc
def _read_tagdesc(f):
'''Function to read in a tag descriptor'''
tagdesc = {'offset': _read_long(f)}
if tagdesc['offset'] == -1:
tagdesc['offset'] = _read_uint64(f)
tagdesc['typecode'] = _read_long(f)
tagflags = _read_long(f)
tagdesc['array'] = tagflags & 4 == 4
tagdesc['structure'] = tagflags & 32 == 32
tagdesc['scalar'] = tagdesc['typecode'] in DTYPE_DICT
# Assume '10'x is scalar
return tagdesc
def _replace_heap(variable, heap):
if isinstance(variable, Pointer):
while isinstance(variable, Pointer):
if variable.index == 0:
variable = None
else:
if variable.index in heap:
variable = heap[variable.index]
else:
warnings.warn("Variable referenced by pointer not found "
"in heap: variable will be set to None")
variable = None
replace, new = _replace_heap(variable, heap)
if replace:
variable = new
return True, variable
elif isinstance(variable, np.core.records.recarray):
# Loop over records
for ir, record in enumerate(variable):
replace, new = _replace_heap(record, heap)
if replace:
variable[ir] = new
return False, variable
elif isinstance(variable, np.core.records.record):
# Loop over values
for iv, value in enumerate(variable):
replace, new = _replace_heap(value, heap)
if replace:
variable[iv] = new
return False, variable
elif isinstance(variable, np.ndarray):
# Loop over values if type is np.object_
if variable.dtype.type is np.object_:
for iv in range(variable.size):
replace, new = _replace_heap(variable.item(iv), heap)
if replace:
variable.itemset(iv, new)
return False, variable
else:
return False, variable
class AttrDict(dict):
'''
A case-insensitive dictionary with access via item, attribute, and call
notations:
>>> d = AttrDict()
>>> d['Variable'] = 123
>>> d['Variable']
123
>>> d.Variable
123
>>> d.variable
123
>>> d('VARIABLE')
123
'''
def __init__(self, init={}):
dict.__init__(self, init)
def __getitem__(self, name):
return super(AttrDict, self).__getitem__(name.lower())
def __setitem__(self, key, value):
return super(AttrDict, self).__setitem__(key.lower(), value)
__getattr__ = __getitem__
__setattr__ = __setitem__
__call__ = __getitem__
def readsav(file_name, idict=None, python_dict=False,
uncompressed_file_name=None, verbose=False):
"""
Read an IDL .sav file.
Parameters
----------
file_name : str
Name of the IDL save file.
idict : dict, optional
Dictionary in which to insert .sav file variables.
python_dict : bool, optional
By default, the object return is not a Python dictionary, but a
case-insensitive dictionary with item, attribute, and call access
to variables. To get a standard Python dictionary, set this option
to True.
uncompressed_file_name : str, optional
This option only has an effect for .sav files written with the
/compress option. If a file name is specified, compressed .sav
files are uncompressed to this file. Otherwise, readsav will use
the `tempfile` module to determine a temporary filename
automatically, and will remove the temporary file upon successfully
reading it in.
verbose : bool, optional
Whether to print out information about the save file, including
the records read, and available variables.
Returns
-------
idl_dict : AttrDict or dict
If `python_dict` is set to False (default), this function returns a
case-insensitive dictionary with item, attribute, and call access
to variables. If `python_dict` is set to True, this function
returns a Python dictionary with all variable names in lowercase.
If `idict` was specified, then variables are written to the
dictionary specified, and the updated dictionary is returned.
"""
# Initialize record and variable holders
records = []
if python_dict or idict:
variables = {}
else:
variables = AttrDict()
# Open the IDL file
f = open(file_name, 'rb')
# Read the signature, which should be 'SR'
signature = _read_bytes(f, 2)
if signature != b'SR':
raise Exception("Invalid SIGNATURE: %s" % signature)
# Next, the record format, which is '\x00\x04' for normal .sav
# files, and '\x00\x06' for compressed .sav files.
recfmt = _read_bytes(f, 2)
if recfmt == b'\x00\x04':
pass
elif recfmt == b'\x00\x06':
if verbose:
print("IDL Save file is compressed")
if uncompressed_file_name:
fout = open(uncompressed_file_name, 'w+b')
else:
fout = tempfile.NamedTemporaryFile(suffix='.sav')
if verbose:
print(" -> expanding to %s" % fout.name)
# Write header
fout.write(b'SR\x00\x04')
# Cycle through records
while True:
# Read record type
rectype = _read_long(f)
fout.write(struct.pack('>l', int(rectype)))
# Read position of next record and return as int
nextrec = _read_uint32(f)
nextrec += _read_uint32(f) * 2 ** 32
# Read the unknown 4 bytes
unknown = f.read(4)
# Check if the end of the file has been reached
if RECTYPE_DICT[rectype] == 'END_MARKER':
fout.write(struct.pack('>I', int(nextrec) % 2 ** 32))
fout.write(struct.pack('>I', int((nextrec - (nextrec % 2 ** 32)) / 2 ** 32)))
fout.write(unknown)
break
# Find current position
pos = f.tell()
# Decompress record
rec_string = zlib.decompress(f.read(nextrec - pos))
# Find new position of next record
nextrec = fout.tell() + len(rec_string) + 12
# Write out record
fout.write(struct.pack('>I', int(nextrec % 2 ** 32)))
fout.write(struct.pack('>I', int((nextrec - (nextrec % 2 ** 32)) / 2 ** 32)))
fout.write(unknown)
fout.write(rec_string)
# Close the original compressed file
f.close()
# Set f to be the decompressed file, and skip the first four bytes
f = fout
f.seek(4)
else:
raise Exception("Invalid RECFMT: %s" % recfmt)
# Loop through records, and add them to the list
while True:
r = _read_record(f)
records.append(r)
if 'end' in r:
if r['end']:
break
# Close the file
f.close()
# Find heap data variables
heap = {}
for r in records:
if r['rectype'] == "HEAP_DATA":
heap[r['heap_index']] = r['data']
# Find all variables
for r in records:
if r['rectype'] == "VARIABLE":
replace, new = _replace_heap(r['data'], heap)
if replace:
r['data'] = new
variables[r['varname'].lower()] = r['data']
if verbose:
# Print out timestamp info about the file
for record in records:
if record['rectype'] == "TIMESTAMP":
print("-" * 50)
print("Date: %s" % record['date'])
print("User: %s" % record['user'])
print("Host: %s" % record['host'])
break
# Print out version info about the file
for record in records:
if record['rectype'] == "VERSION":
print("-" * 50)
print("Format: %s" % record['format'])
print("Architecture: %s" % record['arch'])
print("Operating System: %s" % record['os'])
print("IDL Version: %s" % record['release'])
break
# Print out identification info about the file
for record in records:
if record['rectype'] == "IDENTIFICATON":
print("-" * 50)
print("Author: %s" % record['author'])
print("Title: %s" % record['title'])
print("ID Code: %s" % record['idcode'])
break
# Print out descriptions saved with the file
for record in records:
if record['rectype'] == "DESCRIPTION":
print("-" * 50)
print("Description: %s" % record['description'])
break
print("-" * 50)
print("Successfully read %i records of which:" %
(len(records)))
# Create convenience list of record types
rectypes = [r['rectype'] for r in records]
for rt in set(rectypes):
if rt != 'END_MARKER':
print(" - %i are of type %s" % (rectypes.count(rt), rt))
print("-" * 50)
if 'VARIABLE' in rectypes:
print("Available variables:")
for var in variables:
print(" - %s [%s]" % (var, type(variables[var])))
print("-" * 50)
if idict:
for var in variables:
idict[var] = variables[var]
return idict
else:
return variables
| {
"content_hash": "b5a8e699784196e7b39bb9fd1f9586d6",
"timestamp": "",
"source": "github",
"line_count": 853,
"max_line_length": 100,
"avg_line_length": 28.417350527549825,
"alnum_prop": 0.5359323432343235,
"repo_name": "DailyActie/Surrogate-Model",
"id": "f07b9b68395642abd1a18143c61c85805f6d22a3",
"size": "25805",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "01-codes/scipy-master/scipy/io/idl.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "345"
},
{
"name": "Batchfile",
"bytes": "18746"
},
{
"name": "C",
"bytes": "13004913"
},
{
"name": "C++",
"bytes": "14692003"
},
{
"name": "CMake",
"bytes": "72831"
},
{
"name": "CSS",
"bytes": "303488"
},
{
"name": "Fortran",
"bytes": "7339415"
},
{
"name": "HTML",
"bytes": "854774"
},
{
"name": "Java",
"bytes": "38854"
},
{
"name": "JavaScript",
"bytes": "2432846"
},
{
"name": "Jupyter Notebook",
"bytes": "829689"
},
{
"name": "M4",
"bytes": "1379"
},
{
"name": "Makefile",
"bytes": "48708"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PHP",
"bytes": "93585"
},
{
"name": "Pascal",
"bytes": "1449"
},
{
"name": "Perl",
"bytes": "1152272"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "34668203"
},
{
"name": "Roff",
"bytes": "5925"
},
{
"name": "Ruby",
"bytes": "92498"
},
{
"name": "Shell",
"bytes": "94698"
},
{
"name": "TeX",
"bytes": "156540"
},
{
"name": "TypeScript",
"bytes": "41691"
}
],
"symlink_target": ""
} |
"""news_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from news_project.views import HomeView
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^articles/', include("articles.urls")),
url(r'^registration/', include("registration.backends.hmac.urls")),
url(r'^login/$', auth_views.login, name='login'),
url(r'^logout/$', auth_views.logout, name='logout'),
]
| {
"content_hash": "9250c2508c011e19db94c0743550f414",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 79,
"avg_line_length": 39.758620689655174,
"alnum_prop": 0.6921075455333912,
"repo_name": "julienawilson/news-project",
"id": "28f707e18273476d8e9d24eee4a52c7269185d64",
"size": "1153",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "news_project/news_project/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "8482"
},
{
"name": "Python",
"bytes": "19006"
}
],
"symlink_target": ""
} |
from __future__ import division
supported_datasets = ['bmnist', 'silhouettes']
# ToDo: # 'mnist' and 'tfd' are not normalized (0<= x <=1.)
def get_data(data_name):
if data_name == 'mnist':
from fuel.datasets import MNIST
img_size = (28, 28)
channels = 1
data_train = MNIST(which_sets=["train"], sources=['features'])
data_valid = MNIST(which_sets=["test"], sources=['features'])
data_test = MNIST(which_sets=["test"], sources=['features'])
elif data_name == 'sketch':
from fuel.datasets.hdf5 import H5PYDataset
img_size = (32, 32)
channels = 1
data_train = H5PYDataset('/Users/drewlinsley/Documents/draw/draw/datasets/sketch/sketch.hdf5', which_sets=('train',),sources=['features','targets'])
data_test = H5PYDataset('/Users/drewlinsley/Documents/draw/draw/datasets/sketch/sketch.hdf5', which_sets=('test',),sources=['features','targets'])
data_valid = H5PYDataset('/Users/drewlinsley/Documents/draw/draw/datasets/sketch/sketch.hdf5', which_sets=('test',),sources=['features','targets'])
elif data_name == 'sketch_uint8':
from fuel.datasets.hdf5 import H5PYDataset
img_size = (32, 32)
channels = 1
data_train = H5PYDataset('/Users/drewlinsley/Documents/draw/draw/datasets/sketch_uint8/sketch_uint8.hdf5', which_sets=('train',),sources=['features','targets'])
data_test = H5PYDataset('/Users/drewlinsley/Documents/draw/draw/datasets/sketch_uint8/sketch_uint8.hdf5', which_sets=('test',),sources=['features','targets'])
data_valid = H5PYDataset('/Users/drewlinsley/Documents/draw/draw/datasets/sketch_uint8/sketch_uint8.hdf5', which_sets=('test',),sources=['features','targets'])
elif data_name == 'sketch_uint8_shuffle':
from fuel.datasets.hdf5 import H5PYDataset
img_size = (32, 32)
channels = 1
data_train = H5PYDataset('/Users/drewlinsley/Documents/draw/draw/datasets/sketch_uint8/sketch_uint8.hdf5', which_sets=('train',),sources=['features','targets'])
data_test = H5PYDataset('/Users/drewlinsley/Documents/draw/draw/datasets/sketch_uint8/sketch_uint8.hdf5', which_sets=('test',),sources=['features','targets'])
data_valid = H5PYDataset('/Users/drewlinsley/Documents/draw/draw/datasets/sketch_uint8/sketch_uint8.hdf5', which_sets=('test',),sources=['features','targets'])
elif data_name == 'bmnist':
from fuel.datasets.binarized_mnist import BinarizedMNIST
img_size = (28, 28)
channels = 1
data_train = BinarizedMNIST(which_sets=['train'], sources=['features'])
data_valid = BinarizedMNIST(which_sets=['valid'], sources=['features'])
data_test = BinarizedMNIST(which_sets=['test'], sources=['features'])
# TODO: make a generic catch-all for loading custom datasets like "colormnist"
elif data_name == 'colormnist':
from draw.colormnist import ColorMNIST
img_size = (28, 28)
channels = 3
data_train = ColorMNIST(which_sets=['train'], sources=['features'])
data_valid = ColorMNIST(which_sets=['test'], sources=['features'])
data_test = ColorMNIST(which_sets=['test'], sources=['features'])
elif data_name == 'cifar10':
from fuel.datasets.cifar10 import CIFAR10
img_size = (32, 32)
channels = 3
data_train = CIFAR10(which_sets=['train'], sources=['features'])
data_valid = CIFAR10(which_sets=['test'], sources=['features'])
data_test = CIFAR10(which_sets=['test'], sources=['features'])
elif data_name == 'svhn2':
from fuel.datasets.svhn import SVHN
img_size = (32, 32)
channels = 3
data_train = SVHN(which_format=2,which_sets=['train'], sources=['features'])
data_valid = SVHN(which_format=2,which_sets=['test'], sources=['features'])
data_test = SVHN(which_format=2,which_sets=['test'], sources=['features'])
elif data_name == 'silhouettes':
from fuel.datasets.caltech101_silhouettes import CalTech101Silhouettes
size = 28
img_size = (size, size)
channels = 1
data_train = CalTech101Silhouettes(which_sets=['train'], size=size, sources=['features'])
data_valid = CalTech101Silhouettes(which_sets=['valid'], size=size, sources=['features'])
data_test = CalTech101Silhouettes(which_sets=['test'], size=size, sources=['features'])
elif data_name == 'tfd':
from fuel.datasets.toronto_face_database import TorontoFaceDatabase
img_size = (28, 28)
channels = 1
data_train = TorontoFaceDatabase(which_sets=['unlabeled'], size=size, sources=['features'])
data_valid = TorontoFaceDatabase(which_sets=['valid'], size=size, sources=['features'])
data_test = TorontoFaceDatabase(which_sets=['test'], size=size, sources=['features'])
else:
raise ValueError("Unknown dataset %s" % data_name)
return img_size, channels, data_train, data_valid, data_test
| {
"content_hash": "dfab36f6bf2f281288a439dc4fd86846",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 168,
"avg_line_length": 60.24096385542169,
"alnum_prop": 0.6518,
"repo_name": "drewlinsley/draw_classify",
"id": "769b8b979852012d85da17cc64e3498054d8eaa4",
"size": "5001",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "draw/datasets/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "173005"
},
{
"name": "Matlab",
"bytes": "1212"
},
{
"name": "Python",
"bytes": "297291"
},
{
"name": "Shell",
"bytes": "449"
}
],
"symlink_target": ""
} |
import sys
import os
import time
import collections
import re
from androguard.core import bytecode
from androguard.core.bytecodes.dvm import DalvikVMFormat
from androguard.core.bytecodes import apk, dvm
from androguard.core.analysis.ganalysis import GVMAnalysis
from androguard.core.analysis.analysis import uVMAnalysis
from stopwatch import Stopwatch
#our config
import config
def fun2():
return collections.defaultdict(list)
def fun():
return collections.defaultdict(fun2)
#EXTERNAL_TUNNEL_ - TO STORE READS & WRITES FROM/TO EXTERNAL SOURCES (Files, DB, SharedPreferences, Sockets)
STREAMS_WRITE = []
STREAMS_READ = []
#EXTERNAL_TUNNEL_2 - TO STORE SENT / RECEIVED INTENTS
INTENTS_READ = []
INTENTS_WRITE = []
#EXTERNAL_TUNNEL_4 - TO STORE SENT INTENTS THROUGH BROADCAST RECEIVER (they are received in the onReceive() method)
INTENTS_BROADCAST_SENT = []
#EXTERNAL_TUNNEL_4 - TO STORE THE [key,pc] OF THE FIRST LINE OF THE onReceive(Intent) method
FIRST_LINE_ONRECEIVE = []
#EXTERNAL_TUNNEL_5 - TO STORE CONTENT PROVIDERS INSERTS AND QUERIES
CONTENT_PROV_WRITE = []
CONTENT_PROV_READ = []
def parse_methods(infile, search_source, search_sink):
METHODS = {}
MEMBERS = collections.defaultdict(lambda: collections.defaultdict(list)) # hash of hash to list
SOURCES = []
SINKS = []
STREAMS_WRITE = []
STREAMS_READ = []
INTENTS_READ = []
INTENTS_WRITE = []
INTENTS_BROADCAST_SENT = []
FIRST_LINE_ONRECEIVE = []
CONTENT_PROV_WRITE = []
CONTENT_PROV_READ = []
#create system METHOD's code
# framework_dex = []#'./framework/core.dex']
# for f in framework_dex:
# #print "Parsing %s" % f
#
# new_methods = {}
# d = DalvikVMFormat(open(f, 'rb').read())
# dx = analysis.VMAnalysis(d)
# for current_method in dx.get_methods():
# key = '%s %s %s' % (current_method.method.get_class_name(), current_method.method.get_name(), current_method.method.get_descriptor())
# #create bb-graphs
# #bytecode.method2png('/home/user/projects/android-instrumentation/methods/'+key.replace('/', '.')+'.png', current_method)
# line = 0
# pc = 0
# line_to_pc = {}
# if current_method.method.get_code() is not None:
# for ins in current_method.method.get_code().get_bc().get_instructions():
# line_to_pc[line] = [pc, ins]
# pc += ins.get_length()
# line += 1
# new_methods[key] = [current_method, line_to_pc]
#
# METHODS.update(new_methods)
my_apk = apk.APK(infile)
dex = DalvikVMFormat(my_apk.get_dex())
dex.create_python_export()
dx = uVMAnalysis(dex)
gx = GVMAnalysis(dx, my_apk)
dex.set_vmanalysis(dx)
dex.set_gvmanalysis(gx)
dex.create_xref()
#print "Generating Methods for %s" % input
for current_method in dx.get_methods():
key = '%s %s %s' % (current_method.method.get_class_name(), current_method.method.get_name(), current_method.method.get_descriptor())
#OUTPUT method name
#print "\n", key
#create bb-graphs
#bytecode.method2png('/home/user/projects/android-instrumentation/methods/'+key.replace('/', '.')+'.png', current_method)
line = 0
pc = 0
line_to_pc = {}
pc_to_line = {}
out_bb = []
if current_method.method.get_code() is not None:
# callbacks
if config.ENABLE_EXTERNAL_PATHS:
if ('onReceive' in current_method.method.get_name()
and 'Landroid/content/Intent' in current_method.method.proto):
FIRST_LINE_ONRECEIVE.append([key,pc])
for ins in current_method.method.get_code().get_bc().get_instructions():
if 'invoke' in ins.get_name():
if config.ENABLE_EXTERNAL_PATHS:
function = ins.get_output(pc).split(', ')[-1];
#EXTERNAL_TUNNEL_8 - Fixed PrintWriter issues : it also has write, and println may also be relevant
if ('java/io/PrintWriter;->print' in function
or 'java/io/PrintWriter;->write' in function
or 'java/io/OutputStream;->write' in function
or 'java/io/ByteArrayOutputStream;->write' in function
or 'java/io/DataOutputStream;->write' in function
or 'java/io/FileOutputStream;->write' in function
or 'java/util/zip/GZIPInputStream;->write' in function
or 'java/io/BufferedOutputStream;->write' in function
or 'java/io/OutputStreamWriter;->write' in function
or 'java/io/FileWriter;->write' in function
# EXTERNAL_TUNNEL_9 - This works but it is too generic or 'java/io/Writer;->write' in function
or 'android/content/SharedPreferences$Editor;->put' in function
or 'android/database/sqlite/SQLiteDatabase;->insert' in function):
STREAMS_READ.append([key,pc])
# EXTERNAL_TUNNEL_7 - Improved class specification to avoid unexpected, app-specific method calls
elif ('java/io/InputStream;->read' in function
or 'java/io/BufferedInputStream;->read' in function
or 'java/io/ByteArrayInputStream;->read' in function
or 'java/io/DataInputStream;->read' in function
or 'java/io/FileInputStream;->read' in function
or 'java/io/InputStreamReader;->read' in function
or 'java/util/zip/GZIPOutputStream;->read' in function
or 'java/io/BufferedReader;->read' in function
or 'java/io/FileReader;->read' in function
# EXTERNAL_TUNNEL_9 - This works but it is too generic or 'java/io/Reader;->read' in function
or 'android/content/SharedPreferences;->get' in function
or 'android/database/sqlite/SQLiteDatabase;->query' in function):
STREAMS_WRITE.append([key,pc])
#EXTERNAL_TUNNEL_2 - to address leakage through intents
#EXTERNAL_TUNNEL_3 - Bugfix to avoid startActivity methods that do not receive an intent
#EXTERNAL_TUNNEL_8 - Bugfix to avoid static startActivity methods (they are not from Context)- protection also added in sendBroadcast
elif ('startActivity' in function and 'Landroid/content/Intent' in function # TODO fix
and not 'makeRestartActivity' in function):
INTENTS_WRITE.append([key,pc])
elif ('getIntent' in function):
INTENTS_READ.append([key,pc])
#EXTERNAL_TUNNEL_4 - Use of broadcast receivers to send intents
elif ('sendBroadcast' in function # TODO: Fix
and 'Landroid/content/Intent' in function):
INTENTS_BROADCAST_SENT.append([key,pc])
#EXTERNAL_TUNNEL_5 - Use of content providers
#EXTERNAL_TUNNEL_8 - Code rewriting to make it easier to understand
elif 'android/content/ContentResolver;->insert' in function:
CONTENT_PROV_WRITE.append([key,pc])
elif 'android/content/ContentResolver;->query' in function:
CONTENT_PROV_READ.append([key,pc])
# ENDIF config.ENABLE_EXTERNAL_PATHS
if [i for i in search_source if i in ins.get_output(pc)]:
SOURCES.append([key, pc])
if [i for i in search_sink if i in ins.get_output(pc)]:
SINKS.append([key, pc])
if 'return-wide' in ins.get_name():
out_bb.append([pc, [ins.get_output(), 'v'+str(int(ins.get_output().replace('v', ''))+1)]])
elif 'return-void' in ins.get_name():
out_bb.append([pc, []])
elif 'return' in ins.get_name():
out_bb.append([pc, [ins.get_output()]])
if 'sput' in ins.get_name() or 'iput' in ins.get_name():
var_name = ins.get_output().split(', ')[-1]
MEMBERS[var_name]['write'].append([key, pc])
if 'sget' in ins.get_name() or 'iget' in ins.get_name():
var_name = ins.get_output().split(', ')[-1]
MEMBERS[var_name]['read'].append([key, pc])
bb = None
for block in current_method.basic_blocks.bb:
if block.start <= pc < block.end:
bb = block
line_to_pc[line] = [pc, ins]
pc_to_line[pc] = [line, ins, bb]
# print line, pc, ins.get_name(), ins.get_output(pc)
pc += ins.get_length()
line += 1
METHODS[key] = [current_method, line_to_pc, out_bb, pc_to_line]
print "\t%d methods parsed (size: %.2fkB)" % (len(METHODS), float(sys.getsizeof(METHODS))/1024)
return METHODS, MEMBERS, SOURCES, SINKS, dex
def dfg_forward(METHODS, MEMBERS, SOURCES, dex):
MARKED_VARS = collections.defaultdict(lambda: collections.defaultdict(list)) # hash from node to pc to marked variables
MARKED_MEMBERS = []
investigated_functions = []
class FA_WorklistEntry:
def __init__(self, function, pc, callstack):
self.function = function
self.pc = pc
self.callstack = callstack
def __eq__(self, other):
return (self.function == other.function and\
self.pc == other.pc and\
self.callstack == other.callstack)
TODO_INSTRUCTIONS = []
for source in SOURCES:
TODO_INSTRUCTIONS.append(FA_WorklistEntry(source[0], source[1], []))
current_calculating_nodes = [i.function for i in TODO_INSTRUCTIONS]
first_run = True #for the first run, we have to add all variables to the marked vars!
while TODO_INSTRUCTIONS:
cnt_fun = []
# for i in TODO_INSTRUCTIONS:
# if i.function not in cnt_fun:
# cnt_fun.append(i.function)
# print "Functions: %d, Instructions: %d" % (len(cnt_fun), len(TODO_INSTRUCTIONS))
# if len(cnt_fun) > 600:
# import code
# code.interact(local=locals())
NEXT_TODO = []
#handle all blocks + function calls, before stepping one function down
todo = []
for i in TODO_INSTRUCTIONS:
if i.function in current_calculating_nodes:
todo.append(i)
else:
NEXT_TODO.append(i)
if not todo:
NEXT_TODO = []
current_calculating_nodes = [TODO_INSTRUCTIONS[0].function]
continue
# investigate lowest pcs first
todo = sorted(todo, key=lambda i: i.pc, reverse=False)
if first_run:
for ins in todo:
current_function = ins.function
pc = ins.pc
pc, instruction = get_next_instruction_and_pc(current_function, pc, METHODS)
if 'move-' in instruction.get_name():
variables = instruction.get_output().split(', ')
if '-wide' in instruction.get_name():
variables.append('v' + str(int(variables[0].replace('v', ''))+1))
MARKED_VARS[current_function][pc].extend(variables)
NEXT_TODO.append(FA_WorklistEntry(current_function, pc, []))
#print "(%s) 0x%x" % (current_function, pc), instruction.get_name(), instruction.get_output()
first_run = False
TODO_INSTRUCTIONS = []
for e in NEXT_TODO:
if e not in TODO_INSTRUCTIONS: #more complex test, unhashable -> list(set( not available
TODO_INSTRUCTIONS.append(e)
else:
for ins in todo:
current_function = ins.function
pc = ins.pc
instruction = get_instruction(current_function, pc, METHODS)
# print "(%s) 0x%x %s %s [%s]" % (current_function, pc, instruction.get_name(), instruction.get_output(), ", ".join(MARKED_VARS[current_function][pc]))
# if 'getUrl' in current_function:
# print "stop"
if ('goto' in instruction.get_name()
or 'if' in instruction.get_name()
or 'throw' in instruction.get_name()
or 'packed-switch' in instruction.get_name()
):
block = get_block(METHODS, current_function, pc)
line = pc_to_line(current_function, pc, METHODS)
next_pc = []
for pre in block.childs: #there will/should be only one child
child = pre[2]
next_pc.append(child.start)
if 'throw' in instruction.get_name():
if block.exception_analysis:
for ex in block.exception_analysis.exceptions:
next_pc.append(ex[1]) # ex[1] = next pc
if 'if' in instruction.get_name():
next_pc.append(METHODS[current_function][1][line+1][0]) #next pc
next_pc = unique(next_pc)
for nex in next_pc:
if update_vars_if_not_equal(MARKED_VARS, MARKED_VARS[current_function][pc], current_function, nex):
NEXT_TODO.append(FA_WorklistEntry(current_function, nex, ins.callstack))
continue
if 'return' in instruction.get_name():
variables = instruction.get_output().split(', ')
if '-wide' in instruction.get_name():
variables.append('v' + str(int(variables[0].replace('v', ''))+1))
if ins.callstack:
# if return tainted-> taint move-result of parent (in ins.callstack)
if [i for i in MARKED_VARS[current_function][pc] if i in variables]:
#jump back to function and mark assigned variables
new_marked = []
callee = ins.callstack[-1]
callee_callee = ins.callstack[:-1]
callee_function = callee[0]
callee_pc = callee[1]
instruction = pc_to_instruction(callee_function, callee_pc, METHODS)
if 'static' not in instruction.get_name():
mi = get_method_info(METHODS[current_function][0].method)
instance_var = 'v%d' % mi['registers'][1]
variables = get_variables_from_invoke(instruction)
this_var = variables[0]
if instance_var in MARKED_VARS[current_function][0]:
if this_var not in set(MARKED_VARS[callee_function][callee_pc]):
MARKED_VARS[callee_function][callee_pc].append(this_var)
new_marked.append(this_var)
else:
if this_var in set(MARKED_VARS[callee_function][callee_pc]):
MARKED_VARS[callee_function][callee_pc].remove(this_var)
callee_next_pc = get_next_pc(callee_function, callee_pc, METHODS)
next_instruction = pc_to_instruction(callee_function, callee_next_pc, METHODS)
#print "Jumping back to %s %d:" % (callee_function, callee_pc), instruction.get_name(), instruction.get_output()
if 'move-' in next_instruction.get_name():
new_marked.extend(next_instruction.get_output().split(', '))
if '-wide' in next_instruction.get_name():
new_marked.append('v' + str(int(new_marked[0].replace('v', ''))+1))
if update_vars_if_not_equal(MARKED_VARS, new_marked, callee_function, callee_next_pc):
NEXT_TODO.append(FA_WorklistEntry(callee_function, callee_next_pc, callee_callee))
else: # if the return is tainted, and no callstack is set, jump to ALL calling functions
if [i for i in MARKED_VARS[current_function][pc] if i in variables]:
print "we are now in " + current_function
new_marked = []
for parent in METHODS[current_function][0].method.XREFfrom.items:
parent_key = '%s %s %s' % (parent[0].get_class_name(), parent[0].get_name(), parent[0].get_descriptor())
print "Returning to " + parent_key
for entry in parent[1]:
parent_next_pc = get_next_pc(parent_key, entry.idx, METHODS)
next_instruction = pc_to_instruction(parent_key, parent_next_pc, METHODS)
if 'move-' in next_instruction.get_name():
new_marked.extend(next_instruction.get_output().split(', '))
if '-wide' in next_instruction.get_name():
new_marked.append('v' + str(int(new_marked[0].replace('v', ''))+1))
if update_vars_if_not_equal(MARKED_VARS, new_marked, parent_key, parent_next_pc):
NEXT_TODO.append(FA_WorklistEntry(parent_key, parent_next_pc, []))
continue
next_pc = get_next_pc(current_function, pc, METHODS)
if instruction.get_name() == 'move-exception':
target = instruction.get_output().split(', ')[0]
for bb in METHODS[current_function][0].basic_blocks.bb:
if bb.exception_analysis:
for ex in bb.exception_analysis.exceptions:
if pc == ex[1]:
prev_pc = bb.end - bb.last_length
variables_last = get_instruction(current_function, prev_pc, METHODS).get_output().split(', ')[0]
new_marked = MARKED_VARS[current_function][pc]
if variables_last in MARKED_VARS[current_function][prev_pc] and target not in MARKED_VARS[current_function][pc]:
new_marked.append(target)
if update_vars_if_not_equal(MARKED_VARS, new_marked, current_function, next_pc):
NEXT_TODO.append(FA_WorklistEntry(current_function, next_pc, ins.callstack))
elif instruction.get_name() in config.UNTAINT or 'iget' in instruction.get_name() or 'sget' in instruction.get_name():
variables = instruction.get_output().split(', ')
if '-wide' in instruction.get_name():
variables.append('v' + str(int(variables[0].replace('v', ''))+1))
if 'iget' in instruction.get_name() or 'sget' in instruction.get_name():
member_name = variables[-1]
if member_name in MARKED_MEMBERS:
#this member is already tainted. do not config.untaint
variables = []
else:
variables = [variables[0]]
new_marked = [i for i in MARKED_VARS[current_function][pc] if i not in variables]
MARKED_VARS[current_function][pc] = new_marked # config.untaint current line
if update_vars_if_not_equal(MARKED_VARS, new_marked, current_function, next_pc):
NEXT_TODO.append(FA_WorklistEntry(current_function, next_pc, ins.callstack))
elif instruction.get_name() in config.NOPS or 'move-result' in instruction.get_name():
if update_vars_if_not_equal(MARKED_VARS, MARKED_VARS[current_function][pc], current_function, next_pc):
NEXT_TODO.append(FA_WorklistEntry(current_function, next_pc, ins.callstack))
elif instruction.get_name() in config.INSTRUCTION_PROPAGATION:
variables = instruction.get_output().split(', ')
input_variables = [variables[i] for i in config.INSTRUCTION_PROPAGATION[instruction.get_name()][1]]
output_variables = [variables[i] for i in config.INSTRUCTION_PROPAGATION[instruction.get_name()][0]]
if 'aget-wide' in instruction.get_name():
output_variables.append('v' + str(int(variables[0].replace('v', ''))+1))
if 'aput-wide' in instruction.get_name():
input_variables.append('v' + str(int(variables[0].replace('v', ''))+1))
vars_nextline = MARKED_VARS[current_function][pc]
if [i for i in MARKED_VARS[current_function][pc] if i in input_variables]:
vars_nextline = vars_nextline + output_variables
MARKED_VARS[current_function][pc] = vars_nextline # taint current line
MARKED_VARS[current_function][pc] = unique(MARKED_VARS[current_function][pc])
if update_vars_if_not_equal(MARKED_VARS, vars_nextline, current_function, next_pc):
NEXT_TODO.append(FA_WorklistEntry(current_function, next_pc, ins.callstack))
elif 'iput' in instruction.get_name() or 'sput' in instruction.get_name():
temp = instruction.get_output().split(', ')
variable = temp[0]
member = temp[-1]
if variable in MARKED_VARS[current_function][pc]:
MARKED_MEMBERS.append(member)
MARKED_MEMBERS = unique(MARKED_MEMBERS)
if MEMBERS.get(member) and MEMBERS[member].get('read'):
for ref in MEMBERS[member]['read']:
calling_function = ref[0]
location = ref[1]
instruction_inner = pc_to_instruction(calling_function, location, METHODS)
variables_inner = [instruction_inner.get_output().split(', ')[0]] #get var name, remove field name, first entry is the var, second is This
if update_vars_if_not_equal(MARKED_VARS, variables_inner, calling_function, location):
NEXT_TODO.append(FA_WorklistEntry(calling_function, location, []))
if update_vars_if_not_equal(MARKED_VARS, MARKED_VARS[current_function][pc], current_function, next_pc):
NEXT_TODO.append(FA_WorklistEntry(current_function, next_pc, ins.callstack))
elif 'filled-new-array' in instruction.get_name():
MARKED_VARS[current_function][next_pc].extend(MARKED_VARS[current_function][pc])
MARKED_VARS[current_function][next_pc] = unique(MARKED_VARS[current_function][next_pc])
nins = get_instruction(current_function, next_pc, METHODS)
if 'move-result' in nins.get_name():
variables = get_variables_from_invoke(instruction)
if [i for i in variables if i in MARKED_VARS[current_function][pc]]:
next_variables = nins.get_output().split(', ')
if 'wide' in nins.get_name():
next_variables = [next_variables[0], 'v' + str(int(next_variables[0].replace('v', ''))+1)]
MARKED_VARS[current_function][next_pc].extend(next_variables)
MARKED_VARS[current_function][next_pc] = unique(next_variables)
pc = next_pc
next_pc = get_next_pc(current_function, next_pc, METHODS)
if update_vars_if_not_equal(MARKED_VARS, MARKED_VARS[current_function][pc], current_function, next_pc):
NEXT_TODO.append(FA_WorklistEntry(current_function, next_pc, ins.callstack))
elif 'invoke-' in instruction.get_name():
temp = instruction.get_output().split(', ')
function = temp[-1]
variables = get_variables_from_invoke(instruction)
instance_tainted = False
if 'static' not in instruction.get_name():
if variables[0] in MARKED_VARS[current_function][pc]: # instance var
instance_tainted = True
variables = variables[1:] # removed instance var
marked = []
for i, var in enumerate(variables):
if var in MARKED_VARS[current_function][pc]:
marked.append(i)
ex = function.split('(')
ex2 = ex[0].split('->')
fun_name = '%s %s (%s' % (ex2[0], ex2[1], ex[1])
if marked or instance_tainted:
if fun_name in METHODS and METHODS[fun_name][1].items(): #if the function is abstract, it does not have code
mi = get_method_info(METHODS[fun_name][0].method)
params = mi.get('params')
last_reg = mi.get('registers')[1]
propagated=[]
for i in range(len(params)):
if i in marked:
propagated.append('v%d' % params[i][0])
# # this seems to be an error in Androguard?? a double only uses one internal register.
# # if we have a function with I D I, the params look like:
# # (7, int), (8, double), (9, int)
# # where is double_high gone?
if instance_tainted:
propagated.append('v%d' % last_reg)
if propagated:
var_changed = False
if update_vars_if_not_equal(MARKED_VARS, propagated, fun_name, -1):
var_changed = True
if update_vars_if_not_equal(MARKED_VARS, propagated, fun_name, 0):
var_changed = True
if var_changed or fun_name not in investigated_functions:
if fun_name not in investigated_functions:
investigated_functions.append(fun_name)
NEXT_TODO.append(FA_WorklistEntry(fun_name, 0, ins.callstack + [[current_function, pc]]))
else:
variables = get_variables_from_invoke(instruction)
#print "Function %s not found. Assuming all tainted:\n 0x%x %s %s" % (fun_name, pc, instruction.get_name(), instruction.get_output())
if 'static' not in instruction.get_name():
MARKED_VARS[current_function][pc].append(variables[0]) # just taint instance var!
MARKED_VARS[current_function][pc] = unique(MARKED_VARS[current_function][pc])
if config.ENABLE_EXTERNAL_PATHS:
#EXTERNAL_TUNNEL_ - TO SOLVE THE LEAKAGE USING INTERMEDIATE WRITE&READ FILES
#EXTERNAL_TUNNEL_3 - Code rewriting. TO SOLVE THE LEAKAGE USING INTERMEDIATE WRITE&READ FILES/SOCKETS/SHAREDPREF/DATABASES
#@todo - is there any better chance to identify the use of sockets? Streams might be used for other purposes...
#EXTERNAL_TUNNEL_4 - removed 'else' clause to make it more compact (suggested by Dennis). Also removed taintedFile/DB...
#EXTERNAL_TUNNEL_7 - Improved the detection of file/socket reads and writes. Added instructions to next_todo only if there is a move-result Removed the next_pc-next_pc addition to next_todo (discovered by Dennis).
if (('java/io/FileOutputStream; write' in fun_name and variables[1] in MARKED_VARS[current_function][pc]) or
('java/io/OutputStream; write' in fun_name and variables[1] in MARKED_VARS[current_function][pc]) or
('java/io/ByteArrayOutputStream; write' in fun_name and variables[1] in MARKED_VARS[current_function][pc]) or
('java/io/DataOutputStream; write' in fun_name and variables[1] in MARKED_VARS[current_function][pc]) or
('java/io/BufferedOutputStream; write' in fun_name and variables[1] in MARKED_VARS[current_function][pc]) or
('java/io/OutputStreamWriter; write' in fun_name and variables[1] in MARKED_VARS[current_function][pc]) or
('java/io/FileWriter; write' in fun_name and variables[1] in MARKED_VARS[current_function][pc]) or
# EXTERNAL_TUNNEL_9 - This works but it is too generic ('java/io/Writer; write' in fun_name and variables[1] in MARKED_VARS[current_function][pc]) or
('java/util/zip/GZIPOutputStream; write' in fun_name and variables[1] in MARKED_VARS[current_function][pc]) or
((('java/io/PrintWriter; print' in fun_name and not 'java/io/PrintWriter; printf' in fun_name)
or 'java/io/PrintWriter; write' in fun_name) and len(variables)>1 and variables[1] in MARKED_VARS[current_function][pc]) or
('android/database/sqlite/SQLiteDatabase; insert' in fun_name and variables[1] in MARKED_VARS[current_function][pc]) or
('android/content/SharedPreferences$Editor; put' in fun_name and variables[2] in MARKED_VARS[current_function][pc])):
#print"writing something tainted to external tunnel!"
for i in STREAMS_WRITE:
current_function_jm = i[0]
pc_jm = i[1]
instruction_jm = get_instruction(current_function_jm, pc_jm, METHODS)
temp_jm = instruction_jm.get_output().split(', ')
function_jm = temp_jm[-1]
#EXTERNAL_TUNNEL_7 - Bugfix for special read operations. For example, ByteArrayInputStream has a read(byte[],offset,length). We should taint byte[] -- there is no move-result afterwards
#take the variables of the read method and check if it has 4 -- this length only appears in these special read methods
variables_jm = temp_jm[:-1]
if(len(variables_jm) == 4):
if update_vars_if_not_equal(MARKED_VARS, variables_jm[1], current_function_jm, pc_jm):
#print"CHECK: instruction %s , fun_name %s , variables %s"%(fun_name,instruction.get_output(), variables)
#print"added the read location to NEXT_TODO, tainting the read result! (len == 4)"
NEXT_TODO.append(FA_WorklistEntry(current_function_jm, pc_jm, []))
else:
#these are read operations followed by a move-result
#we are addressing read/write through sockets/files. It may lead to false positives (e.g. file write <-> socket read)
next_pc_jm = get_next_pc(current_function_jm, pc_jm, METHODS)
next_instruction_jm = get_instruction(current_function_jm, next_pc_jm, METHODS)
#next_instruction_jm should be move-result-object vX
variables_next_jm = next_instruction_jm.get_output().split(', ')
#in variables_next_jm[0] we have the variable storing the line read
if('move-result' in next_instruction_jm.get_name()):
dataRead = variables_next_jm[0]
if 'wide' in next_instruction_jm.get_name():
dataRead = [variables_next_jm[0], 'v' + str(int(variables_next_jm[0].replace('v', ''))+1)]
if update_vars_if_not_equal(MARKED_VARS, dataRead, current_function_jm, next_pc_jm):
#print"added the read location to NEXT_TODO, tainting the read result!"
NEXT_TODO.append(FA_WorklistEntry(current_function_jm, next_pc_jm, []))
#EXTERNAL_TUNNEL_2 - ADDRESSING EXTERNAL INTENTS
elif 'startActivity' in fun_name and 'Landroid/content/Intent' in instruction.get_output():
if (variables[1] in MARKED_VARS[current_function][pc] or
(('FromChild' in fun_name or 'FromFragment' in fun_name ) and variables[2] in MARKED_VARS[current_function][pc])):
for i in INTENTS_READ:
current_function_jm = i[0]
pc_jm = i[1]
next_pc_jm = get_next_pc(current_function_jm, pc_jm, METHODS)
next_instruction_jm = pc_to_instruction(current_function_jm, next_pc_jm, METHODS)
#next_instruction should be move-result-object vX
variables_next_jm = next_instruction_jm.get_output().split(', ')
#in variables[0] we have the variable storing the intent retrieved
if('move-result' in next_instruction_jm.get_name()):
if update_vars_if_not_equal(MARKED_VARS, variables_next_jm[0], current_function_jm, next_pc_jm):
#print "WARNING - SENDING SOMETHING TAINTED THROUGH AN INTENT"
NEXT_TODO.append(FA_WorklistEntry(current_function_jm, next_pc_jm, []))
#EXTERNAL_TUNNEL_4 - To address intents sent through broadcast receiver
elif 'sendBroadcast' in fun_name and 'Landroid/content/Intent' in instruction.get_output()\
and variables[1] in MARKED_VARS[current_function][pc]:
#we have to append the first line of the onReceive method (there may be more than one!)
for onrcv in FIRST_LINE_ONRECEIVE:
#We would be in the first line of the onReceive method. Taint the received intent if it was tainted
mi = get_method_info(METHODS[onrcv[0]][0].method)
#signature of the method is onReceive(Context, Intent) --> Intent parameter is [1]
#print"list of parameters. . . %s" % (mi['params'][1][0])
parameter_intent = 'v%s' % mi['params'][1][0]
if update_vars_if_not_equal(MARKED_VARS, parameter_intent, onrcv[0], onrcv[1]):
#print "WARNING - SENDING DOMETHING TAINTED THROUGH A BROADCAST"
#print"parameter_intent %s, marked_vars %s" % (parameter_intent, MARKED_VARS[FIRST_LINE_ONRECEIVE[0][0]][FIRST_LINE_ONRECEIVE[0][1]])
NEXT_TODO.append(FA_WorklistEntry(onrcv[0],onrcv[1],[]))
#EXTERNAL_TUNNEL_5 - ADDRESSING CONTENT PROVIDERS
elif 'android/content/ContentResolver; insert' in fun_name\
and variables[2] in MARKED_VARS[current_function][pc]:
for i in CONTENT_PROV_READ:
current_function_jm = i[0]
pc_jm = i[1]
next_pc_jm = get_next_pc(current_function_jm, pc_jm, METHODS)
next_instruction_jm = pc_to_instruction(current_function_jm, next_pc_jm, METHODS)
#next_instruction should be move-result-object vX
variables_next_jm = next_instruction_jm.get_output().split(', ')
#in variables[0] we have the variable storing the result of the query
if('move-result' in next_instruction_jm.get_name()):
if update_vars_if_not_equal(MARKED_VARS, variables_next_jm[0], current_function_jm, next_pc_jm):
#print "WARNING - SENDING SOMETHING TAINTED THROUGH A CONTENT PROVIDER"
NEXT_TODO.append(FA_WorklistEntry(current_function_jm, next_pc_jm, []))
# ENDIF config.ENABLE_EXTERNAL_PATHS
next_instruction = pc_to_instruction(current_function, next_pc, METHODS)
if 'move-' in next_instruction.get_name():
variables_next = next_instruction.get_output().split(', ')
if '-wide' in next_instruction.get_name():
v1 = 'v%s' % (str(int(variables_next[0].replace('v', ''))+1))
variables_next += [v1]
variables_next = MARKED_VARS[current_function][pc] + variables_next
if update_vars_if_not_equal(MARKED_VARS, variables_next, current_function, next_pc):
NEXT_TODO.append(FA_WorklistEntry(current_function, next_pc, ins.callstack))
if update_vars_if_not_equal(MARKED_VARS, MARKED_VARS[current_function][pc], current_function, next_pc):
NEXT_TODO.append(FA_WorklistEntry(current_function, next_pc, ins.callstack))
else:
print instruction.get_name(), " not configured!"
if update_vars_if_not_equal(MARKED_VARS, MARKED_VARS[current_function][pc], current_function, next_pc):
NEXT_TODO.append(FA_WorklistEntry(current_function, next_pc, ins.callstack))
TODO_INSTRUCTIONS = []
for e in NEXT_TODO:
if e not in TODO_INSTRUCTIONS: #more complex test, unhashable -> list(set( not available
TODO_INSTRUCTIONS.append(e)
TODO_INSTRUCTIONS = NEXT_TODO
#return infos for smali instrumentation (class, method, instruction nr)
ret = {}
#lines = {}
for m, e in MARKED_VARS.items():
ret[m] = {}
#lines[m] = {}
for pc, tainted in e.items():
if pc == -1:
line_nr = -1
else:
line_nr = pc_to_line(m, pc, METHODS) # having twice the same info is a bit ugly, didn't want to mess up ret, though.
ret[m][pc] = [line_nr, tainted]
#lines[m][line_nr] = [pc,tainted]
return ret#, lines
''' creates the DFG for the input APK, from the functions indicated by search, to all entry_nodes.
a function can e.g. be input as "Lcom/example/android/skeletonapp/SkeletonActivity;->why(Ljava/lang/String; I I)".
since this is a string match, it could be "why(" as well...
each line is annotated with a list of variables which are tainted AT the current line of code
'''
def dfg_backward(METHODS, MEMBERS, SINKS, dex, MARKED_forward):
'''
General Analysis workflow:
BACKWARDS ANALYSIS:
every line is marked with the local variables which could lead to a taint leak
FORWARD ANALYSIS:
if we reach a marking for a static or a member-var, update the tainting table
at every decision point (= the beginning of a basic block):
we have a list of currently marked variables
maybe have a list which indicates which variables will be tainted at the end of the block, depending on the currently marked variables
this list would also consider, that we can jump into functions inside a basic block
'''
'''@todo: object stuff:
backwards analysis:
remember which variables of objects are modified:
v* are only relevant inside a function
member variables are potentially tainted when iget/iput is called. at this point, store the instance (as id or address, or ...)
static variables are relevant only for the Class, but not for the instance
forward analysis:
at iput, store the instance id and the name of the tainted variable
at iget, check if the variable is tainted for the given instance id
'''
class BA_WorklistEntry:
''' an worklist entry contains:
function: the name of the function, the instruction is in
program_counter: the actual code location (as pc, NOT as instruction of code!)
[callstack]: list of the parents of the function. this list is filled, if a function is called in the current node.
Then the called function gets a "pointer" to the callee, to be able to jump back. the pointer consists of function name and pc
skip_line: Boolean to indicate, if this instruction was generated by jumping back from a function, or if it is generated "normally"
If this is true, the entry will not investigate the line further, but just jump back to the "line above"
'''
def __init__(self, function, pc, callstack, skip_line):
self.function = function
self.pc = pc
self.callstack = callstack
self.skip_line = skip_line
def __eq__(self, other):
return (self.function == other.function and\
self.pc == other.pc and\
self.callstack == other.callstack and\
self.skip_line == other.skip_line )
MARKED_VARS = collections.defaultdict(lambda: collections.defaultdict(list)) # hash from function to pc to marked variables
MARKED_MEMBERS = []
TODO_INSTRUCTIONS = []
if not SINKS:
return {},{}
for sink in SINKS:
sink_function_key = sink[0]
if MARKED_forward.get(sink_function_key): #!!SHORTCUT!!
TODO_INSTRUCTIONS.append(BA_WorklistEntry(sink_function_key, sink[1], [], False))
current_calculating_nodes = [i.function for i in TODO_INSTRUCTIONS] # these nodes (=functions) are currently checked. if we would step out, we wait until the node is completely done, including its function calls
#for the first run, we have to add all variables to the marked vars!
NEXT_TODO = []
for ins in TODO_INSTRUCTIONS:
current_function = ins.function
pc = ins.pc
instruction = get_instruction(current_function, pc, METHODS)
variables = get_variables_from_invoke(instruction)
MARKED_VARS[current_function][pc].extend(variables)
NEXT_TODO.append(BA_WorklistEntry(current_function, pc, [], True))
TODO_INSTRUCTIONS = NEXT_TODO
# len_before = 0
while TODO_INSTRUCTIONS:
# print "Instructions to investitate: ", len(TODO_INSTRUCTIONS)
cnt_fun = []
# for i in TODO_INSTRUCTIONS:
# if i.function not in cnt_fun:
# cnt_fun.append(i.function)
# print "Functions: %d, Instructions: %d" % (len(cnt_fun), len(TODO_INSTRUCTIONS))
NEXT_TODO = []
#handle all blocks + function calls, before stepping one function up
todo = []
for i in TODO_INSTRUCTIONS:
if i.function in current_calculating_nodes:
todo.append(i)
else:
NEXT_TODO.append(i)
if not todo:
NEXT_TODO = []
current_calculating_nodes = [TODO_INSTRUCTIONS[0].function]
continue
# investigate highest pcs first
todo = sorted(todo, key=lambda i: i.pc, reverse=True)
for ins in todo:
current_function = ins.function
pc = ins.pc
instruction = get_instruction(current_function, pc, METHODS)
#EXTERNAL_TUNNEL_4 - To address broadcast receivers
# callbacks
if config.ENABLE_EXTERNAL_PATHS and pc == 0:
for onrcv in FIRST_LINE_ONRECEIVE:
if current_function == onrcv[0]:
#We are at the beginning of onReceive - we should continue where sendBroadcast appears
#signature of the method is onReceive(Context, Intent) --> Intent parameter is [1]
mi = get_method_info(METHODS[current_function][0].method)
parameter_intent = 'v%s' % mi['params'][1][0]
#if the intent is tainted, then taint the sendBroadcast parameter (variable[1])
#print("parameter_intent %s ,MARKED_VARS[FIRST_LINE_ONRECEIVE[0][0]][FIRST_LINE_ONRECEIVE[0][1] %s")%(parameter_intent, MARKED_VARS[FIRST_LINE_ONRECEIVE[0][0]][FIRST_LINE_ONRECEIVE[0][1]])
if parameter_intent in MARKED_VARS[onrcv[0]][onrcv[1]]:
for intn in INTENTS_BROADCAST_SENT:
current_function_jm = intn[0]
pc_jm = intn[1]
instruction_jm = get_instruction(current_function_jm, pc_jm, METHODS)
variables_jm = get_variables_from_invoke(instruction_jm)
if update_vars_if_not_equal(MARKED_VARS, variables_jm[1], current_function_jm, pc_jm):
NEXT_TODO.append(BA_WorklistEntry(current_function_jm, pc_jm, [], False))
if not ins.skip_line:
variables = []
if instruction.get_name() not in config.NOPS and 'iput' not in instruction.get_name() and 'sput' not in instruction.get_name():
variables = instruction.get_output().split(', ')
function = None
if 'invoke-' in instruction.get_name(): #function call
function = variables[-1]
#indirect result storing
if 'move-resul' in instruction.get_name():
if '-wide' in instruction.get_name():
target = [variables[0], 'v' + str(int(variables[0].replace('v', ''))+1)]
else:
target = [variables[0]]
prev_pc = get_prev_pc(current_function, pc, METHODS)
MARKED_VARS[current_function][prev_pc].extend(MARKED_VARS[current_function][pc])
MARKED_VARS[current_function][prev_pc] = unique(MARKED_VARS[current_function][prev_pc])
instruction = get_prev_instruction(current_function, pc, METHODS)
if 'filled-new-arr' in instruction.get_name():
taint_propagation = False
for t in target:
if t in MARKED_VARS[current_function][pc]:
taint_propagation = True
if taint_propagation:
variables = get_variables_from_invoke(instruction)
update_vars_if_not_equal(MARKED_VARS, variables, current_function, pc)
else:
variables = instruction.get_output().split(', ')
function = variables[-1]
if config.ENABLE_EXTERNAL_PATHS:
if (variables[0] in MARKED_VARS[current_function][pc]): #EXTERNAL_TUNNEL_3 - variables[0] is the storing variable of move-resul--> we have to do something only if it is tainted
#now we are working with tainted instructions!
#EXTERNAL_TUNNEL_ - TO ADDRESS WRITE&READ FILE ISSUE
#EXTERNAL_TUNNEL_2 - changed TODO_INSTRUCTIONS to NEXT_TODO
#EXTERNAL_TUNNEL_3 - Code rewriting. TO ADDRESS WRITE&READ THROUGH FILES/DATABASES/SHAREDPREFS
#EXTERNAL_TUNNEL_7 - Code rewriting to improve effectiveness (full name of class added to avoid unexpected methods)
if (('java/io/InputStream;->read' in function) or
('java/io/InputStreamReader;->read' in function) or
('java/io/BufferedInputStream;->read' in function) or
('java/io/ByteArrayInputStream;->read' in function) or
('java/io/DataInputStream;->read' in function) or
('java/io/FileInputStream;->read' in function) or
('java/io/FileReader;->read' in function) or
# EXTERNAL_TUNNEL_9 - This works but it is too generic ('java/io/Reader;->read' in function) or
('java/io/BufferedReader;->read' in function) or
('java/util/zip/GZIPInputStream;->read' in function) or
('android/database/sqlite/SQLiteDatabase;->query' in function) or
('android/content/SharedPreferences;->get' in function)):
#@todo - readLine serves for reading both files and sockets. Try to discover which one it is.
for i in STREAMS_READ:
current_function_jm = i[0]
pc_jm = i[1]
instruction_jm = get_instruction(current_function_jm, pc_jm, METHODS)
temp_jm = instruction_jm.get_output().split(', ')
function_jm = temp_jm[-1]
variables_jm = get_variables_from_invoke(instruction_jm)
#EXTERNAL_TUNNEL_8 - To prevent connecting a read with a print() with no arguments
if ('read' in function and (('write' in function_jm) or ('print' in function_jm and len(variables_jm)>1))):
if update_vars_if_not_equal(MARKED_VARS, variables_jm[1], current_function_jm, pc_jm):
NEXT_TODO.append(BA_WorklistEntry(current_function_jm, pc_jm, [], False))
elif ('query' in function and 'SQLiteDatabase;->insert' in function_jm):
if update_vars_if_not_equal(MARKED_VARS, variables_jm[1:4], current_function_jm, pc_jm):
NEXT_TODO.append(BA_WorklistEntry(current_function_jm, pc_jm, [], False))
elif ('SharedPreferences;->get' in function and 'SharedPreferences$Editor;->put' in function_jm):
if update_vars_if_not_equal(MARKED_VARS, variables_jm[2], current_function_jm, pc_jm):
NEXT_TODO.append(BA_WorklistEntry(current_function_jm, pc_jm, [], False))
#EXTERNAL_TUNNEL_2 - TO ADDRESS LEAKAGE THROUGH INTENTS
elif 'getIntent' in function:
for intn in INTENTS_WRITE:
current_function_jm = intn[0]
pc_jm = intn[1]
instruction_jm = get_instruction(current_function_jm, pc_jm, METHODS)
temp_jm = instruction_jm.get_output().split(', ')
function_jm = temp_jm[-1]
variables_jm = get_variables_from_invoke(instruction_jm)
if ('FromChild' in function_jm or 'FromFragment' in function_jm):
if update_vars_if_not_equal(MARKED_VARS, variables_jm[2], current_function_jm, pc_jm):
NEXT_TODO.append(BA_WorklistEntry(current_function_jm, pc_jm, [], False))
else:
if update_vars_if_not_equal(MARKED_VARS, variables_jm[1], current_function_jm, pc_jm):
NEXT_TODO.append(BA_WorklistEntry(current_function_jm, pc_jm, [], False))
#EXTERNAL_TUNNEL_5 - TO ADDRESS LEAKAGE THROUGH CONTENT PROVIDERS
#EXTERNAL_TUNNEL_8 - Including android/content in the condition to make it as precise as possible
elif 'android/content/ContentResolver; query' in function:
for intn in CONTENT_PROV_WRITE:
current_function_jm = intn[0]
pc_jm = intn[1]
instruction_jm = get_instruction(current_function_jm, pc_jm, METHODS)
temp_jm = instruction_jm.get_output().split(', ')
function_jm = temp_jm[-1]
variables_jm = get_variables_from_invoke(instruction_jm)
if update_vars_if_not_equal(MARKED_VARS, variables_jm[2], current_function_jm, pc_jm):
NEXT_TODO.append(BA_WorklistEntry(current_function_jm, pc_jm, [], False))
#ENDIF config.ENABLE_EXTERNAL_PATHS
#shortcut, but remember target!
ins = BA_WorklistEntry(current_function, prev_pc, ins.callstack, ins.skip_line)
pc = ins.pc
elif 'throw' == instruction.get_name():
block = get_block(METHODS, current_function, pc)
if block.exception_analysis:
for ex in block.exception_analysis.exceptions:
#all exceptions are interesting in backward!
next_pc = ex[1]
instruction_next = get_instruction(current_function, next_pc, METHODS)
if 'move-exception' == instruction_next.get_name():
variable_next = instruction_next.get_output().split(', ')[0]
if variable_next in MARKED_VARS[current_function][next_pc]:
if variables[0] not in MARKED_VARS[current_function][pc]:
MARKED_VARS[current_function][pc].append(variables[0])
MARKED_VARS[current_function][pc] = unique(MARKED_VARS[current_function][pc])
elif 'iget' in instruction.get_name()\
or 'sget' in instruction.get_name():
if '-wide' in instruction.get_name():
target = [variables[0], 'v' + str(int(variables[0].replace('v', ''))+1)]
else:
target = [variables[0]]
#currently only for instances in the current DEX!
if [i for i in target if i in MARKED_VARS[current_function][pc]]: # do we have taint propagation?
member = variables[-1]
MARKED_MEMBERS.append(member)
MARKED_MEMBERS = unique(MARKED_MEMBERS)
variables = variables[0:-1] #remove variable name
if MEMBERS.get(member) and MEMBERS[member].get('write'):
for ref in MEMBERS[member]['write']:
calling_function = ref[0]
location = ref[1]
instruction_inner = pc_to_instruction(calling_function, location, METHODS)
variables_inner = instruction_inner.get_output().split(', ')[0:-1] #remove field name #last entry is THIS
if 'iput' in instruction_inner.get_name():
variables_inner = variables_inner[:1] #remove THIS
if update_vars_if_not_equal(MARKED_VARS, variables_inner, calling_function, location):
NEXT_TODO.append(BA_WorklistEntry(calling_function, location, [], True))
elif instruction.get_name() in config.INSTRUCTION_PROPAGATION:
propagation = False
for i, v in enumerate(variables):
if i in config.INSTRUCTION_PROPAGATION[instruction.get_name()][0]:
if v in MARKED_VARS[current_function][pc]:
propagation = True
if '-wide' in instruction.get_name() and 'v'+str(int(v.replace('v', ''))+1) in MARKED_VARS[current_function][pc]:
propagation = True
if propagation:
tainted = []
for i, v in enumerate(variables):
if i in config.INSTRUCTION_PROPAGATION[instruction.get_name()][1]:
tainted.append(v)
variables = tainted
else:
variables = []
MARKED_VARS[current_function][pc].extend(variables)
MARKED_VARS[current_function][pc] = unique(MARKED_VARS[current_function][pc])
elif instruction.get_name() in config.UNTAINT:
if '-wide' in instruction.get_name():
target = [variables[0], 'v' + str(int(variables[0].replace('v', ''))+1)]
else:
target = [variables[0]]
# for simple instructions only (e.g. add **). all other functions are handled via invoke-
for t in target:
if t in MARKED_VARS[current_function][pc]: # for simple instructions only (e.g. add **). all other functions are handled via invoke-
MARKED_VARS[current_function][pc].remove(t)
elif instruction.get_name() in config.NOPS:
pass
else:
if not function:
print instruction.get_name(), " not configured."
if function:
variables = get_variables_from_invoke(instruction)
#fix function name
ex = function.split('(')
ex2 = ex[0].split('->')
fun_name = '%s %s (%s' % (ex2[0], ex2[1], ex[1])
propagating = check_function(fun_name, METHODS) #also does void functions. needed for static stuff
if propagating[0] == 0:
#check if var is tainted in next_line
_, nins = get_next_instruction_and_pc(current_function, pc, METHODS)
taint_propagation = False
if 'move-result' in nins.get_name():
next_variables = nins.get_output().split(', ')
if 'wide' in nins.get_name():
next_variables = [next_variables[0], 'v' + str(int(next_variables[0].replace('v', ''))+1)]
if [i for i in next_variables if i in MARKED_VARS[current_function][pc]]:
taint_propagation = True
added_todo = False
for ret in propagating[1]:
investigate = True
if ret[1] and taint_propagation:
update_vars_if_not_equal(MARKED_VARS, ret[1], fun_name, ret[0])
else:
if '-static' not in instruction.get_name() and variables[0] in MARKED_VARS[current_function][pc]: #tainted this!
taint_propagation = True
mi = get_method_info(METHODS[fun_name][0].method)
instance_var = 'v%d' % mi['registers'][1]
if instance_var not in set(MARKED_VARS[fun_name][ret[0]]):
MARKED_VARS[fun_name][ret[0]].append(instance_var)
MARKED_VARS[fun_name][ret[0]] = unique(MARKED_VARS[fun_name][ret[0]])
else:
investigate = False
if investigate:
if [current_function, pc] in ins.callstack: # somewhere on the stack, we have the exact copy of the next_todo already. skip this, to prevent endless loop
#print "RECURSION! in", current_function
pass
elif not taint_propagation and MARKED_VARS[fun_name].get(ret[0]): #we were already at this location and do not need to step into. just add prev line
NEXT_TODO.append(BA_WorklistEntry(current_function, pc, ins.callstack, True))
added_todo = True
else:
NEXT_TODO.append(BA_WorklistEntry(fun_name, ret[0], ins.callstack + [[current_function, pc]], False))
added_todo = True
if added_todo:
continue
else:
#error, function not found, assuming all tainted
#assume parameters tainted, but only if the result is also tainted
# or the instance is tainted
block = get_block(METHODS, current_function, pc)
next_pc, next_instruction = get_next_instruction_and_pc(current_function, pc, METHODS)
while next_pc < block.end - block.last_length and 'move-result' not in next_instruction.get_name():
next_pc, next_instruction = get_next_instruction_and_pc(current_function, next_pc, METHODS)
result_tainted = False
if 'move-result' not in next_instruction.get_name():
pass
else:
v = next_instruction.get_output()
if v in MARKED_VARS[current_function][next_pc]:
result_tainted = True
if '-wide' in next_instruction.get_name() and 'v'+str(int(v.replace('v', ''))+1) in MARKED_VARS[current_function][next_pc]:
result_tainted = True
if '-static' not in instruction.get_name() and variables[0] in MARKED_VARS[current_function][pc]:
result_tainted = True
if result_tainted:
MARKED_VARS[current_function][pc].extend(variables)
MARKED_VARS[current_function][pc] = unique(MARKED_VARS[current_function][pc])
if pc == 0: #method start
MARKED_VARS[current_function][-1] = unique(MARKED_VARS[current_function][0])
#here, the function call itself is already handled, go one up!
method = METHODS[current_function][0].method
params = get_method_info(method).get('params')
mi = get_method_info(METHODS[current_function][0].method)
instance_var = 'v%d' % mi['registers'][1]
callees = []
callees_callees = []
if ins.callstack: # this was a function call from an investigated basic block, just jump back and restore the callee
callees = [ins.callstack[-1]]
callees_callees = ins.callstack[:-1]
else: #this is a "normal" function beginning. search all callees and continue at the points where they jumped into this function.
# This can only happen in the current dex file, since all sinks and sources will be in it, and all direkt paths will also be.
key2 = '%s %s %s' % (method.get_class_name(), method.get_name(), method.get_descriptor())
for parent in METHODS[key2][0].method.XREFfrom.items:
parent_key = '%s %s %s' % (parent[0].get_class_name(), parent[0].get_name(), parent[0].get_descriptor())
for entry in parent[1]:
if entry:
callees.append([parent_key, entry.idx])
for entry in callees:
# print "\tJumping back to", entry
calling_function = entry[0]
location = entry[1]
instruction = pc_to_instruction(calling_function, location, METHODS)
variables = get_variables_from_invoke(instruction)
new_marked = []
this_var = None
if 'invoke-static' in instruction.get_name():
pass
else:
this_var = variables[0]
variables = variables[1:len(variables)] #first variable is THIS
if instance_var in MARKED_VARS[current_function][-1]:
#print 'THIS Instance %s is tainted, tainting instance var %s of caller %s' % (instance_var, this_var, calling_function)
new_marked.append(this_var)
elif MARKED_VARS.get(calling_function)\
and MARKED_VARS[calling_function].get(location)\
and this_var in MARKED_VARS[calling_function].get(location)\
and instance_var not in MARKED_VARS[current_function][-1]: #remove instance var
new_marked = [i for i in MARKED_VARS[calling_function][location] if i != this_var]
MARKED_VARS[calling_function][location] = new_marked
MARKED_VARS[calling_function][location] = unique(MARKED_VARS[calling_function][location])
var_changed = True
for i in range(len(params)):
if "v%s" % params[i][0] in MARKED_VARS[current_function][-1]:
new_marked.append(variables[i])
var_changed = False
if new_marked\
and update_vars_if_not_equal(MARKED_VARS, new_marked, calling_function, location):
var_changed = True
if (var_changed and not calling_function == current_function) or ins.callstack:
NEXT_TODO.append(BA_WorklistEntry(calling_function, location, callees_callees, True))
else:
block = get_block(METHODS, current_function, pc)
if pc == block.start: #block start (but not first block!)
#search for exceptions:
for bb in METHODS[current_function][0].basic_blocks.bb:
if bb.exception_analysis:
for ex in bb.exception_analysis.exceptions:
if pc == ex[1]:
prev_pc = bb.end - bb.last_length
if update_vars_if_not_equal(MARKED_VARS, MARKED_VARS[current_function][pc], current_function, prev_pc):
NEXT_TODO.append(BA_WorklistEntry(current_function, prev_pc, ins.callstack, False))
elif ins.callstack is not None: #no tainting is changed, but we are called from a function. So we go to the first instruction of the function, and then jump back in the following round
NEXT_TODO.append(BA_WorklistEntry(current_function, 0, ins.callstack, True))
elif 'throw' in get_instruction(method, prev_pc, METHODS).get_name(): #if a th1row, we can have several paths, and always have to check...
NEXT_TODO.append(BA_WorklistEntry(current_function, prev_pc, ins.callstack, False))
for pre in block.fathers: #continue at all father.ends of the block
father = pre[2]
prev_pc = father.end - father.last_length
if update_vars_if_not_equal(MARKED_VARS, MARKED_VARS[current_function][pc], current_function, prev_pc):
NEXT_TODO.append(BA_WorklistEntry(current_function, prev_pc, ins.callstack, False))
elif ins.callstack is not None: #no tainting is changed, but we are called from a function. So we go to the first instruction of the function, and then jump back in the following round
NEXT_TODO.append(BA_WorklistEntry(current_function, 0, ins.callstack, True))
elif 'throw' in get_instruction(method, prev_pc, METHODS).get_name(): #if a throw, we can have several paths, and always have to check...
NEXT_TODO.append(BA_WorklistEntry(current_function, prev_pc, ins.callstack, False))
else: #just a "normal" instruction in a BB
prev_pc = get_prev_pc(current_function, pc, METHODS)
if update_vars_if_not_equal(MARKED_VARS, MARKED_VARS[current_function][pc], current_function, prev_pc):
NEXT_TODO.append(BA_WorklistEntry(current_function, prev_pc, ins.callstack, False))
elif ins.callstack is not None: #no tainting is changed, but we are called from a function. So we go to the first instruction of the function, and then jump back in the following round
NEXT_TODO.append(BA_WorklistEntry(current_function, 0, ins.callstack, True))
TODO_INSTRUCTIONS = []
for e in NEXT_TODO:
if MARKED_forward.get(e.function): #only add the functions we really want to investigate (=have an annotation in the forward analysis)
if e not in TODO_INSTRUCTIONS: #more complex test, unhashable -> list(set( not available
TODO_INSTRUCTIONS.append(e)
#return infos for smali instrumentation (class, method, instruction nr)
ret,lines = {},{}
for m, e in MARKED_VARS.items():
ret[m], lines[m] = {}, {}
for pc, tainted in e.items():
if pc == -1:
line_nr = -1
else:
line_nr = pc_to_line(m, pc, METHODS) # having twice the same info is a bit ugly, didn't want to mess up ret, though.
ret[m][pc] = [line_nr, tainted]
lines[m][line_nr] = [pc,tainted]
return ret, lines
def dfg_combine(MARKED_VARS_backward, MARKED_VARS_forward, types, METHODS, SOURCES, SINKS):
MARKED_combined = collections.defaultdict(lambda: collections.defaultdict(list))
MARKED_combined_reversed = collections.defaultdict(lambda: collections.defaultdict(list))
for function, entry in MARKED_VARS_forward.items():
for pc, marked_forward in entry.items():
if marked_forward and MARKED_VARS_backward.get(function) and MARKED_VARS_backward[function].get(pc):
for var in marked_forward[1]:
if var in MARKED_VARS_backward[function][pc][1]:
inner = MARKED_combined[function][pc]
if not inner:
inner = [MARKED_VARS_backward[function][pc][0], []]
inner[1].append(var)
MARKED_combined[function][pc] = inner
MARKED_combined_reversed[function][inner[0]] = [pc, inner[1]]
MARKED_forward_reversed = collections.defaultdict(lambda: collections.defaultdict(list))
for f, e in MARKED_VARS_forward.items():
for pc, inner in e.items():
MARKED_forward_reversed[f][inner[0]] = [pc, inner[1]]
# annotation: display what happens in each line
annotations = collections.defaultdict(fun)
for function, entry in MARKED_combined_reversed.items():
entry_line = min(entry, key=entry.get)
if entry_line == -1:
entry_line = 0
if MARKED_combined_reversed.get(function) and MARKED_combined_reversed[function].get(-1):
current_marked = MARKED_combined_reversed[function][-1][1]
with_type = get_variables_with_type_pc(current_marked, function, -1, METHODS, types)
annotations[function][0]['marked_if_parameter_marked'].extend(with_type) #write this into line 0!!
worklist = [[-1, entry_line]] # prev_line, current_line
done_lines = []
marked = collections.defaultdict(list)
while worklist:
next_work = []
for w in worklist:
line = w[1]
prev_line = w[0]
done_lines.append(line)
pc = line_to_pc(function, line, METHODS)
if pc is None: #this can happen since we just add line+1 and do not care before if this runs over the borders.
#outside function
continue
if pc == 0:
marked[prev_line] = []
if MARKED_combined_reversed.get(function) and MARKED_combined_reversed[function].get(line):
current_marked = MARKED_combined_reversed[function][line][1]
else:
#continue
current_marked = []
if MARKED_combined_reversed.get(function) and MARKED_combined_reversed[function].get(-1):
marked[prev_line] = MARKED_combined_reversed[function][-1][1]
marked[line] = current_marked
block = get_block(METHODS, function, pc)
next_line = []
if pc == block.end - block.last_length:
for c in block.childs:
nl = pc_to_line(function, c[2].start, METHODS)
next_work.append([line, nl])
next_line.append(nl)
if block.exception_analysis:
for ex in block.exception_analysis.exceptions:
if block.exception_analysis:
for ex in block.exception_analysis.exceptions:
npc = pc_to_line(function, ex[1], METHODS)
next_line.append(npc) #next line
else:
next_line = [line + 1]
next_work.append([line, line + 1])
if entry.get(line):
current_marked = entry.get(line)[1]
marked[line] = current_marked
elif entry.get(prev_line):
current_marked = []
marked[line] = current_marked
else:
marked[line] = marked[prev_line]
next_work.append([line, line + 1])
continue
instruction = get_instruction(function, pc, METHODS)
# print "(%s) 0x%x %s %s" % (function, pc, instruction.get_name(), instruction.get_output())
if 'if-' in instruction.get_name():
if [line, line + 1] not in next_work:
next_work.append([line, line + 1])
hideMarking = False
if MARKED_combined_reversed.get(function) and MARKED_combined_reversed[function].get(line):
if 'invoke' in instruction.get_name():
variables = get_variables_from_invoke(instruction)
if [i for i in SINKS if i[0] == function and i[1] == pc]:
if MARKED_combined_reversed[function].get(prev_line):
marked_before = MARKED_combined_reversed[function][prev_line][1]
else:
marked_before = []
params = [i for i in variables if i in marked_before]
if params:
# check if the current line is an invoke with a function we do not have as code.
# if so, only the vars which were tainted in the previous line need to be treated as tainted.
with_type = get_variables_with_type_pc(params, function, pc, METHODS, types)
with_type = [i for i in with_type if i.split(':')[0] in current_marked]
annotations[function][pc]['SINK'].extend(with_type)
else:
params = [i for i in variables if i in current_marked]
if params:
with_type = get_variables_with_type_pc(params, function, pc, METHODS, types)
current_marked = MARKED_combined_reversed[function][line][1]
with_type = [i for i in with_type if i.split(':')[0] in current_marked]
if with_type:
annotations[function][pc]['function'].extend(with_type)
#print "%s 0x%x"% (function, pc), with_type, len(with_type)
current_marked = []
elif 'move-result' in instruction.get_name():
ppc = line_to_pc(function, line - 1, METHODS)
if [i for i in SOURCES if i[0] == function and i[1] == ppc]:
# we are at a source, mark the return value
variables = [instruction.get_output()]
with_type = get_variables_with_type_pc(variables, function, pc, METHODS, types)
#annotations[function][pc]['marking'].extend(with_type)
annotations[function][pc]['SOURCE'].extend(with_type)
#pass
else:
if annotations[function][ppc].get('function'):
variables = [instruction.get_output()]
if '-wide' in instruction.get_name():
v1 = (str(int(variables[0].replace('v', ''))+1))
variables.append('v%s' % v1)
with_type = get_variables_with_type_pc(variables, function, pc, METHODS, types)
if [i for i in variables if i in current_marked]:
annotations[function][pc]['propagate_function_return'].append(with_type[0])
if '-wide' in instruction.get_name():
annotations[function][pc]['propagate_function_return'].append(with_type[1])
else:
annotations[function][pc]['unmarking'].append(with_type[0])
if '-wide' in instruction.get_name():
annotations[function][pc]['unmarking'].append(with_type[1])
current_marked = []
continue
elif 'return' in instruction.get_name():
variables = instruction.get_output().split(', ')
params = [i for i in variables if i in current_marked]
#todo: return wide?!
if params:
with_type = get_variables_with_type_pc(params, function, pc, METHODS, types)
current_marked = MARKED_combined_reversed[function][line][1]
with_type = [i for i in with_type if i.split(':')[0] in current_marked]
annotations[function][pc]['return'].extend(with_type)
continue
elif 'sput-' in instruction.get_name(): #storing in static
variables = instruction.get_output().split(', ')
if variables[0] in current_marked:
inst = variables[-1].split(' ')
typ = inst[1][1:-1].replace('/', '.')
inst = inst[0][1:].replace('/', '.').replace(';->', '.') #*wink*
with_type = '%s: %s' % (inst, typ)
annotations[function][pc]['marking_staticvar'].append(with_type)
hideMarking = True
# continue
elif 'iput-' in instruction.get_name(): #storing in member
variables = instruction.get_output().split(', ')
if variables[0] in current_marked:
inst = variables[-1].split(' ')
typ = inst[1][1:-1].replace('/', '.')
inst = inst[0][1:].replace('/', '.').replace(';->', '.') #*wink*
with_type = '%s: %s: %s' % (variables[1], inst, typ)
annotations[function][pc]['marking_instancevar'].append(with_type)
hideMarking = True
# continue
elif 'sget-' in instruction.get_name(): #loading static
variables = instruction.get_output().split(', ')
if variables[0] in current_marked:
inst = variables[-1].split(' ')
typ = inst[1][1:-1].replace('/', '.')
inst = inst[0][1:].replace('/', '.').replace(';->', '.') #*wink*
with_type = '%s: %s: %s' % (inst, variables[0], typ)
annotations[function][pc]['propagate_staticvar'].append(with_type)
hideMarking = True
# continue
elif 'iget-' in instruction.get_name(): #loading member
variables = instruction.get_output().split(', ')
if variables[0] in current_marked:
inst = variables[-1].split(' ')
typ = inst[1][1:-1].replace('/', '.')
inst = inst[0][1:].replace('/', '.').replace(';->', '.') #*wink*
with_type = '%s: %s: %s: %s' % (variables[1], inst, variables[0], typ)
annotations[function][pc]['propagate_instancevar'].append(with_type)
hideMarking = True
# continue
new_marked = [i for i in current_marked if i not in marked[prev_line]]
if next_line:
new_un_marked = []
for nl in next_line:
if entry.get(nl):
tmp = [i for i in current_marked if i not in entry[nl][1]]
if MARKED_combined_reversed.get(function) and MARKED_combined_reversed[function].get(line):
tmp = [i for i in tmp if i in MARKED_combined_reversed[function][line][1]]
if tmp:
npc = line_to_pc(function, nl, METHODS)
new_un_marked.append([npc, tmp])
else:
new_un_marked = []
if new_marked and not hideMarking:
instruction = get_instruction(function, pc, METHODS).get_output().split(', ')
if '->' in instruction[-1]:
instruction = instruction[:-1]
with_type = get_variables_with_type_pc(new_marked, function, pc, METHODS, types)
#only annotate markings, if these are set in the current line.
# a var could be not tainted in prev_line, but in line, if the var is only marked via a different path.
if pc != 0:
with_type = [i for i in with_type if i.split(':')[0] in instruction]
if MARKED_combined_reversed.get(function) and MARKED_combined_reversed[function].get(line):
current_marked = MARKED_combined_reversed[function][line][1]
with_type = [i for i in with_type if i.split(':')[0] in current_marked]
if with_type:
annotations[function][pc]['marking'].extend(with_type)
if new_un_marked:
for i in new_un_marked:
next_pc = i[0]
with_type = get_variables_with_previous_type_pc(i[1], function, pc, METHODS, types)
annotations[function][next_pc]['unmarking'].extend(with_type)
worklist = []
for j in [i for i in next_work if i[1] not in done_lines]:
if j not in worklist:
worklist.append(j)
annotations_rev = collections.defaultdict(fun)
for m, e in annotations.items():
for pc, inner in e.items():
line_nr = pc_to_line(m, pc, METHODS)
annotations_rev[m][line_nr] = inner
return MARKED_combined, annotations, annotations_rev
def type_checking(MARKED_VARS_backward, MARKED_VARS_forward, METHODS, dex):
'''
codes which are not generating output:
return-void
goto
switch
'''
TYPES_line = collections.defaultdict(lambda: collections.defaultdict(list))
TYPES_total = collections.defaultdict(lambda: collections.defaultdict(list))
TYPES_total_cpy = collections.defaultdict(lambda: collections.defaultdict(list))
for m in dex.methods.methods:
current_function = '%s %s %s' % (m.get_class_name(), m.get_name(), m.get_descriptor())
if not current_function in METHODS or (not MARKED_VARS_backward.get(current_function) and not MARKED_VARS_forward.get(current_function)):
continue
# print "Creating types for %s" % current_function
mi = get_method_info(METHODS[current_function][0].method)
if 'params' in mi:
params = ['v%d: %s' % (i[0], i[1]) for i in mi['params']]
else:
params = []
if METHODS[current_function][0].method.get_access_flags_string() != 'static':
#add THIS to parameters.
params.append('v%d: %s' % (mi['registers'][1], m.get_class_name()[1:-1].replace('/', '.')))
todo_registers = ['v%d' % t for t in range(mi['registers'][0], mi['registers'][1]+1)]
entry = check_function(current_function, METHODS)
#add throw blocks (function could throw to parent)
for bb in METHODS[current_function][0].basic_blocks.bb:
if not bb.exception_analysis:
last_instruction_pc = bb.end - bb.last_length
last_instruction = get_instruction(current_function, last_instruction_pc, METHODS)
# name = last_instruction.get_name()
# output = last_instruction.get_output()
if last_instruction.get_name() == 'throw':
if entry[0] == -1:
entry[0] = 0
entry[1] = [[last_instruction_pc, [last_instruction.get_output()]]]
else:
entry[1].append([last_instruction_pc, [last_instruction.get_output()]])
if entry[0] == -1:
continue
return_type = dvm.get_type(m.get_proto()[1])
done_lines = []
worklist = [i[0] for i in entry[1]]
while worklist:
new_worklist = []
for pc in worklist:
done_lines.append(pc)
block = get_block(METHODS, current_function, pc)
instruction = get_instruction(current_function, pc, METHODS)
variables = instruction.get_output().split(', ')
name = instruction.get_name()
if 'return' in name:
if 'void' in name:
pass
else:
TYPES_line[current_function][pc] = ["%s: %s" % (variables[0], return_type)]
TYPES_total[current_function][variables[0]].append(return_type)
elif 'invoke' in name:
function = variables[-1]
variables = get_variables_from_invoke(instruction)
cl = function.split(';->')[0][1:]
variable_types_function = function.split('(')[1].split(')')[0].split(' ')
ret_type = function.split(')')[1]
variable_types = []
for v in variable_types_function:
if v == '':
continue
t = dvm.get_type(v)
if t in ('long', 'double'):
variable_types.append('%s_high' % t)
variable_types.append('%s_low' % t)
else:
variable_types.append('%s' % t)
if len(variable_types) < len(variables): #static function call
TYPES_line[current_function][pc].append('%s: %s' % (variables[0], cl.replace('/', '.')))
TYPES_total[current_function][variables[0]].append(cl.replace('/', '.'))
variables = variables[1:]
for i, v in enumerate(variables):
TYPES_line[current_function][pc].append('%s: %s' % (v, variable_types[i]))
TYPES_total[current_function][variables[0]].append(variable_types[i])
if ret_type != 'V':
next_pc, next_instruction = get_next_instruction_and_pc(current_function, pc, METHODS)
next_variables = next_instruction.get_output().split(', ')
if 'move-result-wide' in next_instruction.get_name():
TYPES_line[current_function][next_pc].append('%s: %s_high' % (next_variables[0], dvm.get_type(ret_type)))
TYPES_total[current_function][next_variables[0]].append('%s_high'%dvm.get_type(ret_type))
v1 = 'v%s' % (str(int(next_variables[0].replace('v', ''))+1))
TYPES_line[current_function][next_pc].append('%s: %s_low' % (v1, dvm.get_type(ret_type)))
TYPES_total[current_function][v1].append('%s_low'%dvm.get_type(ret_type))
elif 'move' in next_instruction.get_name():
TYPES_line[current_function][next_pc].append('%s: %s' % (next_variables[0], dvm.get_type(ret_type)))
TYPES_total[current_function][next_variables[0]].append(dvm.get_type(ret_type))
elif 'filled-new-arra' in name:
typ = dvm.get_type(variables[-1])
next_pc, next_instruction = get_next_instruction_and_pc(current_function, pc, METHODS)
next_variables = next_instruction.get_output().split(', ')
if 'move-result-wide' in next_instruction.get_name():
TYPES_line[current_function][next_pc].append('%s: %s_high' % (next_variables[0], dvm.get_type(typ)))
TYPES_total[current_function][next_variables[0]].append('%s_high'%dvm.get_type(typ))
v1 = 'v%s' % (str(int(next_variables[0].replace('v', ''))+1))
TYPES_line[current_function][next_pc].append('%s: %s_low' % (v1, dvm.get_type(typ)))
TYPES_total[current_function][v1].append('%s_low'%dvm.get_type(typ))
elif 'move' in next_instruction.get_name():
TYPES_line[current_function][next_pc].append('%s: %s' % (next_variables[0], dvm.get_type(typ)))
TYPES_total[current_function][next_variables[0]].append(dvm.get_type(typ))
elif 'new-instance' == name:
function = variables[-1]
variables = variables[:-1]
TYPES_line[current_function][pc].append('%s: %s' % (variables[0], function[1:-1].replace('/', '.')))
TYPES_total[current_function][variables[0]].append(function[1:-1].replace('/', '.'))
elif 'new-array' == name:
TYPES_line[current_function][pc].append('%s: %s' % (variables[0], dvm.get_type(variables[-1])))
TYPES_total[current_function][variables[0]].append(dvm.get_type(variables[-1]))
elif 'sput' in name or 'sget' in name:
typ = dvm.get_type(variables[-1].split(' ')[1])
TYPES_line[current_function][pc].append('%s: %s' % (variables[0], typ))
TYPES_total[current_function][variables[0]].append(typ)
elif 'iput' in name or 'iget' in name:
typ = dvm.get_type(variables[-1].split(' ')[1])
TYPES_line[current_function][pc].append('%s: %s' % (variables[0], typ))
TYPES_total[current_function][variables[0]].append(typ)
typ_inst = dvm.get_type(variables[-1].split(';->')[0])
TYPES_line[current_function][pc].append('%s: %s' % (variables[1], typ_inst))
TYPES_total[current_function][variables[1]].append(typ_inst)
elif 'cmp' in name:
TYPES_line[current_function][pc].append('%s: int8' % (variables[0]))
TYPES_total[current_function][variables[0]].append('int8')
elif 'array-length' == name:
TYPES_line[current_function][pc].append('%s: int' % (variables[0]))
TYPES_total[current_function][variables[0]].append('int')
elif 'instance-of' == name:
TYPES_line[current_function][pc].append('%s: int' % (variables[0]))
TYPES_total[current_function][variables[0]].append('int')
elif 'const' in name:
if 'wide' in name:
if name in ('const-wide/16', 'const-wide/32', 'const-wide/high16'):
typ = 'long'
elif name == 'const-wide':
typ = 'int64'
else:
typ = 'undefined'
TYPES_line[current_function][pc].append('%s: %s_high' % (variables[0], typ))
TYPES_total[current_function][variables[0]].append('%s_high'%typ)
v1 = 'v%s' % (str(int(variables[0].replace('v', ''))+1))
TYPES_line[current_function][pc].append('%s: %s_low' % (v1, typ))
TYPES_total[current_function][v1].append('%s_low'%typ)
else:
if '/high16' in name:
typ = 'int' #maybe?
elif '/' in name:
typ = 'int%s' % name.split('/')[1]
elif 'string' in name:
typ = 'java.lang.String'
elif name == 'const':
typ = 'int'
elif 'class' in name:
typ = 'class.%s' % variables[1]
else:
typ = 'undef'
TYPES_line[current_function][pc].append('%s: %s' % (variables[0], typ))
TYPES_total[current_function][variables[0]].append(typ)
if pc == block.start:
#search for exceptions:
for bb in METHODS[current_function][0].basic_blocks.bb:
if bb.exception_analysis:
for ex in bb.exception_analysis.exceptions:
prev_pc = bb.end - bb.last_length
if pc == ex[1] and prev_pc not in done_lines:
new_worklist.append(prev_pc)
if pc == ex[1]:
if 'move-exception' == name:
if ex[0] == 'any':
typ = 'java.lang.Exception'
else:
typ = ex[0][1:-1].replace('/', '.')
TYPES_line[current_function][pc].append('%s: %s' % (variables[0], typ))
TYPES_total[current_function][variables[0]].append(typ)
for pre in block.fathers: #continue at all father.ends of the block
father = pre[2]
prev_pc = father.end - father.last_length
if prev_pc not in done_lines:
new_worklist.append(prev_pc)
else:
prev_pc = get_prev_pc(current_function, pc, METHODS)
new_worklist.append(prev_pc)
worklist = new_worklist
#forward checking if we can find the other variables.
for line in METHODS[current_function][1].items():
pc = line[1][0]
instruction = line[1][1]
variables = instruction.get_output().split(', ')
not_found = [i for i in variables if i != '' and i[0] == 'v' and i not in "".join(TYPES_line[current_function][pc])]
if not_found:
source = []
destination = []
search = []
special = ''
type_change = False
if 0x01 <= instruction.OP <= 0x09:
search = [variables[1]]
source = [variables[1]]
destination = [variables[0]]
elif 0x2d <= instruction.OP <= 0x31:
search = [variables[1], variables[2]]
special = 'fixed'
destination = instruction.get_name().split('-')[-1]
elif 0x32 <= instruction.OP <= 0x3d:
search = [variables[0], variables[1]]
#binop
elif 0x7b <= instruction.OP <= 0x80 \
or 0xb0 <= instruction.OP <= 0xe2:
search = [variables[1]]
source = [variables[1]]
#destination = [variables[0]]
special = 'fixed'
destination = instruction.get_name().split('-')[-1].split('/')[0]
#type change!
elif 0x81 <= instruction.OP <= 0x8f:
# this instruction can change the type. this is important for e.g. int-to-long v0 v0
type_change = True
search = [variables[1]]
source = [variables[1]]
#destination = [variables[0]]
special = 'fixed'
destination = instruction.get_name().split('-')[-1].split('/')[0]
elif 0x90 <= instruction.OP <= 0xaf:
search = [variables[1], variables[2]]
source = [variables[1]] #same type?!!
special = 'fixed'
destination = instruction.get_name().split('-')[-1]
elif instruction.get_name() == 'check-cast':
search = [variables[0]]
elif 'iput' in instruction.get_name() or 'iget' in instruction.get_name():
search = [variables[1]]
elif 'aput' in instruction.get_name() or 'aget' in instruction.get_name():
special = 'array'
search = [variables[1], variables[2]]
source = [variables[1]]
destination = [variables[0]]
elif 'array-length' == instruction.get_name():
search = [variables[1]]
elif 'monitor-' in instruction.get_name():
search = [variables[0]]
elif '-switch' in instruction.get_name():
search = [variables[0]]
elif 'fill-array-data' == instruction.get_name():
search = [variables[0]]
elif 'instance-of' == instruction.get_name():
search = [variables[1]]
elif 'filled-new-array' == instruction.get_name():
search = get_variables_from_invoke(instruction)
elif 'throw' == instruction.get_name():
search = [variables[0]]
search = [i for i in search if i in not_found]
worklist = [[pc, search]]
first = True
done_lines = []
while worklist and search:
new_worklist = []
for runner_pc, searchlist in worklist:
done_lines.append(runner_pc)
for s in searchlist[:]:
found = [i for i in TYPES_line[current_function][runner_pc] if s == i.split(':')[0]]
if not found and runner_pc == 0:
found = [i for i in params if s == i.split(':')[0]]
if found and (not first or runner_pc == 0):
found = unique(found)
#print "Found %s. Adding to %d: %s" % (s, pc, "".join(found))
searchlist.remove(s)
TYPES_line[current_function][pc].extend(found)#("%s" % ",".join(found))
for f in found:
temp = f.split(': ')
TYPES_total[current_function][temp[0]].append(temp[1])
first = False
block = get_block(METHODS, current_function, runner_pc)
if runner_pc == block.start:
#search for exceptions:
for bb in METHODS[current_function][0].basic_blocks.bb:
if bb.exception_analysis:
for ex in bb.exception_analysis.exceptions:
prev_pc = bb.end - bb.last_length
if runner_pc == ex[1] and prev_pc not in done_lines:
new_worklist.append([prev_pc, searchlist])
for pre in block.fathers: #continue at all father.ends of the block
father = pre[2]
prev_pc = father.end - father.last_length
if prev_pc not in done_lines:
new_worklist.append([prev_pc, searchlist])
else:
prev_pc = get_prev_pc(current_function, runner_pc, METHODS)
new_worklist.append([prev_pc, searchlist])
worklist = new_worklist
post_indicator = ''
if variables[0] in source:
# we overwrite a source. currently, assume that the destination type is the same as the source type.
# this must not be the case:
# v2 = float
# neg-int v2 v2
# not v2 would be int.
# BUT, maybe this is not allowed from the verifier.
if not type_change:
continue
else:
post_indicator = '_after'
if special == 'fixed':
if '-long' in instruction.get_name() or '-double' in instruction.get_name():
TYPES_line[current_function][pc].append('%s%s: %s_high' % (variables[0], post_indicator, destination))
TYPES_total[current_function][variables[0]].append('%s_high'% destination)
v1 = 'v%s' % (str(int(variables[0].replace('v', ''))+1))
TYPES_line[current_function][pc].append('%s%s: %s_low' % (v1, post_indicator, destination))
TYPES_total[current_function][v1].append('%s_low' % destination)
else:
TYPES_line[current_function][pc].append('%s: %s' % (variables[0], destination))
TYPES_total[current_function][variables[0]].append('%s'% destination)
else:
for s in source:
found = None
for i in TYPES_line[current_function][pc]:
if '%s_after: ' % s in i:
found = i
break
elif s == i.split(':')[0]:
found = i
#found = list(set([i for i in TYPES_line[current_function][pc] if s == i.split(':')[0]]))
if found:
for d in destination:
found = found.replace(s, d)
if special == 'array':
found = found.replace('[]', '')
temp = found.split(': ')
if '-wide' in instruction.get_name():
# remove _high, if such a type is "copied"
if temp[1][-5:] == '_high':
temp[1] = temp[1][:-5]
if temp[1][-4:] == '_low':
temp[1] = temp[1][:-4]
TYPES_line[current_function][pc].append('%s%s: %s_high' % (temp[0], post_indicator, temp[1]))
TYPES_total[current_function][temp[0]].append('%s_high'% temp[1])
v1 = 'v%s' % (str(int(temp[0].replace('v', ''))+1))
TYPES_line[current_function][pc].append('%s%s: %s_low' % (v1, post_indicator, temp[1]))
TYPES_total[current_function][v1].append('%s_low' % temp[1])
else:
TYPES_line[current_function][pc].append("%s%s" % (found, post_indicator))
TYPES_total[current_function][temp[0]].append(temp[1])
else:
print current_function, '0x%x' % pc
print "Source %s not found during forward type checking" % s
for v in TYPES_total[current_function]:
if v in todo_registers:
TYPES_total_cpy[current_function][v] = unique(TYPES_total[current_function][v])
for l in TYPES_line[current_function]:
TYPES_line[current_function][l] = list(set(TYPES_line[current_function][l]))
return TYPES_total_cpy, TYPES_line
def get_variables_with_type_pc(params, function, pc, METHODS, types):
if len(types) == 0:
return params
with_type = []
for p in params:
in_line = [i for i in types[function][pc] if p in i]
if in_line:
t = in_line[0].replace('%s: ' % p, '')
else:
mi = get_method_info(METHODS[function][0].method)
param_function = ['%s' % i[1] for i in mi['params'] if i[0] == int(p[1:])]
if param_function:
t = param_function[0]
elif int(p[1:]) == mi['registers'][1]:
t = function.split(';')[0][1:].replace('/', '.')
else:
t = 'n/A'
with_type.append('%s: %s' % (p, t))
return with_type
def get_variables_with_previous_type_pc(params, function, pc, METHODS, types):
with_type = []
if pc == 0:
mi = get_method_info(METHODS[function][0].method)
for p in params:
param_function = ['%s' % i[1] for i in mi['params'] if i[0] == int(p[1:])]
if param_function:
t = param_function[0]
else:
t = 'n/A'
with_type.append('%s: %s' % (p, t))
else:
worklist = [pc]
done_lines = []
while worklist and params:
new_worklist = []
for pc in worklist:
done_lines.append(pc)
for p in params[:]:
in_line = [i for i in types[function][pc] if "%s:"%p in i]
if in_line:
t = in_line[0].replace('%s: ' % p, '')
with_type.append('%s: %s' % (p, t))
params.remove(p)
block = get_block(METHODS, function, pc)
if pc == block.start:
#search for exceptions:
for bb in METHODS[function][0].basic_blocks.bb:
if bb.exception_analysis:
for ex in bb.exception_analysis.exceptions:
prev_pc = bb.end - bb.last_length
if pc == ex[1] and prev_pc not in done_lines:
new_worklist.append(prev_pc)
for pre in block.fathers: #continue at all father.ends of the block
father = pre[2]
prev_pc = father.end - father.last_length
if prev_pc not in done_lines:
new_worklist.append(prev_pc)
elif pc == 0:
for p in params:
with_type.append('%s: n/A' % (p, t))
else:
new_worklist.append(get_prev_pc(function, pc, METHODS))
worklist = new_worklist
return with_type
def pc_to_line(method, pc, METHODS):
return METHODS[method][3][pc][0]
# for l, p in METHODS[method][1].items():
# if p[0] == pc:
# return l
# return -1
def line_to_pc(method, line, METHODS):
if METHODS[method][1].get(line):
return METHODS[method][1].get(line)[0]
return None
def pc_to_instruction(method, pc, METHODS):
return METHODS[method][3][pc][1]
# for _, p in METHODS[method][1].items():
# if p[0] == pc:
# return p[1]
# return None
def get_prev_pc(method, pc, METHODS):
line = pc_to_line(method, pc, METHODS)
return METHODS[method][1][line-1][0] #sry
def get_next_pc(method, pc, METHODS):
line = pc_to_line(method, pc, METHODS)
return METHODS[method][1][line+1][0] #sry
def get_prev_instruction(method, pc, METHODS):
line = pc_to_line(method, pc, METHODS)
return METHODS[method][1][line-1][1]
def get_next_instruction_and_pc(method, pc, METHODS):
line = pc_to_line(method, pc, METHODS)
return METHODS[method][1][line+1]#returns[pc,instruction]
def get_instruction(method, pc, METHODS):
line = pc_to_line(method, pc, METHODS)
return METHODS[method][1][line][1]
def check_function(fun_name, METHODS):
''' returns a list containing [ret_code, out-locations]:
ret_code:
-1 if function is not found
0 if function is found
out-locations is a list of:
pc of return-locations in the method
variable the variable which would be returned at the code location (can be none for return-void)
'''
if fun_name not in METHODS:
#print fun_name, "not found!"
return [-1, None]
else:
return [0, METHODS[fun_name][2]]
def get_variables_from_invoke(instruction):
variables = instruction.get_output().split(", ")[:-1]
if 'range' in instruction.get_name():
v = variables[0].replace('v', '').split(' ... ')
if len(v) > 1:
variables = ['v%d'%j for j in range(int(v[0]), int(v[1]) + 1)]
return variables
def update_vars_if_not_equal(MARKED_VARS, new_vars, destination_function, destination_pc):
if type(new_vars) != list:
new_vars = [new_vars]
if MARKED_VARS.get(destination_function) is None\
or MARKED_VARS.get(destination_function).get(destination_pc) is None\
or (set(new_vars) != set(MARKED_VARS[destination_function][destination_pc])\
and not set(new_vars).issubset(set(MARKED_VARS[destination_function][destination_pc]))):
MARKED_VARS[destination_function][destination_pc].extend(new_vars)
MARKED_VARS[destination_function][destination_pc] = unique(MARKED_VARS[destination_function][destination_pc])
return True
else:
return False
''' get parameters and register information of the function '''
def get_method_info(method):
info = {'registers':(0,0),'params':[]}
if method.code is None:
#print "WARN: Method without code skipped: %s.%s"%(method.class_name,method.name)
return info
nb = method.code.registers_size
ret = method.proto.split(')')
params = ret[0][1:].split()
inner_offset = 0
for p in params:
if dvm.get_type(p) in ['long', 'double']:
inner_offset += 1
if params:
info["params"] = []
info["registers"] = (0, nb - 1 - len(params) - inner_offset)
j = 0
inner_counter = nb - len(params) - inner_offset
for p in params:
t = dvm.get_type(p)
if t in ['long', 'double']:
info["params"].append((inner_counter, t + '_low'))
info["params"].append((inner_counter + 1, t + '_high'))
j += 2
inner_counter += 2
else:
info["params"].append((inner_counter, t))
inner_counter += 1
else:
info["registers"] = (0, nb - 1)
return info
''' get the block of function m that contains the location pc '''
def get_block(METHODS, method, pc):
return METHODS[method][3][pc][2]
# for block in m.basic_blocks.bb:
# if block.start <= pc < block.end:
# return block
# return None
''' print basic blocks of a function '''
def print_bb_childs(bb, show_code = True, prefix=''):
for block in bb:
print prefix, block.name, (block.start, block.end)
#display block part of method
if show_code:
m = block.method
line = 0
pc = 0
for ins in m.get_code().get_bc().get_instructions():
if block.start <= pc < block.end:
print "\t", line, "0x%x(%d)" % (pc,pc), ins.get_name() + " " + ins.get_output(pc)
pc += ins.get_length()
line += 1
#for c in block.childs:
# print_bb_intern(c, prefix + '\t')
''' internal function to display the childs of a bb '''
def print_bb_intern(bb, prefix=''):
print prefix, bb[0], bb[1], bb[2].name
for c in bb[2].childs:
print_bb_intern(c, prefix + '\t')
def unique(l):
return list(set(l))
def output_path_json(source, target, typ, data=None):
if data:
print '{source="%s", target="%s", type="%s", data="%s"}' % (source, target, typ, data)
else:
print '{source="%s", target="%s", type="%s"}' % (source, target, typ)
def generate_call_stack_json_helper(METHODS, SOURCES, SINKS, annotations, m, pc, path, combined):
path.append(m)
for call_pc, ann in iter(sorted(annotations[m].iteritems())):
if ann.get('function'):
ins = get_instruction(m, call_pc, METHODS)
tmp = ins.get_output().split(',')[-1].split('(')[0]
tmp = tmp.split('->')
function_name = tmp[1]
key = tmp[0]
if config.ENABLE_EXTERNAL_PATHS:
if ('println' in function_name
or 'write' in function_name
or 'SharedPreferences$Editor;->put' in function_name
or 'SQLiteDatabase;->insert' in function_name) and not key.startswith('Landroid/support'):
#STREAMS_READ.append([key,pc])
output_path_json(m, '<<FILE>>', 'file')
for i in STREAMS_WRITE:
output_path_json('<<FILE>>', i[0], 'file')
if i[0] not in path:
generate_call_stack_json_helper(METHODS, SOURCES, SINKS, annotations, i[0], i[1], path, combined)
elif (('startActivity' in function_name and 'Landroid/content/Intent' in key) and not 'makeRestartActivity' in function_name):
#INTENTS_WRITE.append([key,pc])
output_path_json(m, '<<INTENT>>', 'intent')
for i in INTENTS_READ:
output_path_json('<<INTENT>>', i[0], 'intent')
if i[0] not in path:
generate_call_stack_json_helper(METHODS, SOURCES, SINKS, annotations, i[0], i[1], path, combined)
elif 'sendBroadcast' in function_name and 'Landroid/content/Intent' in ins.get_output(pc):
#INTENTS_BROADCAST_SENT.append([key,pc])
output_path_json(m, '<<BROADCAST>>', 'broadcast')
for i in FIRST_LINE_ONRECEIVE:
output_path_json('<<BROADCAST>>', i[0], 'broadcast')
if i[0] not in path:
generate_call_stack_json_helper(METHODS, SOURCES, SINKS, annotations, i[0], i[1], path, combined)
elif 'insert' in function_name and 'ContentResolver' in ins.get_output(pc):
#CONTENT_PROV_WRITE.append([key,pc])
output_path_json(m, '<<CONTENTPROVIDER>>', 'contentprovider')
for i in CONTENT_PROV_READ:
output_path_json('<<CONTENTPROVIDER>>', i[0], 'contentprovider')
if i[0] not in path:
generate_call_stack_json_helper(METHODS, SOURCES, SINKS, annotations, i[0], i[1], path, combined)
else:
spl = ins.get_output().split(', L')
if len(spl) == 2:
fun = 'L' + spl[1]
fun = fun.replace('->', ' ')
fun = fun.replace('(', ' (')
output_path_json(m, fun, 'call')
if fun not in path:
generate_call_stack_json_helper(METHODS, SOURCES, SINKS, annotations, fun, 0, path, combined)
else:
print "error"
if ann.get('SINK'):
ins = get_instruction(m, call_pc, METHODS)
spl = ins.get_output().split(', L')
if len(spl) == 2:
fun = 'L' + spl[1]
fun = fun.replace('->', ' ')
fun = fun.replace('(', ' (')
output_path_json(m, fun, 'sink')
else:
print "error"
if ann.get('return'):
for parent in METHODS[m][0].method.XREFfrom.items:
parent_key = '%s %s %s' % (parent[0].get_class_name(), parent[0].get_name(), parent[0].get_descriptor())
#check if caller is annotated
parent_annotated = False;
for entry in parent[1]:
if combined.get(parent_key) and combined.get(parent_key).get(entry.idx):
parent_annotated = True
if parent_annotated:
output_path_json(m, parent_key, 'return')
if parent_key not in path:
generate_call_stack_json_helper(METHODS, SOURCES, SINKS, annotations, parent_key, 0, path, combined)
if ann.get('marking_staticvar'):
for i in ann.get('marking_staticvar'):
var = i.split(':')
if len(var) == 2:
for m2 in [j for j in annotations]:
for call_pc2, ann in annotations[m2].iteritems():
if ann.get('propagate_staticvar'):
for j in ann.get('propagate_staticvar'):
var2 = j.split(':')
if len(var2) == 3:
if var2[0] == var[0]:
output_path_json(m, m2, 'static')
if m2 not in path:
generate_call_stack_json_helper(METHODS, SOURCES, SINKS, annotations, m2, call_pc2, path, combined)
else:
print "error"
else:
print "error"
if ann.get('marking_instancevar'):
for i in ann.get('marking_instancevar'):
var = i.split(':')
if len(var) == 3:
for m2 in [j for j in annotations]:
for call_pc2, ann in annotations[m2].iteritems():
if ann.get('propagate_instancevar'):
for j in ann.get('propagate_instancevar'):
var2 = j.split(':')
if len(var2) == 4:
if var2[1] == var[1]:
output_path_json(m, m2, 'instance')
if m2 not in path:
generate_call_stack_json_helper(METHODS, SOURCES, SINKS, annotations, m2, call_pc2, path, combined)
else:
print "error"
else:
print "error"
def print_call_stack_helper(METHODS, SOURCES, SINKS, annotations, m, pc, path, prefix=''):
path.append(m)
for call_pc, ann in iter(sorted(annotations[m].iteritems())):
if ann.get('function'):
ins = get_instruction(m, call_pc, METHODS)
spl = ins.get_output().split(', L')
if len(spl) == 2:
fun = 'L' + spl[1]
fun = fun.replace('->', ' ')
fun = fun.replace('(', ' (')
if fun in path:
#print '%s0x%x\tcall %s (loop)' % (prefix, call_pc, fun)
print '%s\tcall %s (loop)' % (prefix, fun)
else:
if fun in [i[0] for i in SINKS]:
print '%s0x%x\t(SINK) call %s' % (prefix, call_pc, fun)
#print '%s\t(SINK) call %s' % (prefix, fun)
else:
#print '%s0x%x\tcall %s' % (prefix, call_pc, fun)
print '%s\tcall %s' % (prefix, fun)
print_call_stack_helper(METHODS, SOURCES, SINKS, annotations, fun, 0, path, '\t' + prefix)
else:
print prefix, ins.get_name(), ins.get_output()
if ann.get('return'):
for parent in METHODS[m][0].method.XREFfrom.items:
parent_key = '%s %s %s' % (parent[0].get_class_name(), parent[0].get_name(), parent[0].get_descriptor())
if parent_key in path:
#print '%s0x%x\treturn %s (loop)' % (prefix, call_pc, parent_key)
print '%s\treturn %s (loop)' % (prefix, parent_key)
else:
#print '%s0x%x\treturn %s' % (prefix, call_pc, parent_key)
print '%s\treturn %s' % (prefix, parent_key)
print_call_stack_helper(METHODS, SOURCES, SINKS, annotations, parent_key, 0, path, '\t' + prefix)
if ann.get('marking_staticvar'):
for i in ann.get('marking_staticvar'):
var = i.split(':')
if len(var) == 2:
#print '%s0x%x\tstatic_var write %s' % (prefix, call_pc, var[0])
print '%s\tstatic_var write %s' % (prefix, var[0])
for m2 in [j for j in annotations]:
for call_pc2, ann in annotations[m2].iteritems():
if ann.get('propagate_staticvar'):
for j in ann.get('propagate_staticvar'):
var2 = j.split(':')
if len(var2) == 3:
if var2[0] == var[0]:
if m2 in [j[0] for j in SINKS]:
#print '\t%s0x%x\t(SINK) static_var read %s' % (prefix, call_pc2, m2)
print '\t%s\t(SINK) static_var read %s' % (prefix, m2)
else:
#print '\t%s0x%x\tstatic_var read %s' % (prefix, call_pc2, m2)
print '\t%s\tstatic_var read %s' % (prefix, m2)
if m2 in path:
#print "\t\t%s0x%x\t%s (loop)" % (prefix, call_pc2, m2)
print "\t\t%s\t%s (loop)" % (prefix, m2)
else:
print_call_stack_helper(METHODS, SOURCES, SINKS, annotations, m2, call_pc2, path, '\t\t' + prefix)
else:
print ann.get('propagate_staticvar')
else:
print "ERROR", ann.get('marking_staticvar')
if ann.get('marking_instancevar'):
for i in ann.get('marking_instancevar'):
var = i.split(':')
if len(var) == 3:
#print '%s0x%x\tinstance_var write %s' % (prefix, call_pc, var[1])
print '%s\tinstance_var write %s' % (prefix, var[1])
for m2 in [j for j in annotations]:
for call_pc2, ann in annotations[m2].iteritems():
if ann.get('propagate_instancevar'):
for j in ann.get('propagate_instancevar'):
var2 = j.split(':')
if len(var2) == 4:
if var2[1] == var[1]:
if m2 in [j[0] for j in SINKS]:
#print '\t%s0x%x\t(SINK) instance_var read %s' % (prefix, call_pc2, m2)
print '\t%s\t(SINK) instance_var read %s' % (prefix, m2)
else:
#print '\t%s0x%x\tinstance_var read %s' % (prefix, call_pc2, m2)
print '\t%s\tinstance_var read %s' % (prefix, m2)
if m2 in path:
#print "\t\t%s0x%x\t%s (loop)" % (prefix, call_pc2, m2)
print "\t\t%s\t%s (loop)" % (prefix, m2)
else:
print_call_stack_helper(METHODS, SOURCES, SINKS, annotations, m2, call_pc2, path, '\t\t' + prefix)
else:
print ann.get('propagate_staticvar')
else:
print "ERROR", ann.get('marking_instancevar')
def generate_call_stack_json(METHODS, SOURCES, SINKS, annotations, combined):
class WritableObject:
def __init__(self):
self.content = []
def write(self, string):
if string != '\n':
self.content.append(string)
class Content:
sources = []
sinks = []
nodes = []
links = []
def add_node(self, s):
if s not in self.nodes:
self.nodes.append(s)
def add_source(self, s):
if s not in self.sources:
self.sources.append(s)
def add_sink(self, s):
if s not in self.sinks:
self.sinks.append(s)
def add_link(self, s):
if s not in self.links:
self.links.append(s)
def simplyfy_functionname(s):
tmp = s
#tmp = s.split(';')
#tmp = tmp[0].split('/')[-1]
#tmp = tmp.split('$')[0]
return tmp
def simplyfy_source_sink(s):
if not ';->' in s:
return s
s = s.replace(';->', '; ')
spl = s.split('; ')
cl = spl[0].split('/')[-1]
method = spl[1].split('(')[0] + "(...)"
sig = cl + "." + method
return sig.replace(' ', '')
c = Content()
out = WritableObject()
remember = sys.stdout
sys.stdout = out
for i in SOURCES:
if annotations.get(i[0]):
s = METHODS[i[0]][3][i[1]][1].get_output().split('L')[1]
c.add_source(simplyfy_source_sink(s))
c.add_node(simplyfy_source_sink(s))
c.add_link([simplyfy_source_sink(s), simplyfy_functionname(i[0]), 'call'])
generate_call_stack_json_helper(METHODS, SOURCES, SINKS, annotations, i[0], i[1], [], combined)
sys.stdout = remember
reg = re.compile(r'{source="(.*)", target="(.*)", type="(.*?)".*')
for line in out.content:
m = re.search(reg, line)
if m:
source = simplyfy_functionname(m.group(1))
target = m.group(2)
c.add_node(source)
if m.group(3) == 'sink':
c.add_sink(simplyfy_source_sink(target))
c.add_node(simplyfy_source_sink(target))
c.add_link([source, simplyfy_source_sink(target), 'call']) #, m.group(3) would contain the data
else:
target = simplyfy_functionname(m.group(2))
c.add_node(source)
c.add_node(target)
c.add_link([source, target, m.group(3)]) #, m.group(3) would contain the data
else:
print "error, cannot parse line: ", line
#optimize graph to remove all nodes without any link out (e.g. function calls which just return a value)
list_changed = True
while list_changed:
list_changed = False
for i, n in enumerate(c.nodes[:]):
if n == '' or n in c.sinks or n in c.sources:
continue
found = False
relevant_links = []
for l in c.links:
if l[0] == n or l[1] == n:
relevant_links.append(l)
participating_nodes = []
for l in relevant_links:
participating_nodes.append(l[0])
participating_nodes.append(l[1])
if l[0] == n:
found = True
participating_nodes = list(set(participating_nodes))
if not found or len(participating_nodes) <= 2:
list_changed = True
c.nodes.remove(n)
for l in c.links[:]:
if l[0] == n or l[1] == n:
c.links.remove(l)
for l in c.links[:]:
if l[1] not in c.nodes:
c.links.remove(l)
continue
if l[0] == l[1]:
c.links.remove(l)
ret = []
ret.append("{")
ret.append( ' "nodes":[')
entry = []
for n in c.nodes:
group = 0
if n in c.sources:
group = 1
if n in c.sinks:
group = 2
entry.append(' {"name":"%s", "group":"%d"}' % (n, group))
ret.append( ',\n'.join(entry))
ret.append( ' ],')
ret.append( ' "links":[')
entry = []
for l in c.links:
entry.append(' {"source":%d, "target":%s, "type":"%s"}' % (c.nodes.index(l[0]), c.nodes.index(l[1]), l[2]))
ret.append( ',\n'.join(entry))
ret.append( ' ]')
ret.append( '}')
if c.nodes or c.links:
return "\n".join(ret)
else:
return None;
def print_call_stack(METHODS, SOURCES, SINKS, annotations):
for i in SOURCES:
if annotations.get(i[0]):
print "SRC: %s 0x%x"% (i[0], i[1])
print_call_stack_helper(METHODS, SOURCES, SINKS, annotations, i[0], i[1], [], '\t')
# needed for sys.out redirection
class NullDevice():
def write(self, s):
pass
def main():
#global MARKED_VARS_backward, MARKED_VARS_forward, MARKED_combined, annotations, VARIABLE_TYPES_pc, VARIABLE_TYPES_total, apk_name, STREAMS_WRITE, STREAMS_READ
watch = Stopwatch()
beg = time.time()
if len(sys.argv) > 1:
apk_name = sys.argv[1]
else:
apk_name = 'SkeletonApp.apk'
print "Analysing %s:" % apk_name
#comment in to disable printing
orig = sys.stdout
#sys.stdout = NullDevice()
with watch:
METHODS, MEMBERS, SOURCES, SINKS, dex = parse_methods(apk_name,
config.SOURCE_DEFINITION,
config.SINK_DEFINITION)
print "Sources: %d, Sinks: %d" % (len(SOURCES), len(SINKS))
print "\tParsing methods took %fs" % watch.get_duration()
if len(SOURCES) == 0 or len(SINKS) == 0:
sys.stdout = orig
print "Complete Analysis took %fs" % (time.time() - beg)
print "%d sources, %d sinks" % (len(SOURCES), len(SINKS))
return
with watch:
MARKED_VARS_forward = dfg_forward(METHODS, MEMBERS, SOURCES, dex)
pass
print "\tForward analysis took %fs" % watch.get_duration()
with watch:
MARKED_VARS_backward, _ = dfg_backward(METHODS, MEMBERS, SINKS, dex, MARKED_VARS_forward)
print "\tBackward analysis took %fs" % watch.get_duration()
with watch:
VARIABLE_TYPES_total, VARIABLE_TYPES_pc = type_checking(MARKED_VARS_backward, MARKED_VARS_forward, METHODS, dex)
print "\tType checking took %fs" % watch.get_duration()
with watch:
MARKED_combined, annotations, annotations_line = dfg_combine(MARKED_VARS_backward, MARKED_VARS_forward, VARIABLE_TYPES_pc, METHODS, SOURCES, SINKS)
print "\tCombining forward and backward analysis took %fs" % watch.get_duration()
sys.stdout = orig
print "Complete Analysis took %fs\n" % (time.time() - beg)
if False: # disable annotation output generation here
import pickle
annotations_line.__reduce__();
with open('/tmp/annotations.pickle', 'wb') as f:
pickle.dump(annotations_line, f)
#check annotations, if a variable is not set
# count = 0
# for function, entry in annotations.items():
# for pc, inner in entry.items():
# for typ, var in inner.items():
# count += 1
# if 'n/A' in ", ".join(var):
# print "Variable undefined:\n\t%s" % function
# print "\t0x%x %s: %s" % (pc, typ, ", ".join(var))
if True: #disable image creation here
overflow_counter = 0
with watch:
dx = uVMAnalysis(dex)
all_methods = [i for i in dx.get_methods()]
print "Generating %d images." % len(all_methods)
for m in all_methods:
key = '%s %s %s' % (m.method.get_class_name(), m.method.get_name(), m.method.get_descriptor())
if len(key) > 180:
key = key[:180] + str(overflow_counter)
overflow_counter += 1
if MARKED_VARS_forward.get(key) or MARKED_VARS_backward.get(key):
filename = 'methods/ann_'+key.replace('/', '.')+'.png'
print "Creating Image for %s" % key
try:
os.remove(filename)
except:
pass
try:
buff = bytecode.method2dot(m , MARKED_VARS_backward.get(key), MARKED_VARS_forward.get(key), MARKED_combined.get(key), annotations.get(key), VARIABLE_TYPES_pc.get(key), VARIABLE_TYPES_total.get(key))
bytecode.method2format(filename, "png", m, buff )
except Exception as e:
print key
print e
print "Creating method images took %fs" % watch.get_duration()
print "%d sources, %d sinks" % (len(SOURCES), len(SINKS))
print "Sources in:"
cnt = 0
sourceFound = False
for i in SOURCES:
if annotations.get(i[0]):# and annotations.get(i[0]).get(i[1]):
# print "\t%s 0x%x"% (i[0], i[1])
cnt += 1
sourceFound = True
print " found %d functions containing sources." % cnt
print "Sinks in:"
cnt = 0
sinkFound = False
for i in SINKS:
if annotations.get(i[0]) and annotations.get(i[0]).get(i[1]):
# print "\t%s 0x%x"% (i[0], i[1])
cnt += 1
sinkFound = True
print " found %d functions containing sinks." % cnt
print "%d functions contain annotations.\n" % len(annotations)
if sourceFound and sinkFound:
print "potential data leakage: YES"
else:
print "potential data leakage: NO"
if True:
print ""
js = generate_call_stack_json(METHODS, SOURCES, SINKS, annotations, MARKED_combined)
if js:
json_name = 'd3-visualization/data/%s.json'%apk_name.split('/')[-1].split(':')[0]
with open(json_name, 'w+') as f:
f.write(js)
print "JSON written to", json_name
if False:
print "\n\n\n"
print_call_stack(METHODS, SOURCES, SINKS, annotations)
#create CFG:
#./androgexf.py -i YOURAPP.apk -o YOURAPP.gexf
if __name__ == "__main__" :
main()
| {
"content_hash": "02044682ce98cfcac3dfb39cf03303f3",
"timestamp": "",
"source": "github",
"line_count": 2447,
"max_line_length": 249,
"avg_line_length": 60.28238659583163,
"alnum_prop": 0.47391719939529936,
"repo_name": "titze/apparecium",
"id": "ccca668b5d11143c511001850a554206db770ce0",
"size": "147533",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "74988"
},
{
"name": "Python",
"bytes": "1582096"
},
{
"name": "Shell",
"bytes": "137"
}
],
"symlink_target": ""
} |
import mock
import requests
import requests_mock
import unittest
from collections import OrderedDict as odict
from appletea import forecastio
from appletea.forecastio.models import Forecast
def _urlq(items):
l = []
for k, v in items:
l.append('%s=%s' % (k, v))
return '&'.join(l)
class TestApi(unittest.TestCase):
def setUp(self):
self.baseurl = 'https://api.forecast.io/forecast'
self.apikey = '238ff8ab86e8245aa668b9d9cf8e8'
self.latitude = 51.036391
self.longitude = 3.699794
@requests_mock.Mocker()
def test_get_forecast_raises_client_error_404(self, mock):
mock.get(requests_mock.ANY, status_code=404, json={})
with self.assertRaises(requests.HTTPError) as e_cm:
forecastio.get_forecast(self.apikey, self.latitude, self.longitude)
self.assertEquals(e_cm.exception.response.status_code, 404)
@requests_mock.Mocker()
def test_get_forecast_raises_server_error_503(self, mock):
mock.get(requests_mock.ANY, status_code=503, json={})
with self.assertRaises(requests.HTTPError) as e_cm:
forecastio.get_forecast(self.apikey, self.latitude, self.longitude)
self.assertEquals(e_cm.exception.response.status_code, 503)
def test_get_forecast_sets_correct_connect_timeout(self):
def requests_get_mock(*args, **kwargs):
self.assertEquals(kwargs.get('timeout'), 5)
with requests.sessions.Session() as session:
from requests.packages import urllib3
urllib3.disable_warnings()
return requests_mock.create_response(
session.request('GET', args[0]), json={})
with mock.patch('requests.get', requests_get_mock):
forecastio.get_forecast(self.apikey, self.latitude, self.longitude)
@requests_mock.Mocker()
def test_get_forecast_calls_correct_url(self, mock):
mock.get(requests_mock.ANY, json={})
forecastio.get_forecast(self.apikey, self.latitude, self.longitude)
expected_url = '%s/%s/%s,%s' % (
self.baseurl, self.apikey, self.latitude, self.longitude)
req = mock.last_request
url = '%s://%s%s' % (req.scheme, req.netloc, req.path)
self.assertEquals(url, expected_url)
@requests_mock.Mocker()
def test_get_forecast_calls_correct_url_with_argument(self, mock):
mock.get(requests_mock.ANY, json={})
kwargs = {'unit': 'si'}
forecastio.get_forecast(
self.apikey, self.latitude, self.longitude, **kwargs)
expected_url = '%s/%s/%s,%s?%s' % (
self.baseurl, self.apikey, self.latitude,
self.longitude, _urlq(kwargs.items()))
req = mock.last_request
url = '%s://%s%s?%s' % (req.scheme, req.netloc, req.path, req.query)
self.assertEquals(url, expected_url)
@requests_mock.Mocker()
def test_get_forecast_calls_correct_url_with_multi_argument(self, mock):
mock.get(requests_mock.ANY, json={})
kwargs = odict({'unit': 'si', 'lang': 'en'})
forecastio.get_forecast(
self.apikey, self.latitude, self.longitude, **kwargs)
expected_url = '%s/%s/%s,%s?%s' % (
self.baseurl, self.apikey, self.latitude,
self.longitude, _urlq(kwargs.items()))
req = mock.last_request
url = '%s://%s%s?%s' % (req.scheme, req.netloc, req.path, req.query)
self.assertEquals(url, expected_url)
@requests_mock.Mocker()
def test_get_forecast_returns_object_model(self, mock):
mock.get(requests_mock.ANY, json={})
r = forecastio.get_forecast(self.apikey, self.latitude, self.longitude)
self.assertIsInstance(r, Forecast)
| {
"content_hash": "8d7d92b5d7473f35bbf1f269da8dd98f",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 79,
"avg_line_length": 38.255102040816325,
"alnum_prop": 0.6303014137103228,
"repo_name": "beylsp/appletea",
"id": "13f34d4d1c80987cc50b6882ce3d8f1ec9d93166",
"size": "3749",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/forecastio/test_api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8562"
}
],
"symlink_target": ""
} |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
budget = 1000000
index = 100
def prepare_data(df):
'''
Return: (pdf,sdf), dataframes containing data on
portfolio and statistics.
'''
df['Time'] = pd.to_datetime(df['Time'])
df = df.set_index('Time')
statscolumns = ['Maximum Value', 'Minimum Value', 'Expected Value', 'CVaR', 'Trading Cost']
pdf = df[[c for c in df.columns if c not in statscolumns]]
sdf = df[[c for c in df.columns if c in statscolumns] + ['Type']]
# Calculate some extra values
sdf['Current Value'] = pdf.sum(axis=1)
sdf['Expected Profit'] = sdf['Expected Value'] - sdf['Current Value']
sdf['Actual Profit'] = sdf['Current Value'].diff(2) # Shift by two due to two types
sdf = sdf.reset_index().set_index(['Type', 'Time'])
sdf['Cumulative Trading Cost'] = df.reset_index().groupby(by=['Type', 'Time'])['Trading Cost'].sum().groupby(level=[0]).cumsum()
sdf = sdf.reset_index().set_index('Time')
sdf['Net Value'] = sdf['Current Value'] - sdf['Cumulative Trading Cost']
return pdf, sdf
df = pd.read_csv('../data/portfolio_revision_all_bootstrap.csv')
pdf, sdf = prepare_data(df)
mdf = pd.read_csv('../data/portfolio_revision_all_moment.csv')
mpdf, msdf = prepare_data(mdf)
# Figure: Portfolio revisions
gp = pdf.groupby('Type').get_group('risk_averse').drop('Type', 1)
gp2 = pdf.groupby('Type').get_group('risk_neutral').drop('Type', 1)
gp = gp.T.div(gp.sum(1)).T
gp2 = gp2.T.div(gp2.sum(1)).T
# Tick axis every n months
n = 12
plt.figure(1, figsize=(6, 8), dpi=100)
ax1 = plt.subplot(211)
p1 = (gp.loc[:, gp.loc[gp.index[1]:].sum() > 0]*100).plot(
kind='bar',
ax=ax1,
stacked=True,
colormap=sns.cubehelix_palette(
8, start=.5, rot=-1, as_cmap=True
),
width=1
)
ticks = p1.xaxis.get_ticklocs()
ticklabels = [l.get_text() for l in p1.xaxis.get_ticklabels()]
p1.xaxis.set_ticks(ticks[::n])
p1.xaxis.set_ticklabels(map(lambda x: x[:-9], ticklabels[::n]))
plt.xticks(rotation=18)
# leg1 = p1.legend(loc='left', ncol=1, bbox_to_anchor=(1.0, 1.0), fontsize=8)
leg1 = p1.legend(
bbox_to_anchor=(0., 1.02, 1., .102), loc=3,
ncol=7, mode="expand", borderaxespad=0.)
plt.ylim(0, 100)
plt.xlabel('')
ax2 = plt.subplot(212)
p2 = (gp2.loc[:, gp2.loc[gp2.index[1]:].sum() > 0]*100).plot(
kind='bar',
ax=ax2,
stacked=True,
colormap=sns.cubehelix_palette(
8, start=.5, rot=-1, as_cmap=True
),
width=1
)
ticks = p2.xaxis.get_ticklocs()
ticklabels = [l.get_text() for l in p2.xaxis.get_ticklabels()]
p2.xaxis.set_ticks(ticks[::n])
p2.xaxis.set_ticklabels(map(lambda x: x[:-9], ticklabels[::n]))
plt.xticks(rotation=18)
# leg2 = p2.legend(loc='center left', ncol=2, bbox_to_anchor=(1, 1.0), fontsize=8)
leg2 = p2.legend(
bbox_to_anchor=(0., 1.02, 1., .102), loc=3,
ncol=7, mode="expand", borderaxespad=0.)
plt.ylim(0, 100)
plt.xlabel('')
plt.tight_layout(h_pad=2)
plt.subplots_adjust(top=0.93)
plt.savefig('../pic/trading_portfolio.pdf')
# Figure: Portfolio predictions vs. scenarios
gp = sdf.groupby('Type').get_group('risk_averse').drop('Type', 1)
gp2 = sdf.groupby('Type').get_group('risk_neutral').drop('Type', 1)
gp['Forecasted Value'] = gp['Expected Value'].shift(1)
gp['Max Forecasted Value'] = gp['Maximum Value'].shift(1)
gp['Min Forecasted Value'] = gp['Minimum Value'].shift(1)
gp2['Forecasted Value'] = gp2['Expected Value'].shift(1)
gp2['Max Forecasted Value'] = gp2['Maximum Value'].shift(1)
gp2['Min Forecasted Value'] = gp2['Minimum Value'].shift(1)
plt.figure(2, figsize=(6, 5), dpi=100)
ax1 = plt.subplot(211)
(gp['Forecasted Value']/budget).plot(c=sns.xkcd_rgb['salmon'], lw=3, ax=ax1)
(gp['Current Value']/budget).plot(c=sns.xkcd_rgb['black'], lw=2, alpha=0.7, ax=ax1)
plt.fill_between(
gp.index,
gp['Max Forecasted Value'].values/budget,
gp['Min Forecasted Value'].values/budget,
color=sns.xkcd_rgb['pale red']
)
plt.xlabel('')
plt.ylabel('Portfolio Value [MDKK]')
ax2 = plt.subplot(212)
(gp2['Forecasted Value']/budget).plot(c=sns.xkcd_rgb['salmon'], lw=3, ax=ax2)
(gp2['Current Value']/budget).plot(c=sns.xkcd_rgb['black'], lw=2, alpha=0.7, ax=ax2)
plt.xlabel('')
plt.ylabel('Portfolio Value [MDKK]')
plt.fill_between(
gp2.index,
gp2['Max Forecasted Value'].values/budget,
gp2['Min Forecasted Value'].values/budget,
color=sns.xkcd_rgb['pale red']
)
plt.tight_layout()
plt.savefig('../pic/trading_forecasted_value.pdf')
# Set up 1/N results
rawetfs = pd.read_csv('../data/etfs_max_mean_prices.csv', parse_dates=0)
rawetfs = rawetfs.rename(columns={u'Unnamed: 0': 'Time'})
rawetfs['Time'] = pd.to_datetime(rawetfs['Time'])
rawetfs = rawetfs.set_index('Time')
# Select date range of problem
rawetfs = rawetfs.ix[sdf.index]
# Normalize prices to initial prices
rawetfs = rawetfs / rawetfs.ix[0]
# Compute value of 1 over n portfolio
overnportfoliovalue = index * rawetfs.sum(1) / len(rawetfs.columns)
# Value of portfolio over time
plt.figure(3, figsize=(6, 3), dpi=100)
ax = plt.axes()
namedict = {
'risk_averse': 'Risk Averse, bootstrap',
'risk_neutral': 'Risk Neutral, bootstrap'
}
for l, d in sdf.groupby('Type'):
(d['Net Value']*index/budget).plot(label=namedict[l], ax=ax)
mnamedict = {
'risk_averse': 'Risk Averse, moment matching',
'risk_neutral': 'Risk Neutral, moment matching'
}
for l, d in msdf.groupby('Type'):
(d['Net Value']*index/budget).plot(label=mnamedict[l], ax=ax)
overnportfoliovalue.plot(label='1 over N', ax=ax)
plt.xlabel('')
plt.ylabel("Portfolio Net Value [DKK]")
plt.legend(ncol=1, loc='upper left')
plt.savefig('../pic/trading_portfolio_value.pdf')
# Build table for results
bra = sdf.groupby('Type').get_group('risk_averse')
brn = sdf.groupby('Type').get_group('risk_neutral')
mra = msdf.groupby('Type').get_group('risk_averse')
mrn = msdf.groupby('Type').get_group('risk_neutral')
lastperiodindex = -6
print 'Last trading day: {}'.format(bra.index[lastperiodindex])
# Total number of years in trading period
numyears = (bra.index[lastperiodindex]-bra.index[0]).days/365.
table = pd.DataFrame(
index=[
'Risk Averse, bootstrap',
'Risk Neutral, bootstrap',
'Risk Averse, moment matching',
'Risk Neutral, moment matching',
'1 over N'
],
data={
'Final Nominal Value': np.round([
bra['Current Value'].ix[lastperiodindex],
brn['Current Value'].ix[lastperiodindex],
mra['Current Value'].ix[lastperiodindex],
mrn['Current Value'].ix[lastperiodindex],
overnportfoliovalue.ix[lastperiodindex]*budget/100
], 0),
'Trading Costs': np.round([
bra['Cumulative Trading Cost'].ix[lastperiodindex],
brn['Cumulative Trading Cost'].ix[lastperiodindex],
mra['Cumulative Trading Cost'].ix[lastperiodindex],
mrn['Cumulative Trading Cost'].ix[lastperiodindex],
0
], 0),
'Final Net Profit': np.round([
bra['Net Value'].ix[lastperiodindex] - budget,
brn['Net Value'].ix[lastperiodindex] - budget,
mra['Net Value'].ix[lastperiodindex] - budget,
mrn['Net Value'].ix[lastperiodindex] - budget,
overnportfoliovalue.ix[lastperiodindex]*budget/100 - budget
], 0),
'Annualized Return': map(lambda x: '{} %'.format(x*100), np.round([
(bra['Net Value'].ix[lastperiodindex]/budget)**(1/numyears)-1,
(brn['Net Value'].ix[lastperiodindex]/budget)**(1/numyears)-1,
(mra['Net Value'].ix[lastperiodindex]/budget)**(1/numyears)-1,
(mrn['Net Value'].ix[lastperiodindex]/budget)**(1/numyears)-1,
(overnportfoliovalue.ix[lastperiodindex]/100)**(1/numyears)-1
], 3))
}
)
table.to_latex('../tex/trading_table.tex', columns=[
'Final Nominal Value',
'Trading Costs',
'Final Net Profit',
'Annualized Return'
])
| {
"content_hash": "70fa55065fc9be2f04c751b2a1e9cfe6",
"timestamp": "",
"source": "github",
"line_count": 243,
"max_line_length": 132,
"avg_line_length": 32.89300411522634,
"alnum_prop": 0.6421869135493556,
"repo_name": "TueVJ/OptFinFinalExam",
"id": "9cc9552165099220b3b8de4c8101c62cc27a341b",
"size": "7993",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plot_results/plot_trading_results.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "GAMS",
"bytes": "25462"
},
{
"name": "Python",
"bytes": "18145"
},
{
"name": "TeX",
"bytes": "29687"
}
],
"symlink_target": ""
} |
import argparse, glob, re, sys
from language import *
from parser import parse_llvm, parse_opt_file
from gen import generate_suite
def block_model(s, sneg, m):
# First simplify the model.
sneg.push()
bools = []
exprs = []
req = []
skip_model = get_pick_one_type()
for n in m.decls():
b = FreshBool()
name = str(n)
expr = (Int(name) == m[n])
sneg.add(b == expr)
if name in skip_model:
req += [b]
else:
bools += [b]
exprs += [expr]
req_exprs = []
for i in range(len(bools)):
if sneg.check(req + bools[i+1:]) != unsat:
req += [bools[i]]
req_exprs += [exprs[i]]
assert sneg.check(req) == unsat
sneg.pop()
# Now block the simplified model.
s.add(Not(mk_and(req_exprs)))
def pick_pre_types(s, s2):
m = s.model()
skip_model = get_pick_one_type()
vars = []
for n in m.decls():
name = str(n)
# FIXME: only fix size_* variables?
if name in skip_model and name.startswith('size_'):
vars += [Int(name)]
else:
s2.add(Int(name) == m[n])
for v in vars:
b = FreshBool()
e = v >= 32
s2.add(b == e)
if s2.check(b) == sat:
s.add(e)
res = s.check()
assert res == sat
pre_tactic = AndThen(
Tactic('propagate-values'),
Repeat(AndThen(Tactic('simplify'), Tactic('ctx-solver-simplify')))
)
def simplify_pre(f):
# TODO: extract set of implied things (iffs, tgt=0, etc).
return pre_tactic.apply(f)[0].as_expr()
def z3_solver_to_smtlib(s):
a = s.assertions()
size = len(a) - 1
_a = (Ast * size)()
for k in range(size):
_a[k] = a[k].as_ast()
return Z3_benchmark_to_smtlib_string(a[size].ctx_ref(), None, None, None, '',
size, _a, a[size].as_ast())
def gen_benchmark(s):
if not os.path.isdir('bench'):
return
header = ("(set-info :source |\n Generated by Alive 0.1\n"
" More info in N. P. Lopes, D. Menendez, S. Nagarakatte, J. Regehr."
"\n Provably Correct Peephole Optimizations with Alive. In PLDI'15."
"\n|)\n\n")
string = header + z3_solver_to_smtlib(s)
files = glob.glob('bench/*.smt2')
if len(files) == 0:
filename = 0
else:
files.sort(reverse=True)
filename = int(re.search('(\d+)\.smt2', files[0]).group(1)) + 1
filename = 'bench/%03d.smt2' % filename
fd = open(filename, 'w')
fd.write(string)
fd.close()
def check_incomplete_solver(res, s):
if res == unknown:
print '\nWARNING: The SMT solver gave up. Verification incomplete.'
print 'Solver says: ' + s.reason_unknown()
exit(-1)
tactic = AndThen(
Repeat(AndThen(Tactic('simplify'), Tactic('propagate-values'))),
#Tactic('ctx-simplify')
Tactic('elim-term-ite'),
Tactic('simplify'),
Tactic('propagate-values'),
Tactic('solve-eqs'),
Cond(Probe('is-qfbv'), Tactic('qfbv'), Tactic('bv'))
)
correct_exprs = {}
def check_expr(qvars, expr, error):
expr = mk_forall(qvars, mk_and(expr))
id = expr.get_id()
if id in correct_exprs:
return
correct_exprs[id] = expr
s = tactic.solver()
s.add(expr)
if __debug__:
gen_benchmark(s)
res = s.check()
if res != unsat:
check_incomplete_solver(res, s)
e, src, tgt, stop, srcv, tgtv, types = error(s)
print '\nERROR: %s' % e
print 'Example:'
print_var_vals(s, srcv, tgtv, stop, types)
print 'Source value: ' + src
print 'Target value: ' + tgt
exit(-1)
def var_type(var, types):
t = types[Int('t_' + var)].as_long()
if t == Type.Int:
return 'i%s' % types[Int('size_' + var)]
if t == Type.Ptr:
return var_type('*' + var, types) + '*'
if t == Type.Array:
elems = types[Int('val_%s_%s' % (var, 'elems'))]
return '[%s x %s]' % (elems, var_type('[' + var + ']', types))
assert False
def val2binhex(v, bits):
return '0x%0*X' % ((bits+3) / 4, v)
#if bits % 4 == 0:
# return '0x%0*X' % (bits / 4, v)
#return format(v, '#0'+str(bits)+'b')
def str_model(s, v):
val = s.model().evaluate(v, True)
if isinstance(val, BoolRef):
return "true" if is_true(val) else "false"
valu = val.as_long()
vals = val.as_signed_long()
bin = val2binhex(valu, val.size())
if valu != vals:
return "%s (%d, %d)" % (bin, valu, vals)
return "%s (%d)" % (bin, valu)
def _print_var_vals(s, vars, stopv, seen, types):
for k,v in vars.iteritems():
if k == stopv:
return
if k in seen:
continue
seen |= set([k])
print "%s %s = %s" % (k, var_type(k, types), str_model(s, v[0]))
def print_var_vals(s, vs1, vs2, stopv, types):
seen = set()
_print_var_vals(s, vs1, stopv, seen, types)
_print_var_vals(s, vs2, stopv, seen, types)
def get_smt_vars(f):
if is_const(f):
if is_bv_value(f) or is_bool(f):
return {}
return {str(f): f}
ret = {}
if isinstance(f, list):
for v in f:
ret.update(get_smt_vars(v))
return ret
for c in f.children():
ret.update(get_smt_vars(c))
return ret
def check_refinement(srcv, tgtv, types, extra_cnstrs, users):
for k,v in srcv.iteritems():
# skip instructions only on one side; assumes they remain unchanged
if k[0] == 'C' or not tgtv.has_key(k):
continue
(a, defa, poisona, qvars) = v
(b, defb, poisonb, qvarsb) = tgtv[k]
defb = mk_and(defb)
poisonb = mk_and(poisonb)
n_users = users[k]
base_cnstr = defa + poisona + extra_cnstrs + n_users
# Check if domain of defined values of Src implies that of Tgt.
check_expr(qvars, base_cnstr + [mk_not(defb)], lambda s :
("Domain of definedness of Target is smaller than Source's for %s %s\n"
% (var_type(k, types), k),
str_model(s, a), 'undef', k, srcv, tgtv, types))
# Check if domain of poison values of Src implies that of Tgt.
check_expr(qvars, base_cnstr + [mk_not(poisonb)], lambda s :
("Domain of poisoness of Target is smaller than Source's for %s %s\n"
% (var_type(k, types), k),
str_model(s, a), 'poison', k, srcv, tgtv, types))
# Check that final values of vars are equal.
check_expr(qvars, base_cnstr + [a != b], lambda s :
("Mismatch in values of %s %s\n" % (var_type(k, types), k),
str_model(s, a), str_model(s, b), k, srcv, tgtv, types))
def infer_flags(srcv, tgtv, types, extra_cnstrs, prev_flags, users):
query = []
flag_vars_src = {}
flag_vars_tgt = {}
for k,v in srcv.iteritems():
# skip instructions only on one side; assumes they remain unchanged
if k[0] == 'C' or not tgtv.has_key(k):
continue
(a, defa, poisona, qvars) = v
(b, defb, poisonb, qvarsb) = tgtv[k]
pre = mk_and(defa + poisona + prev_flags + extra_cnstrs)
eq = [] if a.eq(b) else [a == b]
q = mk_implies(pre, mk_and(defb + poisonb + eq))
if is_true(q):
continue
q = mk_and(users[k] + [q])
input_vars = []
for k,v in get_smt_vars(q).iteritems():
if k[0] == '%' or k[0] == 'C' or k.startswith('icmp_') or\
k.startswith('alloca') or k.startswith('mem_') or k.startswith('ana_'):
input_vars.append(v)
elif k.startswith('f_'):
if k.endswith('_src'):
flag_vars_src[k] = v
else:
assert k.endswith('_tgt')
flag_vars_tgt[k] = v
elif k.startswith('u_') or k.startswith('undef'):
continue
else:
print "Unknown smt var: " + str(v)
exit(-1)
q = mk_exists(qvars, q)
q = mk_forall(input_vars, q)
query.append(q)
s = Solver()#tactic.solver()
s.add(query)
if __debug__:
gen_benchmark(s)
res = s.check()
check_incomplete_solver(res, s)
if s.check() == unsat:
# optimization is incorrect. Run the normal procedure for nice diagnostics.
check_refinement(srcv, tgtv, types, extra_cnstrs, users)
assert False
# enumerate all models (all possible flag assignments)
models = []
while True:
m = s.model()
min_model = []
for v in flag_vars_src.itervalues():
val = m[v]
if val and val.as_long() == 1:
min_model.append(v == 1)
for v in flag_vars_tgt.itervalues():
val = m[v]
if val and val.as_long() == 0:
min_model.append(v == 0)
m = mk_and(min_model)
models.append(m)
s.add(mk_not(m))
if __debug__:
gen_benchmark(s)
res = s.check()
check_incomplete_solver(res, s)
if s.check() == unsat:
return mk_or(models)
gbl_prev_flags = []
def check_typed_opt(pre, src, ident_src, tgt, ident_tgt, types, users):
srcv = toSMT(src, ident_src, True)
tgtv = toSMT(tgt, ident_tgt, False)
pre_d, pre = pre.toSMT(srcv)
extra_cnstrs = pre_d + pre +\
srcv.getAllocaConstraints() + tgtv.getAllocaConstraints()
# 1) check preconditions of BBs
tgtbbs = tgtv.bb_pres
for k,v in srcv.bb_pres.iteritems():
if not tgtbbs.has_key(k):
continue
# assume open world. May need to add language support to state that a BB is
# complete (closed world)
p1 = mk_and(v)
p2 = mk_and(tgtbbs[k])
check_expr([], [p1 != p2] + extra_cnstrs, lambda s :
("Mismatch in preconditions for BB '%s'\n" % k, str_model(s, p1),
str_model(s, p2), None, srcv, tgtv, types))
# 2) check register values
if do_infer_flags():
global gbl_prev_flags
flgs = infer_flags(srcv, tgtv, types, extra_cnstrs, gbl_prev_flags, users)
gbl_prev_flags = [simplify_pre(mk_and(gbl_prev_flags + [flgs]))]
else:
check_refinement(srcv, tgtv, types, extra_cnstrs, users)
# 3) check that the final memory state is similar in both programs
if use_array_theory():
qvars = []
for blck in srcv.ptrs:
qvars += blck.qvars
mem = srcv.mem
memb = tgtv.mem
idx = BitVec('idx', get_ptr_size())
check_expr(qvars, extra_cnstrs + [mem[idx] != memb[idx]], lambda s :
('Mismatch in final memory state in idx %s' % str_model(s, idx),
str_model(s, mem[idx]), str_model(s,memb[idx]), None, srcv, tgtv, types))
return
memsb = {str(blck.ptr) : blck.mem for blck in tgtv.ptrs}
for blck in srcv.ptrs:
ptr = blck.ptr
mem = blck.mem
memb = memsb.get(str(ptr))
if memb == None:
memb = freshBV('mem', mem.size())
check_expr(blck.qvars, [mem != memb] + extra_cnstrs, lambda s :
('Mismatch in final memory state for %s (%d bits)' % (ptr, mem.size()),
str_model(s, mem), str_model(s, memb), None, srcv, tgtv, types))
def check_opt(opt):
name, pre, src, tgt, ident_src, ident_tgt, used_src, used_tgt, skip_tgt = opt
print '----------------------------------------'
print 'Optimization: ' + name
print 'Precondition: ' + str(pre)
print_prog(src, set([]))
print '=>'
print_prog(tgt, skip_tgt)
print
reset_pick_one_type()
global gbl_prev_flags
gbl_prev_flags = []
# infer allowed types for registers
type_src = getTypeConstraints(ident_src)
type_tgt = getTypeConstraints(ident_tgt)
type_pre = pre.getTypeConstraints()
s = SolverFor('QF_LIA')
s.add(type_pre)
if s.check() != sat:
print 'Precondition does not type check'
exit(-1)
# Only one type per variable/expression in the precondition is required.
for v in s.model().decls():
register_pick_one_type(v)
s.add(type_src)
unregister_pick_one_type(get_smt_vars(type_src))
if s.check() != sat:
print 'Source program does not type check'
exit(-1)
s.add(type_tgt)
unregister_pick_one_type(get_smt_vars(type_tgt))
if s.check() != sat:
print 'Source and Target programs do not type check'
exit(-1)
# Pointers are assumed to be either 32 or 64 bits
ptrsize = Int('ptrsize')
s.add(Or(ptrsize == 32, ptrsize == 64))
sneg = SolverFor('QF_LIA')
sneg.add(Not(mk_and([type_pre] + type_src + type_tgt)))
has_unreach = any(v.startswith('unreachable') for v in ident_tgt.iterkeys())
for v in ident_src.iterkeys():
if v[0] == '%' and v not in used_src and v not in used_tgt and\
v in skip_tgt and not has_unreach:
print 'ERROR: Temporary register %s unused and not overwritten' % v
exit(-1)
for v in ident_tgt.iterkeys():
if v[0] == '%' and v not in used_tgt and v not in ident_src:
print 'ERROR: Temporary register %s unused and does not overwrite any'\
' Source register' % v
exit(-1)
# build constraints that indicate the number of users for each register.
users_count = countUsers(src)
users = {}
for k in ident_src.iterkeys():
n_users = users_count.get(k)
users[k] = [get_users_var(k) != n_users] if n_users else []
# pick one representative type for types in Pre
res = s.check()
assert res != unknown
if res == sat:
s2 = SolverFor('QF_LIA')
s2.add(s.assertions())
pick_pre_types(s, s2)
# now check for correctness
proofs = 0
while res == sat:
types = s.model()
set_ptr_size(types)
fixupTypes(ident_src, types)
fixupTypes(ident_tgt, types)
pre.fixupTypes(types)
check_typed_opt(pre, src, ident_src, tgt, ident_tgt, types, users)
block_model(s, sneg, types)
proofs += 1
sys.stdout.write('\rDone: ' + str(proofs))
sys.stdout.flush()
res = s.check()
assert res != unknown
if res == unsat:
print '\nOptimization is correct!'
if do_infer_flags():
print 'Flags: %s' % gbl_prev_flags[0]
print
else:
print '\nVerification incomplete; did not check all bit widths\n'
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-m', '--match', action='append', metavar='name',
help='run tests containing this text')
parser.add_argument('--infer-flags', action='store_true', default=False,
help='Infer NSW/NUW/exact flags automaically', dest='infer_flags')
parser.add_argument('-V', '--verify', action='store_true', default=True,
help='check correctness of optimizations (default: True)')
parser.add_argument('--no-verify', action='store_false', dest='verify')
parser.add_argument('-o', '--output', type=argparse.FileType('w'), metavar='file',
help='Write generated code to <file> ("-" for stdout)')
parser.add_argument('--use-array-th', action='store_true', default=False,
help='Use array theory to encode memory operations (default: False)',
dest='array_th')
parser.add_argument('file', type=argparse.FileType('r'), nargs='*',
default=[sys.stdin],
help='optimization file (read from stdin if none given)',)
args = os.getenv('ALIVE_EXTRA_ARGS', '').split() + sys.argv[1:]
args = parser.parse_args(args)
set_infer_flags(args.infer_flags)
set_use_array_theory(args.array_th)
gen = []
for f in args.file:
if f.isatty():
sys.stderr.write('[Reading from terminal...]\n')
opts = parse_opt_file(f.read())
for opt in opts:
if not args.match or any(pat in opt[0] for pat in args.match):
if args.output:
gen.append(opt)
if args.verify:
check_opt(opt)
elif not args.output:
print opt[0]
if args.output:
generate_suite(gen, args.output)
if __name__ == "__main__":
try:
main()
except IOError, e:
print >> sys.stderr, 'ERROR:', e
exit(-1)
except KeyboardInterrupt:
print '\nCaught Ctrl-C. Exiting..'
| {
"content_hash": "95d64599d41440a471d4152e2d1c58b4",
"timestamp": "",
"source": "github",
"line_count": 530,
"max_line_length": 84,
"avg_line_length": 28.49622641509434,
"alnum_prop": 0.5996159703370192,
"repo_name": "rutgers-apl/alive-loops",
"id": "6435ad6a8a94cfb7a34d00467661af84bb6a0a13",
"size": "15715",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alive.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "220"
},
{
"name": "C++",
"bytes": "149069"
},
{
"name": "LLVM",
"bytes": "656"
},
{
"name": "Makefile",
"bytes": "79716"
},
{
"name": "Python",
"bytes": "520504"
},
{
"name": "Shell",
"bytes": "210366"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^ajax/get_user/$', 'auth_chat.views.get_user',),
)
| {
"content_hash": "3a9885a33cfe200eaf95fcd50b5bbc81",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 58,
"avg_line_length": 26.4,
"alnum_prop": 0.6742424242424242,
"repo_name": "relicode/auth-chat",
"id": "d33786a44057e526a732e52adc1baffe58644865",
"size": "132",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/auth_chat/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "51921"
},
{
"name": "JavaScript",
"bytes": "2872"
},
{
"name": "Python",
"bytes": "3253"
},
{
"name": "Ruby",
"bytes": "869"
}
],
"symlink_target": ""
} |
"""
This file is part of the private API. Please do not use directly these classes as they will be modified on
future versions without warning. The classes should be accessed only via the transforms argument of Weights.
"""
from typing import List, Optional, Tuple, Union
import PIL.Image
import torch
from torch import Tensor
from . import functional as F, InterpolationMode
__all__ = ["StereoMatching"]
class StereoMatching(torch.nn.Module):
def __init__(
self,
*,
use_gray_scale: bool = False,
resize_size: Optional[Tuple[int, ...]],
mean: Tuple[float, ...] = (0.5, 0.5, 0.5),
std: Tuple[float, ...] = (0.5, 0.5, 0.5),
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
) -> None:
super().__init__()
# pacify mypy
self.resize_size: Union[None, List]
if resize_size is not None:
self.resize_size = list(resize_size)
else:
self.resize_size = None
self.mean = list(mean)
self.std = list(std)
self.interpolation = interpolation
self.use_gray_scale = use_gray_scale
def forward(self, left_image: Tensor, right_image: Tensor) -> Tuple[Tensor, Tensor]:
def _process_image(img: PIL.Image.Image) -> Tensor:
if self.resize_size is not None:
img = F.resize(img, self.resize_size, interpolation=self.interpolation)
if not isinstance(img, Tensor):
img = F.pil_to_tensor(img)
if self.use_gray_scale is True:
img = F.rgb_to_grayscale(img)
img = F.convert_image_dtype(img, torch.float)
img = F.normalize(img, mean=self.mean, std=self.std)
img = img.contiguous()
return img
left_image = _process_image(left_image)
right_image = _process_image(right_image)
return left_image, right_image
def __repr__(self) -> str:
format_string = self.__class__.__name__ + "("
format_string += f"\n resize_size={self.resize_size}"
format_string += f"\n mean={self.mean}"
format_string += f"\n std={self.std}"
format_string += f"\n interpolation={self.interpolation}"
format_string += "\n)"
return format_string
def describe(self) -> str:
return (
"Accepts ``PIL.Image``, batched ``(B, C, H, W)`` and single ``(C, H, W)`` image ``torch.Tensor`` objects. "
f"The images are resized to ``resize_size={self.resize_size}`` using ``interpolation={self.interpolation}``. "
f"Finally the values are first rescaled to ``[0.0, 1.0]`` and then normalized using ``mean={self.mean}`` and "
f"``std={self.std}``."
)
| {
"content_hash": "181b324c13a5b4a906e9b1ad134bc3a7",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 122,
"avg_line_length": 37.351351351351354,
"alnum_prop": 0.5850217076700435,
"repo_name": "pytorch/vision",
"id": "a6980f3e135ac382fc2df817d3e10184ddac7a37",
"size": "2764",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "torchvision/prototype/transforms/_presets.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "20242"
},
{
"name": "C",
"bytes": "930"
},
{
"name": "C++",
"bytes": "366825"
},
{
"name": "CMake",
"bytes": "18266"
},
{
"name": "Cuda",
"bytes": "90174"
},
{
"name": "Dockerfile",
"bytes": "1608"
},
{
"name": "Java",
"bytes": "21833"
},
{
"name": "Objective-C",
"bytes": "2715"
},
{
"name": "Objective-C++",
"bytes": "3284"
},
{
"name": "PowerShell",
"bytes": "2874"
},
{
"name": "Python",
"bytes": "3952070"
},
{
"name": "Ruby",
"bytes": "1086"
},
{
"name": "Shell",
"bytes": "35660"
}
],
"symlink_target": ""
} |
"""engine.SCons.Tool.f77
Tool-specific initialization for the generic Posix f77 Fortran compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/f77.py 3603 2008/10/10 05:46:45 scons"
import SCons.Defaults
import SCons.Scanner.Fortran
import SCons.Tool
import SCons.Util
from SCons.Tool.FortranCommon import add_all_to_env, add_f77_to_env
compilers = ['f77']
def generate(env):
add_all_to_env(env)
add_f77_to_env(env)
fcomp = env.Detect(compilers) or 'f77'
env['F77'] = fcomp
env['SHF77'] = fcomp
env['FORTRAN'] = fcomp
env['SHFORTRAN'] = fcomp
def exists(env):
return env.Detect(compilers)
| {
"content_hash": "1658b3e9a23c388946504dba1f76a93f",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 83,
"avg_line_length": 34.517857142857146,
"alnum_prop": 0.7480600103466115,
"repo_name": "frew/simpleproto",
"id": "6fe0c3e6365a1d9dde59360d7336581c36080123",
"size": "1933",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scons-local-1.1.0/SCons/Tool/f77.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C++",
"bytes": "30217"
},
{
"name": "Protocol Buffer",
"bytes": "1960"
},
{
"name": "Python",
"bytes": "1704215"
}
],
"symlink_target": ""
} |
"""Client annotated ACME challenges.
Please use names such as ``achall`` to distiguish from variables "of type"
:class:`acme.challenges.Challenge` (denoted by ``chall``)
and :class:`.ChallengeBody` (denoted by ``challb``)::
from acme import challenges
from acme import messages
from letsencrypt import achallenges
chall = challenges.DNS(token='foo')
challb = messages.ChallengeBody(chall=chall)
achall = achallenges.DNS(chall=challb, domain='example.com')
Note, that all annotated challenges act as a proxy objects::
achall.token == challb.token
"""
import logging
from acme import challenges
from acme import jose
logger = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods
class AnnotatedChallenge(jose.ImmutableMap):
"""Client annotated challenge.
Wraps around server provided challenge and annotates with data
useful for the client.
:ivar challb: Wrapped `~.ChallengeBody`.
"""
__slots__ = ('challb',)
acme_type = NotImplemented
def __getattr__(self, name):
return getattr(self.challb, name)
class KeyAuthorizationAnnotatedChallenge(AnnotatedChallenge):
"""Client annotated `KeyAuthorizationChallenge` challenge."""
__slots__ = ('challb', 'domain', 'account_key')
def response_and_validation(self, *args, **kwargs):
"""Generate response and validation."""
return self.challb.chall.response_and_validation(
self.account_key, *args, **kwargs)
class DNS(AnnotatedChallenge):
"""Client annotated "dns" ACME challenge."""
__slots__ = ('challb', 'domain')
acme_type = challenges.DNS
| {
"content_hash": "4da67ff855185bdd78484d807ff54c6d",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 74,
"avg_line_length": 26.672131147540984,
"alnum_prop": 0.7000614628149969,
"repo_name": "mitnk/letsencrypt",
"id": "0cdec06dfabe2835f714d72ca4ccfc78c5d42461",
"size": "1627",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "letsencrypt/achallenges.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "48432"
},
{
"name": "Augeas",
"bytes": "5062"
},
{
"name": "Batchfile",
"bytes": "35037"
},
{
"name": "DIGITAL Command Language",
"bytes": "133"
},
{
"name": "Groff",
"bytes": "222"
},
{
"name": "Makefile",
"bytes": "37309"
},
{
"name": "Nginx",
"bytes": "4274"
},
{
"name": "Python",
"bytes": "1374377"
},
{
"name": "Shell",
"bytes": "124081"
}
],
"symlink_target": ""
} |
"""
.. moduleauthor:: ZackZK <silajoin@sina.com>
"""
import Queue
import datetime
import threading
import time
from collections import deque
import pytz
import tushare as ts
# from tushare.util.dateu import is_holiday # use our own is_holiday currently as tushare does not include 2016 holiday
import engine.logger
from engine.barfeed import bar
from engine import barfeed
from engine import dataseries
from engine import resamplebase
from engine.utils import dt
from engine.bar import Frequency
from engine.xignite.barfeed import utcnow
logger = engine.logger.getLogger("tushare")
def to_market_datetime(dateTime):
timezone = pytz.timezone('Asia/Shanghai')
return dt.localize(dateTime, timezone)
holiday = ['2015-01-01', '2015-01-02', '2015-02-18', '2015-02-19', '2015-02-20', '2015-02-23', '2015-02-24',
'2015-04-06', '2015-05-01', '2015-06-22', '2015-09-03', '2015-09-04', '2015-10-01', '2015-10-02',
'2015-10-05', '2015-10-06', '2015-10-07',
'2016-01-01', '2016-02-08', '2016-02-09', '2016-02-10', '2016-02-11', '2016-02-12', '2016-04-04',
'2016-05-02', '2016-06-09', '2016-06-10', '2016-09-15', '2016-09-16', '2016-10-03', '2016-10-04',
'2016-10-05', '2016-10-06', '2016-10-07']
def is_holiday(date):
if isinstance(date, str):
today = datetime.datetime.strptime(date, '%Y-%m-%d')
if today.isoweekday() in [6, 7] or date in holiday:
return True
else:
return False
class TickDataSeries(object):
def __init__(self):
self.__priceDS = deque()
self.__volumeDS = deque()
self.__amountDS = deque()
self.__dateTimes = deque() # just for debug
def reset(self):
self.__priceDS.clear()
self.__volumeDS.clear()
self.__amountDS.clear()
self.__dateTimes.clear()
def getPriceDS(self):
return self.__priceDS
def getAmountDS(self):
return self.__amountDS
def getVolumeDS(self):
return self.__volumeDS
def getDateTimes(self):
return self.__dateTimes
def append(self, price, volume, amount, dateTime):
assert(bar is not None)
self.__priceDS.append(price)
self.__volumeDS.append(volume)
self.__amountDS.append(amount)
self.__dateTimes.append(dateTime)
def empty(self):
return len(self.__priceDS) == 0
def get_trading_days(start_day, days):
try:
df = ts.get_hist_data('sh')
except Exception, e:
logger.error("Tushare get hist data exception", exc_info=e)
return []
trading_days = list()
holiday = 0
for i in range(days):
while True:
day = start_day - datetime.timedelta(days=i+1+holiday)
if day.date().isoformat() in df.index:
trading_days.append(day)
break
else:
holiday += 1
trading_days.reverse() # oldest date is put to head
return trading_days
def build_bar(dateTime, ds):
prices = ds.getPriceDS()
volumes = ds.getVolumeDS()
amounts = ds.getAmountDS()
open_ = float(prices[0])
high = float(max(prices))
low = float(min(prices))
close = float(prices[-1])
volume = sum(int(v) for v in volumes)
amount = sum(float(a) for a in amounts)
return bar.BasicBar(dateTime, open_, high, low, close, volume, None, Frequency.DAY, amount)
class TuSharePollingThread(threading.Thread):
# Not using xignite polling thread is because two underscores functions can't be override, e.g. __wait()
TUSHARE_INQUERY_PERIOD = 3 # tushare read period, default is 3s
def __init__(self, identifiers):
super(TuSharePollingThread, self).__init__()
self._identifiers = identifiers
self._tickDSDict = {}
self._last_quotation_time = {}
for identifier in self._identifiers:
self._tickDSDict[identifier] = TickDataSeries()
self._last_quotation_time[identifier] = None
self.__stopped = False
def __wait(self):
# first reset ticks info in one cycle, maybe we need save it if NO quotation in this period
for identifier in self._identifiers:
self._tickDSDict[identifier].reset()
nextCall = self.getNextCallDateTime()
while not self.__stopped and utcnow() < nextCall:
start_time = datetime.datetime.now()
self.get_tushare_tick_data()
end_time = datetime.datetime.now()
time_diff = (end_time - start_time).seconds
if time_diff < TuSharePollingThread.TUSHARE_INQUERY_PERIOD:
time.sleep(TuSharePollingThread.TUSHARE_INQUERY_PERIOD - time_diff)
def valid_tick_data(self, identifier, tick_info):
if self._last_quotation_time[identifier] is None or \
self._last_quotation_time[identifier] < tick_info.time:
self._last_quotation_time[identifier] = tick_info.time
else:
return False
return float(tick_info.pre_close) * 0.9 <= float(tick_info.price) <= float(tick_info.pre_close) * 1.1
def get_tushare_tick_data(self):
try:
df = ts.get_realtime_quotes(self._identifiers)
for index, identifier in enumerate(self._identifiers):
tick_info = df.ix[index]
if self.valid_tick_data(identifier, tick_info):
# tushare use unicode type, another way is convert it to int/float here. refer to build_bar
self._tickDSDict[identifier].append(tick_info.price, tick_info.volume, tick_info.amount,
tick_info.time)
except Exception, e:
logger.error("Tushare polling exception", exc_info=e)
def stop(self):
self.__stopped = True
def stopped(self):
return self.__stopped
def run(self):
logger.debug("Thread started.")
while not self.__stopped:
self.__wait()
if not self.__stopped:
try:
self.doCall()
except Exception, e:
logger.critical("Unhandled exception", exc_info=e)
logger.debug("Thread finished.")
# Must return a non-naive datetime.
def getNextCallDateTime(self):
raise NotImplementedError()
def doCall(self):
raise NotImplementedError()
class TushareBarFeedThread(TuSharePollingThread):
# Events
ON_BARS = 1
def __init__(self, queue, identifiers, frequency):
super(TushareBarFeedThread, self).__init__(identifiers)
self.__queue = queue
self.__frequency = frequency
self.__updateNextBarClose()
def __updateNextBarClose(self):
self.__nextBarClose = resamplebase.build_range(utcnow(), self.__frequency).getEnding()
def getNextCallDateTime(self):
return self.__nextBarClose
def doCall(self):
endDateTime = self.__nextBarClose
self.__updateNextBarClose()
bar_dict = {}
for identifier in self._identifiers:
try:
if not self._tickDSDict[identifier].empty():
bar_dict[identifier] = build_bar(to_market_datetime(endDateTime), self._tickDSDict[identifier])
except Exception, e:
logger.error(e)
if len(bar_dict):
bars = bar.Bars(bar_dict)
self.__queue.put((TushareBarFeedThread.ON_BARS, bars))
def get_bar_list(df, frequency, date=None):
bar_list = []
end_time = df.ix[0].time
if date is None:
date = datetime.datetime.now()
slice_start_time = to_market_datetime(datetime.datetime(date.year, date.month , date.day, 9, 30, 0))
while slice_start_time.strftime("%H:%M:%S") < end_time:
slice_end_time = slice_start_time + datetime.timedelta(seconds=frequency)
ticks_slice = df.ix[(df.time < slice_end_time.strftime("%H:%M:%S")) &
(df.time >= slice_start_time.strftime("%H:%M:%S"))]
if not ticks_slice.empty:
open_ = ticks_slice.price.get_values()[-1]
high = max(ticks_slice.price)
low = min(ticks_slice.price)
close = ticks_slice.price.get_values()[0]
volume = sum(ticks_slice.volume)
amount = sum(ticks_slice.amount)
bar_list.append(bar.BasicBar(slice_start_time, open_, high, low,
close, volume, 0, frequency, amount))
else:
bar_list.append(None)
slice_start_time = slice_end_time
return bar_list
class TuShareLiveFeed(barfeed.BaseBarFeed):
QUEUE_TIMEOUT = 0.01
def __init__(self, identifiers, frequency, maxLen=dataseries.DEFAULT_MAX_LEN, replayDays=-1):
barfeed.BaseBarFeed.__init__(self, frequency, maxLen)
if not isinstance(identifiers, list):
raise Exception("identifiers must be a list")
self.__identifiers = identifiers
self.__frequency = frequency
self.__queue = Queue.Queue()
self.__fill_today_history_bars(replayDays) # should run before polling thread start
self.__thread = TushareBarFeedThread(self.__queue, identifiers, frequency)
for instrument in identifiers:
self.registerInstrument(instrument)
######################################################################
# observer.Subject interface
def start(self):
if self.__thread.is_alive():
raise Exception("Already strated")
# Start the thread that runs the client.
self.__thread.start()
def stop(self):
self.__thread.stop()
def join(self):
if self.__thread.is_alive():
self.__thread.join()
def eof(self):
return self.__thread.stopped()
def peekDateTime(self):
return None
######################################################################
# barfeed.BaseBarFeed interface
def getCurrentDateTime(self):
return utcnow()
def barsHaveAdjClose(self):
return False
def getNextBars(self):
ret = None
try:
eventType, eventData = self.__queue.get(True, TuShareLiveFeed.QUEUE_TIMEOUT)
if eventType == TushareBarFeedThread.ON_BARS:
ret = eventData
else:
logger.error("Invalid event received: %s - %s" % (eventType, eventData))
except Queue.Empty:
pass
return ret
######################################################################
# TuShareLiveFeed own interface
def _fill_today_bars(self):
today = datetime.date.today().isoformat()
if is_holiday(today): # do nothing if holiday
return
elif datetime.date.today().weekday() in [5, 0]:
return
# #James:
# if datetime.datetime.now().hour * 60 + 30 < 9*60 + 30:
# return
today_bars = {}
for identifier in self.__identifiers:
try:
df = ts.get_today_ticks(identifier)
today_bars[identifier] = get_bar_list(df, self.__frequency, None)
except Exception, e:
logger.error(e)
self.__fill_bars(today_bars)
def __fill_bars(self, bars_dict):
for index, value in enumerate(bars_dict[self.__identifiers[0]]):
bar_dict = dict()
for identifier in self.__identifiers:
if bars_dict[identifier][index] is not None:
bar_dict[identifier] = bars_dict[identifier][index]
if len(bar_dict):
bars = bar.Bars(bar_dict)
self.__queue.put((TushareBarFeedThread.ON_BARS, bars))
def _fill_history_bars(self, replay_days):
now = datetime.datetime.now()
for day in get_trading_days(now, replay_days):
bars_dict = {}
for identifier in self.__identifiers:
df = ts.get_tick_data(identifier, date=day.date().isoformat())
bars_dict[identifier] = get_bar_list(df, self.__frequency, day)
self.__fill_bars(bars_dict)
def __fill_today_history_bars(self, replayDays):
if replayDays < 0: # only allow -1 and >=0 integer value
replayDays = -1
if replayDays == -1:
pass
elif replayDays == 0: # replay today's quotation
self._fill_today_bars()
else:
self._fill_history_bars(replayDays)
self._fill_today_bars()
if __name__ == '__main__':
liveFeed = TuShareLiveFeed(['000581'], Frequency.MINUTE, dataseries.DEFAULT_MAX_LEN, 2)
liveFeed.start()
while not liveFeed.eof():
bars = liveFeed.getNextBars()
if bars is not None:
print bars['000581'].getHigh(), bars['000581'].getDateTime()
# test/
| {
"content_hash": "3668cceb49d59c85b1b7ab3ff0cd9ace",
"timestamp": "",
"source": "github",
"line_count": 411,
"max_line_length": 120,
"avg_line_length": 31.447688564476884,
"alnum_prop": 0.5806576402321083,
"repo_name": "Yam-cn/potato",
"id": "3fa16d3558b36a34a2d3dacf94621e889f915761",
"size": "13499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "engine/barfeed/tusharefeed.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "482582"
},
{
"name": "C++",
"bytes": "499680"
},
{
"name": "CSS",
"bytes": "269141"
},
{
"name": "HTML",
"bytes": "3627157"
},
{
"name": "JavaScript",
"bytes": "1343865"
},
{
"name": "PHP",
"bytes": "34371"
},
{
"name": "Python",
"bytes": "1484158"
},
{
"name": "Shell",
"bytes": "1044"
}
],
"symlink_target": ""
} |
from BaseThrusters import BaseThrusters
class Thrusters(BaseThrusters):
def __init__(self):
pass
def set(self, values):
pass
def get(self):
pass
def stop(self):
pass
| {
"content_hash": "a1493d0be93ddbbc6f0f21148f5a6c4e",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 39,
"avg_line_length": 13.75,
"alnum_prop": 0.5772727272727273,
"repo_name": "purduerov/XX-Core",
"id": "fd07825ac9f5f2dc11636e4b60ea421487164e7c",
"size": "220",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "rov/movement/hardware/Thrusters_Mock.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "2366"
},
{
"name": "CSS",
"bytes": "24789"
},
{
"name": "Go",
"bytes": "22577"
},
{
"name": "HTML",
"bytes": "3240"
},
{
"name": "JavaScript",
"bytes": "103794"
},
{
"name": "Makefile",
"bytes": "374"
},
{
"name": "Python",
"bytes": "228753"
},
{
"name": "Shell",
"bytes": "1761"
}
],
"symlink_target": ""
} |
"""Sample simulation parameters."""
import collections
_ParameterSamplerConfig = collections.namedtuple('ParameterSamplerConfig',
['name', 'sampler'])
class ParameterSamplerConfig(_ParameterSamplerConfig):
"""A subclass of namedtuple for parameter distribution configuration.
The arguments to the named tuple are as follows:
name: The parameter name. It should be one of the keyword argument of the
set generator.
sampler: A callable that will return a value of the named parameter. The
values returned from the callable can be either deterministic or random.
For example,
universe_size_sampler = ParameterSamplerConfig(
name='universe_size',
sampler=lambda: np.random.randint(low=1e6, high=2e6, size=1)[0]
)
"""
pass
class ParameterSampler(object):
"""A sampler for sampling parameters.
For example, the sampled parameters can be passed to the set generators via
**kwargs.
"""
def __init__(self, parameter_sampler_config_list):
"""Construct the samplers for set generator parameters.
Args:
parameter_sampler_config_list: An iterable of ParameterSamplerConfig.
"""
self.parameter_sampler_config_list = parameter_sampler_config_list
def __call__(self):
return {
config.name: config.sampler()
for config in self.parameter_sampler_config_list
}
| {
"content_hash": "19b918cb8f06f4fb1c624595db5b36a9",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 78,
"avg_line_length": 30.58695652173913,
"alnum_prop": 0.6958066808813077,
"repo_name": "world-federation-of-advertisers/cardinality_estimation_evaluation_framework",
"id": "b9257569e9e0fe4fc96380c443d55171ce0da6fd",
"size": "2024",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/simulations/parameters_sampler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "30965"
},
{
"name": "Jupyter Notebook",
"bytes": "124222"
},
{
"name": "Python",
"bytes": "650173"
}
],
"symlink_target": ""
} |
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b"\n\x1fgoogle/api/field_behavior.proto\x12\ngoogle.api\x1a google/protobuf/descriptor.proto*\xa6\x01\n\rFieldBehavior\x12\x1e\n\x1a\x46IELD_BEHAVIOR_UNSPECIFIED\x10\x00\x12\x0c\n\x08OPTIONAL\x10\x01\x12\x0c\n\x08REQUIRED\x10\x02\x12\x0f\n\x0bOUTPUT_ONLY\x10\x03\x12\x0e\n\nINPUT_ONLY\x10\x04\x12\r\n\tIMMUTABLE\x10\x05\x12\x12\n\x0eUNORDERED_LIST\x10\x06\x12\x15\n\x11NON_EMPTY_DEFAULT\x10\x07:Q\n\x0e\x66ield_behavior\x12\x1d.google.protobuf.FieldOptions\x18\x9c\x08 \x03(\x0e\x32\x19.google.api.FieldBehaviorBp\n\x0e\x63om.google.apiB\x12\x46ieldBehaviorProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3"
)
_FIELDBEHAVIOR = DESCRIPTOR.enum_types_by_name["FieldBehavior"]
FieldBehavior = enum_type_wrapper.EnumTypeWrapper(_FIELDBEHAVIOR)
FIELD_BEHAVIOR_UNSPECIFIED = 0
OPTIONAL = 1
REQUIRED = 2
OUTPUT_ONLY = 3
INPUT_ONLY = 4
IMMUTABLE = 5
UNORDERED_LIST = 6
NON_EMPTY_DEFAULT = 7
FIELD_BEHAVIOR_FIELD_NUMBER = 1052
field_behavior = DESCRIPTOR.extensions_by_name["field_behavior"]
if _descriptor._USE_C_DESCRIPTORS == False:
google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(
field_behavior
)
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\022FieldBehaviorProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\242\002\004GAPI"
_FIELDBEHAVIOR._serialized_start = 82
_FIELDBEHAVIOR._serialized_end = 248
# @@protoc_insertion_point(module_scope)
| {
"content_hash": "42bfed4853f583ca170e0180bc7d6053",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 666,
"avg_line_length": 48.25,
"alnum_prop": 0.7927461139896373,
"repo_name": "googleapis/python-api-common-protos",
"id": "8863b05e5b011225e4065659a5dc608eb3b0552c",
"size": "2824",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/api/field_behavior_pb2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "267972"
},
{
"name": "Shell",
"bytes": "26583"
}
],
"symlink_target": ""
} |
import viennagrid
class BadFileFormatError(Exception):
"""Exception raised when the mesh file is not well-formatted and hence cannot be read."""
pass
def read_netgen(filepath, domain, segmentation=None):
"""
Read mesh data from a Netgen file and save the read domain data into the given domain.
If a segmentation is provided, also save segmentation data into the given segmentation.
:param filepath: Path to the mesh file.
:type filepath: str
:param domain: Domain
:type domain: :class:`viennagrid.Domain`
:param segmentation: Segmentation
:type segmentation: :class:`viennagrid.Segmentation`
:raises: IOError, :exc:`~viennagrid.io.BadFileFormatError`, TypeError
"""
try:
if isinstance(domain, viennagrid.Domain):
domain = domain._domain
except AttributeError:
raise TypeError('parameter at position 2 is not a valid domain')
try:
if isinstance(segmentation, viennagrid.Segmentation):
segmentation = segmentation._segmentation
except AttributeError:
raise TypeError('parameter at position 3 is not a valid segmentation')
try:
viennagrid.wrapper.read_netgen(filepath, domain, segmentation)
except RuntimeError, e:
raise BadFileFormatError(e.message)
def read_vtk(filepath, domain, segmentation=None, accessors={}):
"""
Read mesh data from a VTK file and save the read domain data into the given domain.
If a segmentation is provided, also save segmentation data into the given segmentation.
:param filepath: Path to the mesh file.
:type filepath: str
:param domain: Domain
:type domain: :class:`viennagrid.Domain`
:param segmentation: Segmentation
:type segmentation: :class:`viennagrid.Segmentation`
:param accessors: Accessors to be used to get quantities that should be read from the mesh file,
in the form of a dictionary where keys are the names of the quantities (`str`)
and where the values are accessors (:class:`viennagrid.accessors.Accessor`) or
fields(:class:`viennagrid.accessors.Field`)
:type accessors: dict
:raises: IOError, :exc:`~viennagrid.io.BadFileFormatError`, TypeError
"""
try:
if isinstance(domain, viennagrid.Domain):
domain = domain._domain
except AttributeError:
raise TypeError('parameter at position 2 is not a valid domain')
try:
if isinstance(segmentation, viennagrid.Segmentation):
segmentation = segmentation._segmentation
except AttributeError:
raise TypeError('parameter at position 3 is not a valid segmentation')
low_level_accessors = {}
for quantity_name, accessor in accessors.iteritems():
low_level_accessors[quantity_name] = accessor._accessor
try:
viennagrid.wrapper.read_vtk(filepath, domain, segmentation, accessors)
except RuntimeError, e:
raise BadFileFormatError(e.message)
def write_opendx(filepath, domain, accessors={}):
"""
Write mesh data from the given domain to an OpenDX file.
:param filepath: Path to the mesh file.
:type filepath: str
:param domain: Domain
:type domain: :class:`viennagrid.Domain`
:param accessors: Accessors to be used to get quantities that should be written to the mesh file,
in the form of a dictionary where keys are the names of the quantities (`str`)
and where the values are accessors (:class:`viennagrid.accessors.Accessor`) or
fields(:class:`viennagrid.accessors.Field`)
:type accessors: dict
:raises: IOError, :exc:`~viennagrid.io.BadFileFormatError`, TypeError
"""
try:
if isinstance(domain, viennagrid.Domain):
domain = domain._domain
except AttributeError:
raise TypeError('parameter at position 2 is not a valid domain')
low_level_accessors = {}
for quantity_name, accessor in accessors.iteritems():
low_level_accessors[quantity_name] = accessor._accessor
try:
viennagrid.wrapper.write_opendx(filepath, domain, accessors)
except RuntimeError, e:
raise BadFileFormatError(e.message)
def write_vtk(filepath, domain, segmentation=None, accessors={}):
"""
Write mesh data from the given domain to a VTK file.
If a segmentation is provided, also write the segmentation data to the file.
:param filepath: Path to the mesh file.
:type filepath: str
:param domain: Domain
:type domain: :class:`viennagrid.Domain`
:param segmentation: Segmentation
:type segmentation: :class:`viennagrid.Segmentation`
:param accessors: Accessors to be used to get quantities that should be written to the mesh file,
in the form of a dictionary where keys are the names of the quantities (`str`)
and where the values are accessors (:class:`viennagrid.accessors.Accessor`) or
fields(:class:`viennagrid.accessors.Field`)
:type accessors: dict
:raises: IOError, :exc:`~viennagrid.io.BadFileFormatError`, TypeError
"""
try:
if isinstance(domain, viennagrid.Domain):
domain = domain._domain
except AttributeError:
raise TypeError('parameter at position 2 is not a valid domain')
try:
if isinstance(segmentation, viennagrid.Segmentation):
segmentation = segmentation._segmentation
except AttributeError:
raise TypeError('parameter at position 3 is not a valid segmentation')
low_level_accessors = {}
for quantity_name, accessor in accessors.iteritems():
low_level_accessors[quantity_name] = accessor._accessor
try:
viennagrid.wrapper.write_vtk(filepath, domain, segmentation, accessors)
except RuntimeError, e:
raise BadFileFormatError(e.message)
| {
"content_hash": "651d66cff5ac6bb6eaa4538adc0c4fd8",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 98,
"avg_line_length": 37.156462585034014,
"alnum_prop": 0.7385573050164774,
"repo_name": "jonancm/viennagrid-python",
"id": "6862264545116840e1fe2091fb53233017fbe2a3",
"size": "5486",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "viennagrid-python/viennagrid/io.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "1287098"
},
{
"name": "Python",
"bytes": "434735"
},
{
"name": "Shell",
"bytes": "1916"
}
],
"symlink_target": ""
} |
'''
Copyright (C) 2012-2017 Diego Torres Milano
Created on Feb 2, 2015
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author: Diego Torres Milano
'''
__version__ = '12.5.4'
import json
import os
import platform
import re
import subprocess
import sys
import threading
try:
import requests
REQUESTS_AVAILABLE = True
except:
REQUESTS_AVAILABLE = False
import time
from com.dtmilano.android.adb.adbclient import AdbClient
from com.dtmilano.android.common import obtainAdbPath
__author__ = 'diego'
DEBUG = False
lock = threading.Lock()
class RunTestsThread(threading.Thread):
"""
Runs the instrumentation for the specified package in a new thread.
"""
def __init__(self, group=None, target=None, name=None, args=(), kwargs=None, verbose=None, adbClient=None, testClass=None, testRunner=None):
threading.Thread.__init__(self, group=group, target=target, name=name, verbose=verbose)
self.adbClient = adbClient
self.testClass = testClass
self.testRunner = testRunner
self.pkg = re.sub('\.test$', '', self.testClass)
def run(self):
if DEBUG:
print >> sys.stderr, "RunTestsThread: Acquiring lock"
lock.acquire()
if DEBUG:
print >> sys.stderr, "RunTestsThread: Lock acquired"
self.forceStop()
time.sleep(3)
if DEBUG:
print >> sys.stderr, "Starting test..."
print >> sys.stderr, "RunTestsThread: Releasing lock"
lock.release()
out = self.adbClient.shell('am instrument -w ' + self.testClass + '/' + self.testRunner + '; echo "ERROR: $?"')
if DEBUG:
print >> sys.stderr, "\nFinished test."
errmsg = out.splitlines()[-1]
m = re.match('ERROR: (\d+)', errmsg)
if m:
exitval = int(m.group(1))
if exitval != 0:
raise RuntimeError('Cannot start test on device: ' + out)
else:
raise RuntimeError('Unknown message')
def forceStop(self):
if DEBUG:
print >> sys.stderr, "Cleaning up before start. Stopping '%s'" % self.pkg
self.adbClient.shell('am force-stop ' + self.pkg)
class UiAutomatorHelper:
PACKAGE = 'com.dtmilano.android.culebratester'
TEST_CLASS = PACKAGE + '.test'
TEST_RUNNER = 'com.dtmilano.android.uiautomatorhelper.UiAutomatorHelperTestRunner'
def __init__(self, adbclient, adb=None, localport=9999, remoteport=9999, hostname='localhost'):
if not REQUESTS_AVAILABLE:
raise Exception('''Python Requests is needed for UiAutomatorHelper to work.
On Ubuntu install
$ sudo apt-get install python-requests
On OSX install
$ easy_install requests
''')
self.adbClient = adbclient
''' The adb client (a.k.a. device) '''
instrumentation = self.adbClient.shell('pm list instrumentation %s' % self.PACKAGE)
if not instrumentation:
raise RuntimeError('The target device does not contain the instrumentation for %s' % self.PACKAGE)
if not re.match('instrumentation:%s/%s \(target=%s\)' % (self.TEST_CLASS, self.TEST_RUNNER, self.PACKAGE), instrumentation):
raise RuntimeError('The instrumentation found for %s does not match the expected %s/%s' % (self.PACKAGE, self.TEST_CLASS, self.TEST_RUNNER))
self.adb = self.__whichAdb(adb)
''' The adb command '''
self.osName = platform.system()
''' The OS name. We sometimes need specific behavior. '''
self.isDarwin = (self.osName == 'Darwin')
''' Is it Mac OSX? '''
self.hostname = hostname
''' The hostname we are connecting to. '''
if hostname in ['localhost', '127.0.0.1']:
self.__redirectPort(localport, remoteport)
self.__runTests()
self.baseUrl = 'http://%s:%d' % (hostname, localport)
try:
self.session = self.__connectSession()
except RuntimeError, ex:
self.thread.forceStop()
raise ex
def __connectSession(self):
if DEBUG:
print >> sys.stderr, "UiAutomatorHelper: Acquiring lock"
lock.acquire()
if DEBUG:
print >> sys.stderr, "UiAutomatorHelper: Lock acquired"
print >> sys.stderr, "UiAutomatorHelper: Connecting session"
session = requests.Session()
if not session:
raise RuntimeError("Cannot create session")
tries = 10
while tries > 0:
time.sleep(0.5)
if DEBUG:
print >> sys.stderr, "UiAutomatorHelper: Attempting to connect to", self.baseUrl, '(tries=%s)' % tries
try:
response = session.head(self.baseUrl)
if response.status_code == 200:
break
except requests.exceptions.ConnectionError, ex:
tries -= 1
lock.release()
if tries == 0:
raise RuntimeError("Cannot connect to " + self.baseUrl)
if DEBUG:
print >> sys.stderr, "UiAutomatorHelper: HEAD", response
print >> sys.stderr, "UiAutomatorHelper: Releasing lock"
#lock.release()
return session
def __whichAdb(self, adb):
if adb:
if not os.access(adb, os.X_OK):
raise Exception('adb="%s" is not executable' % adb)
else:
# Using adbclient we don't need adb executable yet (maybe it's needed if we want to
# start adb if not running) or to redirect ports
adb = obtainAdbPath()
return adb
def __redirectPort(self, localport, remoteport):
self.localPort = localport
self.remotePort = remoteport
subprocess.check_call([self.adb, '-s', self.adbClient.serialno, 'forward', 'tcp:%d' % self.localPort,
'tcp:%d' % self.remotePort])
def __runTests(self):
if DEBUG:
print >> sys.stderr, "__runTests: start"
# We need a new AdbClient instance with timeout=None (means, no timeout) for the long running test service
newAdbClient = AdbClient(self.adbClient.serialno, self.adbClient.hostname, self.adbClient.port, timeout=None)
self.thread = RunTestsThread(adbClient=newAdbClient, testClass=self.TEST_CLASS, testRunner=self.TEST_RUNNER)
if DEBUG:
print >> sys.stderr, "__runTests: starting thread"
self.thread.start()
if DEBUG:
print >> sys.stderr, "__runTests: end"
def __httpCommand(self, url, params=None, method='GET'):
if method == 'GET':
if params:
response = self.session.get(self.baseUrl + url, params=params)
else:
response = self.session.get(self.baseUrl + url)
elif method == 'PUT':
response = self.session.put(self.baseUrl + url, params=params)
else:
raise RuntimeError("method not supported: " + method)
return response.content
#
# Device
#
def getDisplayRealSize(self):
return self.__httpCommand('/Device/getDisplayRealSize')
#
# UiAutomatorHelper internal commands
#
def quit(self):
try:
self.__httpCommand('/UiAutomatorHelper/quit')
except:
pass
self.session.close()
#
# UiDevice
#
def click(self, **kwargs):
params = kwargs
if not ((params.has_key('x') and params.has_key('y')) or params.has_key('oid')):
raise RuntimeError('click: (x, y) or oid must have a value')
if params.has_key('oid'):
return self.__httpCommand('/UiObject2/%d/click' % params['oid'])
else:
return self.__httpCommand('/UiDevice/click', params)
def dumpWindowHierarchy(self):
dump = self.__httpCommand('/UiDevice/dumpWindowHierarchy').decode(encoding='UTF-8', errors='replace')
if DEBUG:
print >> sys.stderr, "DUMP: ", dump
return dump
def findObject(self, **kwargs):
params = kwargs
if not (params.has_key('resourceId') or params.has_key('bySelector')):
raise RuntimeError('findObject: resourceId or bySelector must have a value')
response = self.__httpCommand('/UiDevice/findObject', params)
# { "status": "OK", "oid": 1, "className": "android.view.View"}
if DEBUG:
print >> sys.stderr, "UiAutomatorHelper: findObject: response=", response
r = json.loads(response)
if r[u'status'] == 'OK':
if DEBUG:
print >> sys.stderr, "UiAutomatorHelper: findObject: returning", int(r[u'oid'])
return UiObject2(self, int(r[u'oid']))
elif r[u'status'] == 'ERROR':
if DEBUG:
print >> sys.stderr, "UiAutomatorHelper: findObject: returning", int(r[u'oid'])
if r[u'statusCode'] == -1:
# Object not found
return None
raise RuntimeError("Error: " + response)
def longClick(self, **kwargs):
params = kwargs
if not ((params.has_key('x') and params.has_key('y')) or params.has_key('oid')):
raise RuntimeError('longClick: (x, y) or oid must have a value')
if params.has_key('oid'):
return self.__httpCommand('/UiObject2/%d/longClick' % params['oid'])
else:
return self.__httpCommand('/UiDevice/longClick', params)
def openNotification(self):
return self.__httpCommand('/UiDevice/openNotification')
def openQuickSettings(self):
return self.__httpCommand('/UiDevice/openQuickSettings')
def pressBack(self):
return self.__httpCommand('/UiDevice/pressBack')
def pressHome(self):
return self.__httpCommand('/UiDevice/pressHome')
def pressKeyCode(self, keyCode, metaState=0):
params = {'keyCode': keyCode, 'metaState': metaState}
return self.__httpCommand('/UiDevice/pressKeyCode', params)
def pressRecentApps(self):
return self.__httpCommand('/UiDevice/pressRecentApps')
def swipe(self, startX=-1, startY=-1, endX=-1, endY=-1, steps=10, segments=[], segmentSteps=5):
if startX != -1 and startY != -1:
params = {'startX': startX, 'startY': startY, 'endX': endX, 'endY': endY, 'steps': steps}
elif segments:
params = {'segments': ','.join(str(p) for p in segments), "segmentSteps": segmentSteps}
else:
raise RuntimeError("Cannot determine method invocation from provided parameters. startX and startY or segments must be provided.")
return self.__httpCommand('/UiDevice/swipe', params)
def takeScreenshot(self, scale=1.0, quality=90):
params = {'scale': scale, 'quality': quality}
return self.__httpCommand('/UiDevice/takeScreenshot', params)
def waitForIdle(self, timeout):
params = {'timeout': timeout}
return self.__httpCommand('/UiDevice/waitForIdle')
#
# UiObject
#
def setText(self, uiObject, text):
params = {'text': text}
return self.__httpCommand('/UiObject/0x%x/setText' % (uiObject.oid), params)
#
# UiObject2
#
def clickAndWait(self, uiObject2, eventCondition, timeout):
params = {'eventCondition': eventCondition, 'timeout': timeout}
return self.__httpCommand('/UiObject2/%d/clickAndWait' % (uiObject2.oid), params)
def getText(self, uiObject=None, uiObject2=None):
if uiObject:
path = '/UiObject/%d/getText' % (uiObject.oid)
elif uiObject2:
path = '/UiObject2/%d/getText' % (uiObject2.oid)
else:
raise ValueError("No uiObject or uiObject2 specified")
response = self.__httpCommand(path, None)
r = json.loads(response)
if r[u'status'] == 'OK':
if DEBUG:
print >> sys.stderr, "UiAutomatorHelper: getText: returning", r[u'text']
return r[u'text']
raise RuntimeError("Error: " + response)
def isChecked(self, uiObject=None):
# This path works for UiObject and UiObject2, so there's no need to handle both cases differently
path = '/UiObject/%d/isChecked' % (uiObject.oid)
response = self.__httpCommand(path, None)
r = json.loads(response)
if r[u'status'] == 'OK':
return r[u'checked']
raise RuntimeError("Error: " + response)
#
# UiScrollable
#
def uiScrollable(self, path, params = None):
response = self.__httpCommand('/UiScrollable/' + path, params)
if DEBUG:
print >> sys.stderr, "UiAutomatorHelper: uiScrollable: response=", response
r = None
try:
r = json.loads(response)
except:
print >> sys.stderr, "===================================="
print >> sys.stderr, "Invalid JSON RESPONSE: ", response
if r[u'status'] == 'OK':
if u'oid' in r:
if DEBUG:
print >> sys.stderr, "UiAutomatorHelper: uiScrollable: returning", int(r[u'oid'])
return int(r[u'oid']), r
else:
return r
if DEBUG:
print >> sys.stderr, "RESPONSE: ", response
print >> sus.stderr, "r=", r
raise RuntimeError("Error: " + response)
class UiObject:
def __init__(self, uiAutomatorHelper, oid, response):
self.uiAutomatorHelper = uiAutomatorHelper
self.oid = oid
self.className = response['className']
def getOid(self):
return self.oid
def getClassName(self):
return self.className
def click(self):
self.uiAutomatorHelper.click(oid=self.oid)
def longClick(self):
self.uiAutomatorHelper.longClick(oid=self.oid)
def getText(self):
return self.uiAutomatorHelper.getText(uiObject=self)
def setText(self, text):
self.uiAutomatorHelper.setText(uiObject=self, text=text)
class UiObject2:
def __init__(self, uiAutomatorHelper, oid):
self.uiAutomatorHelper = uiAutomatorHelper
self.oid = oid
def click(self):
self.uiAutomatorHelper.click(oid=self.oid)
def clickAndWait(self, eventCondition, timeout):
self.uiAutomatorHelper.clickAndWait(uiObject2=self, eventCondition=eventCondition, timeout=timeout)
def isChecked(self):
"""
:rtype: bool
"""
return self.uiAutomatorHelper.isChecked(uiObject=self)
def longClick(self):
self.uiAutomatorHelper.longClick(oid=self.oid)
def getText(self):
return self.uiAutomatorHelper.getText(uiObject2=self)
def setText(self, text):
self.uiAutomatorHelper.setText(uiObject2=self, text=text)
class UiScrollable:
def __init__(self, uiAutomatorHelper, uiSelector):
self.uiAutomatorHelper = uiAutomatorHelper
self.uiSelector = uiSelector
self.oid, self.response = self.__createUiScrollable()
def __createUiScrollable(self):
return self.uiAutomatorHelper.uiScrollable('new', {'uiSelector': self.uiSelector})
def flingBackward(self):
return self.uiAutomatorHelper.uiScrollable(str(self.oid) + '/flingBackward')
def flingForward(self):
return self.uiAutomatorHelper.uiScrollable(str(self.oid) + '/flingForward')
def flingToBeginning(self, maxSwipes=20):
return self.uiAutomatorHelper.uiScrollable(str(self.oid) + '/flingToBeginning', {'maxSwipes': maxSwipes})
def flingToEnd(self, maxSwipes=20):
return self.uiAutomatorHelper.uiScrollable(str(self.oid) + '/flingToEnd', {'maxSwipes': maxSwipes})
def getChildByDescription(self, uiSelector, description, allowScrollSearch):
oid, response = self.uiAutomatorHelper.uiScrollable(str(self.oid) + '/getChildByDescription', {'uiSelector': uiSelector, 'contentDescription': description, 'allowScrollSearch': allowScrollSearch})
return UiObject(self.uiAutomatorHelper, oid, response)
def getChildByText(self, uiSelector, text, allowScrollSearch):
oid, response = self.uiAutomatorHelper.uiScrollable(str(self.oid) + '/getChildByText', {'uiSelector': uiSelector, 'text': text, 'allowScrollSearch': allowScrollSearch})
return UiObject(self.uiAutomatorHelper, oid, response)
def setAsHorizontalList(self):
self.uiAutomatorHelper.uiScrollable(str(self.oid) + '/setAsHorizontalList')
return self
def setAsVerticalList(self):
self.uiAutomatorHelper.uiScrollable(str(self.oid) + '/setAsVerticalList')
return self
| {
"content_hash": "d382041e0ec3315b0512713ab91b79eb",
"timestamp": "",
"source": "github",
"line_count": 455,
"max_line_length": 204,
"avg_line_length": 37.48571428571429,
"alnum_prop": 0.618609287054409,
"repo_name": "mwasilew/AndroidViewClient",
"id": "087b68fa9cd97e0b281d6970dbe62ef91f0ee440",
"size": "17080",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/dtmilano/android/uiautomator/uiautomatorhelper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "9419"
},
{
"name": "Python",
"bytes": "1023420"
},
{
"name": "Shell",
"bytes": "1924"
}
],
"symlink_target": ""
} |
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django_countries.fields
class Migration(migrations.Migration):
replaces = [('users', '0001_initial'), ('users', '0002_expanded_user_profiles'), ('users', '0003_fixed_default_avatar_link'), ('users', '0004_added_user_country_field'), ('users', '0005_blank_birthday_and_country_ok'), ('users', '0006_about_and_location_can_be_blank'), ('users', '0007_fixed_default_avatar_filepath'), ('users', '0008_fixed_default_avatar_filepath_v2'), ('users', '0009_fixed_default_avatar_filepath_v3'), ('users', '0010_count_user_problems_solved_and_submissions_made'), ('users', '0011_validators_and_help_text_for_user_model'), ('users', '0012_added_date_format_help_text'), ('users', '0013_fixed_default_avatar_filepath_v4'), ('users', '0014_fixed_default_avatar_filepath_v5'), ('users', '0015_subscribe_to_email_option'), ('users', '0016_remove_userprofile_avatar')]
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('display_name', models.CharField(default='', help_text='Minimum and maximum length of 3-40 characters.', max_length=40, validators=[django.core.validators.MinLengthValidator(3), django.core.validators.MaxLengthValidator(40)])),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('about', models.CharField(blank=True, help_text='Maximum length of 1000 characters.', max_length=1000, validators=[django.core.validators.MaxLengthValidator(1000)])),
('birthday', models.DateField(blank=True, help_text='Date format: YYYY-MM-DD.', null=True)),
('location', models.CharField(blank=True, help_text='Maximum length of 50 characters.', max_length=50, validators=[django.core.validators.MaxLengthValidator(50)])),
('country', django_countries.fields.CountryField(blank=True, max_length=2, null=True)),
('problems_solved', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)])),
('submissions_made', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)])),
('subscribe_to_emails', models.BooleanField(default=True, help_text='Subscribe to emails.')),
],
),
]
| {
"content_hash": "c37997f704c38d18601268c508e7559c",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 793,
"avg_line_length": 77.73529411764706,
"alnum_prop": 0.6814226258040106,
"repo_name": "project-lovelace/lovelace-website",
"id": "74b66d9b028cb14b6ec6c89e1ef0fb858de8ef78",
"size": "2692",
"binary": false,
"copies": "1",
"ref": "refs/heads/dependabot/pip/src/sqlparse-0.4.2",
"path": "src/users/migrations/0001_squashed_0016_remove_userprofile_avatar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "262866"
},
{
"name": "HTML",
"bytes": "190701"
},
{
"name": "JavaScript",
"bytes": "3246"
},
{
"name": "Python",
"bytes": "53765"
}
],
"symlink_target": ""
} |
'''
Entry point module (keep at root):
This module starts the debugger.
'''
from __future__ import nested_scopes # Jython 2.1 support
import atexit
import os
import sys
import traceback
from _pydevd_bundle.pydevd_constants import IS_JYTH_LESS25, IS_PY3K, IS_PY34_OR_GREATER, IS_PYCHARM, get_thread_id, dict_keys, \
dict_contains, dict_iter_items, DebugInfoHolder, PYTHON_SUSPEND, STATE_SUSPEND, STATE_RUN, get_frame, xrange, \
clear_cached_thread_id, INTERACTIVE_MODE_AVAILABLE
from _pydev_bundle import fix_getpass
from _pydev_bundle import pydev_imports, pydev_log
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
from _pydev_bundle.pydev_is_thread_alive import is_thread_alive
from _pydev_imps._pydev_saved_modules import threading
from _pydev_imps._pydev_saved_modules import time
from _pydev_imps._pydev_saved_modules import thread
from _pydevd_bundle import pydevd_io, pydevd_vm_type, pydevd_tracing
from _pydevd_bundle import pydevd_utils
from _pydevd_bundle import pydevd_vars
from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo
from _pydevd_bundle.pydevd_breakpoints import ExceptionBreakpoint, update_exception_hook
from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO, CMD_STEP_OVER, \
CMD_STEP_RETURN, CMD_STEP_INTO_MY_CODE, CMD_THREAD_SUSPEND, CMD_RUN_TO_LINE, \
CMD_ADD_EXCEPTION_BREAK, CMD_SMART_STEP_INTO, InternalConsoleExec, NetCommandFactory, \
PyDBDaemonThread, _queue, ReaderThread, GetGlobalDebugger, get_global_debugger, \
set_global_debugger, WriterThread, pydevd_find_thread_by_id, pydevd_log, \
start_client, start_server, InternalGetBreakpointException, InternalSendCurrExceptionTrace, \
InternalSendCurrExceptionTraceProceeded
from _pydevd_bundle.pydevd_custom_frames import CustomFramesContainer, custom_frames_container_init
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame
from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads
from _pydevd_bundle.pydevd_trace_dispatch import trace_dispatch as _trace_dispatch, global_cache_skips, global_cache_frame_skips, show_tracing_warning
from _pydevd_frame_eval.pydevd_frame_eval_main import frame_eval_func, stop_frame_eval, enable_cache_frames_without_breaks, \
dummy_trace_dispatch, show_frame_eval_warning
from _pydevd_bundle.pydevd_utils import save_main_module
from pydevd_concurrency_analyser.pydevd_concurrency_logger import ThreadingLogger, AsyncioLogger, send_message, cur_time
from pydevd_concurrency_analyser.pydevd_thread_wrappers import wrap_threads
__version_info__ = (1, 0, 0)
__version_info_str__ = []
for v in __version_info__:
__version_info_str__.append(str(v))
__version__ = '.'.join(__version_info_str__)
#IMPORTANT: pydevd_constants must be the 1st thing defined because it'll keep a reference to the original sys._getframe
SUPPORT_PLUGINS = not IS_JYTH_LESS25
PluginManager = None
if SUPPORT_PLUGINS:
from _pydevd_bundle.pydevd_plugin_utils import PluginManager
threadingEnumerate = threading.enumerate
threadingCurrentThread = threading.currentThread
try:
'dummy'.encode('utf-8') # Added because otherwise Jython 2.2.1 wasn't finding the encoding (if it wasn't loaded in the main thread).
except:
pass
connected = False
bufferStdOutToServer = False
bufferStdErrToServer = False
remote = False
forked = False
file_system_encoding = getfilesystemencoding()
#=======================================================================================================================
# PyDBCommandThread
#=======================================================================================================================
class PyDBCommandThread(PyDBDaemonThread):
def __init__(self, py_db):
PyDBDaemonThread.__init__(self)
self._py_db_command_thread_event = py_db._py_db_command_thread_event
self.py_db = py_db
self.setName('pydevd.CommandThread')
def _on_run(self):
for i in xrange(1, 10):
time.sleep(0.5) #this one will only start later on (because otherwise we may not have any non-daemon threads
if self.killReceived:
return
if self.pydev_do_not_trace:
self.py_db.SetTrace(None) # no debugging on this thread
try:
while not self.killReceived:
try:
self.py_db.process_internal_commands()
except:
pydevd_log(0, 'Finishing debug communication...(2)')
self._py_db_command_thread_event.clear()
self._py_db_command_thread_event.wait(0.5)
except:
pydev_log.debug(sys.exc_info()[0])
#only got this error in interpreter shutdown
#pydevd_log(0, 'Finishing debug communication...(3)')
#=======================================================================================================================
# CheckOutputThread
# Non-daemonic thread guaranties that all data is written even if program is finished
#=======================================================================================================================
class CheckOutputThread(PyDBDaemonThread):
def __init__(self, py_db):
PyDBDaemonThread.__init__(self)
self.py_db = py_db
self.setName('pydevd.CheckAliveThread')
self.daemon = False
py_db.output_checker = self
def _on_run(self):
if self.pydev_do_not_trace:
disable_tracing = True
if pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON and sys.hexversion <= 0x020201f0:
# don't run untraced threads if we're in jython 2.2.1 or lower
# jython bug: if we start a thread and another thread changes the tracing facility
# it affects other threads (it's not set only for the thread but globally)
# Bug: http://sourceforge.net/tracker/index.php?func=detail&aid=1870039&group_id=12867&atid=112867
disable_tracing = False
if disable_tracing:
pydevd_tracing.SetTrace(None) # no debugging on this thread
while not self.killReceived:
time.sleep(0.3)
if not self.py_db.has_threads_alive() and self.py_db.writer.empty() \
and not has_data_to_redirect():
try:
pydev_log.debug("No alive threads, finishing debug session")
self.py_db.finish_debugging_session()
kill_all_pydev_threads()
except:
traceback.print_exc()
self.killReceived = True
self.py_db.check_output_redirect()
def do_kill_pydev_thread(self):
self.killReceived = True
#=======================================================================================================================
# PyDB
#=======================================================================================================================
class PyDB:
""" Main debugging class
Lots of stuff going on here:
PyDB starts two threads on startup that connect to remote debugger (RDB)
The threads continuously read & write commands to RDB.
PyDB communicates with these threads through command queues.
Every RDB command is processed by calling process_net_command.
Every PyDB net command is sent to the net by posting NetCommand to WriterThread queue
Some commands need to be executed on the right thread (suspend/resume & friends)
These are placed on the internal command queue.
"""
def __init__(self):
set_global_debugger(self)
pydevd_tracing.replace_sys_set_trace_func()
self.reader = None
self.writer = None
self.output_checker = None
self.quitting = None
self.cmd_factory = NetCommandFactory()
self._cmd_queue = {} # the hash of Queues. Key is thread id, value is thread
self.breakpoints = {}
self.file_to_id_to_line_breakpoint = {}
self.file_to_id_to_plugin_breakpoint = {}
# Note: breakpoints dict should not be mutated: a copy should be created
# and later it should be assigned back (to prevent concurrency issues).
self.break_on_uncaught_exceptions = {}
self.break_on_caught_exceptions = {}
self.ready_to_run = False
self._main_lock = thread.allocate_lock()
self._lock_running_thread_ids = thread.allocate_lock()
self._py_db_command_thread_event = threading.Event()
CustomFramesContainer._py_db_command_thread_event = self._py_db_command_thread_event
self._finish_debugging_session = False
self._termination_event_set = False
self.signature_factory = None
self.SetTrace = pydevd_tracing.SetTrace
self.break_on_exceptions_thrown_in_same_context = False
self.ignore_exceptions_thrown_in_lines_with_ignore_exception = True
# Suspend debugger even if breakpoint condition raises an exception
SUSPEND_ON_BREAKPOINT_EXCEPTION = True
self.suspend_on_breakpoint_exception = SUSPEND_ON_BREAKPOINT_EXCEPTION
# By default user can step into properties getter/setter/deleter methods
self.disable_property_trace = False
self.disable_property_getter_trace = False
self.disable_property_setter_trace = False
self.disable_property_deleter_trace = False
#this is a dict of thread ids pointing to thread ids. Whenever a command is passed to the java end that
#acknowledges that a thread was created, the thread id should be passed here -- and if at some time we do not
#find that thread alive anymore, we must remove it from this list and make the java side know that the thread
#was killed.
self._running_thread_ids = {}
self._set_breakpoints_with_id = False
# This attribute holds the file-> lines which have an @IgnoreException.
self.filename_to_lines_where_exceptions_are_ignored = {}
#working with plugins (lazily initialized)
self.plugin = None
self.has_plugin_line_breaks = False
self.has_plugin_exception_breaks = False
self.thread_analyser = None
self.asyncio_analyser = None
# matplotlib support in debugger and debug console
self.mpl_in_use = False
self.mpl_hooks_in_debug_console = False
self.mpl_modules_for_patching = {}
self._filename_to_not_in_scope = {}
self.first_breakpoint_reached = False
self.is_filter_enabled = pydevd_utils.is_filter_enabled()
self.is_filter_libraries = pydevd_utils.is_filter_libraries()
self.show_return_values = False
self.remove_return_values_flag = False
# this flag disables frame evaluation even if it's available
self.do_not_use_frame_eval = False
def get_plugin_lazy_init(self):
if self.plugin is None and SUPPORT_PLUGINS:
self.plugin = PluginManager(self)
return self.plugin
def not_in_scope(self, filename):
return pydevd_utils.not_in_project_roots(filename)
def is_ignored_by_filters(self, filename):
return pydevd_utils.is_ignored_by_filter(filename)
def first_appearance_in_scope(self, trace):
if trace is None or self.not_in_scope(trace.tb_frame.f_code.co_filename):
return False
else:
trace = trace.tb_next
while trace is not None:
frame = trace.tb_frame
if not self.not_in_scope(frame.f_code.co_filename):
return False
trace = trace.tb_next
return True
def has_threads_alive(self):
for t in threadingEnumerate():
if getattr(t, 'is_pydev_daemon_thread', False):
#Important: Jython 2.5rc4 has a bug where a thread created with thread.start_new_thread won't be
#set as a daemon thread, so, we also have to check for the 'is_pydev_daemon_thread' flag.
#See: https://github.com/fabioz/PyDev.Debugger/issues/11
continue
if isinstance(t, PyDBDaemonThread):
pydev_log.error_once(
'Error in debugger: Found PyDBDaemonThread not marked with is_pydev_daemon_thread=True.\n')
if is_thread_alive(t):
if not t.isDaemon() or hasattr(t, "__pydevd_main_thread"):
return True
return False
def finish_debugging_session(self):
self._finish_debugging_session = True
def initialize_network(self, sock):
try:
sock.settimeout(None) # infinite, no timeouts from now on - jython does not have it
except:
pass
self.writer = WriterThread(sock)
self.reader = ReaderThread(sock)
self.writer.start()
self.reader.start()
time.sleep(0.1) # give threads time to start
def connect(self, host, port):
if host:
s = start_client(host, port)
else:
s = start_server(port)
self.initialize_network(s)
def get_internal_queue(self, thread_id):
""" returns internal command queue for a given thread.
if new queue is created, notify the RDB about it """
if thread_id.startswith('__frame__'):
thread_id = thread_id[thread_id.rfind('|') + 1:]
try:
return self._cmd_queue[thread_id]
except KeyError:
return self._cmd_queue.setdefault(thread_id, _queue.Queue()) #@UndefinedVariable
def post_internal_command(self, int_cmd, thread_id):
""" if thread_id is *, post to all """
if thread_id == "*":
threads = threadingEnumerate()
for t in threads:
thread_id = get_thread_id(t)
queue = self.get_internal_queue(thread_id)
queue.put(int_cmd)
else:
queue = self.get_internal_queue(thread_id)
queue.put(int_cmd)
def check_output_redirect(self):
global bufferStdOutToServer
global bufferStdErrToServer
if bufferStdOutToServer:
init_stdout_redirect()
self.check_output(sys.stdoutBuf, 1) #@UndefinedVariable
if bufferStdErrToServer:
init_stderr_redirect()
self.check_output(sys.stderrBuf, 2) #@UndefinedVariable
def check_output(self, out, outCtx):
'''Checks the output to see if we have to send some buffered output to the debug server
@param out: sys.stdout or sys.stderr
@param outCtx: the context indicating: 1=stdout and 2=stderr (to know the colors to write it)
'''
try:
v = out.getvalue()
if v:
self.cmd_factory.make_io_message(v, outCtx, self)
except:
traceback.print_exc()
def init_matplotlib_in_debug_console(self):
# import hook and patches for matplotlib support in debug console
from _pydev_bundle.pydev_import_hook import import_hook_manager
for module in dict_keys(self.mpl_modules_for_patching):
import_hook_manager.add_module_name(module, self.mpl_modules_for_patching.pop(module))
def init_matplotlib_support(self):
# prepare debugger for integration with matplotlib GUI event loop
from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot, do_enable_gui
# enable_gui_function in activate_matplotlib should be called in main thread. Unlike integrated console,
# in the debug console we have no interpreter instance with exec_queue, but we run this code in the main
# thread and can call it directly.
class _MatplotlibHelper:
_return_control_osc = False
def return_control():
# Some of the input hooks (e.g. Qt4Agg) check return control without doing
# a single operation, so we don't return True on every
# call when the debug hook is in place to allow the GUI to run
_MatplotlibHelper._return_control_osc = not _MatplotlibHelper._return_control_osc
return _MatplotlibHelper._return_control_osc
from pydev_ipython.inputhook import set_return_control_callback
set_return_control_callback(return_control)
self.mpl_modules_for_patching = {"matplotlib": lambda: activate_matplotlib(do_enable_gui),
"matplotlib.pyplot": activate_pyplot,
"pylab": activate_pylab }
def _activate_mpl_if_needed(self):
if len(self.mpl_modules_for_patching) > 0:
for module in dict_keys(self.mpl_modules_for_patching):
if module in sys.modules:
activate_function = self.mpl_modules_for_patching.pop(module)
activate_function()
self.mpl_in_use = True
def _call_mpl_hook(self):
try:
from pydev_ipython.inputhook import get_inputhook
inputhook = get_inputhook()
if inputhook:
inputhook()
except:
pass
def suspend_all_other_threads(self, thread_suspended_at_bp):
all_threads = threadingEnumerate()
for t in all_threads:
if getattr(t, 'is_pydev_daemon_thread', False):
pass # I.e.: skip the DummyThreads created from pydev daemon threads
elif hasattr(t, 'pydev_do_not_trace'):
pass # skip some other threads, i.e. ipython history saving thread from debug console
else:
if t is thread_suspended_at_bp:
continue
additional_info = None
try:
additional_info = t.additional_info
except AttributeError:
pass # that's ok, no info currently set
if additional_info is not None:
for frame in additional_info.iter_frames(t):
self.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=True)
del frame
self.set_suspend(t, CMD_THREAD_SUSPEND)
else:
sys.stderr.write("Can't suspend thread: %s\n" % (t,))
def process_internal_commands(self):
'''This function processes internal commands
'''
self._main_lock.acquire()
try:
self.check_output_redirect()
curr_thread_id = get_thread_id(threadingCurrentThread())
program_threads_alive = {}
all_threads = threadingEnumerate()
program_threads_dead = []
self._lock_running_thread_ids.acquire()
try:
for t in all_threads:
if getattr(t, 'is_pydev_daemon_thread', False):
pass # I.e.: skip the DummyThreads created from pydev daemon threads
elif isinstance(t, PyDBDaemonThread):
pydev_log.error_once('Error in debugger: Found PyDBDaemonThread not marked with is_pydev_daemon_thread=True.\n')
elif is_thread_alive(t):
if not self._running_thread_ids:
# Fix multiprocessing debug with breakpoints in both main and child processes
# (https://youtrack.jetbrains.com/issue/PY-17092) When the new process is created, the main
# thread in the new process already has the attribute 'pydevd_id', so the new thread doesn't
# get new id with its process number and the debugger loses access to both threads.
# Therefore we should update thread_id for every main thread in the new process.
# TODO: Investigate: should we do this for all threads in threading.enumerate()?
# (i.e.: if a fork happens on Linux, this seems likely).
old_thread_id = get_thread_id(t)
clear_cached_thread_id(t)
clear_cached_thread_id(threadingCurrentThread())
thread_id = get_thread_id(t)
curr_thread_id = get_thread_id(threadingCurrentThread())
if pydevd_vars.has_additional_frames_by_id(old_thread_id):
frames_by_id = pydevd_vars.get_additional_frames_by_id(old_thread_id)
pydevd_vars.add_additional_frame_by_id(thread_id, frames_by_id)
else:
thread_id = get_thread_id(t)
program_threads_alive[thread_id] = t
if not dict_contains(self._running_thread_ids, thread_id):
if not hasattr(t, 'additional_info'):
# see http://sourceforge.net/tracker/index.php?func=detail&aid=1955428&group_id=85796&atid=577329
# Let's create the additional info right away!
t.additional_info = PyDBAdditionalThreadInfo()
self._running_thread_ids[thread_id] = t
self.writer.add_command(self.cmd_factory.make_thread_created_message(t))
queue = self.get_internal_queue(thread_id)
cmdsToReadd = [] # some commands must be processed by the thread itself... if that's the case,
# we will re-add the commands to the queue after executing.
try:
while True:
int_cmd = queue.get(False)
if not self.mpl_hooks_in_debug_console and isinstance(int_cmd, InternalConsoleExec):
# add import hooks for matplotlib patches if only debug console was started
try:
self.init_matplotlib_in_debug_console()
self.mpl_in_use = True
except:
pydevd_log(2, "Matplotlib support in debug console failed", traceback.format_exc())
self.mpl_hooks_in_debug_console = True
if int_cmd.can_be_executed_by(curr_thread_id):
pydevd_log(2, "processing internal command ", str(int_cmd))
int_cmd.do_it(self)
else:
pydevd_log(2, "NOT processing internal command ", str(int_cmd))
cmdsToReadd.append(int_cmd)
except _queue.Empty: #@UndefinedVariable
for int_cmd in cmdsToReadd:
queue.put(int_cmd)
# this is how we exit
thread_ids = list(self._running_thread_ids.keys())
for tId in thread_ids:
if not dict_contains(program_threads_alive, tId):
program_threads_dead.append(tId)
finally:
self._lock_running_thread_ids.release()
for tId in program_threads_dead:
try:
self._process_thread_not_alive(tId)
except:
sys.stderr.write('Error iterating through %s (%s) - %s\n' % (
program_threads_alive, program_threads_alive.__class__, dir(program_threads_alive)))
raise
if len(program_threads_alive) == 0:
self.finish_debugging_session()
for t in all_threads:
if hasattr(t, 'do_kill_pydev_thread'):
t.do_kill_pydev_thread()
finally:
self._main_lock.release()
def disable_tracing_while_running_if_frame_eval(self):
pydevd_tracing.settrace_while_running_if_frame_eval(self, self.dummy_trace_dispatch)
def enable_tracing_in_frames_while_running_if_frame_eval(self):
pydevd_tracing.settrace_while_running_if_frame_eval(self, self.trace_dispatch)
def set_tracing_for_untraced_contexts_if_not_frame_eval(self, ignore_frame=None, overwrite_prev_trace=False):
if self.frame_eval_func is not None:
return
self.set_tracing_for_untraced_contexts(ignore_frame, overwrite_prev_trace)
def set_tracing_for_untraced_contexts(self, ignore_frame=None, overwrite_prev_trace=False):
# Enable the tracing for existing threads (because there may be frames being executed that
# are currently untraced).
if self.frame_eval_func is not None:
return
threads = threadingEnumerate()
try:
for t in threads:
if getattr(t, 'is_pydev_daemon_thread', False):
continue
# TODO: optimize so that we only actually add that tracing if it's in
# the new breakpoint context.
additional_info = None
try:
additional_info = t.additional_info
except AttributeError:
pass # that's ok, no info currently set
if additional_info is not None:
for frame in additional_info.iter_frames(t):
if frame is not ignore_frame:
self.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=overwrite_prev_trace)
finally:
frame = None
t = None
threads = None
additional_info = None
def consolidate_breakpoints(self, file, id_to_breakpoint, breakpoints):
break_dict = {}
for breakpoint_id, pybreakpoint in dict_iter_items(id_to_breakpoint):
break_dict[pybreakpoint.line] = pybreakpoint
breakpoints[file] = break_dict
global_cache_skips.clear()
global_cache_frame_skips.clear()
def add_break_on_exception(
self,
exception,
condition,
expression,
notify_always,
notify_on_terminate,
notify_on_first_raise_only,
ignore_libraries=False
):
try:
eb = ExceptionBreakpoint(
exception,
condition,
expression,
notify_always,
notify_on_terminate,
notify_on_first_raise_only,
ignore_libraries
)
except ImportError:
pydev_log.error("Error unable to add break on exception for: %s (exception could not be imported)\n" % (exception,))
return None
if eb.notify_on_terminate:
cp = self.break_on_uncaught_exceptions.copy()
cp[exception] = eb
if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0:
pydev_log.error("Exceptions to hook on terminate: %s\n" % (cp,))
self.break_on_uncaught_exceptions = cp
if eb.notify_always:
cp = self.break_on_caught_exceptions.copy()
cp[exception] = eb
if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0:
pydev_log.error("Exceptions to hook always: %s\n" % (cp,))
self.break_on_caught_exceptions = cp
return eb
def update_after_exceptions_added(self, added):
updated_on_caught = False
updated_on_uncaught = False
for eb in added:
if not updated_on_uncaught and eb.notify_on_terminate:
updated_on_uncaught = True
update_exception_hook(self)
if not updated_on_caught and eb.notify_always:
updated_on_caught = True
self.set_tracing_for_untraced_contexts_if_not_frame_eval()
def _process_thread_not_alive(self, threadId):
""" if thread is not alive, cancel trace_dispatch processing """
self._lock_running_thread_ids.acquire()
try:
thread = self._running_thread_ids.pop(threadId, None)
if thread is None:
return
wasNotified = thread.additional_info.pydev_notify_kill
if not wasNotified:
thread.additional_info.pydev_notify_kill = True
finally:
self._lock_running_thread_ids.release()
cmd = self.cmd_factory.make_thread_killed_message(threadId)
self.writer.add_command(cmd)
def set_suspend(self, thread, stop_reason):
thread.additional_info.suspend_type = PYTHON_SUSPEND
thread.additional_info.pydev_state = STATE_SUSPEND
thread.stop_reason = stop_reason
# If conditional breakpoint raises any exception during evaluation send details to Java
if stop_reason == CMD_SET_BREAK and self.suspend_on_breakpoint_exception:
self._send_breakpoint_condition_exception(thread)
def _send_breakpoint_condition_exception(self, thread):
"""If conditional breakpoint raises an exception during evaluation
send exception details to java
"""
thread_id = get_thread_id(thread)
conditional_breakpoint_exception_tuple = thread.additional_info.conditional_breakpoint_exception
# conditional_breakpoint_exception_tuple - should contain 2 values (exception_type, stacktrace)
if conditional_breakpoint_exception_tuple and len(conditional_breakpoint_exception_tuple) == 2:
exc_type, stacktrace = conditional_breakpoint_exception_tuple
int_cmd = InternalGetBreakpointException(thread_id, exc_type, stacktrace)
# Reset the conditional_breakpoint_exception details to None
thread.additional_info.conditional_breakpoint_exception = None
self.post_internal_command(int_cmd, thread_id)
def send_caught_exception_stack(self, thread, arg, curr_frame_id):
"""Sends details on the exception which was caught (and where we stopped) to the java side.
arg is: exception type, description, traceback object
"""
thread_id = get_thread_id(thread)
int_cmd = InternalSendCurrExceptionTrace(thread_id, arg, curr_frame_id)
self.post_internal_command(int_cmd, thread_id)
def send_caught_exception_stack_proceeded(self, thread):
"""Sends that some thread was resumed and is no longer showing an exception trace.
"""
thread_id = get_thread_id(thread)
int_cmd = InternalSendCurrExceptionTraceProceeded(thread_id)
self.post_internal_command(int_cmd, thread_id)
self.process_internal_commands()
def send_process_created_message(self):
"""Sends a message that a new process has been created.
"""
cmd = self.cmd_factory.make_process_created_message()
self.writer.add_command(cmd)
def set_next_statement(self, frame, event, func_name, next_line):
stop = False
response_msg = ""
old_line = frame.f_lineno
if event == 'line' or event == 'exception':
#If we're already in the correct context, we have to stop it now, because we can act only on
#line events -- if a return was the next statement it wouldn't work (so, we have this code
#repeated at pydevd_frame).
curr_func_name = frame.f_code.co_name
#global context is set with an empty name
if curr_func_name in ('?', '<module>'):
curr_func_name = ''
if curr_func_name == func_name:
line = next_line
if frame.f_lineno == line:
stop = True
else:
if frame.f_trace is None:
frame.f_trace = self.trace_dispatch
frame.f_lineno = line
frame.f_trace = None
stop = True
else:
response_msg = "jump is available only within the bottom frame"
return stop, old_line, response_msg
def do_wait_suspend(self, thread, frame, event, arg, suspend_type="trace", send_suspend_message=True): #@UnusedVariable
""" busy waits until the thread state changes to RUN
it expects thread's state as attributes of the thread.
Upon running, processes any outstanding Stepping commands.
"""
self.process_internal_commands()
if send_suspend_message:
message = thread.additional_info.pydev_message
cmd = self.cmd_factory.make_thread_suspend_message(get_thread_id(thread), frame, thread.stop_reason, message, suspend_type)
self.writer.add_command(cmd)
CustomFramesContainer.custom_frames_lock.acquire() # @UndefinedVariable
try:
from_this_thread = []
for frame_id, custom_frame in dict_iter_items(CustomFramesContainer.custom_frames):
if custom_frame.thread_id == thread.ident:
# print >> sys.stderr, 'Frame created: ', frame_id
self.writer.add_command(self.cmd_factory.make_custom_frame_created_message(frame_id, custom_frame.name))
self.writer.add_command(self.cmd_factory.make_thread_suspend_message(frame_id, custom_frame.frame, CMD_THREAD_SUSPEND, "", suspend_type))
from_this_thread.append(frame_id)
finally:
CustomFramesContainer.custom_frames_lock.release() # @UndefinedVariable
info = thread.additional_info
if info.pydev_state == STATE_SUSPEND and not self._finish_debugging_session:
# before every stop check if matplotlib modules were imported inside script code
self._activate_mpl_if_needed()
while info.pydev_state == STATE_SUSPEND and not self._finish_debugging_session:
if self.mpl_in_use:
# call input hooks if only matplotlib is in use
self._call_mpl_hook()
self.process_internal_commands()
time.sleep(0.01)
# process any stepping instructions
if info.pydev_step_cmd == CMD_STEP_INTO or info.pydev_step_cmd == CMD_STEP_INTO_MY_CODE:
info.pydev_step_stop = None
info.pydev_smart_step_stop = None
elif info.pydev_step_cmd == CMD_STEP_OVER:
info.pydev_step_stop = frame
info.pydev_smart_step_stop = None
self.set_trace_for_frame_and_parents(frame)
elif info.pydev_step_cmd == CMD_SMART_STEP_INTO:
self.set_trace_for_frame_and_parents(frame)
info.pydev_step_stop = None
info.pydev_smart_step_stop = frame
elif info.pydev_step_cmd == CMD_RUN_TO_LINE or info.pydev_step_cmd == CMD_SET_NEXT_STATEMENT:
self.set_trace_for_frame_and_parents(frame)
stop = False
response_msg = ""
old_line = frame.f_lineno
if not IS_PYCHARM:
stop, _, response_msg = self.set_next_statement(frame, event, info.pydev_func_name, info.pydev_next_line)
if stop:
info.pydev_state = STATE_SUSPEND
self.do_wait_suspend(thread, frame, event, arg, "trace")
return
else:
try:
stop, old_line, response_msg = self.set_next_statement(frame, event, info.pydev_func_name, info.pydev_next_line)
except ValueError as e:
response_msg = "%s" % e
finally:
seq = info.pydev_message
cmd = self.cmd_factory.make_set_next_stmnt_status_message(seq, stop, response_msg)
self.writer.add_command(cmd)
info.pydev_message = ''
if stop:
info.pydev_state = STATE_RUN
# `f_line` should be assigned within a tracing function, so, we can't assign it here
# for the frame evaluation debugger. For tracing debugger it will be assigned, but we should
# revert the previous value, because both debuggers should behave the same way
try:
self.set_next_statement(frame, event, info.pydev_func_name, old_line)
except:
pass
else:
info.pydev_step_cmd = -1
info.pydev_state = STATE_SUSPEND
thread.stop_reason = CMD_THREAD_SUSPEND
# return to the suspend state and wait for other command
self.do_wait_suspend(thread, frame, event, arg, "trace", send_suspend_message=False)
return
elif info.pydev_step_cmd == CMD_STEP_RETURN:
back_frame = frame.f_back
if back_frame is not None:
# steps back to the same frame (in a return call it will stop in the 'back frame' for the user)
info.pydev_step_stop = frame
self.set_trace_for_frame_and_parents(frame)
else:
# No back frame?!? -- this happens in jython when we have some frame created from an awt event
# (the previous frame would be the awt event, but this doesn't make part of 'jython', only 'java')
# so, if we're doing a step return in this situation, it's the same as just making it run
info.pydev_step_stop = None
info.pydev_step_cmd = -1
info.pydev_state = STATE_RUN
if self.frame_eval_func is not None and info.pydev_state == STATE_RUN:
if info.pydev_step_cmd == -1:
if not self.do_not_use_frame_eval:
self.SetTrace(self.dummy_trace_dispatch)
self.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=True, dispatch_func=dummy_trace_dispatch)
else:
self.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=True)
# enable old tracing function for stepping
self.SetTrace(self.trace_dispatch)
del frame
cmd = self.cmd_factory.make_thread_run_message(get_thread_id(thread), info.pydev_step_cmd)
self.writer.add_command(cmd)
CustomFramesContainer.custom_frames_lock.acquire() # @UndefinedVariable
try:
# The ones that remained on last_running must now be removed.
for frame_id in from_this_thread:
# print >> sys.stderr, 'Removing created frame: ', frame_id
self.writer.add_command(self.cmd_factory.make_thread_killed_message(frame_id))
finally:
CustomFramesContainer.custom_frames_lock.release() # @UndefinedVariable
def handle_post_mortem_stop(self, thread, frame, frames_byid, exception):
pydev_log.debug("We are stopping in post-mortem\n")
thread_id = get_thread_id(thread)
pydevd_vars.add_additional_frame_by_id(thread_id, frames_byid)
try:
try:
add_exception_to_frame(frame, exception)
self.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK)
self.do_wait_suspend(thread, frame, 'exception', None, "trace")
except:
pydev_log.error("We've got an error while stopping in post-mortem: %s\n"%sys.exc_info()[0])
finally:
pydevd_vars.remove_additional_frame_by_id(thread_id)
def set_trace_for_frame_and_parents(self, frame, also_add_to_passed_frame=True, overwrite_prev_trace=False, dispatch_func=None):
if dispatch_func is None:
dispatch_func = self.trace_dispatch
if also_add_to_passed_frame:
self.update_trace(frame, dispatch_func, overwrite_prev_trace)
frame = frame.f_back
while frame:
self.update_trace(frame, dispatch_func, overwrite_prev_trace)
frame = frame.f_back
del frame
def update_trace(self, frame, dispatch_func, overwrite_prev):
if frame.f_trace is None:
frame.f_trace = dispatch_func
else:
if overwrite_prev:
frame.f_trace = dispatch_func
else:
try:
#If it's the trace_exception, go back to the frame trace dispatch!
if frame.f_trace.im_func.__name__ == 'trace_exception':
frame.f_trace = frame.f_trace.im_self.trace_dispatch
except AttributeError:
pass
frame = frame.f_back
del frame
def prepare_to_run(self):
''' Shared code to prepare debugging by installing traces and registering threads '''
if self.signature_factory is not None or self.thread_analyser is not None:
# we need all data to be sent to IDE even after program finishes
CheckOutputThread(self).start()
# turn off frame evaluation for concurrency visualization
self.frame_eval_func = None
self.patch_threads()
pydevd_tracing.SetTrace(self.trace_dispatch, self.frame_eval_func, self.dummy_trace_dispatch)
# There is no need to set tracing function if frame evaluation is available. Moreover, there is no need to patch thread
# functions, because frame evaluation function is set to all threads by default.
PyDBCommandThread(self).start()
if show_tracing_warning or show_frame_eval_warning:
cmd = self.cmd_factory.make_show_cython_warning_message()
self.writer.add_command(cmd)
def patch_threads(self):
try:
# not available in jython!
import threading
threading.settrace(self.trace_dispatch) # for all future threads
except:
pass
from _pydev_bundle.pydev_monkey import patch_thread_modules
patch_thread_modules()
def get_fullname(self, mod_name):
if IS_PY3K:
import pkgutil
else:
from _pydev_imps import _pydev_pkgutil_old as pkgutil
try:
loader = pkgutil.get_loader(mod_name)
except:
return None
if loader is not None:
for attr in ("get_filename", "_get_filename"):
meth = getattr(loader, attr, None)
if meth is not None:
return meth(mod_name)
return None
def run(self, file, globals=None, locals=None, is_module=False, set_trace=True):
module_name = None
if is_module:
module_name = file
filename = self.get_fullname(file)
if filename is None:
sys.stderr.write("No module named %s\n" % file)
return
else:
file = filename
if os.path.isdir(file):
new_target = os.path.join(file, '__main__.py')
if os.path.isfile(new_target):
file = new_target
if globals is None:
m = save_main_module(file, 'pydevd')
globals = m.__dict__
try:
globals['__builtins__'] = __builtins__
except NameError:
pass # Not there on Jython...
if locals is None:
locals = globals
if set_trace:
# Predefined (writable) attributes: __name__ is the module's name;
# __doc__ is the module's documentation string, or None if unavailable;
# __file__ is the pathname of the file from which the module was loaded,
# if it was loaded from a file. The __file__ attribute is not present for
# C modules that are statically linked into the interpreter; for extension modules
# loaded dynamically from a shared library, it is the pathname of the shared library file.
# I think this is an ugly hack, bug it works (seems to) for the bug that says that sys.path should be the same in
# debug and run.
if m.__file__.startswith(sys.path[0]):
# print >> sys.stderr, 'Deleting: ', sys.path[0]
del sys.path[0]
if not is_module:
# now, the local directory has to be added to the pythonpath
# sys.path.insert(0, os.getcwd())
# Changed: it's not the local directory, but the directory of the file launched
# The file being run must be in the pythonpath (even if it was not before)
sys.path.insert(0, os.path.split(file)[0])
while not self.ready_to_run:
time.sleep(0.1) # busy wait until we receive run command
if self.break_on_caught_exceptions or (self.plugin and self.plugin.has_exception_breaks()) or self.signature_factory:
# disable frame evaluation if there are exception breakpoints with 'On raise' activation policy
# or if there are plugin exception breakpoints or if collecting run-time types is enabled
self.frame_eval_func = None
# call prepare_to_run when we already have all information about breakpoints
self.prepare_to_run()
if self.thread_analyser is not None:
wrap_threads()
t = threadingCurrentThread()
self.thread_analyser.set_start_time(cur_time())
send_message("threading_event", 0, t.getName(), get_thread_id(t), "thread", "start", file, 1, None, parent=get_thread_id(t))
if self.asyncio_analyser is not None:
# we don't have main thread in asyncio graph, so we should add a fake event
send_message("asyncio_event", 0, "Task", "Task", "thread", "stop", file, 1, frame=None, parent=None)
try:
if INTERACTIVE_MODE_AVAILABLE:
self.init_matplotlib_support()
except:
sys.stderr.write("Matplotlib support in debugger failed\n")
traceback.print_exc()
if hasattr(sys, 'exc_clear'):
# we should clean exception information in Python 2, before user's code execution
sys.exc_clear()
if not is_module:
pydev_imports.execfile(file, globals, locals) # execute the script
else:
# Run with the -m switch
import runpy
if hasattr(runpy, '_run_module_as_main'):
# Newer versions of Python actually use this when the -m switch is used.
runpy._run_module_as_main(module_name, alter_argv=False)
else:
runpy.run_module(module_name)
return globals
def exiting(self):
sys.stdout.flush()
sys.stderr.flush()
self.check_output_redirect()
cmd = self.cmd_factory.make_exit_message()
self.writer.add_command(cmd)
def wait_for_commands(self, globals):
self._activate_mpl_if_needed()
thread = threading.currentThread()
from _pydevd_bundle import pydevd_frame_utils
frame = pydevd_frame_utils.Frame(None, -1, pydevd_frame_utils.FCode("Console",
os.path.abspath(os.path.dirname(__file__))), globals, globals)
thread_id = get_thread_id(thread)
from _pydevd_bundle import pydevd_vars
pydevd_vars.add_additional_frame_by_id(thread_id, {id(frame): frame})
cmd = self.cmd_factory.make_show_console_message(thread_id, frame)
self.writer.add_command(cmd)
while True:
if self.mpl_in_use:
# call input hooks if only matplotlib is in use
self._call_mpl_hook()
self.process_internal_commands()
time.sleep(0.01)
trace_dispatch = _trace_dispatch
frame_eval_func = frame_eval_func
dummy_trace_dispatch = dummy_trace_dispatch
enable_cache_frames_without_breaks = enable_cache_frames_without_breaks
def set_debug(setup):
setup['DEBUG_RECORD_SOCKET_READS'] = True
setup['DEBUG_TRACE_BREAKPOINTS'] = 1
setup['DEBUG_TRACE_LEVEL'] = 3
def enable_qt_support(qt_support_mode):
from _pydev_bundle import pydev_monkey_qt
pydev_monkey_qt.patch_qt(qt_support_mode)
def usage(doExit=0):
sys.stdout.write('Usage:\n')
sys.stdout.write('pydevd.py --port N [(--client hostname) | --server] --file executable [file_options]\n')
if doExit:
sys.exit(0)
def init_stdout_redirect():
if not getattr(sys, 'stdoutBuf', None):
sys.stdoutBuf = pydevd_io.IOBuf()
sys.stdout_original = sys.stdout
sys.stdout = pydevd_io.IORedirector(sys.stdout, sys.stdoutBuf) #@UndefinedVariable
def init_stderr_redirect():
if not getattr(sys, 'stderrBuf', None):
sys.stderrBuf = pydevd_io.IOBuf()
sys.stderr_original = sys.stderr
sys.stderr = pydevd_io.IORedirector(sys.stderr, sys.stderrBuf) #@UndefinedVariable
def has_data_to_redirect():
if getattr(sys, 'stdoutBuf', None):
if not sys.stdoutBuf.empty():
return True
if getattr(sys, 'stderrBuf', None):
if not sys.stderrBuf.empty():
return True
return False
#=======================================================================================================================
# settrace
#=======================================================================================================================
def settrace(
host=None,
stdoutToServer=False,
stderrToServer=False,
port=5678,
suspend=True,
trace_only_current_thread=False,
overwrite_prev_trace=False,
patch_multiprocessing=False,
):
'''Sets the tracing function with the pydev debug function and initializes needed facilities.
@param host: the user may specify another host, if the debug server is not in the same machine (default is the local
host)
@param stdoutToServer: when this is true, the stdout is passed to the debug server
@param stderrToServer: when this is true, the stderr is passed to the debug server
so that they are printed in its console and not in this process console.
@param port: specifies which port to use for communicating with the server (note that the server must be started
in the same port). @note: currently it's hard-coded at 5678 in the client
@param suspend: whether a breakpoint should be emulated as soon as this function is called.
@param trace_only_current_thread: determines if only the current thread will be traced or all current and future
threads will also have the tracing enabled.
@param overwrite_prev_trace: if True we'll reset the frame.f_trace of frames which are already being traced
@param patch_multiprocessing: if True we'll patch the functions which create new processes so that launched
processes are debugged.
'''
_set_trace_lock.acquire()
try:
_locked_settrace(
host,
stdoutToServer,
stderrToServer,
port,
suspend,
trace_only_current_thread,
overwrite_prev_trace,
patch_multiprocessing,
)
finally:
_set_trace_lock.release()
_set_trace_lock = thread.allocate_lock()
def _locked_settrace(
host,
stdoutToServer,
stderrToServer,
port,
suspend,
trace_only_current_thread,
overwrite_prev_trace,
patch_multiprocessing,
):
if patch_multiprocessing:
try:
from _pydev_bundle import pydev_monkey
except:
pass
else:
pydev_monkey.patch_new_process_functions()
global connected
global bufferStdOutToServer
global bufferStdErrToServer
if not connected:
pydevd_vm_type.setup_type()
if SetupHolder.setup is None:
setup = {
'client': host, # dispatch expects client to be set to the host address when server is False
'server': False,
'port': int(port),
'multiprocess': patch_multiprocessing,
}
SetupHolder.setup = setup
debugger = PyDB()
debugger.connect(host, port) # Note: connect can raise error.
# Mark connected only if it actually succeeded.
connected = True
bufferStdOutToServer = stdoutToServer
bufferStdErrToServer = stderrToServer
if bufferStdOutToServer:
init_stdout_redirect()
if bufferStdErrToServer:
init_stderr_redirect()
patch_stdin(debugger)
debugger.set_trace_for_frame_and_parents(get_frame(), False, overwrite_prev_trace=overwrite_prev_trace)
CustomFramesContainer.custom_frames_lock.acquire() # @UndefinedVariable
try:
for _frameId, custom_frame in dict_iter_items(CustomFramesContainer.custom_frames):
debugger.set_trace_for_frame_and_parents(custom_frame.frame, False)
finally:
CustomFramesContainer.custom_frames_lock.release() # @UndefinedVariable
t = threadingCurrentThread()
try:
additional_info = t.additional_info
except AttributeError:
additional_info = PyDBAdditionalThreadInfo()
t.additional_info = additional_info
while not debugger.ready_to_run:
time.sleep(0.1) # busy wait until we receive run command
global forked
frame_eval_for_tracing = debugger.frame_eval_func
if frame_eval_func is not None and not forked:
# Disable frame evaluation for Remote Debug Server
frame_eval_for_tracing = None
# note that we do that through pydevd_tracing.SetTrace so that the tracing
# is not warned to the user!
pydevd_tracing.SetTrace(debugger.trace_dispatch, frame_eval_for_tracing, debugger.dummy_trace_dispatch)
if not trace_only_current_thread:
# Trace future threads?
debugger.patch_threads()
# As this is the first connection, also set tracing for any untraced threads
debugger.set_tracing_for_untraced_contexts(ignore_frame=get_frame(), overwrite_prev_trace=overwrite_prev_trace)
# Stop the tracing as the last thing before the actual shutdown for a clean exit.
atexit.register(stoptrace)
PyDBCommandThread(debugger).start()
CheckOutputThread(debugger).start()
#Suspend as the last thing after all tracing is in place.
if suspend:
debugger.set_suspend(t, CMD_THREAD_SUSPEND)
else:
# ok, we're already in debug mode, with all set, so, let's just set the break
debugger = get_global_debugger()
debugger.set_trace_for_frame_and_parents(get_frame(), False)
t = threadingCurrentThread()
try:
additional_info = t.additional_info
except AttributeError:
additional_info = PyDBAdditionalThreadInfo()
t.additional_info = additional_info
pydevd_tracing.SetTrace(debugger.trace_dispatch, debugger.frame_eval_func, debugger.dummy_trace_dispatch)
if not trace_only_current_thread:
# Trace future threads?
debugger.patch_threads()
if suspend:
debugger.set_suspend(t, CMD_THREAD_SUSPEND)
def stoptrace():
global connected
if connected:
pydevd_tracing.restore_sys_set_trace_func()
sys.settrace(None)
try:
#not available in jython!
threading.settrace(None) # for all future threads
except:
pass
from _pydev_bundle.pydev_monkey import undo_patch_thread_modules
undo_patch_thread_modules()
debugger = get_global_debugger()
if debugger:
debugger.set_trace_for_frame_and_parents(
get_frame(), also_add_to_passed_frame=True, overwrite_prev_trace=True, dispatch_func=lambda *args:None)
debugger.exiting()
kill_all_pydev_threads()
connected = False
class Dispatcher(object):
def __init__(self):
self.port = None
def connect(self, host, port):
self.host = host
self.port = port
self.client = start_client(self.host, self.port)
self.reader = DispatchReader(self)
self.reader.pydev_do_not_trace = False #we run reader in the same thread so we don't want to loose tracing
self.reader.run()
def close(self):
try:
self.reader.do_kill_pydev_thread()
except :
pass
class DispatchReader(ReaderThread):
def __init__(self, dispatcher):
self.dispatcher = dispatcher
ReaderThread.__init__(self, self.dispatcher.client)
def _on_run(self):
dummy_thread = threading.currentThread()
dummy_thread.is_pydev_daemon_thread = False
return ReaderThread._on_run(self)
def handle_except(self):
ReaderThread.handle_except(self)
def process_command(self, cmd_id, seq, text):
if cmd_id == 99:
self.dispatcher.port = int(text)
self.killReceived = True
DISPATCH_APPROACH_NEW_CONNECTION = 1 # Used by PyDev
DISPATCH_APPROACH_EXISTING_CONNECTION = 2 # Used by PyCharm
DISPATCH_APPROACH = DISPATCH_APPROACH_NEW_CONNECTION
def dispatch():
setup = SetupHolder.setup
host = setup['client']
port = setup['port']
if DISPATCH_APPROACH == DISPATCH_APPROACH_EXISTING_CONNECTION:
dispatcher = Dispatcher()
try:
dispatcher.connect(host, port)
port = dispatcher.port
finally:
dispatcher.close()
return host, port
def settrace_forked():
'''
When creating a fork from a process in the debugger, we need to reset the whole debugger environment!
'''
host, port = dispatch()
from _pydevd_bundle import pydevd_tracing
pydevd_tracing.restore_sys_set_trace_func()
if port is not None:
global connected
connected = False
global forked
forked = True
custom_frames_container_init()
settrace(
host,
port=port,
suspend=False,
trace_only_current_thread=False,
overwrite_prev_trace=True,
patch_multiprocessing=True,
)
#=======================================================================================================================
# SetupHolder
#=======================================================================================================================
class SetupHolder:
setup = None
def apply_debugger_options(setup_options):
"""
:type setup_options: dict[str, bool]
"""
default_options = {'save-signatures': False, 'qt-support': ''}
default_options.update(setup_options)
setup_options = default_options
debugger = GetGlobalDebugger()
if setup_options['save-signatures']:
if pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON:
sys.stderr.write("Collecting run-time type information is not supported for Jython\n")
else:
# Only import it if we're going to use it!
from _pydevd_bundle.pydevd_signature import SignatureFactory
debugger.signature_factory = SignatureFactory()
if setup_options['qt-support']:
enable_qt_support(setup_options['qt-support'])
def patch_stdin(debugger):
from _pydev_bundle.pydev_console_utils import DebugConsoleStdIn
orig_stdin = sys.stdin
sys.stdin = DebugConsoleStdIn(debugger, orig_stdin)
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
# parse the command line. --file is our last argument that is required
try:
from _pydevd_bundle.pydevd_command_line_handling import process_command_line
setup = process_command_line(sys.argv)
SetupHolder.setup = setup
except ValueError:
traceback.print_exc()
usage(1)
if setup['print-in-debugger-startup']:
try:
pid = ' (pid: %s)' % os.getpid()
except:
pid = ''
sys.stderr.write("pydev debugger: starting%s\n" % pid)
fix_getpass.fix_getpass()
pydev_log.debug("Executing file %s" % setup['file'])
pydev_log.debug("arguments: %s"% str(sys.argv))
pydevd_vm_type.setup_type(setup.get('vm_type', None))
if os.getenv('PYCHARM_DEBUG') == 'True' or os.getenv('PYDEV_DEBUG') == 'True':
set_debug(setup)
DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = setup.get('DEBUG_RECORD_SOCKET_READS', DebugInfoHolder.DEBUG_RECORD_SOCKET_READS)
DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = setup.get('DEBUG_TRACE_BREAKPOINTS', DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS)
DebugInfoHolder.DEBUG_TRACE_LEVEL = setup.get('DEBUG_TRACE_LEVEL', DebugInfoHolder.DEBUG_TRACE_LEVEL)
port = setup['port']
host = setup['client']
f = setup['file']
fix_app_engine_debug = False
debugger = PyDB()
try:
from _pydev_bundle import pydev_monkey
except:
pass #Not usable on jython 2.1
else:
if setup['multiprocess']: # PyDev
pydev_monkey.patch_new_process_functions()
elif setup['multiproc']: # PyCharm
pydev_log.debug("Started in multiproc mode\n")
# Note: we're not inside method, so, no need for 'global'
DISPATCH_APPROACH = DISPATCH_APPROACH_EXISTING_CONNECTION
dispatcher = Dispatcher()
try:
dispatcher.connect(host, port)
if dispatcher.port is not None:
port = dispatcher.port
pydev_log.debug("Received port %d\n" %port)
pydev_log.info("pydev debugger: process %d is connecting\n"% os.getpid())
try:
pydev_monkey.patch_new_process_functions()
except:
pydev_log.error("Error patching process functions\n")
traceback.print_exc()
else:
pydev_log.error("pydev debugger: couldn't get port for new debug process\n")
finally:
dispatcher.close()
else:
pydev_log.info("pydev debugger: starting\n")
try:
pydev_monkey.patch_new_process_functions_with_warning()
except:
pydev_log.error("Error patching process functions\n")
traceback.print_exc()
# Only do this patching if we're not running with multiprocess turned on.
if f.find('dev_appserver.py') != -1:
if os.path.basename(f).startswith('dev_appserver.py'):
appserver_dir = os.path.dirname(f)
version_file = os.path.join(appserver_dir, 'VERSION')
if os.path.exists(version_file):
try:
stream = open(version_file, 'r')
try:
for line in stream.read().splitlines():
line = line.strip()
if line.startswith('release:'):
line = line[8:].strip()
version = line.replace('"', '')
version = version.split('.')
if int(version[0]) > 1:
fix_app_engine_debug = True
elif int(version[0]) == 1:
if int(version[1]) >= 7:
# Only fix from 1.7 onwards
fix_app_engine_debug = True
break
finally:
stream.close()
except:
traceback.print_exc()
try:
# In the default run (i.e.: run directly on debug mode), we try to patch stackless as soon as possible
# on a run where we have a remote debug, we may have to be more careful because patching stackless means
# that if the user already had a stackless.set_schedule_callback installed, he'd loose it and would need
# to call it again (because stackless provides no way of getting the last function which was registered
# in set_schedule_callback).
#
# So, ideally, if there's an application using stackless and the application wants to use the remote debugger
# and benefit from stackless debugging, the application itself must call:
#
# import pydevd_stackless
# pydevd_stackless.patch_stackless()
#
# itself to be able to benefit from seeing the tasklets created before the remote debugger is attached.
from _pydevd_bundle import pydevd_stackless
pydevd_stackless.patch_stackless()
except:
# It's ok not having stackless there...
try:
sys.exc_clear() # the exception information should be cleaned in Python 2
except:
pass
is_module = setup['module']
patch_stdin(debugger)
if fix_app_engine_debug:
sys.stderr.write("pydev debugger: google app engine integration enabled\n")
curr_dir = os.path.dirname(__file__)
app_engine_startup_file = os.path.join(curr_dir, 'pydev_app_engine_debug_startup.py')
sys.argv.insert(1, '--python_startup_script=' + app_engine_startup_file)
import json
setup['pydevd'] = __file__
sys.argv.insert(2, '--python_startup_args=%s' % json.dumps(setup),)
sys.argv.insert(3, '--automatic_restart=no')
sys.argv.insert(4, '--max_module_instances=1')
# Run the dev_appserver
debugger.run(setup['file'], None, None, is_module, set_trace=False)
else:
if setup['save-threading']:
debugger.thread_analyser = ThreadingLogger()
if setup['save-asyncio']:
if IS_PY34_OR_GREATER:
debugger.asyncio_analyser = AsyncioLogger()
apply_debugger_options(setup)
try:
debugger.connect(host, port)
except:
sys.stderr.write("Could not connect to %s: %s\n" % (host, port))
traceback.print_exc()
sys.exit(1)
connected = True # Mark that we're connected when started from inside ide.
globals = debugger.run(setup['file'], None, None, is_module)
if setup['cmd-line']:
debugger.wait_for_commands(globals)
| {
"content_hash": "48f5a058bc5e3ecf962ff1088e9cd2ba",
"timestamp": "",
"source": "github",
"line_count": 1645,
"max_line_length": 157,
"avg_line_length": 41.05775075987842,
"alnum_prop": 0.5842611785608528,
"repo_name": "apixandru/intellij-community",
"id": "6cec167b3d5b49ff37a1dab60e768b6881fb06b8",
"size": "67540",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/helpers/pydev/pydevd.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "20665"
},
{
"name": "AspectJ",
"bytes": "182"
},
{
"name": "Batchfile",
"bytes": "60827"
},
{
"name": "C",
"bytes": "213044"
},
{
"name": "C#",
"bytes": "1264"
},
{
"name": "C++",
"bytes": "181491"
},
{
"name": "CMake",
"bytes": "1675"
},
{
"name": "CSS",
"bytes": "201445"
},
{
"name": "CoffeeScript",
"bytes": "1759"
},
{
"name": "DTrace",
"bytes": "578"
},
{
"name": "Erlang",
"bytes": "10"
},
{
"name": "Groovy",
"bytes": "3329190"
},
{
"name": "HLSL",
"bytes": "57"
},
{
"name": "HTML",
"bytes": "1906377"
},
{
"name": "J",
"bytes": "5050"
},
{
"name": "Java",
"bytes": "168701434"
},
{
"name": "JavaScript",
"bytes": "570147"
},
{
"name": "Jupyter Notebook",
"bytes": "93222"
},
{
"name": "Kotlin",
"bytes": "5190572"
},
{
"name": "Lex",
"bytes": "147513"
},
{
"name": "Makefile",
"bytes": "2352"
},
{
"name": "NSIS",
"bytes": "51691"
},
{
"name": "Objective-C",
"bytes": "27309"
},
{
"name": "PHP",
"bytes": "1549"
},
{
"name": "Perl",
"bytes": "936"
},
{
"name": "Perl 6",
"bytes": "26"
},
{
"name": "Protocol Buffer",
"bytes": "6680"
},
{
"name": "Python",
"bytes": "25896118"
},
{
"name": "Roff",
"bytes": "37534"
},
{
"name": "Ruby",
"bytes": "1217"
},
{
"name": "Shell",
"bytes": "64132"
},
{
"name": "Smalltalk",
"bytes": "338"
},
{
"name": "TeX",
"bytes": "25473"
},
{
"name": "Thrift",
"bytes": "1846"
},
{
"name": "TypeScript",
"bytes": "9469"
},
{
"name": "Visual Basic",
"bytes": "77"
},
{
"name": "XSLT",
"bytes": "113040"
}
],
"symlink_target": ""
} |
import io
import os
import sys
from oslotest import base
from mistralclient import shell
class BaseShellTests(base.BaseTestCase):
def shell(self, argstr):
orig = (sys.stdout, sys.stderr)
clean_env = {}
_old_env, os.environ = os.environ, clean_env.copy()
try:
sys.stdout = io.StringIO()
sys.stderr = io.StringIO()
_shell = shell.MistralShell()
_shell.run(argstr.split())
except SystemExit:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.assertEqual(0, exc_value.code)
finally:
stdout = sys.stdout.getvalue()
stderr = sys.stderr.getvalue()
sys.stdout.close()
sys.stderr.close()
sys.stdout, sys.stderr = orig
os.environ = _old_env
return stdout, stderr
| {
"content_hash": "1330a1f7c4de6dd0615ef23ead269973",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 63,
"avg_line_length": 26.393939393939394,
"alnum_prop": 0.56601607347876,
"repo_name": "openstack/python-mistralclient",
"id": "f15dd8e891792c3be0406ba699186ea95904a299",
"size": "1492",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mistralclient/tests/unit/base_shell_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "499835"
},
{
"name": "Shell",
"bytes": "4068"
}
],
"symlink_target": ""
} |
"""
Copyright (2017) Chris Scuderi
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import socket
import json
import os
SOCKFILE = "/tmp/alarm_socket"
class ServerSock(object):
def __init__(self):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
def __enter__(self):
self.sock.bind(SOCKFILE)
self.sock.listen(5)
return self.sock
def __exit__(self, exc_type, exc_value, traceback):
self.sock.close()
os.remove(SOCKFILE)
def send_client_msg(request):
try:
serr = None
rsp = None
s = start_socket_client()
send(s, request)
rsp = recv(s)
s.close()
except socket.error as exc:
serr = 'Exception: %s\nUnable to open socket, is alarmd running?' % exc
return rsp, serr
def start_socket_client():
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.connect(SOCKFILE)
return s
def send(sock, obj):
msg = json.dumps(obj)
packet = '%05d%s' % (len(msg), msg)
sock.sendall(packet.encode())
def recv(sock):
msg_len = sock.recv(5)
msg = ''
while len(msg) < int(msg_len):
chunk = sock.recv(int(msg_len) - len(msg)).decode()
assert chunk != ''
msg = msg + chunk
return json.loads(msg)
| {
"content_hash": "cd7dc3064dbad591f98bf41c88cf02d6",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 79,
"avg_line_length": 25.4,
"alnum_prop": 0.6519685039370079,
"repo_name": "scudre/alarm-central-station-receiver",
"id": "22d50ec14692965883cc409896e5c1c978aefbae",
"size": "1905",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alarm_central_station_receiver/json_ipc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6250"
},
{
"name": "Makefile",
"bytes": "227"
},
{
"name": "Python",
"bytes": "58477"
}
],
"symlink_target": ""
} |
from functools import lru_cache
from typing import Optional
from django.core.exceptions import FieldDoesNotExist
from django.db import models
from django.db.models import QuerySet
from rest_framework.compat import coreapi, coreschema
from rest_framework.filters import BaseFilterBackend
from rest_framework.request import Request
from rest_framework.viewsets import GenericViewSet
from rest_flex_fields import (
FIELDS_PARAM,
EXPAND_PARAM,
OMIT_PARAM,
WILDCARD_VALUES
)
WILDCARD_VALUES_JOINED = ",".join(WILDCARD_VALUES)
from rest_flex_fields.serializers import (
FlexFieldsModelSerializer,
FlexFieldsSerializerMixin,
)
class FlexFieldsDocsFilterBackend(BaseFilterBackend):
"""
A dummy filter backend only for schema/documentation purposes.
"""
def filter_queryset(self, request, queryset, view):
return queryset
@staticmethod
@lru_cache()
def _get_field(field_name: str, model: models.Model) -> Optional[models.Field]:
try:
# noinspection PyProtectedMember
return model._meta.get_field(field_name)
except FieldDoesNotExist:
return None
@staticmethod
def _get_expandable_fields(serializer_class: FlexFieldsModelSerializer) -> list:
expandable_fields = list(getattr(serializer_class.Meta, 'expandable_fields').items())
expand_list = []
while expandable_fields:
key, cls = expandable_fields.pop()
cls = cls[0] if hasattr(cls, '__iter__') else cls
expand_list.append(key)
if hasattr(cls, "Meta") and issubclass(cls, FlexFieldsSerializerMixin):
next_layer = getattr(cls.Meta, 'expandable_fields')
expandable_fields.extend([(f"{key}.{k}", cls) for k, cls in list(next_layer.items())])
return expand_list
@staticmethod
def _get_fields(serializer_class):
fields = getattr(serializer_class.Meta, "fields", [])
return ",".join(fields)
def get_schema_fields(self, view):
assert (
coreapi is not None
), "coreapi must be installed to use `get_schema_fields()`"
assert (
coreschema is not None
), "coreschema must be installed to use `get_schema_fields()`"
serializer_class = view.get_serializer_class()
if not issubclass(serializer_class, FlexFieldsSerializerMixin):
return []
fields = self._get_fields(serializer_class)
expandable_fields = self._get_expandable_fields(serializer_class)
return [
coreapi.Field(
name=FIELDS_PARAM,
required=False,
location="query",
schema=coreschema.String(
title="Selected fields",
description="Specify required fields by comma",
),
example=(fields or "field1,field2,nested.field") + "," + WILDCARD_VALUES_JOINED,
),
coreapi.Field(
name=OMIT_PARAM,
required=False,
location="query",
schema=coreschema.String(
title="Omitted fields",
description="Specify omitted fields by comma",
),
example=(fields or "field1,field2,nested.field") + "," + WILDCARD_VALUES_JOINED,
),
coreapi.Field(
name=EXPAND_PARAM,
required=False,
location="query",
schema=coreschema.String(
title="Expanded fields",
description="Specify expanded fields by comma",
),
example=(expandable_fields or "field1,field2,nested.field") + "," + WILDCARD_VALUES_JOINED,
),
]
def get_schema_operation_parameters(self, view):
serializer_class = view.get_serializer_class()
if not issubclass(serializer_class, FlexFieldsSerializerMixin):
return []
fields = self._get_fields(serializer_class)
expandable_fields = self._get_expandable_fields(serializer_class)
expandable_fields.extend(WILDCARD_VALUES)
parameters = [
{
"name": FIELDS_PARAM,
"required": False,
"in": "query",
"description": "Specify required fields by comma",
"schema": {
"title": "Selected fields",
"type": "string",
},
"example": (fields or "field1,field2,nested.field") + "," + WILDCARD_VALUES_JOINED,
},
{
"name": OMIT_PARAM,
"required": False,
"in": "query",
"description": "Specify omitted fields by comma",
"schema": {
"title": "Omitted fields",
"type": "string",
},
"example": (fields or "field1,field2,nested.field") + "," + WILDCARD_VALUES_JOINED,
},
{
"name": EXPAND_PARAM,
"required": False,
"in": "query",
"description": "Select fields to expand",
"style": "form",
"explode": False,
"schema": {
"title": "Expanded fields",
"type": "array",
"items": {
"type": "string",
"enum": expandable_fields
}
},
},
]
return parameters
class FlexFieldsFilterBackend(FlexFieldsDocsFilterBackend):
def filter_queryset(
self, request: Request, queryset: QuerySet, view: GenericViewSet
):
if (
not issubclass(view.get_serializer_class(), FlexFieldsSerializerMixin)
or request.method != "GET"
):
return queryset
auto_remove_fields_from_query = getattr(
view, "auto_remove_fields_from_query", True
)
auto_select_related_on_query = getattr(
view, "auto_select_related_on_query", True
)
required_query_fields = list(getattr(view, "required_query_fields", []))
serializer = view.get_serializer( # type: FlexFieldsSerializerMixin
context=view.get_serializer_context()
)
serializer.apply_flex_fields(
serializer.fields, serializer._flex_options_rep_only
)
serializer._flex_fields_rep_applied = True
model_fields = []
nested_model_fields = []
for field in serializer.fields.values():
model_field = self._get_field(field.source, queryset.model)
if model_field:
model_fields.append(model_field)
if field.field_name in serializer.expanded_fields or \
(model_field.is_relation and not model_field.many_to_one) or \
(model_field.is_relation and model_field.many_to_one and not model_field.concrete): # Include GenericForeignKey
nested_model_fields.append(model_field)
if auto_remove_fields_from_query:
queryset = queryset.only(
*(
required_query_fields
+ [
model_field.name
for model_field in model_fields if (
not model_field.is_relation or
model_field.many_to_one and model_field.concrete)
]
)
)
if auto_select_related_on_query and nested_model_fields:
queryset = queryset.select_related(
*(
model_field.name
for model_field in nested_model_fields if (
model_field.is_relation and
model_field.many_to_one and
model_field.concrete) # Exclude GenericForeignKey
)
)
queryset = queryset.prefetch_related(*(
model_field.name for model_field in nested_model_fields if
(model_field.is_relation and not model_field.many_to_one) or
(model_field.is_relation and model_field.many_to_one and not model_field.concrete) # Include GenericForeignKey)
)
)
return queryset
@staticmethod
@lru_cache()
def _get_field(field_name: str, model: models.Model) -> Optional[models.Field]:
try:
# noinspection PyProtectedMember
return model._meta.get_field(field_name)
except FieldDoesNotExist:
return None
| {
"content_hash": "ececd4e5e2a195d07304956fd0b4fef4",
"timestamp": "",
"source": "github",
"line_count": 244,
"max_line_length": 136,
"avg_line_length": 36.221311475409834,
"alnum_prop": 0.5435618918307309,
"repo_name": "rsinger86/drf-flex-fields",
"id": "cee952a11075242fd77d26cf0c160f9948be9aa6",
"size": "8838",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rest_flex_fields/filter_backends.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "59871"
}
],
"symlink_target": ""
} |
'''
File: budget_views.py
Author: Zachary King
Implements the views/handlers for Budget-related requests
'''
from django.shortcuts import render, redirect, reverse
from django.contrib.auth.decorators import login_required
import decimal
from datetime import date
from ..models import Budget, BudgetCategory, Wallet, Transaction
@login_required(login_url='/pynny/login')
def renew_budgets(request):
if request.user.is_authenticated():
all_budgets = Budget.objects.filter(user=request.user)
renewed = set()
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
last_month_budgets = Budget.objects.filter(user=request.user,
month__contains=date.strftime(last_month, '%Y-%m'))
for budget in last_month_budgets:
if budget.budget_id not in renewed:
renewed_budget = Budget.objects.get(pk=budget.pk)
renewed_budget.pk = None
renewed_budget.month = date.today()
renewed_budget.balance = decimal.Decimal('0')
renewed_budget.save()
renewed.add(budget.budget_id)
return budgets(request)
@login_required(login_url='/pynny/login')
def budgets(request):
'''Display Budgets for a user'''
# GET = display user's budgets
if request.method == 'GET':
data = {}
# Get the wallets for this user
data['budgets'] = Budget.objects.filter(user=request.user, month__contains=date.strftime(date.today(), '%Y-%m'))
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
data['last_month_budgets'] = Budget.objects.filter(user=request.user, month__contains=date.strftime(last_month, '%Y-%m'))
data['categories'] = BudgetCategory.objects.filter(user=request.user)
data['wallets'] = Wallet.objects.filter(user=request.user)
return render(request, 'pynny/budgets/budgets.html', context=data)
# POST = create a new Budget
elif request.method == 'POST':
# Get the form data from the request
_category = int(request.POST['category'])
_goal = float(request.POST['goal'])
_start_balance = decimal.Decimal(0.0)
_wallet = int(request.POST['wallet'])
category = BudgetCategory.objects.get(id=_category)
wallet = Wallet.objects.get(id=_wallet)
# Calculate the starting balance
for transaction in Transaction.objects.filter(category=category):
_start_balance += transaction.amount
# Check if the budget already exists
if Budget.objects.filter(user=request.user, category=category, wallet=wallet, month__contains=date.strftime(date.today(), '%Y-%m')):
data = {'alerts': {'errors': ['<strong>Oops!</strong> A Budget already exists for that Wallet and Category, for this month']}}
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
data['last_month_budgets'] = Budget.objects.filter(user=request.user,
month__contains=date.strftime(last_month, '%Y-%m'))
data['budgets'] = Budget.objects.filter(user=request.user,
month__contains=date.strftime(date.today(), '%Y-%m'))
return render(request, 'pynny/budgets/new_budget.html', context=data)
# Create the new Budget
try:
latest_budget = Budget.objects.latest('budget_id')
except Budget.DoesNotExist:
latest_budget = None
new_id = latest_budget.budget_id + 1 if latest_budget is not None else 0
Budget(category=category, wallet=wallet, goal=_goal, balance=_start_balance, user=request.user, budget_id=new_id).save()
data = {'alerts': {'success': ['<strong>Done!</strong> New Budget created successfully!']}}
data['budgets'] = Budget.objects.filter(user=request.user, month__contains=date.strftime(date.today(), '%Y-%m'))
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
data['last_month_budgets'] = Budget.objects.filter(user=request.user,
month__contains=date.strftime(last_month, '%Y-%m'))
data['categories'] = BudgetCategory.objects.filter(user=request.user)
data['wallets'] = Wallet.objects.filter(user=request.user)
return render(request, 'pynny/budgets/budgets.html', context=data, status=201)
@login_required(login_url='/pynny/login')
def new_budget(request):
'''Create a new Budget form'''
# Get the categories
data = {}
data['categories'] = BudgetCategory.objects.filter(user=request.user)
data['wallets'] = Wallet.objects.filter(user=request.user)
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
data['last_month_budgets'] = Budget.objects.filter(user=request.user,
month__contains=date.strftime(last_month, '%Y-%m'))
# Check if they have any categories or wallets first
if not data['categories']:
data = {
'alerts': {
'errors': [
'<strong>Oy!</strong> You don\'t have any Categories yet! You need to create a Category before you can create a Budget!'
]
},
}
return render(request, 'pynny/categories/new_category.html', context=data)
if not data['wallets']:
data = {
'alerts': {
'errors': [
'<strong>Oy!</strong> You don\'t have any Wallets yet! You need to create a Wallet before you can create a Budget!'
]
},
}
return render(request, 'pynny/wallets/new_wallet.html', context=data)
# They have a wallet and category so continue
return render(request, 'pynny/budgets/new_budget.html', context=data)
@login_required(login_url='/pynny/login')
def one_budget(request, budget_id):
'''View a specific Budget'''
data = {}
# Check if the budget is owned by the logged in user
try:
budget = Budget.objects.get(id=budget_id)
except Budget.DoesNotExist:
# DNE
data['budgets'] = Budget.objects.filter(user=request.user, month__contains=date.strftime(date.today(), '%Y-%m'))
data['categories'] = BudgetCategory.objects.filter(user=request.user)
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
data['last_month_budgets'] = Budget.objects.filter(user=request.user,
month__contains=date.strftime(last_month, '%Y-%m'))
data['wallets'] = Wallet.objects.filter(user=request.user)
data['alerts'] = {'errors': ['<strong>Oh snap!</strong> That Budget does not exist.']}
return render(request, 'pynny/budgets/budgets.html', context=data, status=404)
if budget.user != request.user:
data['budgets'] = Budget.objects.filter(user=request.user, month__contains=date.strftime(date.today(), '%Y-%m'))
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
data['last_month_budgets'] = Budget.objects.filter(user=request.user,
month__contains=date.strftime(last_month, '%Y-%m'))
data['categories'] = BudgetCategory.objects.filter(user=request.user)
data['wallets'] = Wallet.objects.filter(user=request.user)
data['alerts'] = {'errors': ['<strong>Oh snap!</strong> That Budget isn\'t yours! You don\'t have permission to view it']}
return render(request, 'pynny/budgets/budgets.html', context=data, status=403)
if request.method == "POST":
# What kind of action?
action = request.POST['action'].lower()
if action == 'delete':
# Delete the Budget
budget.delete()
# And return them to the budgets page
data['budgets'] = Budget.objects.filter(user=request.user, month__contains=date.strftime(date.today(), '%Y-%m'))
data['categories'] = BudgetCategory.objects.filter(user=request.user)
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
data['last_month_budgets'] = Budget.objects.filter(user=request.user,
month__contains=date.strftime(last_month, '%Y-%m'))
data['wallets'] = Wallet.objects.filter(user=request.user)
data['alerts'] = {'success': ['<strong>Done!</strong> Budget was deleted successfully']}
return render(request, 'pynny/budgets/budgets.html', context=data)
elif action == 'edit':
# Render the edit_budget view
data['budget'] = budget
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
data['last_month_budgets'] = Budget.objects.filter(user=request.user,
month__contains=date.strftime(last_month, '%Y-%m'))
data['categories'] = BudgetCategory.objects.filter(user=request.user)
data['wallets'] = Wallet.objects.filter(user=request.user)
return render(request, 'pynny/budgets/edit_budget.html', context=data)
elif action == 'edit_complete':
# Get the form data from the request
_category = int(request.POST['category'])
_wallet = int(request.POST['wallet'])
_goal = float(request.POST['goal'])
category = BudgetCategory.objects.get(id=_category)
wallet = Wallet.objects.get(id=_wallet)
# Edit the Budget
budget.category = category
budget.wallet = wallet
budget.goal = _goal
budget.save()
data = {'alerts': {'success': ['<strong>Done!</strong> Budget updated successfully!']}}
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
data['last_month_budgets'] = Budget.objects.filter(user=request.user,
month__contains=date.strftime(last_month, '%Y-%m'))
data['categories'] = BudgetCategory.objects.filter(user=request.user)
data['wallets'] = Wallet.objects.filter(user=request.user)
data['budgets'] = Budget.objects.filter(user=request.user, month__contains=date.strftime(date.today(), '%Y-%m'))
return render(request, 'pynny/budgets/budgets.html', context=data)
elif request.method == 'GET':
# Show the specific Budget data
data['budget'] = budget
today = date.today()
last_month = date(today.year, today.month - 1 if today.month > 1 else 12, today.day)
data['last_month_budgets'] = Budget.objects.filter(user=request.user,
month__contains=date.strftime(last_month, '%Y-%m'))
data['categories'] = BudgetCategory.objects.filter(user=request.user)
data['wallets'] = Wallet.objects.filter(user=request.user)
data['transactions'] = Transaction.objects.filter(category=budget.category).order_by('-created_time')
return render(request, 'pynny/budgets/one_budget.html', context=data)
| {
"content_hash": "8569d74a75d1dcc335604e7c6f265613",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 140,
"avg_line_length": 52.18942731277533,
"alnum_prop": 0.5992234320925128,
"repo_name": "zcking/Pynny",
"id": "6ca6957d5a42ed6d598d2d04ddf245d7861abe7b",
"size": "11870",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mysite/pynny/views/budget_views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "164000"
},
{
"name": "HTML",
"bytes": "127545"
},
{
"name": "JavaScript",
"bytes": "203"
},
{
"name": "Python",
"bytes": "105317"
}
],
"symlink_target": ""
} |
import zmq
import json
context = zmq.Context()
# Socket to talk to server
print("开始连接服务器")
socket = context.socket(zmq.REQ)
socket.connect ("tcp://localhost:5555")
# Do 10 requests, waiting each time for a response
for request in range (1,10):
print("Sending request ", request,"...")
content = '信息的定义是什么'
data1 = json.dumps(content)
socket.send_json(data1)
# Get the reply.
message = socket.recv_json()
# print(type(message))
# print(message)
data2 = json.loads(message)
print('Received reply ', message) | {
"content_hash": "d7dee593b73528a4a70c589e44fde38b",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 50,
"avg_line_length": 27.3,
"alnum_prop": 0.673992673992674,
"repo_name": "donttal/test_jieba",
"id": "c0d60a686c41672999c2e322b06ac13b4c3236ab",
"size": "576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "try_zmq/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6254"
}
],
"symlink_target": ""
} |
"""
@author: Sebi
File: test_bfread.py
Date: 08.04.2016
Version. 0.6
"""
import numpy as np
import os
import bftools as bf
import pytest
import datetime
import zencom as zc
def setbfpath():
# specify bioformats_package.jar to use if required
bfpath = r'c:\BioFormats_Package\5.1.9\bioformats_package.jar'
bf.set_bfpath(bfpath)
def get_filename():
filename = r'e:\Data\BioFormats_CZI_Test\20160429\20160429_BF_CZI.czi'
return filename
def get_dims():
#return [16, 3, 5, 2, 640, 640]
return [16, 1, 1, 2, 640, 640]
def get_chwlex():
return [493, 553]
def get_chwlem():
return [517, 568]
def get_dyes():
return ['None', 'None']
def get_chnames():
return ['AF488', 'AF555']
def create_testCZI():
# # Define the experiment to be executed
# #zenexperiment = r'e:\Data\BioFormats_CZI_Test\BioFormats_CZI_Test.czexp'
# zenexperiment = r'e:\Data\BioFormats_CZI_Test\BioFormats_CZI_Test_small.czexp'
#
# # get current data and create filename from it
# today = datetime.datetime.today()
# currentCZI = today.strftime('%Y%m%d') + '_BF_CZI.czi'
#
# # Define place to store the CZI file
# #savefolder = 'e:\\Data\BioFormats_CZI_Test\\' + today.strftime('%Y%m%d') + '\\'
# savefolder = 'c:\\Users\\M1SRH\\Documents\\Testdata_Zeiss\\Pyramid_Test\\' + today.strftime('%Y%m%d') + '\\'
#
# # check if the folder already exists
# try:
# os.makedirs(savefolder)
# except OSError:
# if not os.path.isdir(savefolder):
# raise
#
# czifilename_complete = zc.runExperiment(zenexperiment, savefolder, currentCZI, showCZI=False)
# for testing
czifilename_complete = r'c:\Users\M1SRH\Documents\Testdata_Zeiss\Pyramid_Test\20160502\20160502_BF_CZI.czi'
return czifilename_complete
def test_metainfo():
# set the correct path to the bioformats_package.jar
setbfpath()
# run the test experiment to create the CZI test data set
czifilename_complete = create_testCZI()
# get image meta-information
MetaInfo = bf.get_relevant_metainfo_wrapper(czifilename_complete)
img6d = bf.get_image6d(czifilename_complete, MetaInfo['Sizes'])
fulldims = get_dims()
print(MetaInfo['Sizes'])
# check Sizes
assert MetaInfo['Sizes'] == fulldims
# check Dimension Order
assert MetaInfo['DimOrder BF'] == 'XYCZT'
# check Dimesnion Order CZI Style
assert MetaInfo['OrderCZI'] == 'BSTCZYX0'
# check Image Dimensions
assert MetaInfo['TotalSeries'] == fulldims[0]
assert MetaInfo['SizeT'] == fulldims[1]
assert MetaInfo['SizeZ'] == fulldims[2]
assert MetaInfo['SizeC'] == fulldims[3]
assert MetaInfo['SizeY'] == fulldims[4]
assert MetaInfo['SizeX'] == fulldims[5]
# check Scaling
assert MetaInfo['XScale'] == 0.2
assert MetaInfo['YScale'] == 0.2
assert MetaInfo['ZScale'] == 0.5
# check Objective Data
assert MetaInfo['ObjMag'] == 5.0
assert MetaInfo['NA'] == 0.35
assert MetaInfo['Immersion'] == 'Air'
# check objective Name
assert MetaInfo['ObjModel'] == 'Plan-Apochromat 5x/0.35'
# check properties of all channels
for ch in range(0, MetaInfo['SizeC']):
# check Excitation and Emission Wavelengths
assert MetaInfo['WLEx'][ch] == get_chwlex()[ch]
assert MetaInfo['WLEm'][ch] == get_chwlem()[ch]
# check Dye Names
assert MetaInfo['Dyes'][ch] == get_dyes()[ch]
assert MetaInfo['Channels'][ch] == get_chnames()[ch]
# check Channel Description
assert MetaInfo['ChDesc'] == []
# check Numpy Array Shape
for i in range(0, len(fulldims)):
assert np.shape(img6d)[i] == fulldims[i]
# test timeseries
seriesID = 0
zplane = 0
dims = [1, 2, 640, 640]
# get the actual time series from the data set
tseries, dimorder_out = bf.bftools.get_timeseries(czifilename_complete, MetaInfo['Sizes'], seriesID, zplane=zplane)
for i in range(0, len(dims)):
assert np.shape(tseries)[i] == dims[i]
# check resulting dimension order
assert dimorder_out == 'TCXY'
# test zstack
seriesID = 0
timepoint = 0
dims = [5, 2, 640, 640]
# get the actual z-stack from the data set
zstack, dimorder_out = bf.bftools.get_zstack(czifilename_complete, MetaInfo['Sizes'], seriesID, timepoint=timepoint)
print(zstack.shape)
# get plane with the brightest pixel
zplane = (zstack == zstack.max()).nonzero()[0][0]
# check found zplane
assert zplane+1 == 1
for i in range(0, len(dims)):
assert np.shape(zstack)[i] == dims[i]
# test getdimonly:
sizes_czi = bf.czitools.read_dimensions_czi(czifilename_complete)
dims = [1, 4, 2, 1216, 136216, 1]
dimorder = 'BSCYX0'
for i in range(0, len(dims)):
assert sizes_czi[0][i] == dims[i]
assert sizes_czi[1] == dimorder
if __name__ == '__main__':
pytest.main()
| {
"content_hash": "bf7797b909d47e30ac355cd73e6ac274",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 120,
"avg_line_length": 29.171597633136095,
"alnum_prop": 0.639553752535497,
"repo_name": "sebi06/BioFormatsRead",
"id": "1fee62eb53d8f7fd8082e913e3c3f2fe3a274607",
"size": "4954",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_bfread_ZEN_COM.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1564"
},
{
"name": "Python",
"bytes": "112970"
}
],
"symlink_target": ""
} |
import sqlite3
import os
class TweetList(object):
def __init__(self, path):
self.path = os.path.abspath(path)
self.connection = sqlite3.Connection(self.path)
self.__init_db()
def __init_db(self):
c = self.connection.cursor()
c.execute(
'''CREATE TABLE IF NOT EXISTS tweets (
id integer primary key autoincrement,
message text not null,
previous_tweet integer,
next_tweet integer,
FOREIGN KEY(previous_tweet) REFERENCES tweets(id),
FOREIGN KEY(next_tweet) REFERENCES tweets(id)
)'''
)
c.execute(
'''CREATE TABLE IF NOT EXISTS tweetlist (
label text primary key,
tweet integer,
FOREIGN KEY(tweet) REFERENCES tweets(id)
)'''
)
# Check for the first_tweet and last_tweet references, set to
# NULL if they do not exist.
try:
c.execute("SELECT tweet from tweetlist where label='last_tweet'").next()
except StopIteration:
c.execute("INSERT INTO tweetlist VALUES ('last_tweet', NULL)")
try:
c.execute("SELECT tweet from tweetlist where label='first_tweet'").next()
except StopIteration:
c.execute("INSERT INTO tweetlist VALUES ('first_tweet', NULL)")
self.connection.commit()
c.close()
def __len__(self):
c = self.connection.cursor()
count = conn.execute("SELECT COUNT(*) FROM tweets").next()[0]
c.close()
return count
def append(self, tweet):
"""Add a tweet to the end of the list."""
c = self.connection.cursor()
last_tweet = c.execute("SELECT tweet from tweetlist where label='last_tweet'").next()[0]
c.execute("INSERT INTO tweets(message, previous_tweet, next_tweet) VALUES (?,?,NULL)", (tweet, last_tweet))
tweet_id = c.lastrowid
# Set the current tweet as the last tweet
c.execute("UPDATE tweetlist SET tweet=? WHERE label='last_tweet'", (tweet_id,))
# If there was no last_tweet, there was no first_tweet
# so make this the first tweet
if last_tweet is None:
c.execute("UPDATE tweetlist SET tweet=? WHERE label='first_tweet'", (tweet_id,))
else:
# Update the last tweets reference to this one
c.execute("UPDATE tweets SET next_tweet = ? WHERE id= ? ", (tweet_id, last_tweet))
self.connection.commit()
c.close()
def pop(self):
"""Return first tweet in the list."""
c = self.connection.cursor()
first_tweet_id = c.execute("SELECT tweet from tweetlist where label='first_tweet'").next()[0]
if first_tweet_id is None:
# No tweets are in the list, so return None
return None
tweet = c.execute("SELECT id, message, previous_tweet, next_tweet from tweets WHERE id=?", (first_tweet_id,)).next()
# Update the first tweet reference
c.execute("UPDATE tweetlist SET tweet=? WHERE label='first_tweet'", (tweet[3],))
# Update the "next tweet" if it exists
if tweet[3] is not None:
c.execute("UPDATE tweets SET previous_tweet=NULL WHERE id=?", (tweet[3],))
else:
#This was the last tweet so NULL the last tweet reference.
c.execute("UPDATE tweetlist SET tweet=NULL WHERE label=?", ('last_tweet',))
# Now remove the tweet from the list
c.execute("DELETE FROM tweets WHERE id=?", (first_tweet_id,))
self.connection.commit()
c.close()
return tweet[1]
def peek(self):
"""Peeks at the first of the list without removing it."""
c = self.connection.cursor()
first_tweet_id = c.execute("SELECT tweet from tweetlist where label='first_tweet'").next()[0]
if first_tweet_id is None:
# No tweets are in the list, so return None
return None
tweet = c.execute("SELECT message from tweets WHERE id=?", (first_tweet_id,)).next()[0]
c.close()
return tweet
def __iter__(self):
c = self.connection.cursor()
for tweet_id, tweet in c.execute("SELECT id, message from tweets"):
yield (tweet_id, tweet)
c.close()
def delete(self, tweet_id):
"""Deletes a tweet from the list with the given id"""
c = self.connection.cursor()
try:
tweet = c.execute("SELECT id, message, previous_tweet, next_tweet from tweets WHERE id=?", (tweet_id,)).next()
except StopIteration:
raise ValueError("No tweets were found with that ID")
# Update linked list references
c.execute("UPDATE tweets set next_tweet=? WHERE id=?", (tweet[3], tweet[2]))
c.execute("UPDATE tweets set previous_tweet=? WHERE id=?", (tweet[2], tweet[3]))
if tweet[3] is None:
c.execute("UPDATE tweetlist SET tweet=? WHERE label='last_tweet'", (tweet[2],))
if tweet[2] is None:
c.execute("UPDATE tweetlist SET tweet=? WHERE label='first_tweet'", (tweet[3],))
c.execute("DELETE from tweets WHERE id=?", (tweet_id,))
self.connection.commit()
c.close()
| {
"content_hash": "35f51f302b1a89033b33b22b6bf7606f",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 124,
"avg_line_length": 34.50326797385621,
"alnum_prop": 0.5840121235082402,
"repo_name": "billyoverton/tweetqueue",
"id": "5eff4bfb48e145f7d072e5695e0c5c9762dafff3",
"size": "5279",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tweetqueue/TweetList.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11095"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('core', '0002_auto_20141216_2149'),
]
operations = [
migrations.AlterModelOptions(
name='oauth',
options={'verbose_name_plural': 'OAuths'},
),
migrations.RemoveField(
model_name='iteration',
name='user',
),
migrations.AddField(
model_name='pearl',
name='user',
field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
]
| {
"content_hash": "929faf08a867fbfe9f33ed428274520c",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 76,
"avg_line_length": 26.413793103448278,
"alnum_prop": 0.5835509138381201,
"repo_name": "Matt-Deacalion/Mentalist",
"id": "d8daf7f53bf29298df8fce553c1fbdeddadce328",
"size": "790",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mentalist/apps/core/migrations/0003_auto_20141217_1953.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "40370"
}
],
"symlink_target": ""
} |
import os
import time
import baseservice
import apache_log_parser
basepath = os.path.dirname(__file__)
class Fileaccesslog(baseservice.BaseService):
status = {
'name':'unknown',
'status':'stopped',
'action':'stopped',
'actiontime':'000-00-00 00:00:00',
'progress':{
'current':'0',
'total':'0'
},
'lastawake':'0000-00-00 00:00:00'
}
dbname = 'logs'
collectionname = 'online_access_logs'
logformat = "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\""
parser = None
def __init__(self):
self.status['name'] = "Access Logs Parser"
self.initialize()
def filelength(self):
for i, l in enumerate(self.file):
pass
self.file.seek(0)
return i + 1
def setaction(self,theaction):
if(theaction == 'stopped'):
self.status['status'] = 'stopped'
else:
self.status['status'] = 'running'
self.status['action'] = str(theaction)
self.status['actiontime'] = time.strftime('%Y-%m-%d %H:%M:%S')
def setup(self):
self.parser = apache_log_parser.make_parser(self.logformat)
pass
def run(self):
self.setaction('test running')
self.status['status'] = 'running'
while self.load_incoming_file():
self.setaction("loading access log "+self.filename)
self.status['progress']['total'] = str(self.filelength())
self.status['progress']['current'] = 0
self.parselines(self.parseline)
self.movetofinish()
def parseline(self,line):
try:
log_line_data = self.parser(line)
self.insert(log_line_data)
self.status['progress']['current'] += 1
except Exception, e:
print "BAD LINE"+str(e)
def name():
return str("fileaccesslog")
def status():
return Fileaccesslog.status
def runservice():
return Fileaccesslog() | {
"content_hash": "d4d98e19bd00a353e7ed974338b3b92f",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 78,
"avg_line_length": 26.72,
"alnum_prop": 0.5558882235528942,
"repo_name": "UQ-UQx/old_injestor",
"id": "fa5fe5c83721768bf27b643c1bb4c49e3c03b153",
"size": "2023",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "services/fileaccesslog/service.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "971"
},
{
"name": "Python",
"bytes": "110812"
},
{
"name": "Shell",
"bytes": "518"
}
],
"symlink_target": ""
} |
"""Implements VideoFileClip, a class for video clips creation using video files."""
from moviepy.audio.io.AudioFileClip import AudioFileClip
from moviepy.decorators import convert_path_to_string
from moviepy.video.io.ffmpeg_reader import FFMPEG_VideoReader
from moviepy.video.VideoClip import VideoClip
class VideoFileClip(VideoClip):
"""
A video clip originating from a movie file. For instance: ::
>>> clip = VideoFileClip("myHolidays.mp4")
>>> clip.close()
>>> with VideoFileClip("myMaskVideo.avi") as clip2:
>>> pass # Implicit close called by context manager.
Parameters
----------
filename:
The name of the video file, as a string or a path-like object.
It can have any extension supported by ffmpeg:
.ogv, .mp4, .mpeg, .avi, .mov etc.
has_mask:
Set this to 'True' if there is a mask included in the videofile.
Video files rarely contain masks, but some video codecs enable
that. For instance if you have a MoviePy VideoClip with a mask you
can save it to a videofile with a mask. (see also
``VideoClip.write_videofile`` for more details).
audio:
Set to `False` if the clip doesn't have any audio or if you do not
wish to read the audio.
target_resolution:
Set to (desired_width, desired_height) to have ffmpeg resize the frames
before returning them. This is much faster than streaming in high-res
and then resizing. If either dimension is None, the frames are resized
by keeping the existing aspect ratio.
resize_algorithm:
The algorithm used for resizing. Default: "bicubic", other popular
options include "bilinear" and "fast_bilinear". For more information, see
https://ffmpeg.org/ffmpeg-scaler.html
fps_source:
The fps value to collect from the metadata. Set by default to 'fps', but
can be set to 'tbr', which may be helpful if you are finding that it is reading
the incorrect fps from the file.
pixel_format
Optional: Pixel format for the video to read. If is not specified
'rgb24' will be used as the default format unless ``has_mask`` is set
as ``True``, then 'rgba' will be used.
Attributes
----------
filename:
Name of the original video file.
fps:
Frames per second in the original file.
Read docs for Clip() and VideoClip() for other, more generic, attributes.
Lifetime
--------
Note that this creates subprocesses and locks files. If you construct one
of these instances, you must call close() afterwards, or the subresources
will not be cleaned up until the process ends.
If copies are made, and close() is called on one, it may cause methods on
the other copies to fail.
"""
@convert_path_to_string("filename")
def __init__(
self,
filename,
decode_file=False,
has_mask=False,
audio=True,
audio_buffersize=200000,
target_resolution=None,
resize_algorithm="bicubic",
audio_fps=44100,
audio_nbytes=2,
fps_source="fps",
pixel_format=None,
):
VideoClip.__init__(self)
# Make a reader
if not pixel_format:
pixel_format = "rgba" if has_mask else "rgb24"
self.reader = FFMPEG_VideoReader(
filename,
decode_file=decode_file,
pixel_format=pixel_format,
target_resolution=target_resolution,
resize_algo=resize_algorithm,
fps_source=fps_source,
)
# Make some of the reader's attributes accessible from the clip
self.duration = self.reader.duration
self.end = self.reader.duration
self.fps = self.reader.fps
self.size = self.reader.size
self.rotation = self.reader.rotation
self.filename = filename
if has_mask:
self.make_frame = lambda t: self.reader.get_frame(t)[:, :, :3]
def mask_make_frame(t):
return self.reader.get_frame(t)[:, :, 3] / 255.0
self.mask = VideoClip(
is_mask=True, make_frame=mask_make_frame
).with_duration(self.duration)
self.mask.fps = self.fps
else:
self.make_frame = lambda t: self.reader.get_frame(t)
# Make a reader for the audio, if any.
if audio and self.reader.infos["audio_found"]:
self.audio = AudioFileClip(
filename,
buffersize=audio_buffersize,
fps=audio_fps,
nbytes=audio_nbytes,
)
def __deepcopy__(self, memo):
"""Implements ``copy.deepcopy(clip)`` behaviour as ``copy.copy(clip)``.
VideoFileClip class instances can't be deeply copied because the locked Thread
of ``proc`` isn't pickleable. Without this override, calls to
``copy.deepcopy(clip)`` would raise a ``TypeError``:
```
TypeError: cannot pickle '_thread.lock' object
```
"""
return self.__copy__()
def close(self):
"""Close the internal reader."""
if self.reader:
self.reader.close()
self.reader = None
try:
if self.audio:
self.audio.close()
self.audio = None
except AttributeError: # pragma: no cover
pass
| {
"content_hash": "57279579c39e5f4c7ef0b998e5dc9b9b",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 86,
"avg_line_length": 31.28735632183908,
"alnum_prop": 0.6114988978692139,
"repo_name": "Zulko/moviepy",
"id": "1be47cb45ba267306b4e3ceeade146fd8b5c9862",
"size": "5444",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "moviepy/video/io/VideoFileClip.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "622"
},
{
"name": "Python",
"bytes": "536587"
}
],
"symlink_target": ""
} |
import wx
import armid
from EnvironmentListCtrl import EnvironmentListCtrl
from MisuseCaseEnvironmentProperties import MisuseCaseEnvironmentProperties
from MisuseCaseNotebook import MisuseCaseNotebook
class MisuseCaseEnvironmentPanel(wx.Panel):
def __init__(self,parent,dp):
wx.Panel.__init__(self,parent,armid.MISUSECASE_PANELENVIRONMENT_ID)
self.dbProxy = dp
self.theEnvironmentDictionary = {}
self.theSelectedIdx = -1
self.theSelectedRisk = ''
self.theSelectedThreat = ''
self.theSelectedVulnerability = ''
mainSizer = wx.BoxSizer(wx.HORIZONTAL)
environmentBox = wx.StaticBox(self)
environmentListSizer = wx.StaticBoxSizer(environmentBox,wx.HORIZONTAL)
mainSizer.Add(environmentListSizer,0,wx.EXPAND)
self.environmentList = EnvironmentListCtrl(self,armid.MISUSECASE_LISTENVIRONMENTS_ID,self.dbProxy)
self.environmentList.Unbind(wx.EVT_RIGHT_DOWN)
environmentListSizer.Add(self.environmentList,1,wx.EXPAND)
environmentDimSizer = wx.BoxSizer(wx.VERTICAL)
mainSizer.Add(environmentDimSizer,1,wx.EXPAND)
nbBox = wx.StaticBox(self,-1)
nbSizer = wx.StaticBoxSizer(nbBox,wx.VERTICAL)
environmentDimSizer.Add(nbSizer,1,wx.EXPAND)
self.notebook = MisuseCaseNotebook(self)
nbSizer.Add(self.notebook,1,wx.EXPAND)
self.SetSizer(mainSizer)
self.objectiveCtrl = self.notebook.FindWindowById(armid.MISUSECASE_TEXTOBJECTIVE_ID)
self.attackerList = self.notebook.FindWindowById(armid.MISUSECASE_LISTATTACKERS_ID)
self.assetList = self.notebook.FindWindowById(armid.MISUSECASE_LISTASSETS_ID)
self.threatCtrl = self.notebook.FindWindowById(armid.MISUSECASE_TEXTTHREAT_ID)
self.lhoodCtrl = self.notebook.FindWindowById(armid.MISUSECASE_TEXTLIKELIHOOD_ID)
self.vulCtrl = self.notebook.FindWindowById(armid.MISUSECASE_TEXTVULNERABILITY_ID)
self.sevCtrl = self.notebook.FindWindowById(armid.MISUSECASE_TEXTSEVERITY_ID)
self.ratingCtrl = self.notebook.FindWindowById(armid.MISUSECASE_TEXTSCORE_ID)
self.narrativeCtrl = self.notebook.FindWindowById(armid.MISUSECASE_TEXTNARRATIVE_ID)
self.environmentList.Bind(wx.EVT_LIST_INSERT_ITEM,self.OnAddEnvironment)
self.environmentList.Bind(wx.EVT_LIST_DELETE_ITEM,self.OnDeleteEnvironment)
self.narrativeCtrl.Disable()
def unloadMCComponents(self):
self.ratingCtrl.SetValue('')
self.threatCtrl.SetValue('')
self.lhoodCtrl.SetValue('')
self.vulCtrl.SetValue('')
self.sevCtrl.SetValue('')
self.attackerList.DeleteAllItems()
self.assetList.DeleteAllItems()
self.objectiveCtrl.SetValue('')
def loadMCComponents(self):
environmentName = self.environmentList.GetItemText(self.theSelectedIdx)
self.ratingCtrl.SetValue(self.dbProxy.riskRating(self.theSelectedThreat,self.theSelectedVulnerability,environmentName) )
self.threatCtrl.SetValue(self.theSelectedThreat)
threatId = self.dbProxy.getDimensionId(self.theSelectedThreat,'threat')
environmentId = self.dbProxy.getDimensionId(environmentName,'environment')
self.lhoodCtrl.SetValue(self.dbProxy.threatLikelihood(threatId,environmentId))
self.vulCtrl.SetValue(self.theSelectedVulnerability)
vulId = self.dbProxy.getDimensionId(self.theSelectedVulnerability,'vulnerability')
self.sevCtrl.SetValue(self.dbProxy.vulnerabilitySeverity(vulId,environmentId))
self.attackerList.DeleteAllItems()
attackers = self.dbProxy.threatAttackers(threatId,environmentId)
attackerSet = set(attackers)
for atidx,attacker in enumerate(attackerSet):
self.attackerList.InsertStringItem(atidx,attacker)
threatenedAssets = self.dbProxy.threatenedAssets(threatId,environmentId)
vulnerableAssets = self.dbProxy.vulnerableAssets(vulId,environmentId)
objectiveText = 'Exploit vulnerabilities in '
for idx,vulAsset in enumerate(vulnerableAssets):
objectiveText += vulAsset
if (idx != (len(vulnerableAssets) -1)):
objectiveText += ','
objectiveText += ' to threaten '
for idx,thrAsset in enumerate(threatenedAssets):
objectiveText += thrAsset
if (idx != (len(threatenedAssets) -1)):
objectiveText += ','
objectiveText += '.'
self.objectiveCtrl.SetValue(objectiveText)
self.assetList.DeleteAllItems()
assetSet = set(threatenedAssets + vulnerableAssets)
for asidx,asset in enumerate(assetSet):
self.assetList.InsertStringItem(asidx,asset)
def loadMisuseCase(self,mc):
self.theSelectedRisk = mc.risk()
self.theSelectedThreat = mc.threat()
self.theSelectedVulnerability = mc.vulnerability()
self.environmentList.Unbind(wx.EVT_LIST_ITEM_SELECTED)
self.environmentList.Unbind(wx.EVT_LIST_ITEM_DESELECTED)
environmentNames = []
for cp in mc.environmentProperties():
environmentNames.append(cp.name())
self.environmentList.load(environmentNames)
for cp in mc.environmentProperties():
environmentName = cp.name()
self.theEnvironmentDictionary[environmentName] = cp
environmentNames.append(environmentName)
environmentName = environmentNames[0]
p = self.theEnvironmentDictionary[environmentName]
self.narrativeCtrl.SetValue(p.narrative())
self.environmentList.Select(0)
self.loadMCComponents()
self.environmentList.Bind(wx.EVT_LIST_ITEM_SELECTED,self.OnEnvironmentSelected)
self.environmentList.Bind(wx.EVT_LIST_ITEM_DESELECTED,self.OnEnvironmentDeselected)
self.narrativeCtrl.Enable()
self.theSelectedIdx = 0
def loadRiskComponents(self,threatName,vulName):
self.theSelectedThreat = threatName
self.theSelectedVulnerability = vulName
self.environmentList.Unbind(wx.EVT_LIST_INSERT_ITEM)
self.environmentList.Unbind(wx.EVT_LIST_DELETE_ITEM)
self.environmentList.Unbind(wx.EVT_LIST_ITEM_SELECTED)
self.environmentList.Unbind(wx.EVT_LIST_ITEM_DESELECTED)
environments = self.dbProxy.threatVulnerabilityEnvironmentNames(threatName,vulName)
for environmentName in environments:
self.theEnvironmentDictionary[environmentName] = MisuseCaseEnvironmentProperties(environmentName)
self.environmentList.load(environments)
self.environmentList.Select(0)
self.theSelectedIdx = 0
self.loadMCComponents()
self.environmentList.Bind(wx.EVT_LIST_ITEM_SELECTED,self.OnEnvironmentSelected)
self.environmentList.Bind(wx.EVT_LIST_ITEM_DESELECTED,self.OnEnvironmentDeselected)
self.environmentList.Bind(wx.EVT_LIST_INSERT_ITEM,self.OnAddEnvironment)
self.environmentList.Bind(wx.EVT_LIST_DELETE_ITEM,self.OnDeleteEnvironment)
def OnEnvironmentSelected(self,evt):
self.theSelectedIdx = evt.GetIndex()
environmentName = self.environmentList.GetItemText(self.theSelectedIdx)
p = self.theEnvironmentDictionary[environmentName]
self.narrativeCtrl.SetValue(p.narrative())
self.loadMCComponents()
self.narrativeCtrl.Enable()
def OnEnvironmentDeselected(self,evt):
self.theSelectedIdx = evt.GetIndex()
environmentName = self.environmentList.GetItemText(self.theSelectedIdx)
self.theEnvironmentDictionary[environmentName] = MisuseCaseEnvironmentProperties(environmentName,self.narrativeCtrl.GetValue())
self.narrativeCtrl.SetValue('')
self.narrativeCtrl.Disable()
self.unloadMCComponents()
def OnAddEnvironment(self,evt):
self.theSelectedIdx = evt.GetIndex()
environmentName = self.environmentList.GetItemText(self.theSelectedIdx)
self.theEnvironmentDictionary[environmentName] = MisuseCaseEnvironmentProperties(environmentName)
self.environmentList.Select(self.theSelectedIdx)
self.loadMCComponents()
self.narrativeCtrl.Enable()
def OnDeleteEnvironment(self,evt):
selectedIdx = evt.GetIndex()
environmentName = self.environmentList.GetItemText(selectedIdx)
del self.theEnvironmentDictionary[environmentName]
self.theSelectedIdx = -1
self.narrativeCtrl.SetValue('')
self.narrativeCtrl.Disable()
self.unloadMCComponents()
def environmentProperties(self):
if (self.theSelectedIdx != -1):
environmentName = self.environmentList.GetItemText(self.theSelectedIdx)
self.theEnvironmentDictionary[environmentName] = MisuseCaseEnvironmentProperties(environmentName,self.narrativeCtrl.GetValue())
return self.theEnvironmentDictionary.values()
| {
"content_hash": "b8fa5d530e58b3ba501106b0a7d60c14",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 133,
"avg_line_length": 43.1875,
"alnum_prop": 0.7727930535455861,
"repo_name": "RobinQuetin/CAIRIS-web",
"id": "6bcfbbb3df8043d2c1812860474263953e1d7484",
"size": "9091",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "cairis/cairis/MisuseCaseEnvironmentPanel.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11265"
},
{
"name": "Mako",
"bytes": "13226"
},
{
"name": "Python",
"bytes": "3313365"
},
{
"name": "Shell",
"bytes": "19461"
},
{
"name": "XSLT",
"bytes": "35522"
}
],
"symlink_target": ""
} |
from django import forms
from blog.models import Entry
class EntryAdminForm(forms.ModelForm):
class Meta:
model = Entry
fields = ('title', 'created_by', 'excerpt', 'content',
'status', 'is_micro', 'enable_comments')
| {
"content_hash": "80aee185bf41f06f91f7917f6b6e0d27",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 56,
"avg_line_length": 20.727272727272727,
"alnum_prop": 0.6973684210526315,
"repo_name": "SushiTee/teerace",
"id": "f5a007553b999ea8883330cd0a6859e69721451a",
"size": "228",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "teerace/blog/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "40169"
},
{
"name": "HTML",
"bytes": "95280"
},
{
"name": "JavaScript",
"bytes": "17213"
},
{
"name": "Python",
"bytes": "409303"
}
],
"symlink_target": ""
} |
"""
chemspipy.search
~~~~~~~~~~~~~~~~
A wrapper for asynchronous search requests.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import datetime
import logging
import threading
import time
from six.moves import range
from . import errors, objects, utils
log = logging.getLogger(__name__)
# TODO: Use Sequence abc metaclass?
class Results(object):
"""Container class to perform a search on a background thread and hold the results when ready."""
def __init__(self, cs, searchfunc, searchargs, raise_errors=False, max_requests=40):
"""Generally shouldn't be instantiated directly. See :meth:`~chemspipy.api.ChemSpider.search` instead.
:param ChemSpider cs: ``ChemSpider`` session.
:param function searchfunc: Search function that returns a transaction ID.
:param tuple searchargs: Arguments for the search function.
:param bool raise_errors: If True, raise exceptions. If False, store on ``exception`` property.
:param int max_requests: Maximum number of times to check if search results are ready.
"""
log.debug('Results init')
self._cs = cs
self._raise_errors = raise_errors
self._max_requests = max_requests
self._status = 'Created'
self._exception = None
self._qid = None
self._message = None
self._start = None
self._end = None
self._results = []
self._searchthread = threading.Thread(name='SearchThread', target=self._search, args=(cs, searchfunc, searchargs))
self._searchthread.start()
def _search(self, cs, searchfunc, searchargs):
"""Perform the search and retrieve the results."""
log.debug('Searching in background thread')
self._start = datetime.datetime.utcnow()
try:
self._qid = searchfunc(*searchargs)
log.debug('Setting qid: %s' % self._qid)
for _ in range(self._max_requests):
log.debug('Checking status: %s' % self._qid)
status = cs.filter_status(self._qid)
self._status = status['status']
self._message = status.get('message', '')
log.debug(status)
time.sleep(0.2)
if status['status'] == 'Complete':
break
elif status['status'] in {'Failed', 'Unknown', 'Suspended', 'Not Found'}:
raise errors.ChemSpiPyServerError('Search Failed: %s' % status.get('message', ''))
else:
raise errors.ChemSpiPyTimeoutError('Search took too long')
log.debug('Search success!')
self._end = datetime.datetime.utcnow()
if status['count'] > 0:
self._results = [objects.Compound(cs, csid) for csid in cs.filter_results(self._qid)]
log.debug('Results: %s', self._results)
elif not self._message:
self._message = 'No results found'
except Exception as e:
# Catch and store exception so we can raise it in the main thread
self._exception = e
self._end = datetime.datetime.utcnow()
if self._status == 'Created':
self._status = 'Failed'
def ready(self):
"""Return True if the search finished.
:rtype: bool
"""
return not self._searchthread.is_alive()
def success(self):
"""Return True if the search finished with no errors.
:rtype: bool
"""
return self.ready() and not self._exception
def wait(self):
"""Block until the search has completed and optionally raise any resulting exception."""
log.debug('Waiting for search to finish')
self._searchthread.join()
if self._exception and self._raise_errors:
raise self._exception
@property
def status(self):
"""Current status string returned by ChemSpider.
:return: 'Unknown', 'Created', 'Scheduled', 'Processing', 'Suspended', 'PartialResultReady', 'ResultReady'
:rtype: string
"""
return self._status
@property
def exception(self):
"""Any Exception raised during the search. Blocks until the search is finished."""
self.wait() # TODO: If raise_errors=True this will raise the exception when trying to access it?
return self._exception
@property
def qid(self):
"""Search query ID.
:rtype: string
"""
return self._qid
@property
def message(self):
"""A contextual message about the search. Blocks until the search is finished.
:rtype: string
"""
self.wait()
return self._message
@property
def count(self):
"""The number of search results. Blocks until the search is finished.
:rtype: int
"""
return len(self)
@property
def duration(self):
"""The time taken to perform the search. Blocks until the search is finished.
:rtype: :py:class:`datetime.timedelta`
"""
self.wait()
return self._end - self._start
@utils.memoized_property
def sdf(self):
"""Get an SDF containing all the search results.
:return: SDF containing the search results.
:rtype: bytes
"""
self.wait()
return self._cs.filter_results_sdf(self._qid)
def __getitem__(self, index):
"""Get a single result or a slice of results. Blocks until the search is finished.
This means a Results instance can be treated like a normal Python list. For example::
cs.search('glucose')[2]
cs.search('glucose')[0:2]
An IndexError will be raised if the index is greater than the total number of results.
"""
self.wait()
return self._results.__getitem__(index)
def __len__(self):
self.wait()
return self._results.__len__()
def __iter__(self):
self.wait()
return iter(self._results)
def __repr__(self):
if self.success():
return 'Results(%s)' % self._results
else:
return 'Results(%s)' % self.status
| {
"content_hash": "f12c1e7a18401c68dc95b386bed76e04",
"timestamp": "",
"source": "github",
"line_count": 191,
"max_line_length": 122,
"avg_line_length": 32.84293193717277,
"alnum_prop": 0.5895106009883628,
"repo_name": "mcs07/ChemSpiPy",
"id": "d3deab2d4817c766ff2470311420c2e36412d5dd",
"size": "6297",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chemspipy/search.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "81379"
}
],
"symlink_target": ""
} |
"""
Wrapper around git command line interface
"""
from __future__ import division, print_function
import subprocess
import sys
import time
import re
import os
from collections import namedtuple
from pandas import Timestamp
from utils import Commit
try:
reload(sys)
sys.setdefaultencoding('utf-8')
except:
pass
#
# Configuration.
#
TIMEZONE = 'Australia/Melbourne' # The timezone used for all commit times. TODO Make configurable
SHA_LEN = 8 # The number of characters used when displaying git SHA-1 hashes
STRICT_CHECKING = True # For validating code.
ExecResult = namedtuple('ExecResult', ['command', 'ret', 'out', 'err'])
def _is_windows():
"""Returns: True if running on a MS-Windows operating system."""
try:
sys.getwindowsversion()
except:
return False
else:
return True
IS_WINDOWS = _is_windows()
if IS_WINDOWS:
import win32api
import win32process
import win32con
def lowpriority():
""" Set the priority of the process to below-normal.
http://stackoverflow.com/questions/1023038/change-process-priority-in-python-cross-platform
"""
pid = win32api.GetCurrentProcessId()
handle = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, True, pid)
win32process.SetPriorityClass(handle, win32process.BELOW_NORMAL_PRIORITY_CLASS)
else:
def lowpriority():
os.nice(1)
def truncate_sha(sha):
"""The way we show git SHA-1 hashes in reports."""
return sha[:SHA_LEN]
def to_timestamp(date_s):
"""Convert string `date_s' to pandas Timestamp in `TIMEZONE`
NOTE: The idea is to get all times in one timezone.
"""
return Timestamp(date_s).tz_convert(TIMEZONE)
concat = ''.join
path_join = os.path.join
def decode_to_str(bytes):
"""Decode byte list `bytes` to a unicode string trying utf-8 encoding first then latin-1.
"""
if bytes is None:
return None
try:
return bytes.decode('utf-8')
except:
return bytes.decode('latin-1')
def run_program(command, async_timeout=60.0):
ps = subprocess.Popen(command,
# shell=True
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = ps.communicate()[:2]
t0 = time.time()
while True:
ret = ps.poll()
if ret is not None or time.time() <= t0 + async_timeout:
break
time.sleep(1.0)
return ret, out, err
def border(name):
return '%s: %s' % (name, (78 - len(name)) * '^')
BORDER = '^' * 80
BORDER_FAILURE = '*' * 80
def format_error(exec_result, name=None, failure=None):
command, ret, out, err = exec_result
assert name is None or isinstance(name, str), (name, type(name))
assert isinstance(ret, int), (ret, type(ret))
if name is None:
# print('-' * 80)
# print(command)
name = ' '.join(command)
summary = '%s: ret=%d,out=%d,err=%d' % (name, ret, len(out), len(err))
parts = [border('out'), out, border('err'), err]
if failure:
parts.extend([BORDER_FAILURE, failure])
parts.extend([BORDER, summary])
return '\n%s\n' % '\n'.join(parts)
_git_debugging = False
def git_debugging(debugging=None):
global _git_debugging
current_debugging = _git_debugging
if debugging is not None:
_git_debugging = debugging
return current_debugging
test_commit = """
commit 66270d2f85404a3425917c1906a418ead6d2cf0e
Author: Smita Khanzo <smita.khanzode@papercut.com>
Date: Wed Mar 2 11:40:00 2016 +1100
PC-8368: Reverting the temporary change that was made for the test build.
Hello there
"""
RE_LOG_ENTRY = re.compile(r'''
commit\s+(?P<sha>[a-f0-9]{40})\s*\n
(?:Merge:\s*(?P<merge1>[a-f0-9]+)\s+(?P<merge2>[a-f0-9]+)\s*\n)?
Author:\s*(?P<author>.*?)\s*\n
Date:\s*(?P<date>.*?)\s*\n
\s*(?P<body>.*)\s*
$
''', re.VERBOSE | re.DOTALL | re.MULTILINE)
RE_AUTHOR = re.compile(r'<([^@]+@[^@]+)>')
# !@#$ Move to caller
AUTHOR_ALIASES = {
'peter.williams@papercut.cm': 'peter.williams@papercut.com',
}
def extract_commit(text, issue_extractor):
assert issue_extractor is None or callable(issue_extractor)
m = RE_LOG_ENTRY.search(text)
assert m, text[:1000]
# print(m.groups())
d = m.groupdict()
n = RE_AUTHOR.search(d['author'])
assert n, d['author']
author = n.group(1).lower()
d['author'] = AUTHOR_ALIASES.get(author, author)
d['date'] = to_timestamp(d['date'])
d['body'] = d['body'].strip()
d['issue'] = issue_extractor(d['body'])
commit = Commit(**d)
return commit
if False:
from pprint import pprint
m = RE_LOG_ENTRY.search(test_commit)
print(m.groups())
pprint(m.groupdict())
commit = extract_commit(test_commit)
pprint(commit)
for x in commit:
print(type(x), x)
assert False
def exec_output(command, require_output):
"""Executes `command` which is a list of strings. If `require_output` is True then raise an
exception is there is no stdout.
Returns: ret, output_str, error_str
ret: return code from exec'd process
output_str: stdout of the child process as a string
error_str: stderr of the child process as a string
"""
exception = None
output = None
error = None
ret = -1
if _git_debugging:
print('exec_output(%s)' % ' '.join(command))
try:
ret, output, error = run_program(command)
except Exception as e:
exception = e
if exception is None and require_output and not output:
exception = RuntimeError('exec_output: command=%s' % command)
output_str = decode_to_str(output) if output is not None else ''
error_str = decode_to_str(error) if error is not None else ''
exec_result = ExecResult(command, ret, output_str, error_str)
if exception is not None:
format_error(exec_result)
raise exception
return exec_result
def exec_output_lines(command, require_output, separator=None):
"""Executes `command` which is a list of strings. If `require_output` is True then raise an
exception is there is no stdout.
Returns: ret, output_str, error_str
ret: return code from exec'd process
output_lines: stdout of the child process as a list of strings, one string per line
error_str: stderr of the child process as a string
"""
exec_result = exec_output(command, require_output)
if separator is None:
separator = '\n'
output_lines = exec_result.out.split(separator)
# if separator is not None:
# output_lines = exec_result.out.split(separator)
# else:
# output_lines = exec_result.out.splitlines()
assert output_lines, format_error(exec_result)
while output_lines and not output_lines[-1]:
# print(len(output_lines), output_lines[-1])
output_lines.pop()
return output_lines, exec_result
def exec_headline(command):
"""Execute `command` which is a list of strings.
Returns: The first line stdout of the child process.
Returns: ret, output_str, error_str
ret: return code from exec'd process
output_line: he first line stdout of the child process.
error_str: stderr of the child process as a string
"""
output_lines, exec_result = exec_output_lines(command, True)
return output_lines[0], exec_result
def git_config_set(key, value):
return exec_output(['git', 'config', key, value], False)
def git_config_unset(key):
return exec_output(['git', 'config', '--unset', key], False)
def git_reset(obj, hard=True):
command = ['git', 'reset']
if hard:
command.append('--hard')
command.append(obj)
return exec_output(command, False)
def git_file_list(path_patterns=()):
"""Returns: List of files in current git revision matching `path_patterns`.
This is basically git ls-files.
"""
return exec_output_lines(['git', 'ls-files', '--exclude-standard'] + path_patterns, False)
def git_commit_file_list(sha):
"""
git diff-tree --no-commit-id --name-only -r
"""
command = ['git', 'diff-tree', '--no-commit-id', '--name-only', '-r', '-M', sha]
return exec_output_lines(command, False)
def git_diff_lines(treeish, path):
"""
treeish: A git tree-ish id, typically a commit id
path: Path to a file
Returns: patch for diff of blob specified by (`treeish`, `path`) and its parent
This is basically
git diff-tree -p e3eb9aa88741ee250defa1bcd43e6b2385f556b1 -- providers/airprint/mac/airprint.c
"""
command = ['git', 'diff-tree', '-p', '--full-index', treeish, '--', path]
return exec_output_lines(command, False)
def git_pending_list(path_patterns=()):
"""Returns: List of git pending files matching `path_patterns`.
"""
return exec_output_lines(['git', 'diff', '--name-only'] + path_patterns, False)
def git_file_list_no_pending(path_patterns=()):
"""Returns: List of non-pending files in current git revision matching `path_patterns`.
"""
file_list = git_file_list(path_patterns)
pending = set(git_pending_list(path_patterns))
return [path for path in file_list if path not in pending]
def git_diff(rev1, rev2):
"""Returns: List of files that differ in git revisions `rev1` and `rev2`.
"""
return exec_output_lines(['git', 'diff', '--name-only', rev1, rev2], False)
def git_log(pattern=None, inclusions=None, exclusions=None):
"""Returns: List of commits in ancestors of current git revision with commit messages matching
`pattern`.
This is basically git log --grep=<pattern>.
"""
# git ls-files -z returns a '\0' separated list of files terminated with '\0\0'
command = ['git', 'log', '-z',
'--perl-regexp',
# '--grep="%s"' % pattern
'--no-merges'
]
if pattern:
command.append("--grep='%s'" % pattern)
if inclusions:
command.extend(inclusions)
if exclusions:
command.extend(['^%s' % obj for obj in exclusions])
bin_list, exec_result = exec_output_lines(command, False, '\0')
print('bin_list=%d' % len(bin_list))
# print(exec_result)
commit_list = []
for commit in bin_list:
if not commit:
break
commit_list.append(commit)
assert commit_list
return commit_list, exec_result
def git_log_extract(issue_extractor, pattern=None, inclusions=None, exclusions=None):
assert issue_extractor is None or callable(issue_extractor)
entry_list, exec_result = git_log(pattern, inclusions, exclusions)
return [extract_commit(entry, issue_extractor) for entry in entry_list], exec_result
def git_show(obj=None, quiet=False):
"""Returns: Description of a git object `obj`, which is typically a commit.
https://git-scm.com/docs/git-show
"""
if obj is not None:
assert isinstance(obj, str), obj
command = ['git', 'show']
if quiet:
command.append('--quiet')
if obj is not None:
command.append(obj)
return exec_output_lines(command, True)
def git_show_extract(issue_extractor, obj=None):
assert issue_extractor is None or callable(issue_extractor)
lines, exec_result = git_show(obj=obj, quiet=True)
text = '\n'.join(lines)
return extract_commit(text, issue_extractor), exec_result
def git_show_oneline(obj=None):
"""Returns: One-line description of a git object `obj`, which is typically a commit.
https://git-scm.com/docs/git-show
"""
command = ['git', 'show', '--oneline', '--quiet']
if obj is not None:
command.append(obj)
return exec_headline(command)
def git_show_sha(obj=None):
"""Returns: SHA-1 hash `obj`, which is typically a commit.
https://git-scm.com/docs/git-show
"""
command = ['git', 'show', '--format=%H']
if obj is not None:
command.append(obj)
return exec_headline(command)
if False:
ret, line, err = git_show_sha()
print('ret=%d,err="%s"' % (ret, err))
print(line)
assert False
# def git_date(obj):
# """Returns: Date of a git object `obj`, which is typically a commit.
# NOTE: The returned date is standardized to timezone TIMEZONE.
# """
# date_s = exec_headline(['git', 'show', '--pretty=format:%ai', '--quiet', obj])
# return to_timestamp(date_s)
RE_REMOTE_URL = re.compile(r'(https?://.*/[^/]+(?:\.git)?)\s+\(fetch\)')
RE_REMOTE_NAME = re.compile(r'https?://.*/(.+?)(\.git)?$')
def git_remote():
"""Returns: The remote URL and a short name for the current repository.
"""
# $ git remote -v
# origin https://github.com/FFTW/fftw3.git (fetch)
# origin https://github.com/FFTW/fftw3.git (push)
try:
output_lines, exec_result = exec_output_lines(['git', 'remote', '-v'], True)
except Exception as e:
print('git_remote error: %s' % e)
return None, None
for line in output_lines:
m = RE_REMOTE_URL.search(line)
if not m:
continue
remote_url = m.group(1)
remote_name = RE_REMOTE_NAME.search(remote_url).group(1)
return remote_url, remote_name
raise RuntimeError('No remote')
def git_describe():
"""Returns: git describe of current revision.
"""
return exec_headline(['git', 'describe', '--always'])
def git_name():
"""Returns: git name of current revision.
"""
return ' '.join(exec_headline(['git', 'name-rev', 'HEAD']).split()[1:])
def git_current_branch():
"""Returns: git name of current branch or None if there is no current branch (detached HEAD).
"""
branch, exec_result = exec_headline(['git', 'rev-parse', '--abbrev-ref', 'HEAD'])
if branch == 'HEAD': # Detached HEAD?
branch = None
return branch, exec_result
def git_current_revision():
"""Returns: SHA-1 of current revision.
"""
return exec_headline(['git', 'rev-parse', 'HEAD'])
def git_revision_description():
"""Returns: Our best guess at describing the current revision"""
description = git_current_branch()
if not description:
description = git_describe()
return description
def git_blame_text(path):
"""Returns: git blame text for file `path`
"""
return exec_output(['git', 'blame', '-l', '-f', '-w', '-M', path], False)
RE_REMOTE = re.compile(r'origin/(.*?)\s*$')
RE_LOCAL = re.compile(r'\*?\s*(.*?)\s*$')
def git_track_all(delete_local):
"""Track all remote branches
If `delete_local` is True then delete all local branches (except checked out branch)
before tracking.
Returns: local_branches, exec_result
local_branches: List of local branches with remotes
exec_result: Result of exec_output()
"""
local_branches0, exec_result = exec_output_lines(['git', 'branch'], False)
assert exec_result.ret == 0, format_error(exec_result)
local_branches = set()
for i, local in enumerate(local_branches0):
m = RE_LOCAL.search(local)
assert m, 'local="%s"' % local
local = m.group(1)
local_branches.add(local)
current_branch, exec_result = git_current_branch()
assert exec_result.ret == 0, format_error(exec_result)
if delete_local:
removed_branches = set()
for local in local_branches:
if local == current_branch:
continue
exec_result = exec_output(['git', 'branch', '-D', local], False)
assert exec_result.ret == 0, format_error(exec_result)
removed_branches.add(local)
local_branches -= removed_branches
remote_branches0, exec_result = exec_output_lines(['git', 'branch', '-r'], False)
assert exec_result.ret == 0, format_error(exec_result)
local_remote = {}
for i, remote in enumerate(remote_branches0):
if 'origin/HEAD ->' in remote:
continue
m = RE_LOCAL.search(remote)
assert m, remote
remote = m.group(1)
m = RE_REMOTE.search(remote)
assert m, remote
local = m.group(1)
local_remote[local] = remote
for i, (local, remote) in enumerate(sorted(local_remote.items())):
if local in local_branches:
continue
print('%3d: Tracking "%s" from "%s"' % (i, local, remote))
exec_result = exec_output(['git', 'branch', '--track', local], False)
if exec_result.ret != 0:
print('Could not track remote branch local="%s",remote="%s"' % (local, remote))
assert exec_result.ret == 0, format_error(exec_result)
local_branches.add(local)
# Only interested in branches that have a remote
local_branches = {local: remote for local in local_branches if local in local_remote}
return local_branches, exec_result
def git_fetch():
"""
"""
return exec_output(['git', 'fetch', '--all', '--tags', '--prune', '--force'], False)
def git_checkout(branch, force=False):
"""
"""
command = ['git', 'checkout']
if force:
command.append('--force')
command.append(branch)
return exec_output(command, False)
def _find_conflicts(out_lines):
return [line for line in out_lines if line.startswith('CONFLICT')]
def git_pull(branch):
"""
"""
out_lines, exec_result = exec_output_lines(['git', 'pull', '--force', '--ff', 'origin', branch],
False)
return _find_conflicts(out_lines), out_lines, exec_result
def git_push(branch):
"""
"""
assert False, branch
return exec_output(['git', 'push', 'origin', branch], False)
def git_merge(branch):
"""
"""
out_lines, exec_result = exec_output_lines(['git', 'merge', branch], False)
return _find_conflicts(out_lines), out_lines, exec_result
| {
"content_hash": "842373339d28fba4dd6e414820758ae5",
"timestamp": "",
"source": "github",
"line_count": 610,
"max_line_length": 103,
"avg_line_length": 29.432786885245903,
"alnum_prop": 0.6172440681742231,
"repo_name": "peterwilliams97/git-stats",
"id": "e47183e14c63a145b64fea1db85f30dd07747e48",
"size": "17978",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "git_calls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "93790"
}
],
"symlink_target": ""
} |
"""Unit tests for the Quobyte driver module."""
import errno
import os
import six
import StringIO
import traceback
import mock
from oslo_concurrency import processutils as putils
from oslo_config import cfg
from oslo_utils import units
from cinder import context
from cinder import exception
from cinder.image import image_utils
from cinder.openstack.common import imageutils
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers import quobyte
CONF = cfg.CONF
class DumbVolume(object):
fields = {}
def __setitem__(self, key, value):
self.fields[key] = value
def __getitem__(self, item):
return self.fields[item]
class FakeDb(object):
msg = "Tests are broken: mock this out."
def volume_get(self, *a, **kw):
raise Exception(self.msg)
def snapshot_get_all_for_volume(self, *a, **kw):
"""Mock this if you want results from it."""
return []
class QuobyteDriverTestCase(test.TestCase):
"""Test case for Quobyte driver."""
TEST_QUOBYTE_VOLUME = 'quobyte://quobyte-host/openstack-volumes'
TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL = 'quobyte-host/openstack-volumes'
TEST_SIZE_IN_GB = 1
TEST_MNT_POINT = '/mnt/quobyte'
TEST_MNT_POINT_BASE = '/mnt'
TEST_LOCAL_PATH = '/mnt/quobyte/volume-123'
TEST_FILE_NAME = 'test.txt'
TEST_SHARES_CONFIG_FILE = '/etc/cinder/test-shares.conf'
TEST_TMP_FILE = '/tmp/tempfile'
VOLUME_UUID = 'abcdefab-cdef-abcd-efab-cdefabcdefab'
SNAP_UUID = 'bacadaca-baca-daca-baca-dacadacadaca'
SNAP_UUID_2 = 'bebedede-bebe-dede-bebe-dedebebedede'
def setUp(self):
super(QuobyteDriverTestCase, self).setUp()
self._configuration = mock.Mock(conf.Configuration)
self._configuration.append_config_values(mock.ANY)
self._configuration.quobyte_volume_url = \
self.TEST_QUOBYTE_VOLUME
self._configuration.quobyte_client_cfg = None
self._configuration.quobyte_sparsed_volumes = True
self._configuration.quobyte_qcow2_volumes = False
self._configuration.quobyte_mount_point_base = \
self.TEST_MNT_POINT_BASE
self._driver =\
quobyte.QuobyteDriver(configuration=self._configuration,
db=FakeDb())
self._driver.shares = {}
self._driver.set_nas_security_options(is_new_cinder_install=False)
def assertRaisesAndMessageMatches(
self, excClass, msg, callableObj, *args, **kwargs):
"""Ensure that the specified exception was raised and its message
includes the string 'msg'.
"""
caught = False
try:
callableObj(*args, **kwargs)
except Exception as exc:
caught = True
self.assertEqual(excClass, type(exc),
'Wrong exception caught: %s Stacktrace: %s' %
(exc, traceback.format_exc()))
self.assertIn(msg, six.text_type(exc))
if not caught:
self.fail('Expected raised exception but nothing caught.')
def test_local_path(self):
"""local_path common use case."""
drv = self._driver
volume = DumbVolume()
volume['provider_location'] = self.TEST_QUOBYTE_VOLUME
volume['name'] = 'volume-123'
self.assertEqual(
'/mnt/1331538734b757ed52d0e18c0a7210cd/volume-123',
drv.local_path(volume))
def test_mount_quobyte_should_mount_correctly(self):
with mock.patch.object(self._driver, '_execute') as mock_execute, \
mock.patch('cinder.volume.drivers.quobyte.QuobyteDriver'
'.read_proc_mount') as mock_open:
# Content of /proc/mount (not mounted yet).
mock_open.return_value = StringIO.StringIO(
"/dev/sda5 / ext4 rw,relatime,data=ordered 0 0")
self._driver._mount_quobyte(self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT)
mkdir_call = mock.call('mkdir', '-p', self.TEST_MNT_POINT)
mount_call = mock.call(
'mount.quobyte', self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT, run_as_root=False)
getfattr_call = mock.call(
'getfattr', '-n', 'quobyte.info', self.TEST_MNT_POINT,
run_as_root=False)
mock_execute.assert_has_calls(
[mkdir_call, mount_call, getfattr_call], any_order=False)
def test_mount_quobyte_already_mounted_detected_seen_in_proc_mount(self):
with mock.patch.object(self._driver, '_execute') as mock_execute, \
mock.patch('cinder.volume.drivers.quobyte.QuobyteDriver'
'.read_proc_mount') as mock_open:
# Content of /proc/mount (already mounted).
mock_open.return_value = StringIO.StringIO(
"quobyte@%s %s fuse rw,nosuid,nodev,noatime,user_id=1000"
",group_id=100,default_permissions,allow_other 0 0"
% (self.TEST_QUOBYTE_VOLUME, self.TEST_MNT_POINT))
self._driver._mount_quobyte(self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT)
mock_execute.assert_called_once_with(
'getfattr', '-n', 'quobyte.info', self.TEST_MNT_POINT,
run_as_root=False)
def test_mount_quobyte_should_suppress_and_log_already_mounted_error(self):
"""Based on /proc/mount, the file system is not mounted yet. However,
mount.quobyte returns with an 'already mounted' error.
This is a last-resort safe-guard in case /proc/mount parsing was not
successful.
Because _mount_quobyte gets called with ensure=True, the error will
be suppressed and logged instead.
"""
with mock.patch.object(self._driver, '_execute') as mock_execute, \
mock.patch('cinder.volume.drivers.quobyte.QuobyteDriver'
'.read_proc_mount') as mock_open, \
mock.patch('cinder.volume.drivers.quobyte.LOG') as mock_LOG:
# Content of /proc/mount (empty).
mock_open.return_value = StringIO.StringIO()
mock_execute.side_effect = [None, putils.ProcessExecutionError(
stderr='is busy or already mounted')]
self._driver._mount_quobyte(self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT,
ensure=True)
mkdir_call = mock.call('mkdir', '-p', self.TEST_MNT_POINT)
mount_call = mock.call(
'mount.quobyte', self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT, run_as_root=False)
mock_execute.assert_has_calls([mkdir_call, mount_call],
any_order=False)
mock_LOG.warning.assert_called_once_with('%s is already mounted',
self.TEST_QUOBYTE_VOLUME)
def test_mount_quobyte_should_reraise_already_mounted_error(self):
"""Same as
test_mount_quobyte_should_suppress_and_log_already_mounted_error
but with ensure=False.
"""
with mock.patch.object(self._driver, '_execute') as mock_execute, \
mock.patch('cinder.volume.drivers.quobyte.QuobyteDriver'
'.read_proc_mount') as mock_open:
mock_open.return_value = StringIO.StringIO()
mock_execute.side_effect = [
None, # mkdir
putils.ProcessExecutionError( # mount
stderr='is busy or already mounted')]
self.assertRaises(putils.ProcessExecutionError,
self._driver._mount_quobyte,
self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT,
ensure=False)
mkdir_call = mock.call('mkdir', '-p', self.TEST_MNT_POINT)
mount_call = mock.call(
'mount.quobyte', self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT, run_as_root=False)
mock_execute.assert_has_calls([mkdir_call, mount_call],
any_order=False)
def test_get_hash_str(self):
"""_get_hash_str should calculation correct value."""
drv = self._driver
self.assertEqual('1331538734b757ed52d0e18c0a7210cd',
drv._get_hash_str(self.TEST_QUOBYTE_VOLUME))
def test_get_available_capacity_with_df(self):
"""_get_available_capacity should calculate correct value."""
drv = self._driver
df_total_size = 2620544
df_avail = 1490560
df_head = 'Filesystem 1K-blocks Used Available Use% Mounted on\n'
df_data = 'quobyte@%s %d 996864 %d 41%% %s' % \
(self.TEST_QUOBYTE_VOLUME, df_total_size, df_avail,
self.TEST_MNT_POINT)
df_output = df_head + df_data
drv._get_mount_point_for_share = mock.Mock(return_value=self.
TEST_MNT_POINT)
drv._execute = mock.Mock(return_value=(df_output, None))
self.assertEqual((df_avail, df_total_size),
drv._get_available_capacity(self.TEST_QUOBYTE_VOLUME))
(drv._get_mount_point_for_share.
assert_called_once_with(self.TEST_QUOBYTE_VOLUME))
(drv._execute.
assert_called_once_with('df',
'--portability',
'--block-size',
'1',
self.TEST_MNT_POINT,
run_as_root=self._driver._execute_as_root))
def test_get_capacity_info(self):
with mock.patch.object(self._driver, '_get_available_capacity') \
as mock_get_available_capacity:
drv = self._driver
df_size = 2620544
df_avail = 1490560
mock_get_available_capacity.return_value = (df_avail, df_size)
size, available, used = drv._get_capacity_info(mock.ANY)
mock_get_available_capacity.assert_called_once_with(mock.ANY)
self.assertEqual(df_size, size)
self.assertEqual(df_avail, available)
self.assertEqual(size - available, used)
def test_load_shares_config(self):
"""_load_shares_config only puts the Volume URL into shares and strips
quobyte://.
"""
drv = self._driver
drv._load_shares_config()
self.assertIn(self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL, drv.shares)
def test_load_shares_config_without_protocol(self):
"""Same as test_load_shares_config, but this time the URL was specified
without quobyte:// in front.
"""
drv = self._driver
drv.configuration.quobyte_volume_url = \
self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL
drv._load_shares_config()
self.assertIn(self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL, drv.shares)
def test_ensure_share_mounted(self):
"""_ensure_share_mounted simple use case."""
with mock.patch.object(self._driver, '_get_mount_point_for_share') as \
mock_get_mount_point, \
mock.patch.object(self._driver, '_mount_quobyte') as \
mock_mount:
drv = self._driver
drv._ensure_share_mounted(self.TEST_QUOBYTE_VOLUME)
mock_get_mount_point.assert_called_once_with(
self.TEST_QUOBYTE_VOLUME)
mock_mount.assert_called_once_with(
self.TEST_QUOBYTE_VOLUME,
mock_get_mount_point.return_value,
ensure=True)
def test_ensure_shares_mounted_should_save_mounting_successfully(self):
"""_ensure_shares_mounted should save share if mounted with success."""
with mock.patch.object(self._driver, '_ensure_share_mounted') \
as mock_ensure_share_mounted:
drv = self._driver
drv._ensure_shares_mounted()
mock_ensure_share_mounted.assert_called_once_with(
self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL)
self.assertIn(self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL,
drv._mounted_shares)
def test_ensure_shares_mounted_should_not_save_mounting_with_error(self):
"""_ensure_shares_mounted should not save if mount raised an error."""
with mock.patch.object(self._driver, '_ensure_share_mounted') \
as mock_ensure_share_mounted:
drv = self._driver
mock_ensure_share_mounted.side_effect = Exception()
drv._ensure_shares_mounted()
mock_ensure_share_mounted.assert_called_once_with(
self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL)
self.assertEqual(1, len(drv.shares))
self.assertEqual(0, len(drv._mounted_shares))
def test_do_setup(self):
"""do_setup runs successfully."""
drv = self._driver
drv.do_setup(mock.create_autospec(context.RequestContext))
def test_check_for_setup_error_throws_quobyte_volume_url_not_set(self):
"""check_for_setup_error throws if 'quobyte_volume_url' is not set."""
drv = self._driver
drv.configuration.quobyte_volume_url = None
self.assertRaisesAndMessageMatches(exception.VolumeDriverException,
'no Quobyte volume configured',
drv.check_for_setup_error)
def test_check_for_setup_error_throws_client_not_installed(self):
"""check_for_setup_error throws if client is not installed."""
drv = self._driver
drv._execute = mock.Mock(side_effect=OSError
(errno.ENOENT, 'No such file or directory'))
self.assertRaisesAndMessageMatches(exception.VolumeDriverException,
'mount.quobyte is not installed',
drv.check_for_setup_error)
drv._execute.assert_called_once_with('mount.quobyte',
check_exit_code=False,
run_as_root=False)
def test_check_for_setup_error_throws_client_not_executable(self):
"""check_for_setup_error throws if client cannot be executed."""
drv = self._driver
drv._execute = mock.Mock(side_effect=OSError
(errno.EPERM, 'Operation not permitted'))
self.assertRaisesAndMessageMatches(OSError,
'Operation not permitted',
drv.check_for_setup_error)
drv._execute.assert_called_once_with('mount.quobyte',
check_exit_code=False,
run_as_root=False)
def test_find_share_should_throw_error_if_there_is_no_mounted_shares(self):
"""_find_share should throw error if there is no mounted share."""
drv = self._driver
drv._mounted_shares = []
self.assertRaises(exception.NotFound,
drv._find_share,
self.TEST_SIZE_IN_GB)
def test_find_share(self):
"""_find_share simple use case."""
drv = self._driver
drv._mounted_shares = [self.TEST_QUOBYTE_VOLUME]
self.assertEqual(self.TEST_QUOBYTE_VOLUME,
drv._find_share(self.TEST_SIZE_IN_GB))
def test_find_share_does_not_throw_error_if_there_isnt_enough_space(self):
"""_find_share intentionally does not throw when df reports no
available space left.
"""
with mock.patch.object(self._driver, '_get_available_capacity') \
as mock_get_available_capacity:
drv = self._driver
df_size = 2620544
df_avail = 0
mock_get_available_capacity.return_value = (df_avail, df_size)
drv._mounted_shares = [self.TEST_QUOBYTE_VOLUME]
self.assertEqual(self.TEST_QUOBYTE_VOLUME,
drv._find_share(self.TEST_SIZE_IN_GB))
# The current implementation does not call _get_available_capacity.
# Future ones might do and therefore we mocked it.
self.assertGreaterEqual(mock_get_available_capacity.call_count, 0)
def _simple_volume(self, uuid=None):
volume = DumbVolume()
volume['provider_location'] = self.TEST_QUOBYTE_VOLUME
if uuid is None:
volume['id'] = self.VOLUME_UUID
else:
volume['id'] = uuid
# volume['name'] mirrors format from db/sqlalchemy/models.py
volume['name'] = 'volume-%s' % volume['id']
volume['size'] = 10
volume['status'] = 'available'
return volume
def test_create_sparsed_volume(self):
drv = self._driver
volume = self._simple_volume()
drv._create_sparsed_file = mock.Mock()
drv._set_rw_permissions_for_all = mock.Mock()
drv._do_create_volume(volume)
drv._create_sparsed_file.assert_called_once_with(mock.ANY, mock.ANY)
drv._set_rw_permissions_for_all.assert_called_once_with(mock.ANY)
def test_create_nonsparsed_volume(self):
drv = self._driver
volume = self._simple_volume()
old_value = self._configuration.quobyte_sparsed_volumes
self._configuration.quobyte_sparsed_volumes = False
drv._create_regular_file = mock.Mock()
drv._set_rw_permissions_for_all = mock.Mock()
drv._do_create_volume(volume)
drv._create_regular_file.assert_called_once_with(mock.ANY, mock.ANY)
drv._set_rw_permissions_for_all.assert_called_once_with(mock.ANY)
self._configuration.quobyte_sparsed_volumes = old_value
def test_create_qcow2_volume(self):
drv = self._driver
volume = self._simple_volume()
old_value = self._configuration.quobyte_qcow2_volumes
self._configuration.quobyte_qcow2_volumes = True
drv._execute = mock.Mock()
hashed = drv._get_hash_str(volume['provider_location'])
path = '%s/%s/volume-%s' % (self.TEST_MNT_POINT_BASE,
hashed,
self.VOLUME_UUID)
drv._do_create_volume(volume)
assert_calls = [mock.call('qemu-img', 'create', '-f', 'qcow2',
'-o', 'preallocation=metadata', path,
str(volume['size'] * units.Gi),
run_as_root=self._driver._execute_as_root),
mock.call('chmod', 'ugo+rw', path,
run_as_root=self._driver._execute_as_root)]
drv._execute.assert_has_calls(assert_calls)
self._configuration.quobyte_qcow2_volumes = old_value
def test_create_volume_should_ensure_quobyte_mounted(self):
"""create_volume ensures shares provided in config are mounted."""
drv = self._driver
drv.LOG = mock.Mock()
drv._find_share = mock.Mock()
drv._do_create_volume = mock.Mock()
drv._ensure_shares_mounted = mock.Mock()
volume = DumbVolume()
volume['size'] = self.TEST_SIZE_IN_GB
drv.create_volume(volume)
drv._find_share.assert_called_once_with(mock.ANY)
drv._do_create_volume.assert_called_once_with(volume)
drv._ensure_shares_mounted.assert_called_once_with()
def test_create_volume_should_return_provider_location(self):
"""create_volume should return provider_location with found share."""
drv = self._driver
drv.LOG = mock.Mock()
drv._ensure_shares_mounted = mock.Mock()
drv._do_create_volume = mock.Mock()
drv._find_share = mock.Mock(return_value=self.TEST_QUOBYTE_VOLUME)
volume = DumbVolume()
volume['size'] = self.TEST_SIZE_IN_GB
result = drv.create_volume(volume)
self.assertEqual(self.TEST_QUOBYTE_VOLUME, result['provider_location'])
drv._do_create_volume.assert_called_once_with(volume)
drv._ensure_shares_mounted.assert_called_once_with()
drv._find_share.assert_called_once_with(self.TEST_SIZE_IN_GB)
def test_create_cloned_volume(self):
drv = self._driver
drv._create_snapshot = mock.Mock()
drv._copy_volume_from_snapshot = mock.Mock()
drv._delete_snapshot = mock.Mock()
volume = self._simple_volume()
src_vref = self._simple_volume()
src_vref['id'] = '375e32b2-804a-49f2-b282-85d1d5a5b9e1'
src_vref['name'] = 'volume-%s' % src_vref['id']
volume_ref = {'id': volume['id'],
'name': volume['name'],
'status': volume['status'],
'provider_location': volume['provider_location'],
'size': volume['size']}
snap_ref = {'volume_name': src_vref['name'],
'name': 'clone-snap-%s' % src_vref['id'],
'size': src_vref['size'],
'volume_size': src_vref['size'],
'volume_id': src_vref['id'],
'id': 'tmp-snap-%s' % src_vref['id'],
'volume': src_vref}
drv.create_cloned_volume(volume, src_vref)
drv._create_snapshot.assert_called_once_with(snap_ref)
drv._copy_volume_from_snapshot.assert_called_once_with(snap_ref,
volume_ref,
volume['size'])
drv._delete_snapshot.assert_called_once_with(mock.ANY)
@mock.patch('cinder.openstack.common.fileutils.delete_if_exists')
def test_delete_volume(self, mock_delete_if_exists):
volume = self._simple_volume()
volume_filename = 'volume-%s' % self.VOLUME_UUID
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume_filename)
info_file = volume_path + '.info'
with mock.patch.object(self._driver, '_ensure_share_mounted') as \
mock_ensure_share_mounted, \
mock.patch.object(self._driver, '_local_volume_dir') as \
mock_local_volume_dir, \
mock.patch.object(self._driver,
'get_active_image_from_info') as \
mock_active_image_from_info, \
mock.patch.object(self._driver, '_execute') as \
mock_execute, \
mock.patch.object(self._driver, '_local_path_volume') as \
mock_local_path_volume, \
mock.patch.object(self._driver, '_local_path_volume_info') as \
mock_local_path_volume_info:
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
mock_active_image_from_info.return_value = volume_filename
mock_local_path_volume.return_value = volume_path
mock_local_path_volume_info.return_value = info_file
self._driver.delete_volume(volume)
mock_ensure_share_mounted.assert_called_once_with(
volume['provider_location'])
mock_local_volume_dir.assert_called_once_with(volume)
mock_active_image_from_info.assert_called_once_with(volume)
mock_execute.assert_called_once_with('rm', '-f', volume_path,
run_as_root=
self._driver._execute_as_root)
mock_local_path_volume_info.assert_called_once_with(volume)
mock_local_path_volume.assert_called_once_with(volume)
mock_delete_if_exists.assert_any_call(volume_path)
mock_delete_if_exists.assert_any_call(info_file)
def test_delete_should_ensure_share_mounted(self):
"""delete_volume should ensure that corresponding share is mounted."""
drv = self._driver
drv._execute = mock.Mock()
volume = DumbVolume()
volume['name'] = 'volume-123'
volume['provider_location'] = self.TEST_QUOBYTE_VOLUME
drv._ensure_share_mounted = mock.Mock()
drv.delete_volume(volume)
(drv._ensure_share_mounted.
assert_called_once_with(self.TEST_QUOBYTE_VOLUME))
drv._execute.assert_called_once_with('rm', '-f',
mock.ANY,
run_as_root=False)
def test_delete_should_not_delete_if_provider_location_not_provided(self):
"""delete_volume shouldn't delete if provider_location missed."""
drv = self._driver
drv._ensure_share_mounted = mock.Mock()
drv._execute = mock.Mock()
volume = DumbVolume()
volume['name'] = 'volume-123'
volume['provider_location'] = None
drv.delete_volume(volume)
assert not drv._ensure_share_mounted.called
assert not drv._execute.called
def test_extend_volume(self):
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/%s/volume-%s' % (self.TEST_MNT_POINT_BASE,
drv._get_hash_str(
self.TEST_QUOBYTE_VOLUME),
self.VOLUME_UUID)
qemu_img_info_output = """image: volume-%s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 473K
""" % self.VOLUME_UUID
img_info = imageutils.QemuImgInfo(qemu_img_info_output)
drv.get_active_image_from_info = mock.Mock(return_value=volume['name'])
image_utils.qemu_img_info = mock.Mock(return_value=img_info)
image_utils.resize_image = mock.Mock()
drv.extend_volume(volume, 3)
drv.get_active_image_from_info.assert_called_once_with(volume)
image_utils.qemu_img_info.assert_called_once_with(volume_path)
image_utils.resize_image.assert_called_once_with(volume_path, 3)
def test_copy_volume_from_snapshot(self):
drv = self._driver
# lots of test vars to be prepared at first
dest_volume = self._simple_volume(
'c1073000-0000-0000-0000-0000000c1073')
src_volume = self._simple_volume()
vol_dir = os.path.join(self.TEST_MNT_POINT_BASE,
drv._get_hash_str(self.TEST_QUOBYTE_VOLUME))
src_vol_path = os.path.join(vol_dir, src_volume['name'])
dest_vol_path = os.path.join(vol_dir, dest_volume['name'])
info_path = os.path.join(vol_dir, src_volume['name']) + '.info'
snapshot = {'volume_name': src_volume['name'],
'name': 'clone-snap-%s' % src_volume['id'],
'size': src_volume['size'],
'volume_size': src_volume['size'],
'volume_id': src_volume['id'],
'id': 'tmp-snap-%s' % src_volume['id'],
'volume': src_volume}
snap_file = dest_volume['name'] + '.' + snapshot['id']
snap_path = os.path.join(vol_dir, snap_file)
size = dest_volume['size']
qemu_img_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %s
""" % (snap_file, src_volume['name'])
img_info = imageutils.QemuImgInfo(qemu_img_output)
# mocking and testing starts here
image_utils.convert_image = mock.Mock()
drv._read_info_file = mock.Mock(return_value=
{'active': snap_file,
snapshot['id']: snap_file})
image_utils.qemu_img_info = mock.Mock(return_value=img_info)
drv._set_rw_permissions_for_all = mock.Mock()
drv._copy_volume_from_snapshot(snapshot, dest_volume, size)
drv._read_info_file.assert_called_once_with(info_path)
image_utils.qemu_img_info.assert_called_once_with(snap_path)
(image_utils.convert_image.
assert_called_once_with(src_vol_path,
dest_vol_path,
'raw',
run_as_root=self._driver._execute_as_root))
drv._set_rw_permissions_for_all.assert_called_once_with(dest_vol_path)
def test_create_volume_from_snapshot_status_not_available(self):
"""Expect an error when the snapshot's status is not 'available'."""
drv = self._driver
src_volume = self._simple_volume()
snap_ref = {'volume_name': src_volume['name'],
'name': 'clone-snap-%s' % src_volume['id'],
'size': src_volume['size'],
'volume_size': src_volume['size'],
'volume_id': src_volume['id'],
'id': 'tmp-snap-%s' % src_volume['id'],
'volume': src_volume,
'status': 'error'}
new_volume = DumbVolume()
new_volume['size'] = snap_ref['size']
self.assertRaises(exception.InvalidSnapshot,
drv.create_volume_from_snapshot,
new_volume,
snap_ref)
def test_create_volume_from_snapshot(self):
drv = self._driver
src_volume = self._simple_volume()
snap_ref = {'volume_name': src_volume['name'],
'name': 'clone-snap-%s' % src_volume['id'],
'size': src_volume['size'],
'volume_size': src_volume['size'],
'volume_id': src_volume['id'],
'id': 'tmp-snap-%s' % src_volume['id'],
'volume': src_volume,
'status': 'available'}
new_volume = DumbVolume()
new_volume['size'] = snap_ref['size']
drv._ensure_shares_mounted = mock.Mock()
drv._find_share = mock.Mock(return_value=self.TEST_QUOBYTE_VOLUME)
drv._do_create_volume = mock.Mock()
drv._copy_volume_from_snapshot = mock.Mock()
drv.create_volume_from_snapshot(new_volume, snap_ref)
drv._ensure_shares_mounted.assert_called_once_with()
drv._find_share.assert_called_once_with(new_volume['size'])
drv._do_create_volume.assert_called_once_with(new_volume)
(drv._copy_volume_from_snapshot.
assert_called_once_with(snap_ref, new_volume, new_volume['size']))
def test_initialize_connection(self):
drv = self._driver
volume = self._simple_volume()
vol_dir = os.path.join(self.TEST_MNT_POINT_BASE,
drv._get_hash_str(self.TEST_QUOBYTE_VOLUME))
vol_path = os.path.join(vol_dir, volume['name'])
qemu_img_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume['name']
img_info = imageutils.QemuImgInfo(qemu_img_output)
drv.get_active_image_from_info = mock.Mock(return_value=volume['name'])
image_utils.qemu_img_info = mock.Mock(return_value=img_info)
conn_info = drv.initialize_connection(volume, None)
drv.get_active_image_from_info.assert_called_once_with(volume)
image_utils.qemu_img_info.assert_called_once_with(vol_path)
self.assertEqual(conn_info['data']['format'], 'raw')
self.assertEqual(conn_info['driver_volume_type'], 'quobyte')
self.assertEqual(conn_info['data']['name'], volume['name'])
self.assertEqual(conn_info['mount_point_base'],
self.TEST_MNT_POINT_BASE)
def test_copy_volume_to_image_raw_image(self):
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume['name'])
image_meta = {'id': '10958016-e196-42e3-9e7f-5d8927ae3099'}
with mock.patch.object(drv, 'get_active_image_from_info') as \
mock_get_active_image_from_info, \
mock.patch.object(drv, '_local_volume_dir') as \
mock_local_volume_dir, \
mock.patch.object(image_utils, 'qemu_img_info') as \
mock_qemu_img_info, \
mock.patch.object(image_utils, 'upload_volume') as \
mock_upload_volume, \
mock.patch.object(image_utils, 'create_temporary_file') as \
mock_create_temporary_file:
mock_get_active_image_from_info.return_value = volume['name']
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
mock_create_temporary_file.return_value = self.TEST_TMP_FILE
qemu_img_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume['name']
img_info = imageutils.QemuImgInfo(qemu_img_output)
mock_qemu_img_info.return_value = img_info
upload_path = volume_path
drv.copy_volume_to_image(mock.ANY, volume, mock.ANY, image_meta)
mock_get_active_image_from_info.assert_called_once_with(volume)
mock_local_volume_dir.assert_called_once_with(volume)
mock_qemu_img_info.assert_called_once_with(volume_path)
mock_upload_volume.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, upload_path)
self.assertTrue(mock_create_temporary_file.called)
def test_copy_volume_to_image_qcow2_image(self):
"""Upload a qcow2 image file which has to be converted to raw first."""
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume['name'])
image_meta = {'id': '10958016-e196-42e3-9e7f-5d8927ae3099'}
with mock.patch.object(drv, 'get_active_image_from_info') as \
mock_get_active_image_from_info, \
mock.patch.object(drv, '_local_volume_dir') as \
mock_local_volume_dir, \
mock.patch.object(image_utils, 'qemu_img_info') as \
mock_qemu_img_info, \
mock.patch.object(image_utils, 'convert_image') as \
mock_convert_image, \
mock.patch.object(image_utils, 'upload_volume') as \
mock_upload_volume, \
mock.patch.object(image_utils, 'create_temporary_file') as \
mock_create_temporary_file:
mock_get_active_image_from_info.return_value = volume['name']
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
mock_create_temporary_file.return_value = self.TEST_TMP_FILE
qemu_img_output = """image: %s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume['name']
img_info = imageutils.QemuImgInfo(qemu_img_output)
mock_qemu_img_info.return_value = img_info
upload_path = self.TEST_TMP_FILE
drv.copy_volume_to_image(mock.ANY, volume, mock.ANY, image_meta)
mock_get_active_image_from_info.assert_called_once_with(volume)
mock_local_volume_dir.assert_called_with(volume)
mock_qemu_img_info.assert_called_once_with(volume_path)
mock_convert_image.assert_called_once_with(
volume_path, upload_path, 'raw')
mock_upload_volume.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, upload_path)
self.assertTrue(mock_create_temporary_file.called)
def test_copy_volume_to_image_snapshot_exists(self):
"""Upload an active snapshot which has to be converted to raw first."""
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/volume-%s' % (self.TEST_MNT_POINT, self.VOLUME_UUID)
volume_filename = 'volume-%s' % self.VOLUME_UUID
image_meta = {'id': '10958016-e196-42e3-9e7f-5d8927ae3099'}
with mock.patch.object(drv, 'get_active_image_from_info') as \
mock_get_active_image_from_info, \
mock.patch.object(drv, '_local_volume_dir') as \
mock_local_volume_dir, \
mock.patch.object(image_utils, 'qemu_img_info') as \
mock_qemu_img_info, \
mock.patch.object(image_utils, 'convert_image') as \
mock_convert_image, \
mock.patch.object(image_utils, 'upload_volume') as \
mock_upload_volume, \
mock.patch.object(image_utils, 'create_temporary_file') as \
mock_create_temporary_file:
mock_get_active_image_from_info.return_value = volume['name']
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
mock_create_temporary_file.return_value = self.TEST_TMP_FILE
qemu_img_output = """image: volume-%s.%s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %s
""" % (self.VOLUME_UUID, self.SNAP_UUID, volume_filename)
img_info = imageutils.QemuImgInfo(qemu_img_output)
mock_qemu_img_info.return_value = img_info
upload_path = self.TEST_TMP_FILE
drv.copy_volume_to_image(mock.ANY, volume, mock.ANY, image_meta)
mock_get_active_image_from_info.assert_called_once_with(volume)
mock_local_volume_dir.assert_called_with(volume)
mock_qemu_img_info.assert_called_once_with(volume_path)
mock_convert_image.assert_called_once_with(
volume_path, upload_path, 'raw')
mock_upload_volume.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, upload_path)
self.assertTrue(mock_create_temporary_file.called)
| {
"content_hash": "0fd14d49c019a11cb1bb9c26c15d4a06",
"timestamp": "",
"source": "github",
"line_count": 924,
"max_line_length": 79,
"avg_line_length": 41.5,
"alnum_prop": 0.5677776039221822,
"repo_name": "julianwang/cinder",
"id": "cdd4f005b5ad1fb66113c95db8d47ec5b95c2ee5",
"size": "39012",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/test_quobyte.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PLpgSQL",
"bytes": "2511"
},
{
"name": "Python",
"bytes": "10718052"
},
{
"name": "Shell",
"bytes": "8111"
}
],
"symlink_target": ""
} |
import sys
from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
from .._vendor import _convert_request
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(**kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-10-01")) # type: Literal["2020-10-01"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.Attestation/operations")
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class Operations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.attestation.AttestationManagementClient`'s
:attr:`operations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(self, **kwargs: Any) -> _models.OperationList:
"""Lists all of the available Azure attestation operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationList or the result of cls(response)
:rtype: ~azure.mgmt.attestation.models.OperationList
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2020-10-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationList]
request = build_list_request(
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("OperationList", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {"url": "/providers/Microsoft.Attestation/operations"} # type: ignore
| {
"content_hash": "e145f8a59fed1ce6674b129a3bba161f",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 116,
"avg_line_length": 37.959349593495936,
"alnum_prop": 0.6682373099164703,
"repo_name": "Azure/azure-sdk-for-python",
"id": "ef9392e72d7af66fa39710d534c2de9b09b42a42",
"size": "5169",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/attestation/azure-mgmt-attestation/azure/mgmt/attestation/operations/_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import unittest
import responses
import mobilvest
import urlparse
import datetime
from hashlib import md5
class TestsMobilVest(unittest.TestCase):
def setUp(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/timestamp.php',
body='1432359515')
self.valid_api_key = '123'
self.mapi = mobilvest.MobilVestApi('user', self.valid_api_key)
@responses.activate
def test_request_params(self):
def check_request(request):
params = urlparse.parse_qs(urlparse.urlparse(
request.path_url).query)
params = {k: params[k][0] for k in params}
self.assertIn('login', params,
"Parameter 'login' wasn't passed")
self.assertIn('timestamp', params,
"Parameter 'timestamp' wasn't passed")
self.assertIn('signature', params,
"Parameter 'signature' wasn't passed")
recieved_signature = params.pop('signature')
params_as_string = ''.join(str(params[k]) for k in sorted(params))
valid_signature = md5(
params_as_string + self.valid_api_key).hexdigest()
self.assertEqual(recieved_signature, valid_signature)
return (200, {}, {})
responses.add_callback(
responses.GET, 'http://online.mobilvest.ru/get/balance.php',
callback=check_request,
content_type='application/json',
)
self.mapi.get_balance()
@responses.activate
def test_rise_exception(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/balance.php',
body='{"error": 7}')
self.assertRaises(mobilvest.ServerResponsedWithError,
self.mapi.get_balance)
@responses.activate
def test_empty_response(self):
""" Если ответ пустой, сервер возвращает код ошибки 19.
Ожидается, что в таком случае просто вернется None,
а исключение не будет брошено
"""
responses.add(responses.GET,
'http://online.mobilvest.ru/get/balance.php',
body='{"error": 19}')
balance = self.mapi.get_balance()
self.assertIsNone(balance)
@responses.activate
def test_get_balance(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/balance.php',
body='{"money" : "69573.1","currency" : "RUR"}')
balance_json = self.mapi.get_balance()
self.assertIn('money', balance_json)
@responses.activate
def test_get_base(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/base.php',
body='''{
"125452": {
"name": "Valuable clients",
"time_birth": "12:00:00",
"day_before": "0",
"local_time": "1",
"birth_sender": "",
"birth_text": "",
"on_birth": "0",
"count": "2255",
"pages": "23"
}
}''')
base_json = self.mapi.get_base()
for base in base_json.values():
self.assertIn('name', base)
self.assertIn('count', base)
@responses.activate
def test_get_senders(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/senders.php',
body='{"smstest":"completed","smstest2":"completed"}')
senders = self.mapi.get_senders()
self.assertIsNotNone(senders)
@responses.activate
def test_get_phone(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/phone.php',
body='{"error": 19}')
base_id = "125452"
page = 1
phones_json = self.mapi.get_phone(base_id, page)
self.assertIsNone(phones_json)
@responses.activate
def test_get_status(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/status.php',
body='''
{
"4091297100348873330001" : "not_deliver"
}
''')
sms_id = "4091297100348873330001"
statuses_json = self.mapi.get_status(sms_id)
self.assertIn(sms_id, statuses_json)
@responses.activate
def test_get_multiple_statuses(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/status.php',
body='''
{
"4091297100348873330001" : "not_deliver",
"4091297100348880230003" : "not_deliver"
}
''')
sms_ids = ["4091297100348873330001", "4091297100348880230003"]
statuses_json = self.mapi.get_status(sms_ids)
for i in sms_ids:
self.assertIn(i, statuses_json)
@responses.activate
def test_send_sms(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/send.php',
body='''
{
"79029134225": {
"error": "0",
"id_sms": "4092112510348380960001",
"cost": "0.5",
"count_sms": "1"
}
}''')
phone = "79029134225"
text = "Hello world!"
sender = "web.web"
result_json = self.mapi.send_sms(phone, text, sender)
self.assertIn(phone, result_json)
self.assertIn('id_sms', result_json[phone])
@responses.activate
def test_send_multiple_sms(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/send.php',
body='''
{
"79029134225": {
"error": "0",
"id_sms": "4092112510348380960001",
"cost": "0.5",
"count_sms": "1"
},
"79029134226": {
"error": "0",
"id_sms": "4092112510348380970001",
"cost": "0.5",
"count_sms": "1"
}
}''')
phones = ["79029134225", "79029134226"]
text = "Hello world!"
sender = "web.web"
result_json = self.mapi.send_sms(phones, text, sender)
for p in phones:
self.assertIn(p, result_json)
@responses.activate
def test_find_on_stop(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/find_on_stop.php',
body='{"error": 19}')
# запрошенного номера нет в стоп-листе
on_stop_json = self.mapi.find_on_stop('79324354123')
self.assertIsNone(on_stop_json)
@responses.activate
def test_add_to_stop(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/add2stop.php',
body='{"id" : "4419373"}')
response = self.mapi.add_to_stop('79324354123')
self.assertIn('id', response)
@responses.activate
def test_get_template(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/template.php',
body="""{
"test": {
"template": "text",
"up_time": "2014-08-28 15:22:25"
}
}""")
templates = self.mapi.get_template()
for i in templates.values():
self.assertIn('template', i)
@responses.activate
def test_add_template(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/add_template.php',
body='{"id" : "4419373"}')
name = 'template_1'
text = "Hello, World!"
response = self.mapi.add_template(name, text)
self.assertIn('id', response)
@responses.activate
def test_stat_by_month(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/stat_by_month.php',
body='{"error": 19}')
# пустая статистика за текущий месяц
now = datetime.datetime.now()
response = self.mapi.stat_by_month(now)
self.assertIsNone(response)
@responses.activate
def test_get_operator(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/operator.php',
body='{"operator" : "AT&T"}')
phone = '7821345312'
response = self.mapi.get_operator(phone)
self.assertIn('operator', response)
@responses.activate
def test_get_incoming(self):
responses.add(responses.GET,
'http://online.mobilvest.ru/get/incoming.php',
body='''
{
"5597": {
"date": "2014-10-27 05:47:24",
"sender": "79022754620",
"prefix": "51632",
"text": "51632 TEST"
}
}
''')
now = datetime.datetime.now()
incomings = self.mapi.get_incoming(now)
for i in incomings.values():
self.assertIn('sender', i)
self.assertIn('text', i)
| {
"content_hash": "844e84f61940a62f6e65410e552af562",
"timestamp": "",
"source": "github",
"line_count": 281,
"max_line_length": 78,
"avg_line_length": 37.03914590747331,
"alnum_prop": 0.4608954650269024,
"repo_name": "ron8mcr/python-mobilvest",
"id": "2eb0256867e89bf7d807419bb3f743d1c0a5f490",
"size": "10610",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mobilvest/test_mobilvest.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "23661"
}
],
"symlink_target": ""
} |
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
"""
import sys
import os
from .models import *
from groupdocs.FileStream import FileStream
from groupdocs.ApiClient import ApiException
class ComparisonApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
self.__basePath = "https://dev-api.groupdocs.com/v2.0"
@property
def basePath(self):
return self.__basePath
@basePath.setter
def basePath(self, value):
self.__basePath = value
def Compare(self, userId, sourceFileId, targetFileId, callbackUrl, **kwargs):
"""Compare
Args:
userId, str: User GUID (required)
sourceFileId, str: Source File GUID (required)
targetFileId, str: Target File GUID (required)
callbackUrl, str: Callback Url (required)
Returns: CompareResponse
"""
if( userId == None or sourceFileId == None or targetFileId == None or callbackUrl == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'sourceFileId', 'targetFileId', 'callbackUrl']
params = locals()
for (key, val) in params['kwargs'].items():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method Compare" % key)
params[key] = val
del params['kwargs']
resourcePath = '/comparison/{userId}/comparison/compare?source={sourceFileId}&target={targetFileId}&callback={callbackUrl}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('sourceFileId' in params):
queryParams['source'] = self.apiClient.toPathValue(params['sourceFileId'])
if ('targetFileId' in params):
queryParams['target'] = self.apiClient.toPathValue(params['targetFileId'])
if ('callbackUrl' in params):
queryParams['callback'] = self.apiClient.toPathValue(params['callbackUrl'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'CompareResponse')
return responseObject
def GetChanges(self, userId, resultFileId, **kwargs):
"""Get changes
Args:
userId, str: User GUID (required)
resultFileId, str: Comparison result file GUID (required)
Returns: ChangesResponse
"""
if( userId == None or resultFileId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'resultFileId']
params = locals()
for (key, val) in params['kwargs'].items():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetChanges" % key)
params[key] = val
del params['kwargs']
resourcePath = '/comparison/{userId}/comparison/changes?resultFileId={resultFileId}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('resultFileId' in params):
queryParams['resultFileId'] = self.apiClient.toPathValue(params['resultFileId'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'ChangesResponse')
return responseObject
def UpdateChanges(self, userId, resultFileId, body, **kwargs):
"""Update changes
Args:
userId, str: User GUID (required)
resultFileId, str: Comparison result file GUID (required)
body, List[ChangeInfo]: Comparison changes to update (accept or reject) (required)
Returns: ChangesResponse
"""
if( userId == None or resultFileId == None or body == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'resultFileId', 'body']
params = locals()
for (key, val) in params['kwargs'].items():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method UpdateChanges" % key)
params[key] = val
del params['kwargs']
resourcePath = '/comparison/{userId}/comparison/changes?resultFileId={resultFileId}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('resultFileId' in params):
queryParams['resultFileId'] = self.apiClient.toPathValue(params['resultFileId'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'ChangesResponse')
return responseObject
def GetDocumentDetails(self, userId, guid, **kwargs):
"""Get document details
Args:
userId, str: User GUID (required)
guid, str: Document GUID (required)
Returns: DocumentDetailsResponse
"""
if( userId == None or guid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'guid']
params = locals()
for (key, val) in params['kwargs'].items():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetDocumentDetails" % key)
params[key] = val
del params['kwargs']
resourcePath = '/comparison/{userId}/comparison/document?guid={guid}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('guid' in params):
queryParams['guid'] = self.apiClient.toPathValue(params['guid'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'DocumentDetailsResponse')
return responseObject
def DownloadResult(self, userId, resultFileId, **kwargs):
"""Download comparison result file
Args:
userId, str: User GUID (required)
resultFileId, str: Comparison result file GUID (required)
format, str: Comparison result file format (optional)
Returns: stream
"""
if( userId == None or resultFileId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'resultFileId', 'format']
params = locals()
for (key, val) in params['kwargs'].items():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DownloadResult" % key)
params[key] = val
del params['kwargs']
resourcePath = '/comparison/{userId}/comparison/download?resultFileId={resultFileId}&format={format}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('resultFileId' in params):
queryParams['resultFileId'] = self.apiClient.toPathValue(params['resultFileId'])
if ('format' in params):
queryParams['format'] = self.apiClient.toPathValue(params['format'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
return self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams, FileStream)
| {
"content_hash": "b7f2c4f72bce7dab253c21e39290e17a",
"timestamp": "",
"source": "github",
"line_count": 280,
"max_line_length": 148,
"avg_line_length": 39.417857142857144,
"alnum_prop": 0.5840355168977077,
"repo_name": "liosha2007/temporary-groupdocs-python3-sdk",
"id": "08476b5f45119ce15096034c20f9a487fa3d38f3",
"size": "11059",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "groupdocs/ComparisonApi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "992590"
}
],
"symlink_target": ""
} |
from geojsontosvg import transform_to_json
from parser import load_matrices
from collections import OrderedDict
import json
res = transform_to_json()
matrix, reversed_matrix,csv_code_to_name = load_matrices()
from_csv = csv_code_to_name.values()
from_json = res.keys()
#print len(from_csv),from_csv
from_csv = [unicode(name,errors='ignore') for name in from_csv]
unmapped_csv_stats = sorted(list(set(from_csv) - set(from_json)))
csv_name_to_code=OrderedDict()
for code in csv_code_to_name:
csv_name_to_code[unicode(csv_code_to_name[code],errors="ignore")]=code
print "name_to_code keys", sorted(csv_name_to_code.keys())
json_names = sorted(from_json)
import sys
#line = sys.stdin.readline()
res = OrderedDict()
entered_letter = ""
while unmapped_csv_stats:
unmapped_stat = unmapped_csv_stats[0]
print "Match for :", unmapped_stat , ("or beginning letter")
i=1
print "0 : not found"
if entered_letter !="":
show_list= filter(lambda x:x.lower().startswith(entered_letter),json_names)
else:
show_list = json_names
for name in show_list :
print i, ":", name ,
i+=1
print
value = sys.stdin.readline()
try:
print "value ", value
value=int(value)
print "value entered" , value
entered_letter=""
if i==0:
code = "NOTFOUND"
else :
print csv_name_to_code
code= csv_name_to_code[unmapped_stat]
res[code]=show_list[value-1]
print "res"
for r in res :
print r, csv_code_to_name[r], res[r]
print "-------------"
f = open('reconciliation.json','w')
f.write(json.dumps(res))
f.close()
unmapped_csv_stats = unmapped_csv_stats[1:]
except ValueError, e:
print "the exception : ", type(e)
entered_letter=value.strip()
print "letter entered", entered_letter
#print code_to_name
#print line
| {
"content_hash": "f72a73dec3b8a17f33f1517696d230bb",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 83,
"avg_line_length": 29.424242424242426,
"alnum_prop": 0.6189495365602472,
"repo_name": "madewulf/MigrationsMap.net",
"id": "4665a444ac116cd2dc4a19e46a8805521a8bbb1b",
"size": "1942",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "associate_map_to_db.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "33260"
},
{
"name": "HTML",
"bytes": "388345"
},
{
"name": "JavaScript",
"bytes": "179466"
},
{
"name": "Python",
"bytes": "10019"
}
],
"symlink_target": ""
} |
"""
Project version:
>>> print(VERSION_INFO)
VersionInfo(major=0, minor=3, patch=0)
>>> print(VERSION)
0.3.0
>>>
"""
import collections
__author__ = "Simone Campagna"
__copyright__ = 'Copyright (c) 2016 Simone Campagna'
__license__ = 'Apache License Version 2.0'
__all__ = (
'VersionInfo',
'VERSION_INFO',
'VERSION',
)
VersionInfo = collections.namedtuple('VersionInfo', (
'major',
'minor',
'patch',
))
VERSION_INFO = VersionInfo(major=0, minor=3, patch=0)
VERSION = '.'.join(str(v) for v in VERSION_INFO)
| {
"content_hash": "e3bca2fe14b68ecb2553879f9506a780",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 53,
"avg_line_length": 15.882352941176471,
"alnum_prop": 0.6277777777777778,
"repo_name": "simone-campagna/toxins",
"id": "24650d26bc15de06d65e374647ccc6f3a1ed2830",
"size": "1148",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "toxins/version.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "26379"
}
],
"symlink_target": ""
} |
import importlib
import os
import pickle
import sys
import traceback
def main():
script_dict = pickle.load(sys.stdin.buffer)
req = script_dict['request']
inst = script_dict['vnf_instance']
grant_req = script_dict['grant_request']
grant = script_dict['grant_response']
tmp_csar_dir = script_dict['tmp_csar_dir']
additional_params = req['additionalParams']
userdata_path = additional_params['lcm-operation-user-data']
userdata_class = additional_params['lcm-operation-user-data-class']
sys.path.append(tmp_csar_dir)
class_module = os.path.splitext(
userdata_path.lstrip('./'))[0].replace('/', '.')
module = importlib.import_module(class_module)
klass = getattr(module, userdata_class)
operation = grant_req['operation'].lower()
if script_dict['is_rollback']:
operation = operation + '_rollback'
method = getattr(klass, operation)
stack_dict = method(req, inst, grant_req, grant, tmp_csar_dir)
pickle.dump(stack_dict, sys.stdout.buffer)
sys.stdout.flush()
if __name__ == "__main__":
try:
main()
os._exit(0)
except Exception:
sys.stderr.write(traceback.format_exc())
sys.stderr.flush()
os._exit(1)
| {
"content_hash": "faa0c97ee180250d73003ca9a3a3bafb",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 71,
"avg_line_length": 28.25,
"alnum_prop": 0.6468222043443282,
"repo_name": "openstack/tacker",
"id": "f3d2b0de87385ee20de5285ef8632bbdbdf2c033",
"size": "1905",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tacker/sol_refactored/infra_drivers/openstack/userdata_main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "10809"
},
{
"name": "Mako",
"bytes": "1046"
},
{
"name": "Python",
"bytes": "7648075"
},
{
"name": "Ruby",
"bytes": "2841"
},
{
"name": "Shell",
"bytes": "61750"
},
{
"name": "Smarty",
"bytes": "3624"
}
],
"symlink_target": ""
} |
'''Logging from Celery both to logstash and structured log (JSON) file.'''
# You need to install python-logstash package. Also have logstash agent and
# elasticsearch running.
# Example logstash configuration (logstash.conf):
# input {
# udp => {
# codec => json
# }
# }
# output {
# elasticsearch => {
# host => localhost # Point to your host
# }
# }
# Then run "logstash/bin/logstash -f logstash.conf
from celery.utils.log import get_task_logger
import celery
import logstash
from logging.handlers import TimedRotatingFileHandler
from os import makedirs
from os.path import expanduser, isdir, join
import json
import logging
_logstash = {'host': 'localhost', 'port': 5959}
_logdir = expanduser('~/.local/log')
class CeleryAdapter(logging.LoggerAdapter):
'''Adapter to add current task context to "extra" log fields'''
def process(self, msg, kwargs):
if not celery.current_task:
return msg, kwargs
kwargs = kwargs.copy()
kwargs.setdefault('extra', {})['celery'] = \
vars(celery.current_task.request)
return msg, kwargs
class JSONFormatter(logging.Formatter):
def format(self, record):
obj = vars(record)
# msg might be any Python object, make sure json doesn't blow up
try:
json.dumps(obj['msg'])
except TypeError:
obj['msg'] = repr(obj['msg'])
# json can't dump exc_info, use default format as string
if obj['exc_info']:
obj['exc_info'] = self.formatException(obj['exc_info'])
return json.dumps(obj)
def configure(logstash=None, logdir=None):
'''Configuration settings.
logstash is a dictionary of {'host': ..., 'port: ...}
logdir is path to where log files are stored.
'''
global _logdir
if not (logstash or logdir):
raise ValueError('you must specify at least logstash or logdir')
_logstash.update(logstash)
_logdir = logdir or _logdir
init_logdir(_logdir) # We do it here so we'll fail close to the definition
def init_logdir(logdir):
if not isdir(logdir):
makedirs(logdir)
def new_logger(name):
'''Return new logger which will log both to logstash and to file in JSON
format.
Log files are stored in <logdir>/name.json
'''
log = get_task_logger(name)
handler = logstash.LogstashHandler(_logstash['host'], _logstash['port'])
log.addHandler(handler)
init_logdir(_logdir)
handler = TimedRotatingFileHandler(
'%s.json' % join(_logdir, name),
when='midnight',
utc=True,
)
handler.setFormatter(JSONFormatter())
log.addHandler(handler)
return CeleryAdapter(log, {})
| {
"content_hash": "39f0049299ceca99749ee42b3fc9eeef",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 79,
"avg_line_length": 26.134615384615383,
"alnum_prop": 0.6423841059602649,
"repo_name": "tebeka/pythonwise",
"id": "e1d2146f5c712d171b62b0940216acc1006dd607",
"size": "2718",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "celery_logstash.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "419"
},
{
"name": "Assembly",
"bytes": "130"
},
{
"name": "Awk",
"bytes": "94"
},
{
"name": "C",
"bytes": "3348"
},
{
"name": "CSS",
"bytes": "7156"
},
{
"name": "Dockerfile",
"bytes": "691"
},
{
"name": "Go",
"bytes": "17160"
},
{
"name": "HTML",
"bytes": "28603"
},
{
"name": "JavaScript",
"bytes": "75641"
},
{
"name": "Jupyter Notebook",
"bytes": "542450"
},
{
"name": "Makefile",
"bytes": "2242"
},
{
"name": "Mako",
"bytes": "795"
},
{
"name": "Python",
"bytes": "1039734"
},
{
"name": "Shell",
"bytes": "23126"
},
{
"name": "TeX",
"bytes": "257"
},
{
"name": "Vim script",
"bytes": "785"
}
],
"symlink_target": ""
} |
import multiprocessing
import setuptools
setuptools.setup(
setup_requires=['pbr!=2.1.0,>=2.0.0'],
pbr=True)
| {
"content_hash": "ca51ea9dd20bcfb80c4d792bf6ba4bf5",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 42,
"avg_line_length": 16.857142857142858,
"alnum_prop": 0.6864406779661016,
"repo_name": "Flav35/registryctl",
"id": "ae202b3dda1ef179b027d84990e84c330fd268c3",
"size": "118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "211"
},
{
"name": "Python",
"bytes": "8562"
}
],
"symlink_target": ""
} |
"""
Solution to Day 1 - Puzzle 2 of the Advent Of Code 2015 series of challenges.
--- Day 1: Not Quite Lisp ---
An opening parenthesis represents an increase in floor and a closing parenthesis represents a decrease in floor.
After taking a 7000 character long input string of assorted parenthesis, determine the first time that Santa arrives
at a specified floor.
-----------------------------
Author: Luke "rookuu" Roberts
"""
inputData = raw_input("Puzzle Input: ")
floor = 0
index = 0
floorRequired = int(raw_input("What floor are we looking for? "))
# Used to check the length of the input string.
# print len(inputData)
for char in inputData:
if char == "(":
floor += 1
elif char == ")":
floor -= 1
index += 1
if floor == floorRequired:
print "The first time Santa visits floor " + str(floorRequired) + " is on instruction number " + str(index)
break
| {
"content_hash": "8de0b5259c4cc6d2b6ed3ad7df1792af",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 116,
"avg_line_length": 26.88235294117647,
"alnum_prop": 0.6597374179431073,
"repo_name": "rookuu/AdventOfCode-2015",
"id": "de14ffaa31c53a560fa65c555d1949d71a04f45f",
"size": "937",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Day 1/Puzzle 2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35584"
}
],
"symlink_target": ""
} |
"""Code for supporting compatibility across python versions."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
try:
from base64 import decodebytes, encodebytes
except ImportError:
from base64 import encodestring as encodebytes
from base64 import decodestring as decodebytes
| {
"content_hash": "12e85173002fd9d2b415c6dd60407bf4",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 63,
"avg_line_length": 34.2,
"alnum_prop": 0.7865497076023392,
"repo_name": "sserrot/champion_relationships",
"id": "5e5861916ea06c92bd9de7ebd88f9020d8f0bb50",
"size": "342",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "venv/Lib/site-packages/nbformat/_compat.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "128"
},
{
"name": "HTML",
"bytes": "18324224"
},
{
"name": "Jupyter Notebook",
"bytes": "9131072"
},
{
"name": "Python",
"bytes": "10702"
}
],
"symlink_target": ""
} |
"""This example gets all browsers available to target from the Browser table.
Other tables include 'Bandwidth_Group', 'Browser_Language',
'Device_Capability', 'Operating_System', etc...
A full list of available criteria tables can be found at
https://developers.google.com/doubleclick-publishers/docs/reference/v201708/PublisherQueryLanguageService
"""
import tempfile
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize a report downloader.
report_downloader = client.GetDataDownloader(version='v201811')
with tempfile.NamedTemporaryFile(
prefix='browser_data_',
suffix='.csv', mode='w', delete=False) as browser_data_file:
browser_pql_query = ('SELECT Id, BrowserName, MajorVersion, MinorVersion '
'FROM Browser '
'ORDER BY BrowserName ASC')
# Downloads the response from PQL select statement to the specified file
report_downloader.DownloadPqlResultToCsv(
browser_pql_query, browser_data_file)
print 'Saved browser data to... %s' % browser_data_file.name
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
| {
"content_hash": "c6dd03f840d84c87821bf5a70f291756",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 105,
"avg_line_length": 32.743589743589745,
"alnum_prop": 0.7180892717306187,
"repo_name": "Aloomaio/googleads-python-lib",
"id": "5a0c7eddb173ff310b220d92e86ddc4b81e767ca",
"size": "1899",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/ad_manager/v201811/publisher_query_language_service/get_all_browsers.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "491015"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('commitments', '0004_auto_20170227_0318'),
]
operations = [
migrations.CreateModel(
name='CommitmentReadability',
fields=[
('commitment', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='commitments.Commitment')),
],
),
]
| {
"content_hash": "8f593af4bdcc279d3d7340fba1597fbb",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 162,
"avg_line_length": 27.3,
"alnum_prop": 0.6373626373626373,
"repo_name": "vinay-pad/commit_service",
"id": "c238010d13603343260bddc8c49a9e7e92d5dbae",
"size": "617",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/commitments/migrations/0005_commitmentreadability.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "32106"
}
],
"symlink_target": ""
} |
import json
from pathlib import Path
from unittest.mock import Mock
import pytest
import textwrap
import responses as rsps
import click
from ruamel.yaml import YAML
from archery.bot import (
CommentBot, CommandError, CrossbowCommentFormatter, group
)
@pytest.fixture
def responses():
with rsps.RequestsMock() as mock:
yield mock
def load_fixture(name):
path = Path(__file__).parent / 'fixtures' / name
with path.open('r') as fp:
if name.endswith('.json'):
return json.load(fp)
elif name.endswith('.yaml'):
yaml = YAML()
return yaml.load(fp)
else:
return fp.read()
def github_url(path):
return 'https://api.github.com:443/{}'.format(path.strip('/'))
@group()
def custom_handler():
pass
@custom_handler.command()
@click.pass_obj
def extra(obj):
return obj
@custom_handler.command()
@click.option('--force', '-f', is_flag=True)
def build(force):
return force
@custom_handler.command()
@click.option('--name', required=True)
def benchmark(name):
return name
def test_click_based_commands():
assert custom_handler('build') is False
assert custom_handler('build -f') is True
assert custom_handler('benchmark --name strings') == 'strings'
with pytest.raises(CommandError):
assert custom_handler('benchmark')
assert custom_handler('extra', extra='data') == {'extra': 'data'}
def test_crossbow_comment_formatter():
job = load_fixture('crossbow-job.yaml')
msg = load_fixture('crossbow-success-message.md')
formatter = CrossbowCommentFormatter(crossbow_repo='ursa-labs/crossbow')
response = formatter.render(job)
expected = msg.format(
repo='ursa-labs/crossbow',
branch='ursabot-1',
revision='f766a1d615dd1b7ee706d05102e579195951a61c',
status='has been succeeded.'
)
assert response == textwrap.dedent(expected).strip()
@pytest.mark.parametrize('fixture_name', [
# the bot is not mentioned, nothing to do
'event-issue-comment-not-mentioning-ursabot.json',
# don't respond to itself, it prevents recursive comment storms!
'event-issue-comment-by-ursabot.json',
# non-authorized user sent the comment, do not respond
'event-issue-comment-by-non-authorized-user.json',
])
def test_noop_events(fixture_name):
payload = load_fixture(fixture_name)
handler = Mock()
bot = CommentBot(name='ursabot', token='', handler=handler)
bot.handle('issue_comment', payload)
handler.assert_not_called()
def test_issue_comment_without_pull_request(responses):
responses.add(
responses.GET,
github_url('/repositories/169101701/issues/19'),
json=load_fixture('issue-19.json'),
status=200
)
responses.add(
responses.GET,
github_url('repos/ursa-labs/ursabot/pulls/19'),
json={},
status=404
)
responses.add(
responses.POST,
github_url('/repos/ursa-labs/ursabot/issues/19/comments'),
json={}
)
def handler(command, **kwargs):
pass
payload = load_fixture('event-issue-comment-without-pull-request.json')
bot = CommentBot(name='ursabot', token='', handler=handler)
bot.handle('issue_comment', payload)
post = responses.calls[2]
assert json.loads(post.request.body) == {
'body': "The comment bot only listens to pull request comments!"
}
def test_respond_with_usage(responses):
responses.add(
responses.GET,
github_url('/repositories/169101701/issues/26'),
json=load_fixture('issue-26.json'),
status=200
)
responses.add(
responses.GET,
github_url('/repos/ursa-labs/ursabot/pulls/26'),
json=load_fixture('pull-request-26.json'),
status=200
)
responses.add(
responses.GET,
github_url('/repos/ursa-labs/ursabot/issues/comments/480243811'),
json=load_fixture('issue-comment-480243811.json')
)
responses.add(
responses.POST,
github_url('/repos/ursa-labs/ursabot/issues/26/comments'),
json={}
)
def handler(command, **kwargs):
raise CommandError('test-usage')
payload = load_fixture('event-issue-comment-with-empty-command.json')
bot = CommentBot(name='ursabot', token='', handler=handler)
bot.handle('issue_comment', payload)
post = responses.calls[3]
assert json.loads(post.request.body) == {'body': '```\ntest-usage\n```'}
@pytest.mark.parametrize(('command', 'reaction'), [
('@ursabot build', '+1'),
('@ursabot listen', '-1'),
])
def test_issue_comment_with_commands(responses, command, reaction):
responses.add(
responses.GET,
github_url('/repositories/169101701/issues/26'),
json=load_fixture('issue-26.json'),
status=200
)
responses.add(
responses.GET,
github_url('/repos/ursa-labs/ursabot/pulls/26'),
json=load_fixture('pull-request-26.json'),
status=200
)
responses.add(
responses.GET,
github_url('/repos/ursa-labs/ursabot/issues/comments/480248726'),
json=load_fixture('issue-comment-480248726.json')
)
responses.add(
responses.POST,
github_url(
'/repos/ursa-labs/ursabot/issues/comments/480248726/reactions'
),
json={}
)
def handler(command, **kwargs):
if command == 'build':
return True
else:
raise ValueError('Only `build` command is supported.')
payload = load_fixture('event-issue-comment-build-command.json')
payload["comment"]["body"] = command
bot = CommentBot(name='ursabot', token='', handler=handler)
bot.handle('issue_comment', payload)
post = responses.calls[3]
assert json.loads(post.request.body) == {'content': reaction}
# TODO(kszucs): properly mock it
# def test_crossbow_submit():
# from click.testing import CliRunner
# runner = CliRunner()
# result = runner.invoke(
# bot, ['crossbow', 'submit', '-g', 'wheel', '--dry-run']
# )
# assert result.exit_code == 0
| {
"content_hash": "2a12f7ef0d4347837873fc6290a9d752",
"timestamp": "",
"source": "github",
"line_count": 225,
"max_line_length": 76,
"avg_line_length": 27.275555555555556,
"alnum_prop": 0.6356525989897344,
"repo_name": "xhochy/arrow",
"id": "9b2d023bc112c7565bf5c8ce0f3d8eed4a40b5eb",
"size": "6923",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dev/archery/archery/tests/test_bot.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "73655"
},
{
"name": "Awk",
"bytes": "3709"
},
{
"name": "Batchfile",
"bytes": "24676"
},
{
"name": "C",
"bytes": "881567"
},
{
"name": "C#",
"bytes": "699719"
},
{
"name": "C++",
"bytes": "14541996"
},
{
"name": "CMake",
"bytes": "560347"
},
{
"name": "Dockerfile",
"bytes": "100165"
},
{
"name": "Emacs Lisp",
"bytes": "1916"
},
{
"name": "FreeMarker",
"bytes": "2244"
},
{
"name": "Go",
"bytes": "848212"
},
{
"name": "HTML",
"bytes": "6152"
},
{
"name": "Java",
"bytes": "4713332"
},
{
"name": "JavaScript",
"bytes": "102300"
},
{
"name": "Julia",
"bytes": "235105"
},
{
"name": "Lua",
"bytes": "8771"
},
{
"name": "M4",
"bytes": "11095"
},
{
"name": "MATLAB",
"bytes": "36600"
},
{
"name": "Makefile",
"bytes": "57687"
},
{
"name": "Meson",
"bytes": "48356"
},
{
"name": "Objective-C",
"bytes": "17680"
},
{
"name": "Objective-C++",
"bytes": "12128"
},
{
"name": "PLpgSQL",
"bytes": "56995"
},
{
"name": "Perl",
"bytes": "3799"
},
{
"name": "Python",
"bytes": "3135304"
},
{
"name": "R",
"bytes": "533584"
},
{
"name": "Ruby",
"bytes": "1084485"
},
{
"name": "Rust",
"bytes": "3969176"
},
{
"name": "Shell",
"bytes": "380070"
},
{
"name": "Thrift",
"bytes": "142033"
},
{
"name": "TypeScript",
"bytes": "1157087"
}
],
"symlink_target": ""
} |
import time
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from MasterSeer import MasterSeer
import math
import itertools
import csv
from sklearn.feature_selection import SelectPercentile, f_classif, SelectFromModel
from sklearn.linear_model import LinearRegression, Lasso, Ridge
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import MultinomialNB, BernoulliNB, GaussianNB
from sklearn.cross_validation import train_test_split, KFold
from sklearn.neighbors import KNeighborsRegressor, KNeighborsClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn import preprocessing
from sklearn.metrics import classification_report, f1_score, accuracy_score, precision_score, recall_score, precision_recall_fscore_support
class ModelSeer(MasterSeer):
def __init__(self, path=r'./data/', testMode=False, verbose=True, sample_size=5000, where="DATE_yr < 2008"):
# user supplied parameters
self.testMode = testMode # import one file, 500 records and return
self.verbose = verbose # prints status messages
self.sample_size = sample_size # number of rows to pull for testing
self.where = where # filter for SQL load of data
if type(path) != str:
raise TypeError('path must be a string')
if path[-1] != '/':
path += '/' # if path does not end with a backslash, add one
self.path = path
# open connection to the database
super().__init__(path, False, verbose=verbose)
self.db_conn, self.db_cur = super().init_database(False)
def __del__(self):
super().__del__()
def prepare_test_train_sets(self, source, dependent, test_pct = .20, return_one_df=False, cols = [], dependent_cutoffs=[60]):
""" prepare_test_train_sets(source, dependent):
params: source - table name in seer database, defaults to 'breast'
dependent - name of field we are testing for, need to remove from X and assign to Y
test_pct - percentage of sample to rserve for testing defaults to .20
return_one_df - return one big X, and y set for cross validation of entire sample
cols - columns to pull from sqldatabase
dependent_cutoffs - list of number of months to create buckets for dependent variable
default is [60] which will create two buckets (one <60 and one >= 60)
returns: X_train, X_test, y_train, y_test, cols
X_train and X_test are pd.DataFrames, y_train and y_test are np.arrays
cols is a list of column names
if return_one_df, return one X, y
"""
# pull specified fields from database using random rows.
cols.append(dependent)
df = super().load_data(source, cols, cond=self.where, sample_size=self.sample_size)
df, dependent = self.clean_recode_data(df, dependent_cutoffs)
# drop dependent colum from feature arrays
y = df[dependent].values
df = df.drop(dependent, 1)
if return_one_df:
return df, y
X_train, X_test, y_train, y_test = train_test_split(df, y, test_size=test_pct, random_state=0)
return X_train, X_test, y_train, y_test, X_train.columns
def test_models(self,
source = 'breast',
styles = [MultinomialNB, BernoulliNB, LinearRegression, KNeighborsRegressor, Lasso, Ridge],
num_features = 3,
cols = ['YR_BRTH','AGE_DX','RACE','ORIGIN','LATERAL','RADIATN','HISTREC','ERSTATUS',
'PRSTATUS','BEHANAL','HST_STGA','NUMPRIMS'],
dependent_cutoffs=[60]):
""" test_models(source = 'BREAST'):
params: source - table name in seer database, defaults to 'breast'
styles - list of classes to use to test the data i.e. [LinearRegression, LogisticRegression]
if styles is left empty, default routines in function will be used.
make sure to import modules containing the routines to test
i.e. from sklearn.linear_model import LinearRegression, LogisticRegression
num_features - number of features to test at one time, set to 99 to test all features in one run.
returns: n/a
test various models against a combination of features, save scores to excel file named: source+'_seer_models.xlsx'
"""
# name of excel file to dump results
xls_name = source + '_seer_models.xlsx'
# variable to predict
dependent = 'SRV_TIME_MON'
# get sets to train and test (80/20 split)
X_train, X_test, y_train, y_test, cols = self.prepare_test_train_sets(source, dependent, test_pct = .20, cols = cols)
col_cnt = len(cols)
# make sure features to test is not greater than number of columns
num_features = min(num_features, col_cnt)
# formula for number of combinations: nCr = n! / r! (n - r)!
tot_cnt = (math.factorial(col_cnt) / (math.factorial(num_features) * math.factorial(col_cnt - num_features))) * len(styles)
print("Processing: {0} tests.".format(int(tot_cnt)))
res = []
counter = 0
for style in range(len(styles)):
style_fnc = styles[style]
model = style_fnc()
print("Testing: {0} ".format(style_fnc.__name__))
for combo in itertools.combinations(cols, num_features):
try:
# train this model
X = X_train[list(combo)]
#X = preprocessing.scale(x) # scale data if model requires it
model.fit(X, y_train)
# now test and score it
X = X_test[list(combo)]
#X = preprocessing.scale(x) # scale data if model requires it
y_pred_test = model.predict(X)
y_pred_test = np.rint(y_pred_test)
y_pred_test = y_pred_test.astype(np.int)
f1 = f1_score(y_test, y_pred_test)
res.append([f1, style_fnc.__name__, [k for k in combo[:num_features]]])
counter += 1
if counter % 100 == 0:
print("Completed: {0}".format(counter, flush=True), end = '\r')
except Exception as err:
counter += 1
if self.verbose:
print(err)
del model
# store trial results to excel
res = sorted(res, reverse=True)
res_df = pd.DataFrame(res)
exc = pd.ExcelWriter(xls_name)
res_df.to_excel(exc)
exc.save()
# cross validate and plot best model
#TODO get parameters from res
#self.cv_model(KNeighborsRegressor, 'KNeighborsRegressor', ['DATE_yr', 'ICDOTO9V', 'ICD_5DIG'])
print("\nAll Completed: {0} Results stored in: {1}".format(counter, xls_name))
def clean_recode_data(self, df, dependent_cutoffs):
""" clean_recode_data(df)
params: df - dataframe of seer data to clean
returns: cleaned dataframe, and name of new coded dependent variable
Each cleaning step is on its own line so we can pick and choose what
steps we want after we decide on variables to study
*** This is just a starting template.
*** I will finish when variables are determined.
"""
# drop all rows that have invalid or missing data
try:
df = df.dropna(subset = ['YR_BRTH']) # add column names here as needed
except Exception as err:
pass
try:
df.LATERAL = df.LATERAL.replace([0, 1,2,3], 1) # one site = 1
df.LATERAL = df.LATERAL.replace([4,5,9], 2) # paired = 2
except:
pass
try:
df = df[df.O_DTH_CLASS == 0]
except:
pass
try:
# 0-benign, 1-borderline, 2-in situ, 3-malignant
df = df[df.BEHANAL != 5]
df.BEHANAL = df.BEHANAL.replace([3,4,6], 3)
except:
pass
try:
df = df[df.HST_STGA != 8]
df = df[df.HST_STGA != 9]
except:
pass
try:
# 0-negative, 1-borderline,, 2-positive
df = df[df.ERSTATUS != 4]
df = df[df.ERSTATUS != 9]
df.ERSTATUS = df.ERSTATUS.replace(2, 0)
df.ERSTATUS = df.ERSTATUS.replace(1, 2)
df.ERSTATUS = df.ERSTATUS.replace(3, 1)
except:
pass
try:
# 0-negative, 1-borderline,, 2-positive
df = df[df.PRSTATUS != 4]
df = df[df.PRSTATUS != 9]
df.PRSTATUS = df.PRSTATUS.replace(2, 0)
df.PRSTATUS = df.PRSTATUS.replace(1, 2)
df.PRSTATUS = df.PRSTATUS.replace(3, 1)
except:
pass
try:
df.RADIATN = df.RADIATN.replace(7, 0)
df = df[df.RADIATN < 7]
except Exception as err:
pass
try:
# code as 1 or 2-more than one
df.NUMPRIMS = df.NUMPRIMS.replace([x for x in range(2,37)], 2)
except Exception as err:
pass
#try:
# df = df[df.AGE_DX != 999]
#except:
# pass
#try:
# df = df[df.SEQ_NUM != 88]
#except:
# pass
#try:
# df = df[df.GRADE != 9]
#except:
# pass
#try:
# df = df[df.EOD10_SZ != 999]
#except:
# pass
#try:
# df = df[df.EOD10_PN < 95]
#except:
# pass
#try:
# # remove unknown or not performed. reorder 0-neg, 1-borderline, 2-pos
# df = df[df.TUMOR_1V in [1,2,3]]
# df.TUMOR_1V = df.TUMOR_1V.replace(2, 0)
# df.TUMOR_1V = df.TUMOR_1V.replace(1, 2)
# df.TUMOR_1V = df.TUMOR_1V.replace(3, 1)
#except:
# pass
#try:
# df.TUMOR_2V = df.TUMOR_2V.replace(7, 0)
# df = df[df.RADIATN < 7]
#except Exception as err:
# pass
# creat new dependent column called SRV_BUCKET to hold the survival time value
# based on the values sent into this function in the dependent_cutoffs list
# first bucket is set to 0, next 1, etc...
# Example dependent_cutoffs=[60,120,500]
# if survival is less than 60 SRV_BUCKET is set to 0
# if survival is >=60 and < 120 SRV_BUCKET is set to 1
# create new column of all NaN
df['SRV_BUCKET'] = np.NaN
# fill buckets
last_cut = 0
for x, cut in enumerate(dependent_cutoffs):
df.loc[(df.SRV_TIME_MON >= last_cut) & (df.SRV_TIME_MON < cut), 'SRV_BUCKET'] = x
last_cut = cut
# assign all values larger than last cutoff to next bucket number
df['SRV_BUCKET'].fillna(len(dependent_cutoffs), inplace=True)
#df = df.drop('SRV_TIME_MON', 1)
# categorical columns to one hot encode, check to make sure they are in df
#cat_cols_to_encode = list(set(['RACE', 'ORIGIN', 'SEX', 'TUMOR_2V', 'HISTREC']) & set(df.columns))
#df = self.one_hot_data(df, cat_cols_to_encode)
df.replace([np.inf, -np.inf], np.nan)
df = df.fillna(0)
exc = pd.ExcelWriter('clean1.xlsx')
df.to_excel(exc)
exc.save()
return df, 'SRV_BUCKET'
def one_hot_data(self, data, cols):
""" Takes a dataframe and a list of columns that need to be encoded.
Returns a new dataframe with the one hot encoded vectorized data
See the following for explanation:
http://stackoverflow.com/questions/17469835/one-hot-encoding-for-machine-learning
"""
# check to only encode columns that are in the data
col_to_process = [c for c in cols if c in data]
return pd.get_dummies(data, columns = col_to_process, prefix = col_to_process)
def cross_val_model(self, model, features, source='breast', sample_size=5000, num_folds = 5, dependent_cutoffs=[60]):
""" cr_val_model(self, model, model_name, source)
perform cross-validation on a specific model using specified sample size
params: model - scikit-learn model function
features = list of features(fields) to use for model
source - table name in seer database, defaults to 'breast'
num_folds - number of folds for cross validation
dependent_cutoffs - list of number of months to create buckets for dependent variable
default is [60] which will create two buckets (one <60 and one >= 60)
"""
mdl = model()
model_name = model.__name__
# variable to predict
dependent = 'SRV_TIME_MON'
# get all of the data, we will split to test/train using scikit's KFold routine
X, y = self.prepare_test_train_sets(source, dependent, return_one_df=True, cols = features, dependent_cutoffs=dependent_cutoffs)
X = np.array(X, dtype=np.float16)
y = y.astype(np.int)
kf = KFold(len(X), n_folds=num_folds, shuffle=True)
# `means` will be a list of mean accuracies (one entry per fold)
scores = {'precision':[], 'recall':[], 'f1':[]}
for training, testing in kf:
# Fit a model for this fold, then apply it to the
Xtrn = X[training]
#min_max_scaler = preprocessing.MinMaxScaler()
#X_train_minmax = min_max_scaler.fit_transform(Xtrn)
mdl.fit(Xtrn, y[training])
Xtst = X[testing]
#min_max_scaler = preprocessing.MinMaxScaler()
#Xtst = min_max_scaler.fit_transform(Xtst)
y_pred_test = mdl.predict(Xtst)
y_pred_test = np.rint(y_pred_test)
y_pred_test = y_pred_test.astype(np.int)
# last batch is used for plotting
y_test = y[testing]
classificationReport = classification_report(y_test, y_pred_test)
print("Report For: {0}".format(model_name))
print(classificationReport)
# Append scores for this run
nn = precision_score(y_test, y_pred_test, average='micro')
p,r,f,_ = precision_recall_fscore_support(y_test, y_pred_test)
scores['precision'].append(p)
scores['recall'].append(r)
scores['f1'].append(f)
# sort the y test data and keep the y_pred_test array in sync
# sort to make the graph more informative
y_test, y_pred_test = zip(*sorted(zip(y_test, y_pred_test)))
# plot last batch's results
plt.plot([x for x in range(len(y_test))], y_pred_test, 'x', label="prediction")
plt.plot([x for x in range(len(y_test))], y_test, 'o', label="data")
plt.legend(loc='best')
plt.title(model_name)
# crop outliers so graph is more meaningful
plt.ylim(0, 6)
plt.show()
#print(scores)
def show_hist(self, df, cols):
for col in cols:
df.hist(col)
plt.show()
if __name__ == '__main__':
t0 = time.perf_counter()
seer = ModelSeer(sample_size=20000, where="DATE_yr < 2008 AND O_DTH_CLASS = 0")
################
# these three lines are used to display a histogram for the slected columns
#_, df = seer.describe_data()
#df = df[df.SRV_TIME_MON <= 360]
#seer.show_hist(df, ['SRV_TIME_MON'])
################
# this line will run the selcted models
#seer.test_models(styles = [RandomForestClassifier, KNeighborsRegressor, Lasso, Ridge],
# cols = ['YR_BRTH','AGE_DX','RACE','ORIGIN','LATERAL','RADIATN','HISTREC','ERSTATUS','PRSTATUS','BEHANAL','HST_STGA','NUMPRIMS'], num_features=3, dependent_cutoffs=[60, 120])
################
# used to cross validate and plot a specific test and features.
seer.cross_val_model(LogisticRegression, ['YR_BRTH','AGE_DX','RACE','ORIGIN','LATERAL',
'RADIATN','HISTREC','ERSTATUS','PRSTATUS','BEHANAL','HST_STGA','NUMPRIMS'], dependent_cutoffs=[60, 120])
################
del seer
print('\nModelSeer Module Elapsed Time: {0:.2f}'.format(time.perf_counter() - t0))
| {
"content_hash": "bcd423615a1435761cfe8f0ee5a7495a",
"timestamp": "",
"source": "github",
"line_count": 423,
"max_line_length": 195,
"avg_line_length": 39.657210401891255,
"alnum_prop": 0.5614307004470939,
"repo_name": "georgetown-analytics/envirohealth",
"id": "2e5aebfa886d11e1381eb06ee2a4960a237f7a58",
"size": "16777",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CapstoneSEER/ModelSeer1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "12585"
},
{
"name": "Jupyter Notebook",
"bytes": "851030"
},
{
"name": "Python",
"bytes": "96933"
}
],
"symlink_target": ""
} |
import pandas as pd
from sklearn import datasets
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
names = ['min', 'max', 'mean', 'sd', 'class']
data = pd.read_csv('iris.data', names=names)
array = data.values
i = 0
'''for x in data['class']:
if x == 'Iris-setosa':
data['class'][i] = 0
elif x == 'Iris-versicolor':
data['class'][i] = 1
else:
data['class'][i] = 2
i += 1'''
print(array)
x = array[:, 0:3]
y = array[:, 3]
plt.plot(x, y)
'''
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.33, random_state=7)
model = LinearRegression()
model.fit(x_train, y_train)
result = model.score(x_test, y_test)
print("Accuracy: ", result.mean()*100.0, result.std()*100)
'''
| {
"content_hash": "295c59103e06bb7bb5041abde3199bbe",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 89,
"avg_line_length": 23.11764705882353,
"alnum_prop": 0.6539440203562341,
"repo_name": "Swaraj1998/MyCode",
"id": "e360673b7571f3189c51fcd8d685fdc9eae857bc",
"size": "786",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ML-Workshop/iris_data.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "303"
},
{
"name": "C",
"bytes": "26252"
},
{
"name": "C++",
"bytes": "44759"
},
{
"name": "HTML",
"bytes": "6910"
},
{
"name": "Java",
"bytes": "25674"
},
{
"name": "Perl",
"bytes": "716"
},
{
"name": "Python",
"bytes": "71899"
},
{
"name": "Shell",
"bytes": "983"
}
],
"symlink_target": ""
} |
import sys
import argparse
import os
import os.path
import tarfile
import shutil
from pyskeleton import __version__
from pkg_resources import resource_filename
class Parser(argparse.ArgumentParser):
def __init__(self, **kwargs):
super(Parser, self).__init__(**kwargs)
DESCRIPTION = """
will create a python module skeleton
"""
def main():
parser = Parser(description=DESCRIPTION)
parser.add_argument('name', help='the target proejct name')
parser.add_argument('-v', '--version',
action='version', version=__version__)
args = parser.parse_args()
project_name = args.name
try:
os.mkdir(project_name)
os.chdir(project_name)
except FileExistsError as e:
print('file exists error')
sys.exit(1)
# tar unzip
with tarfile.open(
resource_filename("pyskeleton", "pyskeleton.tar.gz")) as tar:
tar.extractall()
# make dir
os.makedirs('src/module_name')
pyfile = resource_filename("pyskeleton", "__init__.py")
shutil.copy(pyfile, 'src/module_name')
print('great, create {0} succeed'.format(project_name))
| {
"content_hash": "dcc0cb991adb2c29df57b4e36c5d1d7e",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 73,
"avg_line_length": 23.916666666666668,
"alnum_prop": 0.64198606271777,
"repo_name": "a358003542/skeleton",
"id": "6feede6f3723db9bdae129bdb0ed758b545e24e7",
"size": "1192",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/pyskeleton/__main__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "661"
},
{
"name": "Python",
"bytes": "2280"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.