text stringlengths 4 1.02M | meta dict |
|---|---|
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import annotation
from ddb_ngsflow import pipeline
from ddb_ngsflow.align import bwa
from ddb_ngsflow.variation import variation
from ddb_ngsflow.variation import haplotypecaller
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
libraries = configuration.configure_samples(args.samples_file, config)
samples = configuration.merge_library_configs_samples(libraries)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
snpeff_job = Job.wrapJobFn(annotation.snpeff, config, config['project'],
"{}.filtered.vcf".format(config['project']),
cores=int(config['snpeff']['num_cores']),
memory="{}G".format(config['snpeff']['max_mem']))
# root_job.addFollowOn(joint_call_job)
root_job.addFollowOn(snpeff_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
| {
"content_hash": "260ede2eb3bd29b5ac5de995e006a04d",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 117,
"avg_line_length": 37.72727272727273,
"alnum_prop": 0.6993975903614458,
"repo_name": "dgaston/ddb-scripts",
"id": "b59ab3aa366d3403652f58f0d23071208b7ea7a4",
"size": "1703",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "defunct/workflow-var_annotate_exome_multi_lib.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "52529"
}
],
"symlink_target": ""
} |
"""Sensors for National Weather Service (NWS)."""
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_LATITUDE,
CONF_LONGITUDE,
LENGTH_METERS,
LENGTH_MILES,
PERCENTAGE,
PRESSURE_INHG,
PRESSURE_PA,
SPEED_KILOMETERS_PER_HOUR,
SPEED_MILES_PER_HOUR,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.util.distance import convert as convert_distance
from homeassistant.util.dt import utcnow
from homeassistant.util.pressure import convert as convert_pressure
from homeassistant.util.speed import convert as convert_speed
from . import base_unique_id, device_info
from .const import (
ATTRIBUTION,
CONF_STATION,
COORDINATOR_OBSERVATION,
DOMAIN,
NWS_DATA,
OBSERVATION_VALID_TIME,
SENSOR_TYPES,
NWSSensorEntityDescription,
)
PARALLEL_UPDATES = 0
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the NWS weather platform."""
hass_data = hass.data[DOMAIN][entry.entry_id]
station = entry.data[CONF_STATION]
async_add_entities(
NWSSensor(
hass=hass,
entry_data=entry.data,
hass_data=hass_data,
description=description,
station=station,
)
for description in SENSOR_TYPES
)
class NWSSensor(CoordinatorEntity, SensorEntity):
"""An NWS Sensor Entity."""
entity_description: NWSSensorEntityDescription
_attr_extra_state_attributes = {ATTR_ATTRIBUTION: ATTRIBUTION}
def __init__(
self,
hass: HomeAssistant,
entry_data,
hass_data,
description: NWSSensorEntityDescription,
station,
):
"""Initialise the platform with a data instance."""
super().__init__(hass_data[COORDINATOR_OBSERVATION])
self._nws = hass_data[NWS_DATA]
self._latitude = entry_data[CONF_LATITUDE]
self._longitude = entry_data[CONF_LONGITUDE]
self.entity_description = description
self._attr_name = f"{station} {description.name}"
if not hass.config.units.is_metric:
self._attr_native_unit_of_measurement = description.unit_convert
@property
def native_value(self):
"""Return the state."""
value = self._nws.observation.get(self.entity_description.key)
if value is None:
return None
# Set alias to unit property -> prevent unnecessary hasattr calls
unit_of_measurement = self.native_unit_of_measurement
if unit_of_measurement == SPEED_MILES_PER_HOUR:
return round(
convert_speed(value, SPEED_KILOMETERS_PER_HOUR, SPEED_MILES_PER_HOUR)
)
if unit_of_measurement == LENGTH_MILES:
return round(convert_distance(value, LENGTH_METERS, LENGTH_MILES))
if unit_of_measurement == PRESSURE_INHG:
return round(convert_pressure(value, PRESSURE_PA, PRESSURE_INHG), 2)
if unit_of_measurement == TEMP_CELSIUS:
return round(value, 1)
if unit_of_measurement == PERCENTAGE:
return round(value)
return value
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return f"{base_unique_id(self._latitude, self._longitude)}_{self.entity_description.key}"
@property
def available(self):
"""Return if state is available."""
if self.coordinator.last_update_success_time:
last_success_time = (
utcnow() - self.coordinator.last_update_success_time
< OBSERVATION_VALID_TIME
)
else:
last_success_time = False
return self.coordinator.last_update_success or last_success_time
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return False
@property
def device_info(self) -> DeviceInfo:
"""Return device info."""
return device_info(self._latitude, self._longitude)
| {
"content_hash": "facc507b000959b0b580a687055bcfe0",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 97,
"avg_line_length": 33.1953125,
"alnum_prop": 0.6528594963520828,
"repo_name": "jawilson/home-assistant",
"id": "35bbcef838ddbe5cc09dc2a4368e758e438aa0b6",
"size": "4249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "homeassistant/components/nws/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2782"
},
{
"name": "Python",
"bytes": "40129467"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
"""Utilities for sorting jpgs using a Tensorflow image classifier.
This package provides libraries and command line tools for consuming
directories of images and sorting them into directories corresponding
to their classification given a tensorflow image classification model."""
| {
"content_hash": "5352ea30469c7e629975d7f1d0b448c9",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 73,
"avg_line_length": 56.2,
"alnum_prop": 0.8362989323843416,
"repo_name": "lukejduncan/tfimgtools",
"id": "8aad5c4f13a8d7dbdf281428dc50d64f9b5a7a02",
"size": "281",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tfimgsort/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "210"
},
{
"name": "Python",
"bytes": "17914"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
# Flag to indicate if this migration is too risky
# to run online and needs to be coordinated for offline
is_dangerous = False
def forwards(self, orm):
# Adding field 'OrganizationIntegration.config'
db.add_column('sentry_organizationintegration', 'config',
self.gf('sentry.db.models.fields.encrypted.EncryptedJsonField')(default={}),
keep_default=False)
# Adding field 'OrganizationIntegration.default_auth_id'
db.add_column('sentry_organizationintegration', 'default_auth_id',
self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(
null=True, db_index=True),
keep_default=False)
# Deleting field 'Integration.default_auth_id'
db.delete_column(u'sentry_integration', 'default_auth_id')
# Deleting field 'Integration.config'
db.delete_column(u'sentry_integration', 'config')
# Adding field 'Integration.metadata'
db.add_column('sentry_integration', 'metadata',
self.gf('sentry.db.models.fields.encrypted.EncryptedJsonField')(default={}),
keep_default=False)
# Changing field 'ProjectIntegration.config'
db.alter_column('sentry_projectintegration', 'config', self.gf(
'sentry.db.models.fields.encrypted.EncryptedJsonField')())
def backwards(self, orm):
# Deleting field 'OrganizationIntegration.config'
db.delete_column('sentry_organizationintegration', 'config')
# Deleting field 'OrganizationIntegration.default_auth_id'
db.delete_column('sentry_organizationintegration', 'default_auth_id')
# Adding field 'Integration.default_auth_id'
db.add_column(u'sentry_integration', 'default_auth_id',
self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(
null=True, db_index=True),
keep_default=False)
# Adding field 'Integration.config'
db.add_column(u'sentry_integration', 'config',
self.gf('jsonfield.fields.JSONField')(default={}),
keep_default=False)
# Deleting field 'Integration.metadata'
db.delete_column('sentry_integration', 'metadata')
# Changing field 'ProjectIntegration.config'
db.alter_column('sentry_projectintegration', 'config',
self.gf('jsonfield.fields.JSONField')())
models = {
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.apiapplication': {
'Meta': {'object_name': 'ApiApplication'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'client_id': ('django.db.models.fields.CharField', [], {'default': "'83b0161cf56443df93ce0327caf32b940f6d970ad53f4fea82450af500288d3c'", 'unique': 'True', 'max_length': '64'}),
'client_secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'0d309c5d47804767a2645f62495fabdcdc298ced6ae5492f96c4e7d51a2e917f'"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'homepage_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Open Teal'", 'max_length': '64', 'blank': 'True'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'privacy_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'redirect_uris': ('django.db.models.fields.TextField', [], {}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'terms_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'sentry.apiauthorization': {
'Meta': {'unique_together': "(('user', 'application'),)", 'object_name': 'ApiAuthorization'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.apigrant': {
'Meta': {'object_name': 'ApiGrant'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']"}),
'code': ('django.db.models.fields.CharField', [], {'default': "'c5afedc036844c7682a185ba9365a5ad'", 'max_length': '64', 'db_index': 'True'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2017, 9, 20, 0, 0)', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'redirect_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.apitoken': {
'Meta': {'object_name': 'ApiToken'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2017, 10, 20, 0, 0)', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'refresh_token': ('django.db.models.fields.CharField', [], {'default': "'cae6be73f1974c2399281a059da18ee649a605a736274e4ca5b2d8c56f63376a'", 'max_length': '64', 'unique': 'True', 'null': 'True'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'token': ('django.db.models.fields.CharField', [], {'default': "'e31c8d80e5c34724bf8ac6926002f819bde970faaf27426b84c16f4a17034387'", 'unique': 'True', 'max_length': '64'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']"}),
'actor_key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authenticator': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'Authenticator', 'db_table': "'auth_authenticator'"},
'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_expires': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2017, 9, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'upstream_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'sentry.broadcastseen': {
'Meta': {'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen'},
'broadcast': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Broadcast']"}),
'date_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.commit': {
'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'Commit', 'index_together': "(('repository_id', 'date_added'),)"},
'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {'unique_together': "(('organization_id', 'email'), ('organization_id', 'external_id'))", 'object_name': 'CommitAuthor'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '164', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.commitfilechange': {
'Meta': {'unique_together': "(('commit', 'filename'),)", 'object_name': 'CommitFileChange'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '1'})
},
'sentry.counter': {
'Meta': {'object_name': 'Counter', 'db_table': "'sentry_projectcounter'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'unique': 'True'}),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.deploy': {
'Meta': {'object_name': 'Deploy'},
'date_finished': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'notified': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'sentry.distribution': {
'Meta': {'unique_together': "(('release', 'name'),)", 'object_name': 'Distribution'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.dsymapp': {
'Meta': {'unique_together': "(('project', 'platform', 'app_id'),)", 'object_name': 'DSymApp'},
'app_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'platform': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'sync_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'})
},
'sentry.email': {
'Meta': {'object_name': 'Email'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('sentry.db.models.fields.citext.CIEmailField', [], {'unique': 'True', 'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.environment': {
'Meta': {'unique_together': "(('project_id', 'name'), ('organization_id', 'name'))", 'object_name': 'Environment'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'through': "orm['sentry.EnvironmentProject']", 'symmetrical': 'False'})
},
'sentry.environmentproject': {
'Meta': {'unique_together': "(('project', 'environment'),)", 'object_name': 'EnvironmentProject'},
'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.event': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group_id', 'datetime'),)"},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventprocessingissue': {
'Meta': {'unique_together': "(('raw_event', 'processing_issue'),)", 'object_name': 'EventProcessingIssue'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'processing_issue': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProcessingIssue']"}),
'raw_event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.RawEvent']"})
},
'sentry.eventtag': {
'Meta': {'unique_together': "(('event_id', 'key_id', 'value_id'),)", 'object_name': 'EventTag', 'index_together': "(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {'unique_together': "(('project_id', 'ident'), ('project_id', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project_id', 'email'), ('project_id', 'username'), ('project_id', 'ip_address'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'})
},
'sentry.featureadoption': {
'Meta': {'unique_together': "(('organization', 'feature_id'),)", 'object_name': 'FeatureAdoption'},
'applicable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'feature_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'legacy_blob'", 'null': 'True', 'to': "orm['sentry.FileBlob']"}),
'blobs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.FileBlob']", 'through': "orm['sentry.FileBlobIndex']", 'symmetrical': 'False'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'headers': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.fileblob': {
'Meta': {'object_name': 'FileBlob'},
'checksum': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'})
},
'sentry.fileblobindex': {
'Meta': {'unique_together': "(('file', 'blob', 'offset'),)", 'object_name': 'FileBlobIndex'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'short_id'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'short_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupcommitresolution': {
'Meta': {'unique_together': "(('group_id', 'commit_id'),)", 'object_name': 'GroupCommitResolution'},
'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.groupemailthread': {
'Meta': {'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread'},
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'msgid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']"})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'group_tombstone_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'state': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {'object_name': 'GroupRedirect'},
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'previous_group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'unique': 'True'})
},
'sentry.grouprelease': {
'Meta': {'unique_together': "(('group_id', 'release_id', 'environment'),)", 'object_name': 'GroupRelease'},
'environment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.groupresolution': {
'Meta': {'object_name': 'GroupResolution'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.groupsnooze': {
'Meta': {'object_name': 'GroupSnooze'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'state': ('jsonfield.fields.JSONField', [], {'null': 'True'}),
'until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'user_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.groupsubscription': {
'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'GroupSubscription'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Project']"}),
'reason': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project_id', 'group_id', 'key'),)", 'object_name': 'GroupTagKey'},
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('group_id', 'key', 'value'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'", 'index_together': "(('project_id', 'key', 'value', 'last_seen'),)"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.grouptombstone': {
'Meta': {'object_name': 'GroupTombstone'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'previous_group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'unique': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.integration': {
'Meta': {'unique_together': "(('provider', 'external_id'),)", 'object_name': 'Integration'},
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'metadata': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationIntegration']", 'to': "orm['sentry.Organization']"}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectIntegration']", 'to': "orm['sentry.Project']"}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationaccessrequest': {
'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationavatar': {
'Meta': {'object_name': 'OrganizationAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.Organization']"})
},
'sentry.organizationintegration': {
'Meta': {'unique_together': "(('organization', 'integration'),)", 'object_name': 'OrganizationIntegration'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'default_auth_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.organizationmemberteam': {
'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'"},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationonboardingtask': {
'Meta': {'unique_together': "(('organization', 'task'),)", 'object_name': 'OrganizationOnboardingTask'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.organizationoption': {
'Meta': {'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.processingissue': {
'Meta': {'unique_together': "(('project', 'checksum', 'type'),)", 'object_name': 'ProcessingIssue'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'first_event': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True'}),
'forced_color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.projectbookmark': {
'Meta': {'unique_together': "(('project_id', 'user'),)", 'object_name': 'ProjectBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.projectdsymfile': {
'Meta': {'unique_together': "(('project', 'uuid'),)", 'object_name': 'ProjectDSymFile'},
'cpu_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'})
},
'sentry.projectintegration': {
'Meta': {'unique_together': "(('project', 'integration'),)", 'object_name': 'ProjectIntegration'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'rate_limit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'rate_limit_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.projectplatform': {
'Meta': {'unique_together': "(('project_id', 'platform'),)", 'object_name': 'ProjectPlatform'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.rawevent': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'RawEvent'},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.release': {
'Meta': {'unique_together': "(('organization', 'version'),)", 'object_name': 'Release'},
'authors': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'commit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_released': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'last_deploy_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'releases'", 'symmetrical': 'False', 'through': "orm['sentry.ReleaseProject']", 'to': "orm['sentry.Project']"}),
'ref': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'total_deploys': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.releasecommit': {
'Meta': {'unique_together': "(('release', 'commit'), ('release', 'order'))", 'object_name': 'ReleaseCommit'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseenvironment': {
'Meta': {'unique_together': "(('organization_id', 'release_id', 'environment_id'),)", 'object_name': 'ReleaseEnvironment', 'db_table': "'sentry_environmentrelease'"},
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'dist': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Distribution']", 'null': 'True'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseheadcommit': {
'Meta': {'unique_together': "(('repository_id', 'release'),)", 'object_name': 'ReleaseHeadCommit'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.releaseproject': {
'Meta': {'unique_together': "(('project', 'release'),)", 'object_name': 'ReleaseProject', 'db_table': "'sentry_release_project'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.repository': {
'Meta': {'unique_together': "(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))", 'object_name': 'Repository'},
'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'sentry.reprocessingreport': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'ReprocessingReport'},
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.savedsearch': {
'Meta': {'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'SavedSearchUserDefault', 'db_table': "'sentry_savedsearch_userdefault'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'savedsearch': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.SavedSearch']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.scheduleddeletion': {
'Meta': {'unique_together': "(('app_label', 'model_name', 'object_id'),)", 'object_name': 'ScheduledDeletion'},
'aborted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_scheduled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2017, 10, 20, 0, 0)'}),
'guid': ('django.db.models.fields.CharField', [], {'default': "'02a089f8773f422787a58a636dbf1721'", 'unique': 'True', 'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'in_progress': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'object_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.scheduledjob': {
'Meta': {'object_name': 'ScheduledJob'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_scheduled': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'payload': ('jsonfield.fields.JSONField', [], {'default': '{}'})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project_id', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project_id', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_password_expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'first_name'", 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'session_nonce': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useravatar': {
'Meta': {'object_name': 'UserAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.User']"})
},
'sentry.useremail': {
'Meta': {'unique_together': "(('user', 'email'),)", 'object_name': 'UserEmail'},
'date_hash_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'emails'", 'to': "orm['sentry.User']"}),
'validation_hash': ('django.db.models.fields.CharField', [], {'default': "u'zswb3cbp1NyLwrSqnkA6RZtBJK1wJjxC'", 'max_length': '32'})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'), ('user', 'organization', 'key'))", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'UserReport', 'index_together': "(('project', 'event_id'), ('project', 'date_added'))"},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'event_user_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.versiondsymfile': {
'Meta': {'unique_together': "(('dsym_file', 'version', 'build'),)", 'object_name': 'VersionDSymFile'},
'build': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dsym_app': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.DSymApp']"}),
'dsym_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDSymFile']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '32'})
}
}
complete_apps = ['sentry']
| {
"content_hash": "caa39318211b5fca3f39d13a67a65192",
"timestamp": "",
"source": "github",
"line_count": 949,
"max_line_length": 233,
"avg_line_length": 89.65753424657534,
"alnum_prop": 0.5803843215607921,
"repo_name": "ifduyue/sentry",
"id": "5acb2ac74d7230fcc386818222f77077bc4c1985",
"size": "85109",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/sentry/south_migrations/0355_auto__add_field_organizationintegration_config__add_field_organization.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "301292"
},
{
"name": "HTML",
"bytes": "241298"
},
{
"name": "JavaScript",
"bytes": "3295572"
},
{
"name": "Lua",
"bytes": "65795"
},
{
"name": "Makefile",
"bytes": "6892"
},
{
"name": "Python",
"bytes": "36910084"
},
{
"name": "Ruby",
"bytes": "217"
},
{
"name": "Shell",
"bytes": "5701"
}
],
"symlink_target": ""
} |
' 数据库操作 '
__author__ = 'Ellery'
from app import db, models
from datetime import datetime
def db_insert(model):
db.session.add(model)
def db_delete(model):
db.session.delete(model)
def db_commit():
db.session.commit() | {
"content_hash": "9838b44e942f3f50b61c5a9383244748",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 29,
"avg_line_length": 15.533333333333333,
"alnum_prop": 0.6824034334763949,
"repo_name": "allotory/basilinna",
"id": "47bf9e76174d3b4ecdfe096f923294f8682ae62b",
"size": "268",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/main/db_service.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "623771"
},
{
"name": "Groff",
"bytes": "574014"
},
{
"name": "HTML",
"bytes": "270816"
},
{
"name": "JavaScript",
"bytes": "14532"
},
{
"name": "Python",
"bytes": "85241"
}
],
"symlink_target": ""
} |
import numpy as np
from numpy.testing import assert_allclose, assert_raises
from skimage.feature.register_translation import (register_translation,
_upsampled_dft)
from skimage.data import camera
from scipy.ndimage.fourier import fourier_shift
def test_correlation():
reference_image = np.fft.fftn(camera())
shift = (-7, 12)
shifted_image = fourier_shift(reference_image, shift)
# pixel precision
result, error, diffphase = register_translation(reference_image,
shifted_image,
space="fourier")
assert_allclose(result[:2], -np.array(shift))
def test_subpixel_precision():
reference_image = np.fft.fftn(camera())
subpixel_shift = (-2.4, 1.32)
shifted_image = fourier_shift(reference_image, subpixel_shift)
# subpixel precision
result, error, diffphase = register_translation(reference_image,
shifted_image, 100,
space="fourier")
assert_allclose(result[:2], -np.array(subpixel_shift), atol=0.05)
def test_real_input():
reference_image = camera()
subpixel_shift = (-2.4, 1.32)
shifted_image = fourier_shift(np.fft.fftn(reference_image), subpixel_shift)
shifted_image = np.fft.ifftn(shifted_image)
# subpixel precision
result, error, diffphase = register_translation(reference_image,
shifted_image, 100)
assert_allclose(result[:2], -np.array(subpixel_shift), atol=0.05)
def test_size_one_dimension_input():
# take a strip of the input image
reference_image = np.fft.fftn(camera()[:, 15]).reshape((-1, 1))
subpixel_shift = (-2.4, 4)
shifted_image = fourier_shift(reference_image, subpixel_shift)
# subpixel precision
result, error, diffphase = register_translation(reference_image,
shifted_image, 100,
space="fourier")
assert_allclose(result[:2], -np.array((-2.4, 0)), atol=0.05)
def test_3d_input():
# TODO: this test case is waiting on a Phantom data set to be added to the
# data module.
# pixel precision
# result, error, diffphase = register_translation(ref_image, shifted_image)
# assert_allclose(np.array(result[:2]), np.array(shift))
pass
def test_unknown_space_input():
image = np.ones((5, 5))
assert_raises(ValueError, register_translation, image, image,
space="frank")
def test_wrong_input():
# Dimensionality mismatch
image = np.ones((5, 5, 1))
template = np.ones((5, 5))
assert_raises(ValueError, register_translation, template, image)
# Greater than 2 dimensions does not support subpixel precision
# (TODO: should support 3D at some point.)
image = np.ones((5, 5, 5))
template = np.ones((5, 5, 5))
assert_raises(NotImplementedError, register_translation,
template, image, 2)
# Size mismatch
image = np.ones((5, 5))
template = np.ones((4, 4))
assert_raises(ValueError, register_translation, template, image)
def test_mismatch_upsampled_region_size():
assert_raises(ValueError, _upsampled_dft, np.ones((4, 4)),
upsampled_region_size=[3, 2, 1, 4])
def test_mismatch_offsets_size():
assert_raises(ValueError, _upsampled_dft, np.ones((4, 4)), 3,
axis_offsets=[3, 2, 1, 4])
if __name__ == "__main__":
from numpy import testing
testing.run_module_suite()
| {
"content_hash": "53fb966e2f09d9fbab1072d1a336327c",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 79,
"avg_line_length": 34.716981132075475,
"alnum_prop": 0.5970108695652174,
"repo_name": "bennlich/scikit-image",
"id": "6494630e84eb0bda73a2bde7bfb75e4f22399d38",
"size": "3680",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "skimage/feature/tests/test_register_translation.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "76670"
},
{
"name": "Makefile",
"bytes": "449"
},
{
"name": "Python",
"bytes": "2182144"
}
],
"symlink_target": ""
} |
"""
Copyright 2013 Load Impact
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import hashlib
import json
import sys
import unittest
from loadimpact.clients import Client
from loadimpact.fields import IntegerField
from loadimpact.resources import (
DataStore, LoadZone, Resource, Test, TestConfig, TestResult,
_TestResultStream, UserScenario, UserScenarioValidation,
_UserScenarioValidationResultStream)
class MockRequestsResponse(object):
def __init__(self, status_code=200, **kwargs):
self.url = 'http://example.com/'
self.status_code = status_code
self.text = ''
self.kwargs = kwargs
for k, v in kwargs.items():
setattr(self, k, v)
def json(self):
return self.kwargs
class MockClient(Client):
def __init__(self, response_status_code=200, **kwargs):
super(MockClient, self).__init__()
self.response_status_code = response_status_code
self.kwargs = kwargs
self.last_request_method = None
self.last_request_args = None
self.last_request_kwargs = None
def _requests_request(self, method, *args, **kwargs):
self.last_request_method = method
self.last_request_args = args
self.last_request_kwargs = kwargs
if isinstance(kwargs.get('data'), str):
self.last_request_kwargs['data'] = json.loads(kwargs['data'])
nkwargs = {}
if self.kwargs.get('response_body'):
if isinstance(self.kwargs['response_body'], dict):
nkwargs = self.kwargs['response_body']
elif isinstance(self.kwargs['response_body'], str):
nkwargs = json.loads(self.kwargs['response_body'])
return MockRequestsResponse(status_code=self.response_status_code,
**nkwargs)
class MockResource(Resource):
fields = {}
resource_name = 'resource'
def __init__(self, client, field_cls, field_value=None):
self.__class__.fields['field'] = field_cls
super(MockResource, self).__init__(client, field=field_value)
class TestResourcesResource(unittest.TestCase):
def test___getattr__(self):
r = MockResource(None, IntegerField, 0)
def raises():
r.field2
self.assertRaises(AttributeError, raises)
def test__path(self):
self.assertEqual(MockResource._path(), MockResource.resource_name)
self.assertEqual(MockResource._path(resource_id=None),
MockResource.resource_name)
self.assertEqual(MockResource._path(resource_id=0),
'%s/%s' % (MockResource.resource_name, 0))
self.assertEqual(MockResource._path(resource_id=1),
'%s/%s' % (MockResource.resource_name, 1))
self.assertEqual(MockResource._path(resource_id=1, action='action'),
'%s/%s/%s' % (MockResource.resource_name, 1, 'action'))
class TestResourcesDataStore(unittest.TestCase):
def setUp(self):
self.client = MockClient()
def test_has_conversion_finished(self):
ds = DataStore(self.client)
self.assertFalse(ds.has_conversion_finished())
self.assertEqual(self.client.last_request_method, 'get')
def test_has_conversion_finished_status_queued(self):
self._check_has_conversion_finished(DataStore.STATUS_QUEUED, False)
def test_has_conversion_finished_status_converting(self):
self._check_has_conversion_finished(DataStore.STATUS_CONVERTING, False)
def test_has_conversion_finished_status_finished(self):
self._check_has_conversion_finished(DataStore.STATUS_FINISHED, True)
def test_has_conversion_finished_status_failed(self):
self._check_has_conversion_finished(DataStore.STATUS_FAILED, True)
def test_status_code_to_text(self):
self.assertEqual(
DataStore.status_code_to_text(DataStore.STATUS_QUEUED), 'queued')
self.assertEqual(
DataStore.status_code_to_text(DataStore.STATUS_CONVERTING),
'converting')
self.assertEqual(
DataStore.status_code_to_text(DataStore.STATUS_FINISHED),
'finished')
self.assertEqual(
DataStore.status_code_to_text(DataStore.STATUS_FAILED), 'failed')
self.assertEqual(
Test.status_code_to_text(0xffffffff),
'unknown')
def _check_has_conversion_finished(self, status, expected):
ds = DataStore(self.client)
ds.status = status
self.assertEqual(ds.has_conversion_finished(), expected)
self.assertEqual(self.client.last_request_method, 'get')
class TestResourcesLoadZone(unittest.TestCase):
def test_name_to_id(self):
self.assertEqual(LoadZone.name_to_id(LoadZone.AGGREGATE_WORLD), 1)
self.assertEqual(LoadZone.name_to_id(LoadZone.AMAZON_US_ASHBURN), 11)
self.assertEqual(LoadZone.name_to_id(LoadZone.AMAZON_US_PALOALTO), 12)
self.assertEqual(LoadZone.name_to_id(LoadZone.AMAZON_IE_DUBLIN), 13)
self.assertEqual(LoadZone.name_to_id(LoadZone.AMAZON_SG_SINGAPORE), 14)
self.assertEqual(LoadZone.name_to_id(LoadZone.AMAZON_JP_TOKYO), 15)
self.assertEqual(LoadZone.name_to_id(LoadZone.AMAZON_US_PORTLAND), 22)
self.assertEqual(LoadZone.name_to_id(LoadZone.AMAZON_BR_SAOPAULO), 23)
self.assertEqual(LoadZone.name_to_id(LoadZone.AMAZON_AU_SYDNEY), 25)
self.assertEqual(LoadZone.name_to_id(LoadZone.RACKSPACE_US_CHICAGO), 26)
self.assertEqual(LoadZone.name_to_id(LoadZone.RACKSPACE_US_DALLAS), 27)
self.assertEqual(LoadZone.name_to_id(LoadZone.RACKSPACE_UK_LONDON), 28)
self.assertEqual(LoadZone.name_to_id(LoadZone.RACKSPACE_AU_SYDNEY), 29)
def test_name_to_id_exception(self):
self.assertRaises(ValueError, LoadZone.name_to_id, 'unknown')
class TestResourcesTest(unittest.TestCase):
def setUp(self):
self.client = MockClient()
def test_abort(self):
test = Test(self.client)
result = test.abort()
self.assertEqual(self.client.last_request_method, 'post')
self.assertTrue(result)
def test_abort_failed_409(self):
client = MockClient(response_status_code=409)
test = Test(client)
result = test.abort()
self.assertEqual(client.last_request_method, 'post')
self.assertFalse(result)
def test_is_done(self):
test = Test(self.client)
self.assertFalse(test.is_done())
self.assertEqual(self.client.last_request_method, 'get')
def test_is_done_status_created(self):
self._check_is_done(Test.STATUS_CREATED, False)
def test_is_done_status_queued(self):
self._check_is_done(Test.STATUS_QUEUED, False)
def test_is_done_status_initializing(self):
self._check_is_done(Test.STATUS_INITIALIZING, False)
def test_is_done_status_running(self):
self._check_is_done(Test.STATUS_RUNNING, False)
def test_is_done_status_finished(self):
self._check_is_done(Test.STATUS_FINISHED, True)
def test_is_done_status_timed_out(self):
self._check_is_done(Test.STATUS_TIMED_OUT, True)
def test_is_done_status_aborting_user(self):
self._check_is_done(Test.STATUS_ABORTING_USER, False)
def test_is_done_status_aborted_user(self):
self._check_is_done(Test.STATUS_ABORTED_USER, True)
def test_is_done_status_aborting_system(self):
self._check_is_done(Test.STATUS_ABORTING_SYSTEM, False)
def test_is_done_status_aborted_system(self):
self._check_is_done(Test.STATUS_ABORTED_SYSTEM, True)
def test_result_stream(self):
test = Test(self.client)
result_stream = test.result_stream()
self.assertTrue(isinstance(
result_stream, _TestResultStream))
self.assertEqual(result_stream.test, test)
self.assertEqual(result_stream.result_ids, [
TestResult.result_id_from_name(
TestResult.USER_LOAD_TIME,
load_zone_id=LoadZone.name_to_id(LoadZone.AGGREGATE_WORLD)),
TestResult.result_id_from_name(
TestResult.ACTIVE_USERS,
load_zone_id=LoadZone.name_to_id(LoadZone.AGGREGATE_WORLD))
])
def test_status_code_to_text(self):
self.assertEqual(
Test.status_code_to_text(Test.STATUS_CREATED), 'created')
self.assertEqual(
Test.status_code_to_text(Test.STATUS_QUEUED), 'queued')
self.assertEqual(
Test.status_code_to_text(Test.STATUS_INITIALIZING), 'initializing')
self.assertEqual(
Test.status_code_to_text(Test.STATUS_RUNNING), 'running')
self.assertEqual(
Test.status_code_to_text(Test.STATUS_FINISHED), 'finished')
self.assertEqual(
Test.status_code_to_text(Test.STATUS_TIMED_OUT), 'timed out')
self.assertEqual(
Test.status_code_to_text(Test.STATUS_ABORTING_USER),
'aborting (by user)')
self.assertEqual(Test.status_code_to_text(Test.STATUS_ABORTED_USER),
'aborted (by user)')
self.assertEqual(
Test.status_code_to_text(Test.STATUS_ABORTING_SYSTEM),
'aborting (by system)')
self.assertEqual(
Test.status_code_to_text(Test.STATUS_ABORTED_SYSTEM),
'aborted (by system)')
self.assertEqual(
Test.status_code_to_text(0xffffffff),
'unknown')
def _check_is_done(self, status, expected):
test = Test(self.client)
test.status = status
self.assertEqual(test.is_done(), expected)
self.assertEqual(self.client.last_request_method, 'get')
class TestResourcesTestResult(unittest.TestCase):
def test_result_id_from_name_with_name(self):
self.assertEqual(TestResult.result_id_from_name('__li_user_load_time'),
'__li_user_load_time')
def test_result_id_from_name_with_name_load_zone(self):
self.assertEqual(TestResult.result_id_from_name('__li_user_load_time',
load_zone_id=1),
'__li_user_load_time:1')
def test_result_id_from_name_with_name_load_zone_user_scenario(self):
self.assertEqual(TestResult.result_id_from_name('__li_user_load_time',
load_zone_id=1,
user_scenario_id=1),
'__li_user_load_time:1:1')
def test_result_id_from_custom_metric_name(self):
name = 'my metric'
result_id = TestResult.result_id_from_custom_metric_name(name, 1, 1)
if sys.version_info >= (3, 0) and isinstance(name, str):
name = name.encode('utf-8')
self.assertEqual(result_id, '__custom_%s:1:1'
% hashlib.md5(name).hexdigest())
def test_result_id_for_page(self):
name = 'my page'
result_id = TestResult.result_id_for_page(name, 1, 1)
if sys.version_info >= (3, 0) and isinstance(name, str):
name = name.encode('utf-8')
self.assertEqual(result_id, '__li_page_%s:1:1'
% hashlib.md5(name).hexdigest())
def test_result_id_for_url(self):
url = 'http://example.com/'
result_id = TestResult.result_id_for_url(url, 1, 1, method='GET',
status_code=200)
if sys.version_info >= (3, 0) and isinstance(url, str):
url = url.encode('utf-8')
self.assertEqual(result_id, '__li_url_%s:1:1:200:GET'
% hashlib.md5(url).hexdigest())
class TestResourcesTestConfig(unittest.TestCase):
def setUp(self):
self.client = MockClient()
def test_user_type_enums(self):
self.assertEqual(TestConfig.SBU, 'sbu')
self.assertEqual(TestConfig.VU, 'vu')
def test_user_type_getter(self):
c = TestConfig(self.client)
self.assertEqual(c.user_type, TestConfig.SBU)
def test_user_type_setter(self):
c = TestConfig(self.client)
c.user_type = TestConfig.VU
self.assertEqual(c.user_type, TestConfig.VU)
def test_user_type_setter_valueerror(self):
c = TestConfig(self.client)
def assign_bad_user_type():
c.user_type = 'something bad'
self.assertRaises(ValueError, assign_bad_user_type)
def test_clone(self):
name = 'Cloned Test Config'
test_config = TestConfig(self.client)
test_config_clone = test_config.clone(name)
self.assertEqual(self.client.last_request_method, 'post')
self.assertEqual(self.client.last_request_kwargs['data']['name'],
name)
self.assertTrue(isinstance(test_config_clone, TestConfig))
def test_update_with_dict(self):
name_change = 'Test Config'
client = MockClient(response_body={'name': name_change})
test_config = TestConfig(client)
test_config.update({'name': name_change})
self.assertEqual(client.last_request_method, 'put')
self.assertEqual(client.last_request_kwargs['data']['name'],
name_change)
self.assertEqual(client.last_request_kwargs['headers']['Content-Type'],
'application/json')
self.assertEqual(test_config.name, name_change)
def test_update_with_attribute(self):
name_change = 'Test Config'
test_config = TestConfig(self.client)
test_config.name = name_change
test_config.update()
self.assertEqual(self.client.last_request_method, 'put')
self.assertEqual(self.client.last_request_kwargs['data']['name'],
name_change)
self.assertEqual(self.client.last_request_kwargs['headers']['Content-Type'],
'application/json')
self.assertEqual(test_config.name, name_change)
class TestResourcesUserScenario(unittest.TestCase):
def setUp(self):
self.client = MockClient()
def test_get(self):
client = MockClient(response_body={
'data_stores': [{'id': 1}, {'id': 2}]
})
user_scenario = client.get_user_scenario(1)
self.assertEqual(client.last_request_method, 'get')
self.assertEqual(user_scenario.data_stores, [1, 2])
def test_clone(self):
name = 'Cloned User Scenario'
user_scenario = UserScenario(self.client)
user_scenario_clone = user_scenario.clone(name)
self.assertEqual(self.client.last_request_method, 'post')
self.assertEqual(self.client.last_request_kwargs['data']['name'],
name)
self.assertTrue(isinstance(user_scenario_clone, UserScenario))
def test_update_with_dict(self):
name_change = 'Test User Scenario'
client = MockClient(response_body={'name': name_change})
user_scenario = UserScenario(client)
user_scenario.update({'name': name_change})
self.assertEqual(client.last_request_method, 'put')
self.assertEqual(client.last_request_kwargs['data']['name'],
name_change)
self.assertEqual(client.last_request_kwargs['headers']['Content-Type'],
'application/json')
self.assertEqual(user_scenario.name, name_change)
def test_update_with_attribute(self):
name_change = 'Test User Scenario'
user_scenario = UserScenario(self.client)
user_scenario.name = name_change
user_scenario.update()
self.assertEqual(self.client.last_request_method, 'put')
self.assertEqual(self.client.last_request_kwargs['data']['name'],
name_change)
self.assertEqual(self.client.last_request_kwargs['headers']['Content-Type'],
'application/json')
self.assertEqual(user_scenario.name, name_change)
class TestResourcesUserScenarioValidation(unittest.TestCase):
def setUp(self):
self.client = MockClient()
def test_is_done(self):
validation = UserScenarioValidation(self.client)
self.assertFalse(validation.is_done())
self.assertEqual(self.client.last_request_method, 'get')
def test_is_done_status_queued(self):
self._check_is_done(UserScenarioValidation.STATUS_QUEUED, False)
def test_is_done_status_initializing(self):
self._check_is_done(UserScenarioValidation.STATUS_INITIALIZING, False)
def test_is_done_status_running(self):
self._check_is_done(UserScenarioValidation.STATUS_RUNNING, False)
def test_is_done_status_finished(self):
self._check_is_done(UserScenarioValidation.STATUS_FINISHED, True)
def test_is_done_status_failed(self):
self._check_is_done(UserScenarioValidation.STATUS_FAILED, True)
def test_status_code_to_text(self):
self.assertEqual(UserScenarioValidation.status_code_to_text(
UserScenarioValidation.STATUS_QUEUED), 'queued')
self.assertEqual(UserScenarioValidation.status_code_to_text(
UserScenarioValidation.STATUS_INITIALIZING), 'initializing')
self.assertEqual(UserScenarioValidation.status_code_to_text(
UserScenarioValidation.STATUS_RUNNING), 'running')
self.assertEqual(UserScenarioValidation.status_code_to_text(
UserScenarioValidation.STATUS_FINISHED), 'finished')
self.assertEqual(UserScenarioValidation.status_code_to_text(
UserScenarioValidation.STATUS_FAILED), 'failed')
self.assertEqual(UserScenarioValidation.status_code_to_text(
0xffffffff), 'unknown')
def test_result_stream(self):
validation = UserScenarioValidation(self.client)
result_stream = validation.result_stream()
self.assertTrue(isinstance(
result_stream, _UserScenarioValidationResultStream))
self.assertEqual(result_stream.validation, validation)
def _check_is_done(self, status, expected):
validation = UserScenarioValidation(self.client)
validation.status = status
self.assertEqual(validation.is_done(), expected)
self.assertEqual(self.client.last_request_method, 'get')
| {
"content_hash": "1d0980fcfea03c3f18cd4ed8abb808b3",
"timestamp": "",
"source": "github",
"line_count": 461,
"max_line_length": 84,
"avg_line_length": 40.84598698481562,
"alnum_prop": 0.6369622942113649,
"repo_name": "loadimpact/loadimpact-sdk-python",
"id": "d59819838861a138fc5136cb4283fc4d92e8f55c",
"size": "18846",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "test/test_resources.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1057"
},
{
"name": "Python",
"bytes": "100036"
}
],
"symlink_target": ""
} |
from jsonrpc import ServiceProxy
import sys
import string
import getpass
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:51020")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:51020")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "encryptwallet":
try:
pwd = getpass.getpass(prompt="Enter passphrase: ")
pwd2 = getpass.getpass(prompt="Repeat passphrase: ")
if pwd == pwd2:
access.encryptwallet(pwd)
print "\n---Wallet encrypted. Server stopping, restart to run with encrypted wallet---\n"
else:
print "\n---Passphrases do not match---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Bitcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Bitcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = getpass.getpass(prompt="Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = getpass.getpass(prompt="Enter old wallet passphrase: ")
pwd2 = getpass.getpass(prompt="Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| {
"content_hash": "8c84d0b8650d331cd525e56724f0022f",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 101,
"avg_line_length": 28.679525222551927,
"alnum_prop": 0.5685463010863943,
"repo_name": "CoinProjects/AmsterdamCoin-v4",
"id": "8afc1d4d582a2caab7ec650ff06daaa6bc41a522",
"size": "9665",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/bitrpc/bitrpc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "7639"
},
{
"name": "C",
"bytes": "3651089"
},
{
"name": "C++",
"bytes": "5099739"
},
{
"name": "CSS",
"bytes": "44109"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2100"
},
{
"name": "M4",
"bytes": "160500"
},
{
"name": "Makefile",
"bytes": "98386"
},
{
"name": "Objective-C",
"bytes": "2023"
},
{
"name": "Objective-C++",
"bytes": "7350"
},
{
"name": "Python",
"bytes": "253473"
},
{
"name": "QMake",
"bytes": "26864"
},
{
"name": "Roff",
"bytes": "18307"
},
{
"name": "Shell",
"bytes": "54176"
}
],
"symlink_target": ""
} |
import sys
from typing import Any, Dict, List, Optional, Type, TypeVar
import json
from .utils import parse_yaml, dump_json
from .cache import Cacheable
R = TypeVar('R', bound='Resource')
class Resource (Cacheable):
"""
A resource that's part of the overall Ambassador configuration world. This is
the base class for IR resources, Ambassador-config resources, etc.
Elements in a Resource:
- rkey is a short identifier that is used as the primary key for _all_ the
Ambassador classes to identify this single specific resource. It should be
something like "ambassador-default.1" or the like: very specific, doesn't
have to be fun for humans.
- location is a more human-readable string describing where the human should
go to find the source of this resource. "Service ambassador, namespace default,
object 1". This isn't really used by the Config class, but the Diagnostics class
makes heavy use of it.
- kind (keyword-only) is what kind of Ambassador resource this is.
- serialization (keyword-only) is the _original input serialization_, if we have
it, of the object. If we don't have it, this should be None -- don't just serialize
the object to no purpose.
- any additional keyword arguments are saved in the Resource.
:param rkey: unique identifier for this source, should be short
:param location: where should a human go to find the source of this resource?
:param kind: what kind of thing is this?
:param serialization: original input serialization of obj, if we have it
:param kwargs: key-value pairs that form the data object for this resource
"""
rkey: str
location: str
kind: str
serialization: Optional[str]
# _errors: List[RichStatus]
_errored: bool
_referenced_by: Dict[str, 'Resource']
def __init__(self, rkey: str, location: str, *,
kind: str,
serialization: Optional[str]=None,
**kwargs) -> None:
if not rkey:
raise Exception("Resource requires rkey")
if not kind:
raise Exception("Resource requires kind")
# print("Resource __init__ (%s %s)" % (kind, name))
super().__init__(rkey=rkey, location=location,
kind=kind, serialization=serialization,
# _errors=[],
_referenced_by={},
**kwargs)
def sourced_by(self, other: 'Resource'):
self.rkey = other.rkey
self.location = other.location
def referenced_by(self, other: 'Resource') -> None:
# print("%s %s REF BY %s %s" % (self.kind, self.name, other.kind, other.rkey))
self._referenced_by[other.location] = other
def is_referenced_by(self, other_location) -> Optional['Resource']:
return self._referenced_by.get(other_location, None)
def __getattr__(self, key: str) -> Any:
try:
return self[key]
except KeyError:
raise AttributeError(key)
def __setattr__(self, key: str, value: Any) -> None:
self[key] = value
def __str__(self) -> str:
return("<%s %s>" % (self.kind, self.rkey))
def as_dict(self) -> Dict[str, Any]:
ad = dict(self)
ad.pop('rkey', None)
ad.pop('serialization', None)
ad.pop('location', None)
ad.pop('_referenced_by', None)
ad.pop('_errored', None)
return ad
def as_json(self):
return dump_json(self.as_dict(), pretty=True)
@classmethod
def from_resource(cls: Type[R], other: R,
rkey: Optional[str]=None,
location: Optional[str]=None,
kind: Optional[str]=None,
serialization: Optional[str]=None,
**kwargs) -> R:
"""
Create a Resource by copying another Resource, possibly overriding elements
along the way.
NOTE WELL: if you pass in kwargs, we assume that any values are safe to use as-is
and DO NOT COPY THEM. Otherwise, we SHALLOW COPY other.attrs for the new Resource.
:param other: the base Resource we're copying
:param rkey: optional new rkey
:param location: optional new location
:param kind: optional new kind
:param serialization: optional new original input serialization
:param kwargs: optional new key-value pairs -- see discussion about copying above!
"""
# rkey and location are required positional arguments. Fine.
new_rkey = rkey or other.rkey
new_location = location or other.location
# Make a shallow-copied dict that we can muck with...
new_attrs = dict(kwargs) if kwargs else dict(other)
# Don't include kind unless it comes in on this call.
if kind:
new_attrs['kind'] = kind
else:
new_attrs.pop('kind', None)
# Don't include serialization at all if we don't have one.
if serialization:
new_attrs['serialization'] = serialization
elif other.serialization:
new_attrs['serialization'] = other.serialization
# Make sure that things that shouldn't propagate are gone...
new_attrs.pop('rkey', None)
new_attrs.pop('location', None)
new_attrs.pop('_errors', None)
new_attrs.pop('_errored', None)
new_attrs.pop('_referenced_by', None)
# ...and finally, use new_attrs for all the keyword args when we set up
# the new instance.
return cls(new_rkey, new_location, **new_attrs)
@classmethod
def from_dict(cls: Type[R], rkey: str, location: str, serialization: Optional[str], attrs: Dict) -> R:
"""
Create a Resource or subclass thereof from a dictionary. The new Resource's rkey
and location must be handed in explicitly.
The difference between this and simply intializing a Resource object is that
from_dict will introspect the attrs passed in and create whatever kind of Resource
matches attrs['kind'] -- so for example, if kind is "Mapping", this method will
return a Mapping rather than a Resource.
:param rkey: unique identifier for this source, should be short
:param location: where should a human go to find the source of this resource?
:param serialization: original input serialization of obj
:param attrs: dictionary from which to initialize the new object
"""
# So this is a touch odd but here we go. We want to use the Kind here to find
# the correct type.
ambassador = sys.modules['ambassador']
resource_class: Type[R] = getattr(ambassador, attrs['kind'], None)
if not resource_class:
resource_class = getattr(ambassador, 'AC' + attrs[ 'kind' ], cls)
# print("%s.from_dict: %s => %s" % (cls, attrs['kind'], resource_class))
return resource_class(rkey, location=location, serialization=serialization, **attrs)
@classmethod
def from_yaml(cls: Type[R], rkey: str, location: str, serialization: str) -> R:
"""
Create a Resource from a YAML serialization. The new Resource's rkey
and location must be handed in explicitly, and of course in this case the
serialization is mandatory.
Raises an exception if the serialization is not parseable.
:param rkey: unique identifier for this source, should be short
:param location: where should a human go to find the source of this resource?
:param serialization: original input serialization of obj
"""
attrs = parse_yaml(serialization)
return cls.from_dict(rkey, location, serialization, attrs)
| {
"content_hash": "8c5ded23c4195f44d1e3ddc79fb6e1b9",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 106,
"avg_line_length": 37.32057416267943,
"alnum_prop": 0.6276923076923077,
"repo_name": "datawire/ambassador",
"id": "01e3b1c4837f1d57b427a1886674238c7153f19f",
"size": "7800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/ambassador/resource.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "20990"
},
{
"name": "Go",
"bytes": "564752"
},
{
"name": "HTML",
"bytes": "25150"
},
{
"name": "JavaScript",
"bytes": "32368"
},
{
"name": "Makefile",
"bytes": "113905"
},
{
"name": "Python",
"bytes": "1158187"
},
{
"name": "Shell",
"bytes": "188832"
}
],
"symlink_target": ""
} |
import sys
import os
import logging
import traceback
import ed25519
from nacl.public import PrivateKey, PublicKey
from nacl.encoding import HexEncoder
from .crypt4gh import encrypt, decrypt, reencrypt
from .cli import parse_args
from .keys import generate_ec, generate_signing
LOG = logging.getLogger(__name__)
DEFAULT_PK = os.getenv('C4GH_PUBLIC_KEY', None)
DEFAULT_SK = os.getenv('C4GH_SECRET_KEY', None)
DEFAULT_SIGK = os.getenv('C4GH_SIGNING_KEY', None)
def run(args):
# Parse CLI arguments
args = parse_args(args)
#####################################
## For Encryption
#####################################
if args['encrypt']:
# If we want to sign the header
signing_key = args['--signing_key'] or DEFAULT_SIGK
if signing_key: # and os.path.exists(signing_key):
signing_key = os.path.expanduser(signing_key)
with open(signing_key, 'rt') as f: # hex format
signing_key = ed25519.SigningKey(bytes.fromhex(f.read()))
pubkey = args['--pk'] or DEFAULT_PK
if pubkey: # and os.path.exists(signing_key):
pubkey = os.path.expanduser(pubkey)
with open(pubkey, 'rt') as f: # hex format
pubkey = PublicKey(f.read(), HexEncoder)
encrypt(sys.stdin.buffer, sys.stdout.buffer, pubkey, signing_key=signing_key)
#####################################
## For Decryption
#####################################
if args['decrypt']:
seckey = args['--sk'] or DEFAULT_SK
if not seckey:
raise ValueError('Secret key not specified')
seckey = os.path.expanduser(seckey)
if not os.path.exists(seckey):
raise ValueError('Secret key not found')
with open(seckey, 'rt') as skfile: # hex format
seckey = PrivateKey(skfile.read(), HexEncoder)
# from getpass import getpass
# passphrase = getpass(prompt=f'Passphrase for {args["--sk"]}: ')
# hashlib.pbkdf2_hmac('sha256', passphrase.encode(), os.random(16), 100000, dklen=32)
# hmac.digest(key, encrypted_seckey, digest=hashlib.sha256)
# md = hmac.new(passphrase, encrypted_seckey)
# passphrase = hmac.new(passphrase, seckey)
# seckey = unlock(seckey, passphrase)
decrypt(seckey, sys.stdin.buffer, process_output=sys.stdout.buffer.write)
#####################################
## For ReEncryption
#####################################
if args['reencrypt']:
signing_key = args['--signing_key'] or DEFAULT_SIGK
if signing_key: # and os.path.exists(signing_key):
signing_key = os.path.expanduser(signing_key)
with open(signing_key, 'rt') as f: # hex format
signing_key = ed25519.SigningKey(bytes.fromhex(f.read()))
pubkey = os.path.expanduser(args['--pk'] or DEFAULT_PK)
if not pubkey:
raise ValueError('Public key not specified')
pubkey = os.path.expanduser(pubkey)
if not os.path.exists(pubkey):
raise ValueError('Public key not found')
seckey = args['--sk'] or DEFAULT_SK
if not seckey:
raise ValueError('Secret key not specified')
seckey = os.path.expanduser(seckey)
if not os.path.exists(seckey):
raise ValueError('Secret key not found')
with open(seckey, 'rt') as skfile, open(pubkey, 'rt') as pkfile: # hex format
seckey = PrivateKey(skfile.read(), HexEncoder)
pubkey = PublicKey(pkfile.read(), HexEncoder)
# Same thing, unlock the key
reencrypt(pubkey, seckey, sys.stdin.buffer, signing_key=signing_key, process_output=sys.stdout.buffer.write)
#####################################
## For Keys Generation
#####################################
if args['generate']:
seckey = os.path.expanduser(args['-o'])
if os.path.isfile(seckey):
yn = input(f'{seckey} already exists. Do you want to overwrite it? (y/n) ')
if yn != 'y':
print('Ok. Fair enough. Exiting.')
#sys.exit(0)
return
pubkey = os.path.expanduser(args['-o'] + '.pub')
passphrase = args['-P']
if args['--signing']:
generate_signing(seckey, pubkey, passphrase=passphrase)
print('Signing key created in',seckey, file=sys.stderr)
else:
generate_ec(seckey, pubkey, passphrase=passphrase)
print('EC key created in',seckey, file=sys.stderr)
def main(args=sys.argv[1:]):
try:
run(args)
except KeyboardInterrupt:
pass
# except Exception as e:
# _, _, exc_tb = sys.exc_info()
# traceback.print_tb(exc_tb, file=sys.stderr)
# sys.exit(1)
if __name__ == '__main__':
main()
| {
"content_hash": "e42eb6085f009097ca99f2ffc9fefc64",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 120,
"avg_line_length": 35.934306569343065,
"alnum_prop": 0.5567743245988218,
"repo_name": "AlexanderSenf/Crypt4GH",
"id": "a95cafb1f3ef9dc1dafbef71603439e814422a93",
"size": "4970",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "crypt4gh/__main__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "54195"
}
],
"symlink_target": ""
} |
import phial
@phial.page
def homepage():
return phial.file(name="index.htm", content="Hello World!")
| {
"content_hash": "6b3cb5f10bf83d7993eb45f819928f0e",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 63,
"avg_line_length": 17.833333333333332,
"alnum_prop": 0.7009345794392523,
"repo_name": "brownhead/phial",
"id": "c0153ef69b8bcd18a88969a2769f4f69bb3a0917",
"size": "107",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phial/examples/super_simple_site/app.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "34"
},
{
"name": "HTML",
"bytes": "673"
},
{
"name": "JavaScript",
"bytes": "8"
},
{
"name": "Python",
"bytes": "53328"
},
{
"name": "Shell",
"bytes": "2246"
}
],
"symlink_target": ""
} |
import time
from oslo_log import log
from tempest_lib.common.utils import data_utils
import testtools
from ec2api.tests.functional import base
from ec2api.tests.functional import config
CONF = config.CONF
LOG = log.getLogger(__name__)
class SecurityGroupBaseTest(base.EC2TestCase):
def _test_rules(self, add_func, del_func, field, vpc_id=None):
kwargs = dict()
if vpc_id:
kwargs['Filters'] = [{'Name': 'vpc-id', 'Values': [vpc_id]}]
data = self.client.describe_security_groups(*[], **kwargs)
security_groups = data['SecurityGroups']
if not vpc_id:
# TODO(andrey-mp): remove it when fitering by None will be
security_groups = [sg for sg in security_groups
if sg.get('VpcId') is None]
default_group = security_groups[0]
name = data_utils.rand_name('sgName')
desc = data_utils.rand_name('sgDesc')
kwargs = {'GroupName': name, 'Description': desc}
if vpc_id:
kwargs['VpcId'] = vpc_id
data = self.client.create_security_group(*[], **kwargs)
group_id = data['GroupId']
res_clean = self.addResourceCleanUp(self.client.delete_security_group,
GroupId=group_id)
time.sleep(2)
data = self.client.describe_security_groups(GroupIds=[group_id])
count = len(data['SecurityGroups'][0][field])
kwargs = {
'GroupId': group_id,
'IpPermissions': [{
'IpProtocol': 'icmp',
'FromPort': -1,
'ToPort': -1,
'IpRanges': [{
'CidrIp': '10.0.0.0/8'
}],
}, {
'UserIdGroupPairs': [{'GroupId': default_group['GroupId']}],
'ToPort': 65535,
'IpProtocol': 'tcp',
'FromPort': 1
}]
}
add_func(*[], **kwargs)
data = self.client.describe_security_groups(GroupIds=[group_id])
self.assertEqual(1, len(data['SecurityGroups']))
self.assertEqual(count + 2, len(data['SecurityGroups'][0][field]))
found = 0
for perm in data['SecurityGroups'][0][field]:
cidrs = [v['CidrIp'] for v in perm.get('IpRanges', [])]
if (perm.get('FromPort') == -1 and
perm.get('ToPort') == -1 and
perm.get('IpProtocol') == 'icmp' and
len(perm.get('IpRanges')) == 1 and
'10.0.0.0/8' in cidrs):
found = found + 1
elif (perm.get('FromPort') == 1 and
perm.get('ToPort') == 65535 and
perm.get('IpProtocol') == 'tcp' and
len(perm.get('UserIdGroupPairs')) == 1 and
perm.get('UserIdGroupPairs')[0]['GroupId']
== default_group['GroupId']):
found = found + 1
self.assertEqual(2, found)
del_func(*[], **kwargs)
data = self.client.describe_security_groups(GroupIds=[group_id])
self.assertEqual(1, len(data['SecurityGroups']))
self.assertEqual(count, len(data['SecurityGroups'][0][field]))
if vpc_id:
self.assertRaises('InvalidPermission.NotFound', del_func, **kwargs)
else:
del_func(*[], **kwargs)
self.client.delete_security_group(GroupId=group_id)
self.cancelResourceCleanUp(res_clean)
class SecurityGroupInVPCTest(SecurityGroupBaseTest):
VPC_CIDR = '10.10.0.0/20'
vpc_id = None
@classmethod
@base.safe_setup
def setUpClass(cls):
super(SecurityGroupInVPCTest, cls).setUpClass()
if not base.TesterStateHolder().get_vpc_enabled():
raise cls.skipException('VPC is disabled')
data = cls.client.create_vpc(CidrBlock=cls.VPC_CIDR)
cls.vpc_id = data['Vpc']['VpcId']
cls.addResourceCleanUpStatic(cls.client.delete_vpc, VpcId=cls.vpc_id)
cls.get_vpc_waiter().wait_available(cls.vpc_id)
def test_create_delete_security_group(self):
name = data_utils.rand_name('sgName')
desc = data_utils.rand_name('sgDesc')
data = self.client.create_security_group(VpcId=self.vpc_id,
GroupName=name,
Description=desc)
group_id = data['GroupId']
res_clean = self.addResourceCleanUp(self.client.delete_security_group,
GroupId=group_id)
time.sleep(2)
self.client.delete_security_group(GroupId=group_id)
self.cancelResourceCleanUp(res_clean)
self.assertRaises('InvalidGroup.NotFound',
self.client.describe_security_groups,
GroupIds=[group_id])
self.assertRaises('InvalidGroup.NotFound',
self.client.delete_security_group,
GroupId=group_id)
def test_create_duplicate_security_group(self):
name = data_utils.rand_name('sgName')
desc = data_utils.rand_name('sgDesc')
data = self.client.create_security_group(VpcId=self.vpc_id,
GroupName=name,
Description=desc)
group_id = data['GroupId']
res_clean = self.addResourceCleanUp(self.client.delete_security_group,
GroupId=group_id)
time.sleep(2)
self.assertRaises('InvalidGroup.Duplicate',
self.client.create_security_group,
VpcId=self.vpc_id, GroupName=name, Description=desc)
data = self.client.delete_security_group(GroupId=group_id)
self.cancelResourceCleanUp(res_clean)
def test_create_duplicate_security_group_in_another_vpc(self):
name = data_utils.rand_name('sgName')
desc = data_utils.rand_name('sgDesc')
data = self.client.create_security_group(VpcId=self.vpc_id,
GroupName=name,
Description=desc)
group_id = data['GroupId']
res_clean = self.addResourceCleanUp(self.client.delete_security_group,
GroupId=group_id)
time.sleep(2)
data = self.client.create_vpc(CidrBlock=self.VPC_CIDR)
vpc_id = data['Vpc']['VpcId']
dv_clean = self.addResourceCleanUp(self.client.delete_vpc,
VpcId=vpc_id)
data = self.client.create_security_group(VpcId=vpc_id,
GroupName=name,
Description=desc)
time.sleep(2)
self.client.delete_security_group(GroupId=data['GroupId'])
self.client.delete_vpc(VpcId=vpc_id)
self.cancelResourceCleanUp(dv_clean)
self.get_vpc_waiter().wait_delete(vpc_id)
data = self.client.delete_security_group(GroupId=group_id)
self.cancelResourceCleanUp(res_clean)
@testtools.skipUnless(CONF.aws.run_incompatible_tests,
"MismatchError: 'InvalidParameterValue' != 'ValidationError'")
def test_create_invalid_name_desc(self):
valid = data_utils.rand_name('sgName')
invalid = 'name%"'
self.assertRaises('InvalidParameterValue',
self.client.create_security_group,
VpcId=self.vpc_id, GroupName=invalid,
Description=valid)
self.assertRaises('InvalidParameterValue',
self.client.create_security_group,
VpcId=self.vpc_id, GroupName=valid,
Description=invalid)
self.assertRaises('InvalidParameterValue',
self.client.create_security_group,
VpcId=self.vpc_id, GroupName='default',
Description='default')
self.assertRaises('MissingParameter',
self.client.create_security_group,
VpcId=self.vpc_id, GroupName=valid, Description='')
self.assertRaises('MissingParameter',
self.client.create_security_group,
VpcId=self.vpc_id, GroupName='', Description=valid)
def test_ingress_rules(self):
self._test_rules(self.client.authorize_security_group_ingress,
self.client.revoke_security_group_ingress,
'IpPermissions', self.vpc_id)
def test_egress_rules(self):
self._test_rules(self.client.authorize_security_group_egress,
self.client.revoke_security_group_egress,
'IpPermissionsEgress', self.vpc_id)
class SecurityGroupEC2ClassicTest(SecurityGroupBaseTest):
def test_create_delete_security_group(self):
name = data_utils.rand_name('sgName')
desc = data_utils.rand_name('sgDesc')
data = self.client.create_security_group(GroupName=name,
Description=desc)
group_id = data['GroupId']
res_clean = self.addResourceCleanUp(self.client.delete_security_group,
GroupId=group_id)
time.sleep(2)
data = self.client.describe_security_groups(GroupNames=[name])
self.assertEqual(1, len(data['SecurityGroups']))
self.assertEqual(group_id, data['SecurityGroups'][0]['GroupId'])
data = self.client.describe_security_groups(GroupIds=[group_id])
self.assertEqual(1, len(data['SecurityGroups']))
self.assertEqual(name, data['SecurityGroups'][0]['GroupName'])
self.client.delete_security_group(GroupName=name)
self.cancelResourceCleanUp(res_clean)
def test_create_duplicate_security_group(self):
name = data_utils.rand_name('sgName')
desc = data_utils.rand_name('sgDesc')
data = self.client.create_security_group(GroupName=name,
Description=desc)
group_id = data['GroupId']
res_clean = self.addResourceCleanUp(self.client.delete_security_group,
GroupId=group_id)
time.sleep(2)
self.assertRaises('InvalidGroup.Duplicate',
self.client.create_security_group,
GroupName=name, Description=desc)
self.client.delete_security_group(GroupId=group_id)
self.cancelResourceCleanUp(res_clean)
@testtools.skipUnless(CONF.aws.run_incompatible_tests,
"MismatchError: 'MissingParameter' != 'ValidationError'")
def test_create_invalid_name_desc(self):
valid = data_utils.rand_name('sgName')
self.assertRaises('MissingParameter',
self.client.create_security_group,
GroupName=valid, Description='')
self.assertRaises('MissingParameter',
self.client.create_security_group,
GroupName='', Description=valid)
self.assertRaises('InvalidGroup.Reserved',
self.client.create_security_group,
GroupName='default', Description='default')
def test_ingress_rules(self):
self._test_rules(self.client.authorize_security_group_ingress,
self.client.revoke_security_group_ingress,
'IpPermissions')
def test_egress_rules(self):
def _test():
self._test_rules(
self.client.authorize_security_group_egress,
self.client.revoke_security_group_egress,
'IpPermissionsEgress')
self.assertRaises('InvalidParameterValue', _test)
| {
"content_hash": "f5749cc450cc99d0a5d38690d593dbd2",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 79,
"avg_line_length": 40.95172413793104,
"alnum_prop": 0.5595318288986191,
"repo_name": "vishnu-kumar/ec2-api",
"id": "7525ff72799e992e9ee2e91bf4d3f6203b9ea929",
"size": "12512",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ec2api/tests/functional/api/test_security_groups.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1702647"
},
{
"name": "Shell",
"bytes": "29444"
}
],
"symlink_target": ""
} |
"""
This inline script utilizes harparser.HAR from
https://github.com/JustusW/harparser to generate a HAR log object.
"""
import six
import sys
import pytz
from harparser import HAR
from datetime import datetime
class _HARLog(HAR.log):
# The attributes need to be registered here for them to actually be
# available later via self. This is due to HAREncodable linking __getattr__
# to __getitem__. Anything that is set only in __init__ will just be added
# as key/value pair to self.__classes__.
__page_list__ = []
__page_count__ = 0
__page_ref__ = {}
def __init__(self, page_list=[]):
self.__page_list__ = page_list
self.__page_count__ = 0
self.__page_ref__ = {}
HAR.log.__init__(self, {"version": "1.2",
"creator": {"name": "MITMPROXY HARExtractor",
"version": "0.1",
"comment": ""},
"pages": [],
"entries": []})
def reset(self):
self.__init__(self.__page_list__)
def add(self, obj):
if isinstance(obj, HAR.pages):
self['pages'].append(obj)
if isinstance(obj, HAR.entries):
self['entries'].append(obj)
def create_page_id(self):
self.__page_count__ += 1
return "autopage_%s" % str(self.__page_count__)
def set_page_ref(self, page, ref):
self.__page_ref__[page] = ref
def get_page_ref(self, page):
return self.__page_ref__.get(page, None)
def get_page_list(self):
return self.__page_list__
def start(context):
"""
On start we create a HARLog instance. You will have to adapt this to
suit your actual needs of HAR generation. As it will probably be
necessary to cluster logs by IPs or reset them from time to time.
"""
context.dump_file = None
if len(sys.argv) > 1:
context.dump_file = sys.argv[1]
else:
raise ValueError(
'Usage: -s "har_extractor.py filename" '
'(- will output to stdout, filenames ending with .zhar '
'will result in compressed har)'
)
context.HARLog = _HARLog()
context.seen_server = set()
def response(context, flow):
"""
Called when a server response has been received. At the time of this
message both a request and a response are present and completely done.
"""
# Values are converted from float seconds to int milliseconds later.
ssl_time = -.001
connect_time = -.001
if flow.server_conn not in context.seen_server:
# Calculate the connect_time for this server_conn. Afterwards add it to
# seen list, in order to avoid the connect_time being present in entries
# that use an existing connection.
connect_time = (flow.server_conn.timestamp_tcp_setup -
flow.server_conn.timestamp_start)
context.seen_server.add(flow.server_conn)
if flow.server_conn.timestamp_ssl_setup is not None:
# Get the ssl_time for this server_conn as the difference between
# the start of the successful tcp setup and the successful ssl
# setup. If no ssl setup has been made it is left as -1 since it
# doesn't apply to this connection.
ssl_time = (flow.server_conn.timestamp_ssl_setup -
flow.server_conn.timestamp_tcp_setup)
# Calculate the raw timings from the different timestamps present in the
# request and response object. For lack of a way to measure it dns timings
# can not be calculated. The same goes for HAR blocked: MITMProxy will open
# a server connection as soon as it receives the host and port from the
# client connection. So the time spent waiting is actually spent waiting
# between request.timestamp_end and response.timestamp_start thus it
# correlates to HAR wait instead.
timings_raw = {
'send': flow.request.timestamp_end - flow.request.timestamp_start,
'wait': flow.response.timestamp_start - flow.request.timestamp_end,
'receive': flow.response.timestamp_end - flow.response.timestamp_start,
'connect': connect_time,
'ssl': ssl_time
}
# HAR timings are integers in ms, so we have to re-encode the raw timings to
# that format.
timings = dict([(k, int(1000 * v)) for k, v in six.iteritems(timings_raw)])
# The full_time is the sum of all timings.
# Timings set to -1 will be ignored as per spec.
full_time = sum(v for v in timings.values() if v > -1)
started_date_time = datetime.utcfromtimestamp(
flow.request.timestamp_start).replace(tzinfo=pytz.timezone("UTC")).isoformat()
request_query_string = [{"name": k, "value": v}
for k, v in flow.request.query or {}]
response_body_size = len(flow.response.content)
response_body_decoded_size = len(flow.response.get_decoded_content())
response_body_compression = response_body_decoded_size - response_body_size
entry = HAR.entries({
"startedDateTime": started_date_time,
"time": full_time,
"request": {
"method": flow.request.method,
"url": flow.request.url,
"httpVersion": flow.request.http_version,
"cookies": format_cookies(flow.request.cookies),
"headers": format_headers(flow.request.headers),
"queryString": request_query_string,
"headersSize": len(str(flow.request.headers)),
"bodySize": len(flow.request.content),
},
"response": {
"status": flow.response.status_code,
"statusText": flow.response.reason,
"httpVersion": flow.response.http_version,
"cookies": format_cookies(flow.response.cookies),
"headers": format_headers(flow.response.headers),
"content": {
"size": response_body_size,
"compression": response_body_compression,
"mimeType": flow.response.headers.get('Content-Type', '')
},
"redirectURL": flow.response.headers.get('Location', ''),
"headersSize": len(str(flow.response.headers)),
"bodySize": response_body_size,
},
"cache": {},
"timings": timings,
})
# If the current url is in the page list of context.HARLog or
# does not have a referrer, we add it as a new pages object.
is_new_page = (
flow.request.url in context.HARLog.get_page_list() or
flow.request.headers.get('Referer') is None
)
if is_new_page:
page_id = context.HARLog.create_page_id()
context.HARLog.add(
HAR.pages({
"startedDateTime": entry['startedDateTime'],
"id": page_id,
"title": flow.request.url,
"pageTimings": {}
})
)
context.HARLog.set_page_ref(flow.request.url, page_id)
entry['pageref'] = page_id
# Lookup the referer in the page_ref of context.HARLog to point this entries
# pageref attribute to the right pages object, then set it as a new
# reference to build a reference tree.
elif context.HARLog.get_page_ref(flow.request.headers.get('Referer')) is not None:
entry['pageref'] = context.HARLog.get_page_ref(
flow.request.headers['Referer']
)
context.HARLog.set_page_ref(
flow.request.headers['Referer'], entry['pageref']
)
context.HARLog.add(entry)
def done(context):
"""
Called once on script shutdown, after any other events.
"""
import pprint
import json
json_dump = context.HARLog.json()
compressed_json_dump = context.HARLog.compress()
if context.dump_file == '-':
context.log(pprint.pformat(json.loads(json_dump)))
elif context.dump_file.endswith('.zhar'):
file(context.dump_file, "w").write(compressed_json_dump)
else:
file(context.dump_file, "w").write(json_dump)
context.log(
"HAR log finished with %s bytes (%s bytes compressed)" % (
len(json_dump), len(compressed_json_dump)
)
)
context.log(
"Compression rate is %s%%" % str(
100. * len(compressed_json_dump) / len(json_dump)
)
)
def format_cookies(obj):
if obj:
return [{"name": k.strip(), "value": v[0]} for k, v in obj.items()]
return ""
def format_headers(obj):
if obj:
return [{"name": k, "value": v} for k, v in obj.fields]
return ""
def print_attributes(obj, filter_string=None, hide_privates=False):
"""
Useful helper method to quickly get all attributes of an object and its
values.
"""
for attr in dir(obj):
if hide_privates and "__" in attr:
continue
if filter_string is not None and filter_string not in attr:
continue
value = getattr(obj, attr)
print("%s.%s" % ('obj', attr), value, type(value))
| {
"content_hash": "0235dbe8bc2b34b4ca6030c4ebb103cf",
"timestamp": "",
"source": "github",
"line_count": 249,
"max_line_length": 86,
"avg_line_length": 36.72289156626506,
"alnum_prop": 0.5931758530183727,
"repo_name": "tdickers/mitmproxy",
"id": "d6b50c21311a622a16d42f35cd1a9e725f10c8b0",
"size": "9144",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/har_extractor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "405"
},
{
"name": "CSS",
"bytes": "194361"
},
{
"name": "HTML",
"bytes": "2878"
},
{
"name": "JavaScript",
"bytes": "127316"
},
{
"name": "Python",
"bytes": "1248282"
},
{
"name": "Shell",
"bytes": "4087"
}
],
"symlink_target": ""
} |
"""Defines interface for DB access.
The underlying driver is loaded as a :class:`LazyPluggable`.
**Related Flags**
:db_backend: string to lookup in the list of LazyPluggable backends.
`sqlalchemy` is the only supported backend right now.
:sql_connection: string specifying the sqlalchemy connection to use, like:
`sqlite:///var/lib/nova/nova.sqlite`.
:enable_new_services: when adding a new service to the database, is it in the
pool of available hardware (Default: True)
"""
from nova import exception
from nova import flags
from nova import utils
FLAGS = flags.FLAGS
flags.DEFINE_string('db_backend', 'sqlalchemy',
'The backend to use for db')
flags.DEFINE_boolean('enable_new_services', True,
'Services to be added to the available pool on create')
flags.DEFINE_string('instance_name_template', 'instance-%08x',
'Template string to be used to generate instance names')
flags.DEFINE_string('volume_name_template', 'volume-%08x',
'Template string to be used to generate instance names')
flags.DEFINE_string('snapshot_name_template', 'snapshot-%08x',
'Template string to be used to generate snapshot names')
flags.DEFINE_string('vsa_name_template', 'vsa-%08x',
'Template string to be used to generate VSA names')
IMPL = utils.LazyPluggable(FLAGS['db_backend'],
sqlalchemy='nova.db.sqlalchemy.api')
class NoMoreBlades(exception.Error):
"""No more available blades."""
pass
class NoMoreNetworks(exception.Error):
"""No more available networks."""
pass
class NoMoreTargets(exception.Error):
"""No more available blades"""
pass
###################
def service_destroy(context, instance_id):
"""Destroy the service or raise if it does not exist."""
return IMPL.service_destroy(context, instance_id)
def service_get(context, service_id):
"""Get a service or raise if it does not exist."""
return IMPL.service_get(context, service_id)
def service_get_by_host_and_topic(context, host, topic):
"""Get a service by host it's on and topic it listens to."""
return IMPL.service_get_by_host_and_topic(context, host, topic)
def service_get_all(context, disabled=None):
"""Get all services."""
return IMPL.service_get_all(context, disabled)
def service_get_all_by_topic(context, topic):
"""Get all services for a given topic."""
return IMPL.service_get_all_by_topic(context, topic)
def service_get_all_by_host(context, host):
"""Get all services for a given host."""
return IMPL.service_get_all_by_host(context, host)
def service_get_all_compute_by_host(context, host):
"""Get all compute services for a given host."""
return IMPL.service_get_all_compute_by_host(context, host)
def service_get_all_compute_sorted(context):
"""Get all compute services sorted by instance count.
:returns: a list of (Service, instance_count) tuples.
"""
return IMPL.service_get_all_compute_sorted(context)
def service_get_all_network_sorted(context):
"""Get all network services sorted by network count.
:returns: a list of (Service, network_count) tuples.
"""
return IMPL.service_get_all_network_sorted(context)
def service_get_all_volume_sorted(context):
"""Get all volume services sorted by volume count.
:returns: a list of (Service, volume_count) tuples.
"""
return IMPL.service_get_all_volume_sorted(context)
def service_get_by_args(context, host, binary):
"""Get the state of an service by node name and binary."""
return IMPL.service_get_by_args(context, host, binary)
def service_create(context, values):
"""Create a service from the values dictionary."""
return IMPL.service_create(context, values)
def service_update(context, service_id, values):
"""Set the given properties on an service and update it.
Raises NotFound if service does not exist.
"""
return IMPL.service_update(context, service_id, values)
###################
def compute_node_get(context, compute_id, session=None):
"""Get an computeNode or raise if it does not exist."""
return IMPL.compute_node_get(context, compute_id)
def compute_node_create(context, values):
"""Create a computeNode from the values dictionary."""
return IMPL.compute_node_create(context, values)
def compute_node_update(context, compute_id, values):
"""Set the given properties on an computeNode and update it.
Raises NotFound if computeNode does not exist.
"""
return IMPL.compute_node_update(context, compute_id, values)
###################
def certificate_create(context, values):
"""Create a certificate from the values dictionary."""
return IMPL.certificate_create(context, values)
def certificate_destroy(context, certificate_id):
"""Destroy the certificate or raise if it does not exist."""
return IMPL.certificate_destroy(context, certificate_id)
def certificate_get_all_by_project(context, project_id):
"""Get all certificates for a project."""
return IMPL.certificate_get_all_by_project(context, project_id)
def certificate_get_all_by_user(context, user_id):
"""Get all certificates for a user."""
return IMPL.certificate_get_all_by_user(context, user_id)
def certificate_get_all_by_user_and_project(context, user_id, project_id):
"""Get all certificates for a user and project."""
return IMPL.certificate_get_all_by_user_and_project(context,
user_id,
project_id)
def certificate_update(context, certificate_id, values):
"""Set the given properties on an certificate and update it.
Raises NotFound if service does not exist.
"""
return IMPL.certificate_update(context, certificate_id, values)
###################
def floating_ip_get(context, id):
return IMPL.floating_ip_get(context, id)
def floating_ip_allocate_address(context, project_id):
"""Allocate free floating ip and return the address.
Raises if one is not available.
"""
return IMPL.floating_ip_allocate_address(context, project_id)
def floating_ip_create(context, values):
"""Create a floating ip from the values dictionary."""
return IMPL.floating_ip_create(context, values)
def floating_ip_count_by_project(context, project_id):
"""Count floating ips used by project."""
return IMPL.floating_ip_count_by_project(context, project_id)
def floating_ip_deallocate(context, address):
"""Deallocate an floating ip by address."""
return IMPL.floating_ip_deallocate(context, address)
def floating_ip_destroy(context, address):
"""Destroy the floating_ip or raise if it does not exist."""
return IMPL.floating_ip_destroy(context, address)
def floating_ip_disassociate(context, address):
"""Disassociate an floating ip from a fixed ip by address.
:returns: the address of the existing fixed ip.
"""
return IMPL.floating_ip_disassociate(context, address)
def floating_ip_fixed_ip_associate(context, floating_address,
fixed_address, host):
"""Associate an floating ip to a fixed_ip by address."""
return IMPL.floating_ip_fixed_ip_associate(context,
floating_address,
fixed_address,
host)
def floating_ip_get_all(context):
"""Get all floating ips."""
return IMPL.floating_ip_get_all(context)
def floating_ip_get_all_by_host(context, host):
"""Get all floating ips by host."""
return IMPL.floating_ip_get_all_by_host(context, host)
def floating_ip_get_all_by_project(context, project_id):
"""Get all floating ips by project."""
return IMPL.floating_ip_get_all_by_project(context, project_id)
def floating_ip_get_by_address(context, address):
"""Get a floating ip by address or raise if it doesn't exist."""
return IMPL.floating_ip_get_by_address(context, address)
def floating_ip_update(context, address, values):
"""Update a floating ip by address or raise if it doesn't exist."""
return IMPL.floating_ip_update(context, address, values)
def floating_ip_set_auto_assigned(context, address):
"""Set auto_assigned flag to floating ip"""
return IMPL.floating_ip_set_auto_assigned(context, address)
####################
def migration_update(context, id, values):
"""Update a migration instance."""
return IMPL.migration_update(context, id, values)
def migration_create(context, values):
"""Create a migration record."""
return IMPL.migration_create(context, values)
def migration_get(context, migration_id):
"""Finds a migration by the id."""
return IMPL.migration_get(context, migration_id)
def migration_get_by_instance_and_status(context, instance_uuid, status):
"""Finds a migration by the instance uuid its migrating."""
return IMPL.migration_get_by_instance_and_status(context, instance_uuid,
status)
####################
def fixed_ip_associate(context, address, instance_id, network_id=None,
reserved=False):
"""Associate fixed ip to instance.
Raises if fixed ip is not available.
"""
return IMPL.fixed_ip_associate(context, address, instance_id, network_id,
reserved)
def fixed_ip_associate_pool(context, network_id, instance_id=None, host=None):
"""Find free ip in network and associate it to instance or host.
Raises if one is not available.
"""
return IMPL.fixed_ip_associate_pool(context, network_id,
instance_id, host)
def fixed_ip_create(context, values):
"""Create a fixed ip from the values dictionary."""
return IMPL.fixed_ip_create(context, values)
def fixed_ip_disassociate(context, address):
"""Disassociate a fixed ip from an instance by address."""
return IMPL.fixed_ip_disassociate(context, address)
def fixed_ip_disassociate_all_by_timeout(context, host, time):
"""Disassociate old fixed ips from host."""
return IMPL.fixed_ip_disassociate_all_by_timeout(context, host, time)
def fixed_ip_get_all(context):
"""Get all defined fixed ips."""
return IMPL.fixed_ip_get_all(context)
def fixed_ip_get_all_by_instance_host(context, host):
"""Get all allocated fixed ips filtered by instance host."""
return IMPL.fixed_ip_get_all_by_instance_host(context, host)
def fixed_ip_get_by_address(context, address):
"""Get a fixed ip by address or raise if it does not exist."""
return IMPL.fixed_ip_get_by_address(context, address)
def fixed_ip_get_by_instance(context, instance_id):
"""Get fixed ips by instance or raise if none exist."""
return IMPL.fixed_ip_get_by_instance(context, instance_id)
def fixed_ip_get_by_network_host(context, network_id, host):
"""Get fixed ip for a host in a network."""
return IMPL.fixed_ip_get_by_network_host(context, network_id, host)
def fixed_ip_get_by_virtual_interface(context, vif_id):
"""Get fixed ips by virtual interface or raise if none exist."""
return IMPL.fixed_ip_get_by_virtual_interface(context, vif_id)
def fixed_ip_get_network(context, address):
"""Get a network for a fixed ip by address."""
return IMPL.fixed_ip_get_network(context, address)
def fixed_ip_update(context, address, values):
"""Create a fixed ip from the values dictionary."""
return IMPL.fixed_ip_update(context, address, values)
####################
def virtual_interface_create(context, values):
"""Create a virtual interface record in the database."""
return IMPL.virtual_interface_create(context, values)
def virtual_interface_update(context, vif_id, values):
"""Update a virtual interface record in the database."""
return IMPL.virtual_interface_update(context, vif_id, values)
def virtual_interface_get(context, vif_id):
"""Gets a virtual interface from the table,"""
return IMPL.virtual_interface_get(context, vif_id)
def virtual_interface_get_by_address(context, address):
"""Gets a virtual interface from the table filtering on address."""
return IMPL.virtual_interface_get_by_address(context, address)
def virtual_interface_get_by_uuid(context, vif_uuid):
"""Gets a virtual interface from the table filtering on vif uuid."""
return IMPL.virtual_interface_get_by_uuid(context, vif_uuid)
def virtual_interface_get_by_fixed_ip(context, fixed_ip_id):
"""Gets the virtual interface fixed_ip is associated with."""
return IMPL.virtual_interface_get_by_fixed_ip(context, fixed_ip_id)
def virtual_interface_get_by_instance(context, instance_id):
"""Gets all virtual_interfaces for instance."""
return IMPL.virtual_interface_get_by_instance(context, instance_id)
def virtual_interface_get_by_instance_and_network(context, instance_id,
network_id):
"""Gets all virtual interfaces for instance."""
return IMPL.virtual_interface_get_by_instance_and_network(context,
instance_id,
network_id)
def virtual_interface_get_by_network(context, network_id):
"""Gets all virtual interfaces on network."""
return IMPL.virtual_interface_get_by_network(context, network_id)
def virtual_interface_delete(context, vif_id):
"""Delete virtual interface record from the database."""
return IMPL.virtual_interface_delete(context, vif_id)
def virtual_interface_delete_by_instance(context, instance_id):
"""Delete virtual interface records associated with instance."""
return IMPL.virtual_interface_delete_by_instance(context, instance_id)
####################
def instance_create(context, values):
"""Create an instance from the values dictionary."""
return IMPL.instance_create(context, values)
def instance_data_get_for_project(context, project_id):
"""Get (instance_count, total_cores, total_ram) for project."""
return IMPL.instance_data_get_for_project(context, project_id)
def instance_destroy(context, instance_id):
"""Destroy the instance or raise if it does not exist."""
return IMPL.instance_destroy(context, instance_id)
def instance_stop(context, instance_id):
"""Stop the instance or raise if it does not exist."""
return IMPL.instance_stop(context, instance_id)
def instance_get_by_uuid(context, uuid):
"""Get an instance or raise if it does not exist."""
return IMPL.instance_get_by_uuid(context, uuid)
def instance_get(context, instance_id):
"""Get an instance or raise if it does not exist."""
return IMPL.instance_get(context, instance_id)
def instance_get_all(context):
"""Get all instances."""
return IMPL.instance_get_all(context)
def instance_get_all_by_filters(context, filters):
"""Get all instances that match all filters."""
return IMPL.instance_get_all_by_filters(context, filters)
def instance_get_active_by_window(context, begin, end=None, project_id=None):
"""Get instances active during a certain time window.
Specifying a project_id will filter for a certain project."""
return IMPL.instance_get_active_by_window(context, begin, end, project_id)
def instance_get_active_by_window_joined(context, begin, end=None,
project_id=None):
"""Get instances and joins active during a certain time window.
Specifying a project_id will filter for a certain project."""
return IMPL.instance_get_active_by_window_joined(context, begin, end,
project_id)
def instance_get_all_by_user(context, user_id):
"""Get all instances."""
return IMPL.instance_get_all_by_user(context, user_id)
def instance_get_all_by_project(context, project_id):
"""Get all instance belonging to a project."""
return IMPL.instance_get_all_by_project(context, project_id)
def instance_get_all_by_host(context, host):
"""Get all instance belonging to a host."""
return IMPL.instance_get_all_by_host(context, host)
def instance_get_all_by_reservation(context, reservation_id):
"""Get all instances belonging to a reservation."""
return IMPL.instance_get_all_by_reservation(context, reservation_id)
def instance_get_by_fixed_ip(context, address):
"""Get an instance for a fixed ip by address."""
return IMPL.instance_get_by_fixed_ip(context, address)
def instance_get_by_fixed_ipv6(context, address):
"""Get an instance for a fixed ip by IPv6 address."""
return IMPL.instance_get_by_fixed_ipv6(context, address)
def instance_get_fixed_addresses(context, instance_id):
"""Get the fixed ip address of an instance."""
return IMPL.instance_get_fixed_addresses(context, instance_id)
def instance_get_fixed_addresses_v6(context, instance_id):
return IMPL.instance_get_fixed_addresses_v6(context, instance_id)
def instance_get_floating_address(context, instance_id):
"""Get the first floating ip address of an instance."""
return IMPL.instance_get_floating_address(context, instance_id)
def instance_get_project_vpn(context, project_id):
"""Get a vpn instance by project or return None."""
return IMPL.instance_get_project_vpn(context, project_id)
def instance_set_state(context, instance_id, state, description=None):
"""Set the state of an instance."""
return IMPL.instance_set_state(context, instance_id, state, description)
def instance_update(context, instance_id, values):
"""Set the given properties on an instance and update it.
Raises NotFound if instance does not exist.
"""
return IMPL.instance_update(context, instance_id, values)
def instance_add_security_group(context, instance_id, security_group_id):
"""Associate the given security group with the given instance."""
return IMPL.instance_add_security_group(context, instance_id,
security_group_id)
def instance_remove_security_group(context, instance_id, security_group_id):
"""Disassociate the given security group from the given instance."""
return IMPL.instance_remove_security_group(context, instance_id,
security_group_id)
def instance_action_create(context, values):
"""Create an instance action from the values dictionary."""
return IMPL.instance_action_create(context, values)
def instance_get_actions(context, instance_id):
"""Get instance actions by instance id."""
return IMPL.instance_get_actions(context, instance_id)
###################
def key_pair_create(context, values):
"""Create a key_pair from the values dictionary."""
return IMPL.key_pair_create(context, values)
def key_pair_destroy(context, user_id, name):
"""Destroy the key_pair or raise if it does not exist."""
return IMPL.key_pair_destroy(context, user_id, name)
def key_pair_destroy_all_by_user(context, user_id):
"""Destroy all key_pairs by user."""
return IMPL.key_pair_destroy_all_by_user(context, user_id)
def key_pair_get(context, user_id, name):
"""Get a key_pair or raise if it does not exist."""
return IMPL.key_pair_get(context, user_id, name)
def key_pair_get_all_by_user(context, user_id):
"""Get all key_pairs by user."""
return IMPL.key_pair_get_all_by_user(context, user_id)
####################
def network_associate(context, project_id, force=False):
"""Associate a free network to a project."""
return IMPL.network_associate(context, project_id, force)
def network_count(context):
"""Return the number of networks."""
return IMPL.network_count(context)
def network_count_allocated_ips(context, network_id):
"""Return the number of allocated non-reserved ips in the network."""
return IMPL.network_count_allocated_ips(context, network_id)
def network_count_available_ips(context, network_id):
"""Return the number of available ips in the network."""
return IMPL.network_count_available_ips(context, network_id)
def network_count_reserved_ips(context, network_id):
"""Return the number of reserved ips in the network."""
return IMPL.network_count_reserved_ips(context, network_id)
def network_create_safe(context, values):
"""Create a network from the values dict.
The network is only returned if the create succeeds. If the create violates
constraints because the network already exists, no exception is raised.
"""
return IMPL.network_create_safe(context, values)
def network_delete_safe(context, network_id):
"""Delete network with key network_id.
This method assumes that the network is not associated with any project
"""
return IMPL.network_delete_safe(context, network_id)
def network_create_fixed_ips(context, network_id, num_vpn_clients):
"""Create the ips for the network, reserving sepecified ips."""
return IMPL.network_create_fixed_ips(context, network_id, num_vpn_clients)
def network_disassociate(context, network_id):
"""Disassociate the network from project or raise if it does not exist."""
return IMPL.network_disassociate(context, network_id)
def network_disassociate_all(context):
"""Disassociate all networks from projects."""
return IMPL.network_disassociate_all(context)
def network_get(context, network_id):
"""Get an network or raise if it does not exist."""
return IMPL.network_get(context, network_id)
def network_get_all(context):
"""Return all defined networks."""
return IMPL.network_get_all(context)
def network_get_all_by_uuids(context, network_uuids, project_id=None):
"""Return networks by ids."""
return IMPL.network_get_all_by_uuids(context, network_uuids, project_id)
# pylint: disable=C0103
def network_get_associated_fixed_ips(context, network_id):
"""Get all network's ips that have been associated."""
return IMPL.network_get_associated_fixed_ips(context, network_id)
def network_get_by_bridge(context, bridge):
"""Get a network by bridge or raise if it does not exist."""
return IMPL.network_get_by_bridge(context, bridge)
def network_get_by_uuid(context, uuid):
"""Get a network by uuid or raise if it does not exist."""
return IMPL.network_get_by_uuid(context, uuid)
def network_get_by_cidr(context, cidr):
"""Get a network by cidr or raise if it does not exist"""
return IMPL.network_get_by_cidr(context, cidr)
def network_get_by_instance(context, instance_id):
"""Get a network by instance id or raise if it does not exist."""
return IMPL.network_get_by_instance(context, instance_id)
def network_get_all_by_instance(context, instance_id):
"""Get all networks by instance id or raise if none exist."""
return IMPL.network_get_all_by_instance(context, instance_id)
def network_get_all_by_host(context, host):
"""All networks for which the given host is the network host."""
return IMPL.network_get_all_by_host(context, host)
def network_get_index(context, network_id):
"""Get non-conflicting index for network."""
return IMPL.network_get_index(context, network_id)
def network_get_vpn_ip(context, network_id):
"""Get non-conflicting index for network."""
return IMPL.network_get_vpn_ip(context, network_id)
def network_set_cidr(context, network_id, cidr):
"""Set the Classless Inner Domain Routing for the network."""
return IMPL.network_set_cidr(context, network_id, cidr)
def network_set_host(context, network_id, host_id):
"""Safely set the host for network."""
return IMPL.network_set_host(context, network_id, host_id)
def network_update(context, network_id, values):
"""Set the given properties on an network and update it.
Raises NotFound if network does not exist.
"""
return IMPL.network_update(context, network_id, values)
###################
def queue_get_for(context, topic, physical_node_id):
"""Return a channel to send a message to a node with a topic."""
return IMPL.queue_get_for(context, topic, physical_node_id)
###################
def export_device_count(context):
"""Return count of export devices."""
return IMPL.export_device_count(context)
def export_device_create_safe(context, values):
"""Create an export_device from the values dictionary.
The device is not returned. If the create violates the unique
constraints because the shelf_id and blade_id already exist,
no exception is raised.
"""
return IMPL.export_device_create_safe(context, values)
###################
def iscsi_target_count_by_host(context, host):
"""Return count of export devices."""
return IMPL.iscsi_target_count_by_host(context, host)
def iscsi_target_create_safe(context, values):
"""Create an iscsi_target from the values dictionary.
The device is not returned. If the create violates the unique
constraints because the iscsi_target and host already exist,
no exception is raised.
"""
return IMPL.iscsi_target_create_safe(context, values)
###############
def auth_token_destroy(context, token_id):
"""Destroy an auth token."""
return IMPL.auth_token_destroy(context, token_id)
def auth_token_get(context, token_hash):
"""Retrieves a token given the hash representing it."""
return IMPL.auth_token_get(context, token_hash)
def auth_token_update(context, token_hash, values):
"""Updates a token given the hash representing it."""
return IMPL.auth_token_update(context, token_hash, values)
def auth_token_create(context, token):
"""Creates a new token."""
return IMPL.auth_token_create(context, token)
###################
def quota_create(context, project_id, resource, limit):
"""Create a quota for the given project and resource."""
return IMPL.quota_create(context, project_id, resource, limit)
def quota_get(context, project_id, resource):
"""Retrieve a quota or raise if it does not exist."""
return IMPL.quota_get(context, project_id, resource)
def quota_get_all_by_project(context, project_id):
"""Retrieve all quotas associated with a given project."""
return IMPL.quota_get_all_by_project(context, project_id)
def quota_update(context, project_id, resource, limit):
"""Update a quota or raise if it does not exist."""
return IMPL.quota_update(context, project_id, resource, limit)
def quota_destroy(context, project_id, resource):
"""Destroy the quota or raise if it does not exist."""
return IMPL.quota_destroy(context, project_id, resource)
def quota_destroy_all_by_project(context, project_id):
"""Destroy all quotas associated with a given project."""
return IMPL.quota_get_all_by_project(context, project_id)
###################
def volume_allocate_shelf_and_blade(context, volume_id):
"""Atomically allocate a free shelf and blade from the pool."""
return IMPL.volume_allocate_shelf_and_blade(context, volume_id)
def volume_allocate_iscsi_target(context, volume_id, host):
"""Atomically allocate a free iscsi_target from the pool."""
return IMPL.volume_allocate_iscsi_target(context, volume_id, host)
def volume_attached(context, volume_id, instance_id, mountpoint):
"""Ensure that a volume is set as attached."""
return IMPL.volume_attached(context, volume_id, instance_id, mountpoint)
def volume_create(context, values):
"""Create a volume from the values dictionary."""
return IMPL.volume_create(context, values)
def volume_data_get_for_project(context, project_id):
"""Get (volume_count, gigabytes) for project."""
return IMPL.volume_data_get_for_project(context, project_id)
def volume_destroy(context, volume_id):
"""Destroy the volume or raise if it does not exist."""
return IMPL.volume_destroy(context, volume_id)
def volume_detached(context, volume_id):
"""Ensure that a volume is set as detached."""
return IMPL.volume_detached(context, volume_id)
def volume_get(context, volume_id):
"""Get a volume or raise if it does not exist."""
return IMPL.volume_get(context, volume_id)
def volume_get_all(context):
"""Get all volumes."""
return IMPL.volume_get_all(context)
def volume_get_all_by_host(context, host):
"""Get all volumes belonging to a host."""
return IMPL.volume_get_all_by_host(context, host)
def volume_get_all_by_instance(context, instance_id):
"""Get all volumes belonging to a instance."""
return IMPL.volume_get_all_by_instance(context, instance_id)
def volume_get_all_by_project(context, project_id):
"""Get all volumes belonging to a project."""
return IMPL.volume_get_all_by_project(context, project_id)
def volume_get_by_ec2_id(context, ec2_id):
"""Get a volume by ec2 id."""
return IMPL.volume_get_by_ec2_id(context, ec2_id)
def volume_get_instance(context, volume_id):
"""Get the instance that a volume is attached to."""
return IMPL.volume_get_instance(context, volume_id)
def volume_get_shelf_and_blade(context, volume_id):
"""Get the shelf and blade allocated to the volume."""
return IMPL.volume_get_shelf_and_blade(context, volume_id)
def volume_get_iscsi_target_num(context, volume_id):
"""Get the target num (tid) allocated to the volume."""
return IMPL.volume_get_iscsi_target_num(context, volume_id)
def volume_update(context, volume_id, values):
"""Set the given properties on an volume and update it.
Raises NotFound if volume does not exist.
"""
return IMPL.volume_update(context, volume_id, values)
####################
def snapshot_create(context, values):
"""Create a snapshot from the values dictionary."""
return IMPL.snapshot_create(context, values)
def snapshot_destroy(context, snapshot_id):
"""Destroy the snapshot or raise if it does not exist."""
return IMPL.snapshot_destroy(context, snapshot_id)
def snapshot_get(context, snapshot_id):
"""Get a snapshot or raise if it does not exist."""
return IMPL.snapshot_get(context, snapshot_id)
def snapshot_get_all(context):
"""Get all snapshots."""
return IMPL.snapshot_get_all(context)
def snapshot_get_all_by_project(context, project_id):
"""Get all snapshots belonging to a project."""
return IMPL.snapshot_get_all_by_project(context, project_id)
def snapshot_update(context, snapshot_id, values):
"""Set the given properties on an snapshot and update it.
Raises NotFound if snapshot does not exist.
"""
return IMPL.snapshot_update(context, snapshot_id, values)
####################
def block_device_mapping_create(context, values):
"""Create an entry of block device mapping"""
return IMPL.block_device_mapping_create(context, values)
def block_device_mapping_update(context, bdm_id, values):
"""Update an entry of block device mapping"""
return IMPL.block_device_mapping_update(context, bdm_id, values)
def block_device_mapping_update_or_create(context, values):
"""Update an entry of block device mapping.
If not existed, create a new entry"""
return IMPL.block_device_mapping_update_or_create(context, values)
def block_device_mapping_get_all_by_instance(context, instance_id):
"""Get all block device mapping belonging to a instance"""
return IMPL.block_device_mapping_get_all_by_instance(context, instance_id)
def block_device_mapping_destroy(context, bdm_id):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy(context, bdm_id)
def block_device_mapping_destroy_by_instance_and_volume(context, instance_id,
volume_id):
"""Destroy the block device mapping or raise if it does not exist."""
return IMPL.block_device_mapping_destroy_by_instance_and_volume(
context, instance_id, volume_id)
####################
def security_group_get_all(context):
"""Get all security groups."""
return IMPL.security_group_get_all(context)
def security_group_get(context, security_group_id):
"""Get security group by its id."""
return IMPL.security_group_get(context, security_group_id)
def security_group_get_by_name(context, project_id, group_name):
"""Returns a security group with the specified name from a project."""
return IMPL.security_group_get_by_name(context, project_id, group_name)
def security_group_get_by_project(context, project_id):
"""Get all security groups belonging to a project."""
return IMPL.security_group_get_by_project(context, project_id)
def security_group_get_by_instance(context, instance_id):
"""Get security groups to which the instance is assigned."""
return IMPL.security_group_get_by_instance(context, instance_id)
def security_group_exists(context, project_id, group_name):
"""Indicates if a group name exists in a project."""
return IMPL.security_group_exists(context, project_id, group_name)
def security_group_create(context, values):
"""Create a new security group."""
return IMPL.security_group_create(context, values)
def security_group_destroy(context, security_group_id):
"""Deletes a security group."""
return IMPL.security_group_destroy(context, security_group_id)
def security_group_destroy_all(context):
"""Deletes a security group."""
return IMPL.security_group_destroy_all(context)
####################
def security_group_rule_create(context, values):
"""Create a new security group."""
return IMPL.security_group_rule_create(context, values)
def security_group_rule_get_by_security_group(context, security_group_id):
"""Get all rules for a a given security group."""
return IMPL.security_group_rule_get_by_security_group(context,
security_group_id)
def security_group_rule_get_by_security_group_grantee(context,
security_group_id):
"""Get all rules that grant access to the given security group."""
return IMPL.security_group_rule_get_by_security_group_grantee(context,
security_group_id)
def security_group_rule_destroy(context, security_group_rule_id):
"""Deletes a security group rule."""
return IMPL.security_group_rule_destroy(context, security_group_rule_id)
def security_group_rule_get(context, security_group_rule_id):
"""Gets a security group rule."""
return IMPL.security_group_rule_get(context, security_group_rule_id)
###################
def provider_fw_rule_create(context, rule):
"""Add a firewall rule at the provider level (all hosts & instances)."""
return IMPL.provider_fw_rule_create(context, rule)
def provider_fw_rule_get_all(context):
"""Get all provider-level firewall rules."""
return IMPL.provider_fw_rule_get_all(context)
def provider_fw_rule_get_all_by_cidr(context, cidr):
"""Get all provider-level firewall rules."""
return IMPL.provider_fw_rule_get_all_by_cidr(context, cidr)
def provider_fw_rule_destroy(context, rule_id):
"""Delete a provider firewall rule from the database."""
return IMPL.provider_fw_rule_destroy(context, rule_id)
###################
def user_get(context, id):
"""Get user by id."""
return IMPL.user_get(context, id)
def user_get_by_uid(context, uid):
"""Get user by uid."""
return IMPL.user_get_by_uid(context, uid)
def user_get_by_access_key(context, access_key):
"""Get user by access key."""
return IMPL.user_get_by_access_key(context, access_key)
def user_create(context, values):
"""Create a new user."""
return IMPL.user_create(context, values)
def user_delete(context, id):
"""Delete a user."""
return IMPL.user_delete(context, id)
def user_get_all(context):
"""Create a new user."""
return IMPL.user_get_all(context)
def user_add_role(context, user_id, role):
"""Add another global role for user."""
return IMPL.user_add_role(context, user_id, role)
def user_remove_role(context, user_id, role):
"""Remove global role from user."""
return IMPL.user_remove_role(context, user_id, role)
def user_get_roles(context, user_id):
"""Get global roles for user."""
return IMPL.user_get_roles(context, user_id)
def user_add_project_role(context, user_id, project_id, role):
"""Add project role for user."""
return IMPL.user_add_project_role(context, user_id, project_id, role)
def user_remove_project_role(context, user_id, project_id, role):
"""Remove project role from user."""
return IMPL.user_remove_project_role(context, user_id, project_id, role)
def user_get_roles_for_project(context, user_id, project_id):
"""Return list of roles a user holds on project."""
return IMPL.user_get_roles_for_project(context, user_id, project_id)
def user_update(context, user_id, values):
"""Update user."""
return IMPL.user_update(context, user_id, values)
###################
def project_get(context, id):
"""Get project by id."""
return IMPL.project_get(context, id)
def project_create(context, values):
"""Create a new project."""
return IMPL.project_create(context, values)
def project_add_member(context, project_id, user_id):
"""Add user to project."""
return IMPL.project_add_member(context, project_id, user_id)
def project_get_all(context):
"""Get all projects."""
return IMPL.project_get_all(context)
def project_get_by_user(context, user_id):
"""Get all projects of which the given user is a member."""
return IMPL.project_get_by_user(context, user_id)
def project_remove_member(context, project_id, user_id):
"""Remove the given user from the given project."""
return IMPL.project_remove_member(context, project_id, user_id)
def project_update(context, project_id, values):
"""Update Remove the given user from the given project."""
return IMPL.project_update(context, project_id, values)
def project_delete(context, project_id):
"""Delete project."""
return IMPL.project_delete(context, project_id)
def project_get_networks(context, project_id, associate=True):
"""Return the network associated with the project.
If associate is true, it will attempt to associate a new
network if one is not found, otherwise it returns None.
"""
return IMPL.project_get_networks(context, project_id, associate)
def project_get_networks_v6(context, project_id):
return IMPL.project_get_networks_v6(context, project_id)
###################
def console_pool_create(context, values):
"""Create console pool."""
return IMPL.console_pool_create(context, values)
def console_pool_get(context, pool_id):
"""Get a console pool."""
return IMPL.console_pool_get(context, pool_id)
def console_pool_get_by_host_type(context, compute_host, proxy_host,
console_type):
"""Fetch a console pool for a given proxy host, compute host, and type."""
return IMPL.console_pool_get_by_host_type(context,
compute_host,
proxy_host,
console_type)
def console_pool_get_all_by_host_type(context, host, console_type):
"""Fetch all pools for given proxy host and type."""
return IMPL.console_pool_get_all_by_host_type(context,
host,
console_type)
def console_create(context, values):
"""Create a console."""
return IMPL.console_create(context, values)
def console_delete(context, console_id):
"""Delete a console."""
return IMPL.console_delete(context, console_id)
def console_get_by_pool_instance(context, pool_id, instance_id):
"""Get console entry for a given instance and pool."""
return IMPL.console_get_by_pool_instance(context, pool_id, instance_id)
def console_get_all_by_instance(context, instance_id):
"""Get consoles for a given instance."""
return IMPL.console_get_all_by_instance(context, instance_id)
def console_get(context, console_id, instance_id=None):
"""Get a specific console (possibly on a given instance)."""
return IMPL.console_get(context, console_id, instance_id)
##################
def instance_type_create(context, values):
"""Create a new instance type."""
return IMPL.instance_type_create(context, values)
def instance_type_get_all(context, inactive=False):
"""Get all instance types."""
return IMPL.instance_type_get_all(context, inactive)
def instance_type_get(context, id):
"""Get instance type by id."""
return IMPL.instance_type_get(context, id)
def instance_type_get_by_name(context, name):
"""Get instance type by name."""
return IMPL.instance_type_get_by_name(context, name)
def instance_type_get_by_flavor_id(context, id):
"""Get instance type by name."""
return IMPL.instance_type_get_by_flavor_id(context, id)
def instance_type_destroy(context, name):
"""Delete a instance type."""
return IMPL.instance_type_destroy(context, name)
def instance_type_purge(context, name):
"""Purges (removes) an instance type from DB.
Use instance_type_destroy for most cases
"""
return IMPL.instance_type_purge(context, name)
####################
def zone_create(context, values):
"""Create a new child Zone entry."""
return IMPL.zone_create(context, values)
def zone_update(context, zone_id, values):
"""Update a child Zone entry."""
return IMPL.zone_update(context, zone_id, values)
def zone_delete(context, zone_id):
"""Delete a child Zone."""
return IMPL.zone_delete(context, zone_id)
def zone_get(context, zone_id):
"""Get a specific child Zone."""
return IMPL.zone_get(context, zone_id)
def zone_get_all(context):
"""Get all child Zones."""
return IMPL.zone_get_all(context)
####################
def instance_metadata_get(context, instance_id):
"""Get all metadata for an instance."""
return IMPL.instance_metadata_get(context, instance_id)
def instance_metadata_delete(context, instance_id, key):
"""Delete the given metadata item."""
IMPL.instance_metadata_delete(context, instance_id, key)
def instance_metadata_update(context, instance_id, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
IMPL.instance_metadata_update(context, instance_id, metadata, delete)
####################
def agent_build_create(context, values):
"""Create a new agent build entry."""
return IMPL.agent_build_create(context, values)
def agent_build_get_by_triple(context, hypervisor, os, architecture):
"""Get agent build by hypervisor/OS/architecture triple."""
return IMPL.agent_build_get_by_triple(context, hypervisor, os,
architecture)
def agent_build_get_all(context):
"""Get all agent builds."""
return IMPL.agent_build_get_all(context)
def agent_build_destroy(context, agent_update_id):
"""Destroy agent build entry."""
IMPL.agent_build_destroy(context, agent_update_id)
def agent_build_update(context, agent_build_id, values):
"""Update agent build entry."""
IMPL.agent_build_update(context, agent_build_id, values)
####################
def instance_type_extra_specs_get(context, instance_type_id):
"""Get all extra specs for an instance type."""
return IMPL.instance_type_extra_specs_get(context, instance_type_id)
def instance_type_extra_specs_delete(context, instance_type_id, key):
"""Delete the given extra specs item."""
IMPL.instance_type_extra_specs_delete(context, instance_type_id, key)
def instance_type_extra_specs_update_or_create(context, instance_type_id,
extra_specs):
"""Create or update instance type extra specs. This adds or modifies the
key/value pairs specified in the extra specs dict argument"""
IMPL.instance_type_extra_specs_update_or_create(context, instance_type_id,
extra_specs)
##################
def volume_metadata_get(context, volume_id):
"""Get all metadata for a volume."""
return IMPL.volume_metadata_get(context, volume_id)
def volume_metadata_delete(context, volume_id, key):
"""Delete the given metadata item."""
IMPL.volume_metadata_delete(context, volume_id, key)
def volume_metadata_update(context, volume_id, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
IMPL.volume_metadata_update(context, volume_id, metadata, delete)
##################
def volume_type_create(context, values):
"""Create a new volume type."""
return IMPL.volume_type_create(context, values)
def volume_type_get_all(context, inactive=False):
"""Get all volume types."""
return IMPL.volume_type_get_all(context, inactive)
def volume_type_get(context, id):
"""Get volume type by id."""
return IMPL.volume_type_get(context, id)
def volume_type_get_by_name(context, name):
"""Get volume type by name."""
return IMPL.volume_type_get_by_name(context, name)
def volume_type_destroy(context, name):
"""Delete a volume type."""
return IMPL.volume_type_destroy(context, name)
def volume_type_purge(context, name):
"""Purges (removes) a volume type from DB.
Use volume_type_destroy for most cases
"""
return IMPL.volume_type_purge(context, name)
####################
def volume_type_extra_specs_get(context, volume_type_id):
"""Get all extra specs for a volume type."""
return IMPL.volume_type_extra_specs_get(context, volume_type_id)
def volume_type_extra_specs_delete(context, volume_type_id, key):
"""Delete the given extra specs item."""
IMPL.volume_type_extra_specs_delete(context, volume_type_id, key)
def volume_type_extra_specs_update_or_create(context, volume_type_id,
extra_specs):
"""Create or update volume type extra specs. This adds or modifies the
key/value pairs specified in the extra specs dict argument"""
IMPL.volume_type_extra_specs_update_or_create(context, volume_type_id,
extra_specs)
####################
def vsa_create(context, values):
"""Creates Virtual Storage Array record."""
return IMPL.vsa_create(context, values)
def vsa_update(context, vsa_id, values):
"""Updates Virtual Storage Array record."""
return IMPL.vsa_update(context, vsa_id, values)
def vsa_destroy(context, vsa_id):
"""Deletes Virtual Storage Array record."""
return IMPL.vsa_destroy(context, vsa_id)
def vsa_get(context, vsa_id):
"""Get Virtual Storage Array record by ID."""
return IMPL.vsa_get(context, vsa_id)
def vsa_get_all(context):
"""Get all Virtual Storage Array records."""
return IMPL.vsa_get_all(context)
def vsa_get_all_by_project(context, project_id):
"""Get all Virtual Storage Array records by project ID."""
return IMPL.vsa_get_all_by_project(context, project_id)
| {
"content_hash": "b114939591320b10ab2fd64f3da11c97",
"timestamp": "",
"source": "github",
"line_count": 1555,
"max_line_length": 79,
"avg_line_length": 30.54469453376206,
"alnum_prop": 0.6787797124028886,
"repo_name": "xushiwei/nova",
"id": "05d81d8b2aa8ff501b36ef90c92ca9894a5ddee3",
"size": "48274",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/db/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7412"
},
{
"name": "Python",
"bytes": "4170357"
},
{
"name": "Shell",
"bytes": "33002"
}
],
"symlink_target": ""
} |
import numpy as np
from sklearn import metrics
import pandas
import tensorflow as tf
from tensorflow.models.rnn import rnn, rnn_cell
import skflow
### Training data
# Download dbpedia_csv.tar.gz from
# https://drive.google.com/folderview?id=0Bz8a_Dbh9Qhbfll6bVpmNUtUcFdjYmF2SEpmZUZUcVNiMUw1TWN6RDV3a0JHT3kxLVhVR2M
# Unpack: tar -xvf dbpedia_csv.tar.gz
print('Reading files')
train = pandas.read_csv('../../../exporters/datasets/better_reykjavik/categories/train.csv', header=None)
X_train, y_train = train[1], train[0]
test = pandas.read_csv('../../../exporters/datasets/better_reykjavik/categories/test.csv', header=None)
X_test, y_test = test[1], test[0]
### Process vocabulary
print('Process vocabulary')
MAX_DOCUMENT_LENGTH = 100
vocab_processor = skflow.preprocessing.VocabularyProcessor(MAX_DOCUMENT_LENGTH)
X_train = np.array(list(vocab_processor.fit_transform(X_train)))
X_test = np.array(list(vocab_processor.transform(X_test)))
n_words = len(vocab_processor.vocabulary_)
print('Total words: %d' % n_words)
print(X_train)
### Models
EMBEDDING_SIZE = 50
print('Models')
def average_model(X, y):
word_vectors = skflow.ops.categorical_variable(X, n_classes=n_words,
embedding_size=EMBEDDING_SIZE, name='words')
features = tf.reduce_max(word_vectors, reduction_indices=1)
return skflow.models.logistic_regression(features, y)
def rnn_model(X, y):
"""Recurrent neural network model to predict from sequence of words
to a class."""
# Convert indexes of words into embeddings.
# This creates embeddings matrix of [n_words, EMBEDDING_SIZE] and then
# maps word indexes of the sequence into [batch_size, sequence_length,
# EMBEDDING_SIZE].
word_vectors = skflow.ops.categorical_variable(X, n_classes=n_words,
embedding_size=EMBEDDING_SIZE, name='words')
# Split into list of embedding per word, while removing doc length dim.
# word_list results to be a list of tensors [batch_size, EMBEDDING_SIZE].
word_list = skflow.ops.split_squeeze(1, MAX_DOCUMENT_LENGTH, word_vectors)
# Create a Gated Recurrent Unit cell with hidden size of EMBEDDING_SIZE.
cell = rnn_cell.GRUCell(EMBEDDING_SIZE)
# Create an unrolled Recurrent Neural Networks to length of
# MAX_DOCUMENT_LENGTH and passes word_list as inputs for each unit.
_, encoding = rnn.rnn(cell, word_list, dtype=tf.float32)
# Given encoding of RNN, take encoding of last step (e.g hidden size of the
# neural network of last step) and pass it as features for logistic
# regression over output classes.
return skflow.models.logistic_regression(encoding, y)
classifier = skflow.TensorFlowEstimator(model_fn=rnn_model, n_classes=14,
steps=1200, optimizer='Adam', learning_rate=0.01, continue_training=True)
# Continously train for 1000 steps & predict on test set.
while True:
classifier.fit(X_train, y_train, logdir='tf_temp_summaries/word_rnn_test_1')
score = metrics.accuracy_score(y_test, classifier.predict(X_test))
print('Accuracy: {0:f}'.format(score))
classifier.save('tf_temp_models/word_rnn_test_1/')
| {
"content_hash": "c0b0887e68f52838261c983dc2f1265e",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 113,
"avg_line_length": 40.75,
"alnum_prop": 0.7332902809170164,
"repo_name": "rbjarnason/active-citizen",
"id": "95f4bec92dc6491c1c07ef142b937df013b5e974",
"size": "3718",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "engine/classifications/experiments/skflow_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "17643"
},
{
"name": "HTML",
"bytes": "39428"
},
{
"name": "JavaScript",
"bytes": "226804"
},
{
"name": "Python",
"bytes": "42611"
}
],
"symlink_target": ""
} |
from django.contrib.auth.views import login
from django.views.decorators.debug import sensitive_post_parameters
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
@sensitive_post_parameters()
@csrf_protect
@never_cache
def do_login(request):
trust_root = request.REQUEST.get('trust_root', '')
return login(request, extra_context={'trust_root':trust_root})
| {
"content_hash": "183fcbe41e7dea040905a3d265efa36c",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 67,
"avg_line_length": 37.81818181818182,
"alnum_prop": 0.7980769230769231,
"repo_name": "RaduGatej/SensibleData-Platform",
"id": "29e46cf61c0d2d981b29bc3ec53e2d106f16dc4b",
"size": "416",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sensible_data_platform/accounts/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "174929"
},
{
"name": "HTML",
"bytes": "102473"
},
{
"name": "JavaScript",
"bytes": "166349"
},
{
"name": "Python",
"bytes": "226082"
},
{
"name": "Shell",
"bytes": "418"
}
],
"symlink_target": ""
} |
from random import choice
from shapeworld import util
from shapeworld.captions import Predicate, EntityType
class PragmaticalPredication(object):
def __init__(self, agreeing, ambiguous=None, disagreeing=None, sub_predications=None):
self.agreeing = list(agreeing)
self.ambiguous = list() if ambiguous is None else list(ambiguous)
self.disagreeing = list() if disagreeing is None else list(disagreeing)
self.sub_predications = list() if sub_predications is None else list(sub_predications)
self.entities = sorted((self.agreeing + self.ambiguous + self.disagreeing), key=(lambda e: e.id))
self.not_disagreeing = sorted((self.agreeing + self.ambiguous), key=(lambda e: e.id))
def __str__(self):
return '{{agreeing: {}, ambiguous: {}, disagreeing: {}}}'.format(len(self.agreeing), len(self.ambiguous), len(self.disagreeing))
@property
def num_entities(self):
return len(self.entities)
@property
def num_agreeing(self):
return len(self.agreeing)
@property
def num_not_disagreeing(self):
return len(self.not_disagreeing)
def copy(self, reset=False, exclude_sub_predications=False):
if reset:
return PragmaticalPredication(agreeing=self.entities)
elif exclude_sub_predications:
return PragmaticalPredication(agreeing=self.agreeing, ambiguous=self.ambiguous, disagreeing=self.disagreeing)
else:
return PragmaticalPredication(agreeing=self.agreeing, ambiguous=self.ambiguous, disagreeing=self.disagreeing, sub_predications=[predication.copy(exclude_sub_predications=False) for predication in self.sub_predications])
def empty(self):
return len(self.ambiguous) == 0 and len(self.disagreeing) == 0
def implies(self, predicate, **kwargs):
assert isinstance(predicate, Predicate)
return util.all_and_any(predicate.pred_agreement(entity=entity, **kwargs) for entity in self.agreeing) and all(not predicate.pred_disagreement(entity=entity, **kwargs) for entity in self.ambiguous)
def implied_by(self, predicate, **kwargs):
assert isinstance(predicate, Predicate)
return len(self.agreeing) > 0 and any(predicate.pred_agreement(entity=entity, **kwargs) for entity in self.entities) and util.all_and_any(predicate.pred_disagreement(entity=entity, **kwargs) for entity in self.disagreeing) and all(not predicate.pred_agreement(entity=entity, **kwargs) for entity in self.ambiguous)
def tautological(self, predicate, **kwargs):
assert isinstance(predicate, Predicate)
return util.all_and_any(predicate.pred_agreement(entity=entity, **kwargs) for entity in self.agreeing) and \
all(predicate.pred_disagreement(entity=entity, predication=self, **kwargs) for entity in self.disagreeing)
def contradictory(self, predicate, **kwargs):
assert isinstance(predicate, Predicate)
return util.all_and_any(predicate.pred_disagreement(entity=entity, **kwargs) for entity in self.agreeing)
def get_sub_predications(self):
for predication in self.sub_predications:
yield predication
yield from predication.get_sub_predications()
# def redundant_sub_predications(self):
# for m in range(len(self.sub_predications)):
# if self.equals(other=self.sub_predications[m]):
# return True
# # not recursive
# # if self.sub_predications[m].redundant_sub_predications():
# # return True
# for n in range(m + 1, len(self.sub_predications)):
# if self.sub_predications[m].equals(other=self.sub_predications[n]):
# return True
# return False
def apply(self, predicate, **kwargs):
assert isinstance(predicate, Predicate)
assert not isinstance(predicate, EntityType) # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Insert in sorted order ideally !!!
for n in reversed(range(len(self.agreeing))):
if predicate.pred_disagreement(entity=self.agreeing[n], **kwargs):
entity = self.agreeing.pop(n)
self.not_disagreeing.remove(entity)
self.disagreeing.append(entity)
elif not predicate.pred_agreement(entity=self.agreeing[n], **kwargs):
entity = self.agreeing.pop(n)
self.ambiguous.append(entity)
for n in reversed(range(len(self.ambiguous))):
if predicate.pred_disagreement(entity=self.ambiguous[n], **kwargs):
entity = self.ambiguous.pop(n)
self.not_disagreeing.remove(entity)
self.disagreeing.append(entity)
assert self.num_agreeing <= self.num_not_disagreeing <= self.num_entities
assert self.agreeing == sorted(self.agreeing, key=(lambda e: e.id))
def sub_predication(self, reset=False, predication=None):
assert not reset or predication is None
if predication is None:
predication = self.copy(reset=reset)
self.sub_predications.append(predication)
return predication
def get_sub_predication(self, index):
if index < len(self.sub_predications):
return self.sub_predications[index]
else:
return None
def __eq__(self, other):
assert all(entity1 == entity2 for entity1, entity2 in zip(self.entities, other.entities))
return all(entity1 == entity2 for entity1, entity2 in zip(self.agreeing, other.agreeing)) and all(entity in other.ambiguous for entity in self.ambiguous) and all(entity in self.ambiguous for entity in other.ambiguous) # since ambiguous not in order
def __le__(self, other):
assert all(entity1 == entity2 for entity1, entity2 in zip(self.entities, other.entities))
return all(entity in other.agreeing for entity in self.agreeing) and all(entity in other.not_disagreeing for entity in self.ambiguous)
def __ge__(self, other):
assert all(entity1 == entity2 for entity1, entity2 in zip(self.entities, other.entities))
return all(entity in self.agreeing for entity in other.agreeing) and all(entity in self.not_disagreeing for entity in other.ambiguous)
def disjoint(self, other):
assert all(entity1 == entity2 for entity1, entity2 in zip(self.entities, other.entities))
return (len(self.agreeing) == 0 or len(other.agreeing) == 0) or all(entity not in other.not_disagreeing for entity in self.agreeing) and all(entity not in self.not_disagreeing for entity in other.agreeing)
def equals(self, other):
assert all(entity1 == entity2 for entity1, entity2 in zip(self.entities, other.entities))
return (len(self.agreeing) > 0 or len(other.agreeing) > 0) and all(entity in other.not_disagreeing for entity in self.agreeing) and all(entity in self.not_disagreeing for entity in other.agreeing)
def union(self, other):
assert all(entity1 == entity2 for entity1, entity2 in zip(self.entities, other.entities))
agreeing = list()
ambiguous = list()
disagreeing = list()
for entity in self.entities:
if entity in self.agreeing or entity in other.agreeing:
agreeing.append(entity)
elif entity in self.disagreeing and entity in other.disagreeing:
disagreeing.append(entity)
else:
ambiguous.append(entity)
return PragmaticalPredication(agreeing=agreeing, ambiguous=ambiguous, disagreeing=disagreeing)
def intersect(self, other):
assert all(entity1 == entity2 for entity1, entity2 in zip(self.entities, other.entities))
agreeing = list()
ambiguous = list()
disagreeing = list()
for entity in self.entities:
if entity in self.agreeing and entity in other.agreeing:
agreeing.append(entity)
elif entity in self.disagreeing or entity in other.disagreeing:
disagreeing.append(entity)
else:
ambiguous.append(entity)
return PragmaticalPredication(agreeing=agreeing, ambiguous=ambiguous, disagreeing=disagreeing)
| {
"content_hash": "450f862b3d71dce40df665a78a8d67ca",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 322,
"avg_line_length": 52.35668789808917,
"alnum_prop": 0.6684914841849149,
"repo_name": "AlexKuhnle/ShapeWorld",
"id": "2fb52a87a4913c463ce4a602f755b7ae13c04753",
"size": "8220",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shapeworld/captions/pragmatical_predication.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "587098"
},
{
"name": "Shell",
"bytes": "8728"
}
],
"symlink_target": ""
} |
class FieldError(Exception):
pass
class ResourceNotFoundError(Exception):
pass
class DynamoPreconditionError(Exception):
pass
class ItemAttributeTypeError(Exception):
pass
class NotUniqueError(Exception):
pass
class InvalidQueryError(Exception):
pass
class ValidationError(Exception):
pass
class DoesNotExist(Exception):
pass
class AttributeIsRequired(Exception):
pass
| {
"content_hash": "2b2cdf6d50ae6ce3a46778ceb2b6e233",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 41,
"avg_line_length": 12.382352941176471,
"alnum_prop": 0.7482185273159145,
"repo_name": "jcoc611/DynamoEngine",
"id": "608b5e7d1f3a2af130aa2237129ad124218f2588",
"size": "443",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dynamoengine/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1032"
},
{
"name": "Python",
"bytes": "27888"
}
],
"symlink_target": ""
} |
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
class NetworkInterface(resource.Resource):
PROPERTIES = (
DESCRIPTION, GROUP_SET, PRIVATE_IP_ADDRESS, SOURCE_DEST_CHECK,
SUBNET_ID, TAGS,
) = (
'Description', 'GroupSet', 'PrivateIpAddress', 'SourceDestCheck',
'SubnetId', 'Tags',
)
_TAG_KEYS = (
TAG_KEY, TAG_VALUE,
) = (
'Key', 'Value',
)
ATTRIBUTES = (
PRIVATE_IP_ADDRESS_ATTR,
) = (
'PrivateIpAddress',
)
properties_schema = {
DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description for this interface.')
),
GROUP_SET: properties.Schema(
properties.Schema.LIST,
_('List of security group IDs associated with this interface.'),
update_allowed=True
),
PRIVATE_IP_ADDRESS: properties.Schema(
properties.Schema.STRING
),
SOURCE_DEST_CHECK: properties.Schema(
properties.Schema.BOOLEAN,
_('Flag indicating if traffic to or from instance is validated.'),
implemented=False
),
SUBNET_ID: properties.Schema(
properties.Schema.STRING,
_('Subnet ID to associate with this interface.'),
required=True,
constraints=[
constraints.CustomConstraint('neutron.subnet')
]
),
TAGS: properties.Schema(
properties.Schema.LIST,
schema=properties.Schema(
properties.Schema.MAP,
_('List of tags associated with this interface.'),
schema={
TAG_KEY: properties.Schema(
properties.Schema.STRING,
required=True
),
TAG_VALUE: properties.Schema(
properties.Schema.STRING,
required=True
),
},
implemented=False,
)
),
}
attributes_schema = {
PRIVATE_IP_ADDRESS: attributes.Schema(
_('Private IP address of the network interface.'),
type=attributes.Schema.STRING
),
}
default_client_name = 'neutron'
@staticmethod
def network_id_from_subnet_id(neutronclient, subnet_id):
subnet_info = neutronclient.show_subnet(subnet_id)
return subnet_info['subnet']['network_id']
def __init__(self, name, json_snippet, stack):
super(NetworkInterface, self).__init__(name, json_snippet, stack)
self.fixed_ip_address = None
def handle_create(self):
subnet_id = self.properties[self.SUBNET_ID]
network_id = self.client_plugin().network_id_from_subnet_id(
subnet_id)
fixed_ip = {'subnet_id': subnet_id}
if self.properties[self.PRIVATE_IP_ADDRESS]:
fixed_ip['ip_address'] = self.properties[self.PRIVATE_IP_ADDRESS]
props = {
'name': self.physical_resource_name(),
'admin_state_up': True,
'network_id': network_id,
'fixed_ips': [fixed_ip]
}
# if without group_set, don't set the 'security_groups' property,
# neutron will create the port with the 'default' securityGroup,
# if has the group_set and the value is [], which means to create the
# port without securityGroup(same as the behavior of neutron)
if self.properties[self.GROUP_SET] is not None:
sgs = self.client_plugin().get_secgroup_uuids(
self.properties.get(self.GROUP_SET))
props['security_groups'] = sgs
port = self.client().create_port({'port': props})['port']
self.resource_id_set(port['id'])
def handle_delete(self):
if self.resource_id is None:
return
try:
self.client().delete_port(self.resource_id)
except Exception as ex:
self.client_plugin().ignore_not_found(ex)
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
if prop_diff:
update_props = {}
if self.GROUP_SET in prop_diff:
group_set = prop_diff.get(self.GROUP_SET)
# update should keep the same behavior as creation,
# if without the GroupSet in update template, we should
# update the security_groups property to referent
# the 'default' security group
if group_set is not None:
sgs = self.client_plugin().get_secgroup_uuids(group_set)
else:
sgs = self.client_plugin().get_secgroup_uuids(['default'])
update_props['security_groups'] = sgs
self.client().update_port(self.resource_id,
{'port': update_props})
def _get_fixed_ip_address(self, ):
if self.fixed_ip_address is None:
try:
port = self.client().show_port(self.resource_id)['port']
if port['fixed_ips'] and len(port['fixed_ips']) > 0:
self.fixed_ip_address = port['fixed_ips'][0]['ip_address']
except Exception as ex:
self.client_plugin().ignore_not_found(ex)
return self.fixed_ip_address
def _resolve_attribute(self, name):
if name == self.PRIVATE_IP_ADDRESS:
return self._get_fixed_ip_address()
def resource_mapping():
return {
'AWS::EC2::NetworkInterface': NetworkInterface,
}
| {
"content_hash": "e5c710a6507337e9f962ed8add65e558",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 78,
"avg_line_length": 34.467065868263475,
"alnum_prop": 0.5533356497567755,
"repo_name": "cryptickp/heat",
"id": "a4d7caa5decd23265f9d58dda81f9c6717361f66",
"size": "6331",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heat/engine/resources/aws/ec2/network_interface.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6610494"
},
{
"name": "Shell",
"bytes": "33316"
}
],
"symlink_target": ""
} |
"""Utilities for metrics on Hamiltonian."""
import tensorflow as tf
from qhbmlib.inference import qnn_utils
from qhbmlib.inference import ebm_utils
from qhbmlib.models import hamiltonian
def density_matrix(model: hamiltonian.Hamiltonian):
r"""Returns the thermal state corresponding to a modular Hamiltonian.
Given a modular Hamiltonian $K_{\theta\phi} = U_\phi K_\theta U_\phi^\dagger$,
the corresponding thermal state is
\begin{align*}
\rho &= (\text{tr}[e^{-K_{\theta\phi}}])^{-1} e^{-K_{\theta\phi}}
\\&= Z_\theta^{-1} U_\phi e^{-K_\theta} U_\phi^\dagger
\\&= U_\phi P_\theta U_\phi^\dagger
\end{align*}
where we defined the diagonal matrix $P_\theta$ as
$$
\langle x|P_\theta|y\rangle = \begin{cases}
p_\theta(x), & \text{if}\ x = y \\
0, & \text{otherwise}
\end{cases}
$$
Continuing, using the definition of matrix multiplication, we have
\begin{align*}
\rho &= U_\phi \sum_{i,k,j} |i\rangle\langle i|P_\theta|k\rangle
\langle k| U_\phi^\dagger|j\rangle\langle j|
\\&= U_\phi \sum_{k,j} p_\theta(k)|k\rangle
\langle k| U_\phi^\dagger|j\rangle\langle j|
\\&= \sum_{i,k,j} p_\theta(k)|i\rangle\langle i|U_\phi|k\rangle
\langle k| U_\phi^\dagger|j\rangle\langle j|
\end{align*}
Args:
model: Modular Hamiltonian whose corresponding thermal state is the density
matrix to be calculated.
"""
probabilities = tf.cast(ebm_utils.probabilities(model.energy), tf.complex64)
unitary_matrix = qnn_utils.unitary(model.circuit)
return tf.einsum("k,ik,kj->ij", probabilities, unitary_matrix,
tf.linalg.adjoint(unitary_matrix))
def fidelity(model: hamiltonian.Hamiltonian, sigma: tf.Tensor):
r"""Calculate the fidelity between a QHBM and a density matrix.
Definition of the fidelity between two quantum states $\rho$ and $\sigma$ is
$$
F(\rho, \sigma) = \left(\text{tr}\sqrt{\sqrt{\rho}\sigma\sqrt{\rho}}\right)^2.
$$
When the first argument is a QHBM, we can write
$$
F(\rho, \sigma) = \left(\text{tr}\sqrt{
U_\phi\sqrt{K_\theta}U_\phi^\dagger
\sigma U_\phi\sqrt{K_\theta}U_\phi^\dagger}\right)^2.
$$
By the definition of matrix functions, we can pull the unitaries outside
the square root, and use cyclicity of trace to remove them. Then we have
$$
F(\rho, \sigma) = \left(\text{tr}\sqrt{
\sqrt{K_\theta}U_\phi^\dagger\sigma U_\phi\sqrt{K_\theta}}\right)^2.
$$
Let $\omega = \sqrt{K_\theta}U_\phi^\dagger\sigma U_\phi\sqrt{K_\theta}$,
and let $WD W^\dagger$ be a unitary diagonalization of $\omega$. Note that
$U_\phi^\dagger\sigma U_\phi$ is Hermitian since it is a unitary
conjugation of a Hermitian matrix. Also, for Hermitian matrices A and B,
we have
$$
(ABA)^\dagger = (BA)^\dagger A^\dagger
= A^\dagger B^\dagger A^\dagger
= ABA.
$$
Therefore $ABA$ is also Hermitian. Thus $\omega$ is Hermitian, which
allows the use of faster eigenvalue finding routines. Then we have
$$
F(\rho, \sigma) = \left(\text{tr}\sqrt{D}\right)^2
= \left(\sum_i\sqrt{D_{ii}}\right)^2.
$$
Args:
model: Modular Hamiltonian whose corresponding thermal state is to be
compared to `sigma`, as the left density matrix in fidelity.
sigma: 2-D `tf.Tensor` of a numeric dtype representing the right
density matrix in the fidelity calculation.
Returns:
A scalar `tf.Tensor` which is the fidelity between the density matrix
represented by this QHBM and `sigma`.
"""
sigma = tf.cast(sigma, tf.complex64)
k_theta = tf.cast(ebm_utils.probabilities(model.energy), tf.complex64)
u_phi = qnn_utils.unitary(model.circuit)
u_phi_dagger = tf.linalg.adjoint(u_phi)
sqrt_k_theta = tf.sqrt(k_theta)
omega = tf.einsum("a,ab,bc,cd,d->ad", sqrt_k_theta, u_phi_dagger, sigma,
u_phi, sqrt_k_theta)
d_omega = tf.linalg.eigvalsh(omega)
return tf.math.reduce_sum(tf.math.sqrt(d_omega))**2
| {
"content_hash": "029a1702b16e96dda102763584e17596",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 80,
"avg_line_length": 39.8235294117647,
"alnum_prop": 0.6361398325947809,
"repo_name": "google/qhbm-library",
"id": "24461b2032b42eff8194f2ed42244a39bbf38639",
"size": "4754",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "qhbmlib/inference/qhbm_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "332789"
},
{
"name": "Shell",
"bytes": "2991"
}
],
"symlink_target": ""
} |
import os.path
# Django
from django.test import TestCase
# Third-party apps
from lxml import etree # http://lxml.de/
# Internal
from .models import Menu, Meal, Music
from xmlmapping.models import Mapping
XML = open(os.path.join(os.path.dirname(__file__), '../restaurant.xml')).read()
DOC = etree.fromstring(XML)
class LoadXMLTestCase(TestCase):
def setUp(self):
# Get the mapping
self.map = Mapping.objects.get(label='Restaurant Mapping')
def test_load_xml(self):
self.map.load_xml(XML)
self.assertEqual(Menu.objects.count(), 2)
self.assertEqual(Meal.objects.count(), 2)
self.assertEqual(Music.objects.count(), 3)
self.assertEqual(Menu.objects.get(pk=2).label, u'French Toast $4.50')
self.assertEqual(Meal.objects.get(pk=1).title, u'Belgian Waffles')
self.assertEqual(Music.objects.get(pk=1).singer, u'Bryan Adams')
| {
"content_hash": "442eaa55a9b190a5a7a4e07fdb05471d",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 79,
"avg_line_length": 28.515151515151516,
"alnum_prop": 0.6546227417640808,
"repo_name": "YAmikep/django-xmlmapping",
"id": "aa721dde625d7d12a615b03eb3c3bea19a5fd1d2",
"size": "958",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/restaurant/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "53575"
}
],
"symlink_target": ""
} |
import os
from django.conf import settings
from django.test import TestCase
import shutil
from filebrowser.functions import get_version_path, version_generator
__author__ = 'andriy'
class FunctionsTest(TestCase):
def __init__(self, methodName='runTest'):
super(TestCase, self).__init__(methodName)
def setUp(self):
shutil.rmtree(os.path.join(settings.MEDIA_ROOT, "2"), True)
os.mkdir(os.path.join(settings.MEDIA_ROOT, "2"))
shutil.copy(os.path.join(settings.MEDIA_ROOT, "rss.gif"),
os.path.join(settings.MEDIA_ROOT, "2"))
def test_get_version_path(self):
for version in settings.FILEBROWSER_VERSIONS:
version_generator("2/rss.gif", version)
for version in settings.FILEBROWSER_VERSIONS:
path = get_version_path("2/rss.gif", version)
ends = "rss_" + version + ".gif"
print("Path [%s] have to ends with [%s], version [%s]\n" % (path, ends, version))
self.assertTrue(path.endswith(ends),
"Path [%s] is not ends with [%s]" % (path, ends,))
for version2 in settings.FILEBROWSER_VERSIONS:
path2 = get_version_path(path, version2)
ends = "rss_" + version2 + ".gif"
print("Path [%s] have to ends with [%s], version [%s->%s]\n" % (path2, ends, version, version2))
self.assertTrue(path2.endswith(ends),
"Path [%s] is not ends with [%s]" % (path2, ends,))
ends = "rss.gif"
orig_path = get_version_path(path, None)
print("Path [%s] have to ends with [%s], version [%s->]\n" % (orig_path, ends, version))
self.assertTrue(orig_path.endswith(ends),
"Path [%s] is not ends with [%s]" % (path2, ends,))
orig_path = get_version_path(path)
print("Path [%s] have to ends with [%s], version [%s->]\n" % (orig_path, ends, version))
self.assertTrue(orig_path.endswith(ends),
"Path [%s] is not ends with [%s]" % (path2, ends,))
def test_get_version_path_do_not_check_file(self):
for version in settings.FILEBROWSER_VERSIONS:
path = get_version_path("3/rss.gif", version, check_file=False)
ends = "3/rss_" + version + ".gif"
print("Path [%s] have to ends with [%s], version [%s]\n" % (path, ends, version))
self.assertTrue(path.endswith(ends),
"Path [%s] is not ends with [%s]" % (path, ends,))
for version2 in settings.FILEBROWSER_VERSIONS:
path2 = get_version_path(path, version2, check_file=False)
ends = "3/rss_" + version2 + ".gif"
print("Path [%s] have to ends with [%s], version [%s->%s]\n" % (path2, ends, version, version2))
self.assertTrue(path2.endswith(ends),
"Path [%s] is not ends with [%s]" % (path2, ends,))
ends = "3/rss.gif"
orig_path = get_version_path(path, version_prefix='', check_file=False)
print("Path [%s] have to ends with [%s], version [%s->]\n" % (orig_path, ends, version))
self.assertTrue(orig_path.endswith(ends),
"Path [%s] is not ends with [%s]" % (path2, ends,))
orig_path = get_version_path(path, check_file=False)
print("Path [%s] have to ends with [%s], version [%s->]\n" % (orig_path, ends, version))
self.assertTrue(orig_path.endswith(ends),
"Path [%s] is not ends with [%s]" % (path2, ends,))
| {
"content_hash": "290da1680f9f60d7bdc7b11ac01c27e9",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 112,
"avg_line_length": 50.32876712328767,
"alnum_prop": 0.5416439847577572,
"repo_name": "agushuley/gu-django-filebrowser-no-grappelli-test",
"id": "35ea636a6176acfead6d9dad8133d0d5c4ab08a6",
"size": "3674",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/tests/tests_functions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9054"
}
],
"symlink_target": ""
} |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'ucloudapi',
version = '0.1',
description = "olleh ucloud biz API Client",
long_description = "Python interface ucloud biz API",
author = "ChangBum Hong",
author_email = "hongiiv@gmail.com",
url = "https://github.com/hongiiv/CloudStack_for_ucloudbiz",
packages = [ 'ucloudapi' ],
license = 'MIT',
platforms = 'Posix; MacOS X; Windows',
)
| {
"content_hash": "bb0f496da1445c327b36198e65aee369",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 77,
"avg_line_length": 33.23529411764706,
"alnum_prop": 0.5663716814159292,
"repo_name": "hongiiv/CloudStack_for_ucloudbiz",
"id": "bff1badcfcfc54fb800658ae0a1bee68d436f529",
"size": "1684",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "421968"
},
{
"name": "Shell",
"bytes": "1048"
}
],
"symlink_target": ""
} |
"""Tests for adding tags via CLI"""
import logging
from housekeeper.cli.add import tag_cmd
def test_add_tags_no_args(populated_context, cli_runner, caplog):
"""Test to add a two tags to the database"""
# GIVEN a context with a populated store, and a cli runner
caplog.set_level(logging.DEBUG)
# GIVEN that there is no tag input
# WHEN trying to add two tags to the an existing file
result = cli_runner.invoke(tag_cmd, [], obj=populated_context)
# THEN assert it has a zero exit status
assert result.exit_code == 1
# THEN check that the correct information is logged
assert "No tags provided" in caplog.text
def test_add_two_tags(populated_context, cli_runner, caplog):
"""Test to add a two tags to the database"""
# GIVEN a context with a populated store, and a cli runner
caplog.set_level(logging.DEBUG)
# GIVEN two new tags
tag1 = "new-tag"
tag2 = "other-tag"
# WHEN trying to add two tags to the an existing file
result = cli_runner.invoke(tag_cmd, [tag1, tag2], obj=populated_context)
# THEN assert it has a zero exit status
assert result.exit_code == 0
# THEN check that the tags are logged
assert tag1 in caplog.text
# THEN check that the tags are logged
assert tag2 in caplog.text
# THEN assert that the tags are added to the data base
db_tags = set([tag.name for tag in populated_context["store"].tags()])
assert db_tags.intersection(set([tag1, tag2]))
def test_add_existing_tag_existing_file(populated_context, cli_runner, caplog):
"""Test to add a existing tag to a file that exists"""
caplog.set_level(logging.DEBUG)
# GIVEN a context with a populated store, and a cli runner
store = populated_context["store"]
# GIVEN a existing file id
file_id = 1
file_obj = store.File.get(file_id)
# GIVEN that the new tag already exists for the file
tag = file_obj.tags[0].name
# WHEN trying to add the existing tag to the file
result = cli_runner.invoke(tag_cmd, [tag, "-f", str(file_id)], obj=populated_context)
# THEN assert it has a non zero exit status
assert result.exit_code == 0
# THEN check that it communicates that the tag existed
assert f"{tag}: tag already added" in caplog.text
def test_add_tag_existing_file(populated_context, cli_runner, caplog):
"""Test to add a non existing tag to a file that exists"""
caplog.set_level(logging.DEBUG)
# GIVEN a context with a populated store, and a cli runner
store = populated_context["store"]
# GIVEN a existing file id
file_id = 1
file_obj = store.File.get(file_id)
assert file_obj
# GIVEN a new tag
tag = "new-tag"
# WHEN trying to add a tag to the existing file
result = cli_runner.invoke(tag_cmd, [tag, "-f", str(file_id)], obj=populated_context)
# THEN assert it has a zero exit status
assert result.exit_code == 0
# THEN check that the tag is displayed in the output
assert f"{tag}: tag created" in caplog.text
def test_add_tag_non_existing_file(populated_context, cli_runner, caplog):
"""Test to add a tag to a file that not exist"""
caplog.set_level(logging.DEBUG)
# GIVEN a context with a populated store and a cli runner
store = populated_context["store"]
# GIVEN a non existing file id
missing_file_id = 42
file_obj = store.File.get(missing_file_id)
assert not file_obj
# WHEN trying to add a tag to the non existing file
result = cli_runner.invoke(
tag_cmd, ["new-tag", "-f", str(missing_file_id)], obj=populated_context
)
# THEN assert it has a non zero exit status
assert result.exit_code == 1
# THEN check that the error message is displayed
assert "unable to find file" in caplog.text
| {
"content_hash": "88bfede8c1aa15a39b745661a8285b51",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 89,
"avg_line_length": 38.47959183673469,
"alnum_prop": 0.6817820206841687,
"repo_name": "Clinical-Genomics/housekeeper",
"id": "192bbd69d3fd9d7f6443b504bbdb060adc718c97",
"size": "3771",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/cli/add/test_cli_add_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "59717"
},
{
"name": "Shell",
"bytes": "1624"
}
],
"symlink_target": ""
} |
def make_param_entries(model_name, prepend, feat_dict, cfg):
if(model_name in cfg):
for i, param in enumerate(cfg[model_name]):
param_key = param.keys()[0]
feat_dict[prepend + '__' + param_key] = \
cfg[model_name][i][param_key]
return feat_dict
| {
"content_hash": "b4c04c5f734de3b54e6a434b463c8e33",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 60,
"avg_line_length": 37.75,
"alnum_prop": 0.5629139072847682,
"repo_name": "joelmpiper/bill_taxonomy",
"id": "bbacb9f3eece6d73411a4ea1c2e7df61cb769fac",
"size": "303",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/wrangle/make_params.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "597360"
},
{
"name": "HTML",
"bytes": "507685"
},
{
"name": "JavaScript",
"bytes": "1452"
},
{
"name": "Jupyter Notebook",
"bytes": "1163363"
},
{
"name": "Python",
"bytes": "51203"
}
],
"symlink_target": ""
} |
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import division
from __future__ import print_function
import array
import difflib
import distutils.dir_util
import filecmp
import io
import operator
import os
import re
import shutil
import struct
import subprocess
import sys
import tempfile
import uuid
from functools import reduce
def ZapTimestamp(filename):
contents = open(filename, 'rb').read()
# midl.exe writes timestamp 2147483647 (2^31 - 1) as creation date into its
# outputs, but using the local timezone. To make the output timezone-
# independent, replace that date with a fixed string of the same length.
# Also blank out the minor version number.
if filename.endswith('.tlb'):
# See https://chromium-review.googlesource.com/c/chromium/src/+/693223 for
# a fairly complete description of the .tlb binary format.
# TLB files start with a 54 byte header. Offset 0x20 stores how many types
# are defined in the file, and the header is followed by that many uint32s.
# After that, 15 section headers appear. Each section header is 16 bytes,
# starting with offset and length uint32s.
# Section 12 in the file contains custom() data. custom() data has a type
# (int, string, etc). Each custom data chunk starts with a uint16_t
# describing its type. Type 8 is string data, consisting of a uint32_t
# len, followed by that many data bytes, followed by 'W' bytes to pad to a
# 4 byte boundary. Type 0x13 is uint32 data, followed by 4 data bytes,
# followed by two 'W' to pad to a 4 byte boundary.
# The custom block always starts with one string containing "Created by
# MIDL version 8...", followed by one uint32 containing 0x7fffffff,
# followed by another uint32 containing the MIDL compiler version (e.g.
# 0x0801026e for v8.1.622 -- 0x26e == 622). These 3 fields take 0x54 bytes.
# There might be more custom data after that, but these 3 blocks are always
# there for file-level metadata.
# All data is little-endian in the file.
assert contents[0:8] == b'MSFT\x02\x00\x01\x00'
ntypes, = struct.unpack_from('<I', contents, 0x20)
custom_off, custom_len = struct.unpack_from(
'<II', contents, 0x54 + 4*ntypes + 11*16)
assert custom_len >= 0x54
# First: Type string (0x8), followed by 0x3e characters.
assert contents[custom_off:custom_off + 6] == b'\x08\x00\x3e\x00\x00\x00'
assert re.match(
br'Created by MIDL version 8\.\d\d\.\d{4} at ... Jan 1. ..:..:.. 2038\n',
contents[custom_off + 6:custom_off + 6 + 0x3e])
# Second: Type uint32 (0x13) storing 0x7fffffff (followed by WW / 0x57 pad)
assert contents[custom_off+6+0x3e:custom_off+6+0x3e+8] == \
b'\x13\x00\xff\xff\xff\x7f\x57\x57'
# Third: Type uint32 (0x13) storing MIDL compiler version.
assert contents[custom_off + 6 + 0x3e + 8:custom_off + 6 + 0x3e + 8 +
2] == b'\x13\x00'
# Replace "Created by" string with fixed string, and fixed MIDL version with
# 8.1.622 always.
contents = (
contents[0:custom_off + 6] +
b'Created by MIDL version 8.xx.xxxx at a redacted point in time\n' +
# uint32 (0x13) val 0x7fffffff, WW, uint32 (0x13), val 0x0801026e, WW
b'\x13\x00\xff\xff\xff\x7f\x57\x57\x13\x00\x6e\x02\x01\x08\x57\x57' +
contents[custom_off + 0x54:])
else:
contents = re.sub(
br'File created by MIDL compiler version 8\.\d\d\.\d{4} \*/\r\n'
br'/\* at ... Jan 1. ..:..:.. 2038',
br'File created by MIDL compiler version 8.xx.xxxx */\r\n'
br'/* at a redacted point in time', contents)
contents = re.sub(
br' Oicf, W1, Zp8, env=(.....) \(32b run\), '
br'target_arch=(AMD64|X86) 8\.\d\d\.\d{4}',
br' Oicf, W1, Zp8, env=\1 (32b run), target_arch=\2 8.xx.xxxx',
contents)
# TODO(thakis): If we need more hacks than these, try to verify checked-in
# outputs when we're using the hermetic toolchain.
# midl.exe older than 8.1.622 omit '//' after #endif, fix that:
contents = contents.replace(b'#endif !_MIDL_USE_GUIDDEF_',
b'#endif // !_MIDL_USE_GUIDDEF_')
# midl.exe puts the midl version into code in one place. To have
# predictable output, lie about the midl version if it's not 8.1.622.
# This is unfortunate, but remember that there's beauty too in imperfection.
contents = contents.replace(b'0x801026c, /* MIDL Version 8.1.620 */',
b'0x801026e, /* MIDL Version 8.1.622 */')
open(filename, 'wb').write(contents)
def overwrite_cls_guid_h(h_file, dynamic_guid):
contents = open(h_file, 'rb').read()
contents = re.sub(br'class DECLSPEC_UUID\("[^"]*"\)',
br'class DECLSPEC_UUID("%s")' % str(dynamic_guid).encode(),
contents)
open(h_file, 'wb').write(contents)
def overwrite_cls_guid_iid(iid_file, dynamic_guid):
contents = open(iid_file, 'rb').read()
hexuuid = '0x%08x,0x%04x,0x%04x,' % dynamic_guid.fields[0:3]
# dynamic_guid.bytes is a bytestring in Py3, but a normal string in Py2.
if sys.version_info.major == 2:
hexuuid += ','.join('0x%02x' % ord(b) for b in dynamic_guid.bytes[8:])
else:
hexuuid += ','.join('0x%02x' % b for b in dynamic_guid.bytes[8:])
contents = re.sub(br'MIDL_DEFINE_GUID\(CLSID, ([^,]*),[^)]*\)',
br'MIDL_DEFINE_GUID(CLSID, \1,%s)' % hexuuid.encode(),
contents)
open(iid_file, 'wb').write(contents)
def overwrite_cls_guid_tlb(tlb_file, dynamic_guid):
# See ZapTimestamp() for a short overview of the .tlb format. The 1st
# section contains type descriptions, and the first type should be our
# coclass. It points to the type's GUID in section 6, the GUID section.
contents = open(tlb_file, 'rb').read()
assert contents[0:8] == b'MSFT\x02\x00\x01\x00'
ntypes, = struct.unpack_from('<I', contents, 0x20)
type_off, type_len = struct.unpack_from('<II', contents, 0x54 + 4*ntypes)
# contents is a bytestring in Python 3, but a normal string in Py2.
if sys.version_info.major == 2:
coclass = ord(contents[type_off])
else:
coclass = contents[type_off]
assert coclass == 0x25, "expected coclass"
guidind = struct.unpack_from('<I', contents, type_off + 0x2c)[0]
guid_off, guid_len = struct.unpack_from(
'<II', contents, 0x54 + 4*ntypes + 5*16)
assert guidind + 14 <= guid_len
contents = array.array('B', contents)
struct.pack_into('<IHH8s', contents, guid_off + guidind,
*(dynamic_guid.fields[0:3] + (dynamic_guid.bytes[8:],)))
# The GUID is correct now, but there's also a GUID hashtable in section 5.
# Need to recreate that too. Since the hash table uses chaining, it's
# easiest to recompute it from scratch rather than trying to patch it up.
hashtab = [0xffffffff] * (0x80 // 4)
for guidind in range(guid_off, guid_off + guid_len, 24):
guidbytes, typeoff, nextguid = struct.unpack_from(
'<16sII', contents, guidind)
words = struct.unpack('<8H', guidbytes)
# midl seems to use the following simple hash function for GUIDs:
guidhash = reduce(operator.xor, [w for w in words]) % (0x80 // 4)
nextguid = hashtab[guidhash]
struct.pack_into('<I', contents, guidind + 0x14, nextguid)
hashtab[guidhash] = guidind - guid_off
hash_off, hash_len = struct.unpack_from(
'<II', contents, 0x54 + 4*ntypes + 4*16)
for i, hashval in enumerate(hashtab):
struct.pack_into('<I', contents, hash_off + 4*i, hashval)
open(tlb_file, 'wb').write(contents)
def overwrite_cls_guid(h_file, iid_file, tlb_file, dynamic_guid):
# Fix up GUID in .h, _i.c, and .tlb. This currently assumes that there's
# only one coclass in the idl file, and that that's the type with the
# dynamic type.
overwrite_cls_guid_h(h_file, dynamic_guid)
overwrite_cls_guid_iid(iid_file, dynamic_guid)
overwrite_cls_guid_tlb(tlb_file, dynamic_guid)
def main(arch, gendir, outdir, dynamic_guid, tlb, h, dlldata, iid, proxy, idl,
*flags):
# Copy checked-in outputs to final location.
source = gendir
if os.path.isdir(os.path.join(source, os.path.basename(idl))):
source = os.path.join(source, os.path.basename(idl))
source = os.path.join(source, arch.split('.')[1]) # Append 'x86' or 'x64'.
source = os.path.normpath(source)
distutils.dir_util.copy_tree(source, outdir, preserve_times=False)
if dynamic_guid != 'none':
overwrite_cls_guid(os.path.join(outdir, h),
os.path.join(outdir, iid),
os.path.join(outdir, tlb),
uuid.UUID(dynamic_guid))
# On non-Windows, that's all we can do.
if sys.platform != 'win32':
return 0
# On Windows, run midl.exe on the input and check that its outputs are
# identical to the checked-in outputs (after possibly replacing their main
# class guid).
tmp_dir = tempfile.mkdtemp()
delete_tmp_dir = True
# Read the environment block from the file. This is stored in the format used
# by CreateProcess. Drop last 2 NULs, one for list terminator, one for
# trailing vs. separator.
env_pairs = open(arch).read()[:-2].split('\0')
env_dict = dict([item.split('=', 1) for item in env_pairs])
args = ['midl', '/nologo'] + list(flags) + [
'/out', tmp_dir,
'/tlb', tlb,
'/h', h,
'/dlldata', dlldata,
'/iid', iid,
'/proxy', proxy,
idl]
try:
popen = subprocess.Popen(args, shell=True, env=env_dict,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
# Filter junk out of stdout, and write filtered versions. Output we want
# to filter is pairs of lines that look like this:
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.decode('utf-8').splitlines()
prefixes = ('Processing ', '64 bit Processing ')
processing = set(os.path.basename(x)
for x in lines if x.startswith(prefixes))
for line in lines:
if not line.startswith(prefixes) and line not in processing:
print(line)
if popen.returncode != 0:
return popen.returncode
for f in os.listdir(tmp_dir):
ZapTimestamp(os.path.join(tmp_dir, f))
# Now compare the output in tmp_dir to the copied-over outputs.
diff = filecmp.dircmp(tmp_dir, outdir)
if diff.diff_files:
print('midl.exe output different from files in %s, see %s' % (outdir,
tmp_dir))
for f in diff.diff_files:
if f.endswith('.tlb'): continue
fromfile = os.path.join(outdir, f)
tofile = os.path.join(tmp_dir, f)
print(''.join(
difflib.unified_diff(
io.open(fromfile).readlines(),
io.open(tofile).readlines(), fromfile, tofile)))
delete_tmp_dir = False
print('To rebaseline:')
print(r' copy /y %s\* %s' % (tmp_dir, source))
sys.exit(1)
return 0
finally:
if os.path.exists(tmp_dir) and delete_tmp_dir:
shutil.rmtree(tmp_dir)
if __name__ == '__main__':
sys.exit(main(*sys.argv[1:]))
| {
"content_hash": "90e9ccf7032ac531a3f246d5579af62a",
"timestamp": "",
"source": "github",
"line_count": 260,
"max_line_length": 81,
"avg_line_length": 43.63846153846154,
"alnum_prop": 0.6383747576238322,
"repo_name": "endlessm/chromium-browser",
"id": "6d6fab05a50edc738b24801c8ee0651ad254cf7c",
"size": "11346",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build/toolchain/win/midl.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name="home"),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin, use {% url 'admin:index' %}
url(settings.ADMIN_URL, include(admin.site.urls)),
# User management
url(r'^users/', include("apps.users.urls", namespace="users")),
url(r'^filters/', include("apps.filters.urls", namespace="filters")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception("Bad Request!")}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception("Permission Denied")}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception("Page not Found")}),
url(r'^500/$', default_views.server_error),
]
| {
"content_hash": "9df6afcbff830fc6bdd2cf5909bba800",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 110,
"avg_line_length": 42.22857142857143,
"alnum_prop": 0.6928281461434371,
"repo_name": "WarmongeR1/feedly-filter",
"id": "b1a2788c7a9592771a62847692d46a8b23fde71a",
"size": "1502",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "41604"
},
{
"name": "HTML",
"bytes": "39910"
},
{
"name": "JavaScript",
"bytes": "4270"
},
{
"name": "Nginx",
"bytes": "1095"
},
{
"name": "Python",
"bytes": "56650"
},
{
"name": "Shell",
"bytes": "6984"
}
],
"symlink_target": ""
} |
from chalice import Chalice, Response
import boto3
import pickle
from urlparse import urlparse, parse_qs
from botocore.vendored import requests
import json
app = Chalice(app_name='slack-handler')
app.debug=True
def __get_alfred_url():
return "https://4v8uog9pw8.execute-api.us-west-2.amazonaws.com/api/process/message"
@app.route('/', methods=['POST','GET'], content_types=['application/x-www-form-urlencoded'])
def index():
parsed = parse_qs(app.current_request.raw_body.decode())
print "parsed:" + str(parsed)
# str(parsed)
return {'hello': 'world'}
'''
This handler merely takes in input from slack and forwards it to alfred
That way, we can have very small commands on slack that transalte to
specific commands on alfred.
'''
@app.route('/process/message/{username}', methods=['POST'], content_types=['application/x-www-form-urlencoded'])
def process_message(username):
# Extract command from body and just pass it to alfred
parsed_input = parse_qs(app.current_request.raw_body.decode())
input_text = parsed_input['text'][0]
print "Making query to alfred using command: " + str(input_text)
# Make http call to alfred
headers = {'Content-type': 'application/json'}
json_request_body = json.dumps(get_json_data_for(username,input_text))
print "Making query with json body:", json_request_body
response = requests.post(__get_alfred_url(), headers=headers, data=json_request_body )
print response.json()
if "gg" in input_text:
if "gg" == input_text:
formatted_output = format_output_for_all_goals(response.json())
else:
formatted_output = format_output_for_specific_goal(response.json())
else:
formatted_output = format_generic_output(response.json())
slack_response = {"text":"Command executed: "+input_text, "attachments":[{"title":"Goals","pretext":"Command response:","fields":formatted_output,"short":"true","color": "#3AA3E3"}]}
return slack_response
def format_generic_output(response):
result = response["result"]["response_message"]
return "Command probably succeeded, no error found"
def format_output_for_all_goals(response):
goals = response["result"]["response_message"]["goals"]
goal_names = goals.keys()
result = [] # list of dictionary per goal
for goal in goal_names:
goal_dict = {}
goal_dict["title"]=goals[goal]["name"]
goal_dict["value"]=goals[goal]["progress"] + " " + goals[goal]["progress_bar"]
result.append(goal_dict)
return result
def format_output_for_specific_goal(response):
goals = response["result"]["response_message"]["goals"]
name = goals["name"]
result = []
for step_name in goals["steps"].keys():
step_dict = {}
step_dict["title"]=step_name
step_dict["value"]=str(goals["steps"][step_name]["cost"]) + " " + goals["steps"][step_name]["status"]
result.append(step_dict)
return result
class GoalOutput(object):
def __init__(self, name, progress_percentage):
self.name = name
self.progress = progress_percentage
def get_json_data_for(username,input_text):
data = {}
data["username"]=username
data["command"]=input_text
return data
| {
"content_hash": "1142d56393fdf882dc6ba7bc997e7e37",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 186,
"avg_line_length": 38.642857142857146,
"alnum_prop": 0.670671595810228,
"repo_name": "nakul225/GtaProgressBar",
"id": "58115255a8d530aa83a558ac56d97371652a5e7c",
"size": "3246",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "slack-handler/app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "89969"
},
{
"name": "Shell",
"bytes": "3901"
}
],
"symlink_target": ""
} |
"""
*t4mon* - T4 monitoring **test functions** for orchestrator.py
"""
from __future__ import absolute_import
import os
from datetime import datetime as dt
import pandas as pd
from t4mon.df_tools import consolidate_data
from t4mon.arguments import ConfigReadError
from t4mon.orchestrator import Orchestrator
from pandas.util.testing import assert_frame_equal
from .base import TEST_CSV, TEST_PKL, BAD_CONFIG, BaseTestClass
class TestOrchestrator(BaseTestClass):
""" Set of test functions for orchestrator.py """
def test_orchestrator(self):
""" Check that Orchestrator has the correct fields by default """
self.assertDictEqual(self.orchestrator_test.logs,
{'my_sys': 'These are my dummy log results'}
)
self.assertGreaterEqual(dt.today().toordinal(),
dt.strptime(self.orchestrator_test.date_time,
'%d/%m/%Y %H:%M:%S').toordinal())
assert_frame_equal(pd.DataFrame(),
self.orchestrator_test.data)
self.assertIn('html_template', self.orchestrator_test.__dict__)
self.assertIn('graphs_definition_file',
self.orchestrator_test.__dict__)
self.assertNotEqual(self.orchestrator_test.store_folder, '')
self.assertNotEqual(self.orchestrator_test.reports_folder,
self.orchestrator_test.store_folder)
self.assertIsInstance(self.orchestrator_test.__str__(), str)
def test_clone(self):
""" Test function for Orchestrator.clone() """
_orchestrator = self.orchestrator_test.clone()
self.assertEqual(self.orchestrator_test.date_time,
_orchestrator.date_time)
def test_check_files(self):
""" Test check_files"""
_orchestrator = self.orchestrator_test.clone()
# First of all, check with default settings
self.assertIsNone(_orchestrator._check_files())
# Check with wrong settings file
with self.assertRaises(ConfigReadError):
_orchestrator.settings_file = BAD_CONFIG
_orchestrator._check_files()
with self.assertRaises(ConfigReadError):
_orchestrator.settings_file = TEST_CSV
_orchestrator._check_files()
# Check with missing settings file
with self.assertRaises(ConfigReadError):
_orchestrator.settings_file = 'test/non_existing.file'
_orchestrator._check_files()
def test_check_files_raises_exception_if_bad_settings(self):
""" Check that if the setting file contains a link to a
non existing file, init will raise an exception """
with self.assertRaises(ConfigReadError):
Orchestrator(settings_file=BAD_CONFIG).check_files()
def test_reports_generator(self):
""" Test function for Orchestrator._reports_generator() """
_orchestrator = self.orchestrator_test.clone()
_orchestrator.data = self.test_data
_orchestrator._reports_generator()
self.assertNotEqual(_orchestrator.reports_written, [])
for report_file in _orchestrator.reports_written:
self.assertTrue(os.path.exists(report_file))
# Test the non-threaded version
_orchestrator.reports_written = [] # reset the count
_orchestrator.safe = True
_orchestrator.data = consolidate_data(partial_dataframe=self.test_data,
dataframe=self.test_data,
system='SYS2')
_orchestrator._reports_generator()
self.assertNotEqual(_orchestrator.reports_written, [])
self.assertEqual(len(_orchestrator.reports_written), 2)
for report_file in _orchestrator.reports_written:
self.assertTrue(os.path.exists(report_file))
def test_create_reports_from_local(self):
"""
Test function for Orchestrator.create_reports_from_local(pkl=True)
"""
_orchestrator = self.orchestrator_test.clone()
_orchestrator.create_reports_from_local(TEST_PKL)
self.assertNotEqual(_orchestrator.reports_written, [])
for report_file in _orchestrator.reports_written:
self.assertTrue(os.path.exists(report_file))
# Non existing file raises error
with self.assertRaises(IOError):
_orchestrator.create_reports_from_local('WR0NG')
def test_create_reports_from_local_csv(self):
"""
Test function for Orchestrator.create_reports_from_local(pkl=False)
"""
_orchestrator = self.orchestrator_test.clone()
_orchestrator.create_reports_from_local(TEST_CSV, pkl=False)
self.assertNotEqual(_orchestrator.reports_written, [])
for report_file in _orchestrator.reports_written:
self.assertTrue(os.path.exists(report_file))
# Non existing file raises error
with self.assertRaises(IOError):
_orchestrator.create_reports_from_local('WR0NG', pkl=False)
def test_local_store(self):
""" Test that data can be stored locally in both PKL and CSV formats
"""
_orchestrator = self.orchestrator_test.clone()
_collector = self.collector_test.clone()
_collector.nologs = False
_collector.data = self.test_data
_orchestrator._local_store(_collector)
for extension in ['pkl.gz', 'csv']:
filename = '{0}/data_{1}.{2}'.format(_orchestrator.store_folder,
_orchestrator.date_tag(),
extension)
self.assertTrue(os.path.exists(filename))
| {
"content_hash": "19b7966bb7731e62a1e801f3f7f9afb6",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 79,
"avg_line_length": 44.651162790697676,
"alnum_prop": 0.6222222222222222,
"repo_name": "fernandezcuesta/pySMSCMon",
"id": "02ab23b94f59eaeb74fb7a1092a5b223039d4944",
"size": "5807",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/unit_tests/test_orchestrator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "299"
},
{
"name": "CSS",
"bytes": "3402"
},
{
"name": "HTML",
"bytes": "2813"
},
{
"name": "Python",
"bytes": "259293"
}
],
"symlink_target": ""
} |
import unittest
class TestHello(): # [test description]
def testHello(self):
pass
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "cc432cc03fe4f61480deb2ca5f1bbc9f",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 40,
"avg_line_length": 13.727272727272727,
"alnum_prop": 0.5629139072847682,
"repo_name": "talapus/Ophidian",
"id": "93f4444534bc91d91670e8f7325bd06089432c00",
"size": "175",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Testing/Pytest/test_1.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "154649"
},
{
"name": "JavaScript",
"bytes": "3364"
},
{
"name": "Python",
"bytes": "314611"
},
{
"name": "Shell",
"bytes": "16809"
}
],
"symlink_target": ""
} |
import os
from cloudbaseinit.osutils import factory as osutils_factory
from cloudbaseinit.utils import classloader
def get_storage_manager():
class_paths = {
"VDS": "cloudbaseinit.utils.windows.storage.vds_storage_manager."
"VDSStorageManager",
"WSM": "cloudbaseinit.utils.windows.storage.wsm_storage_manager."
"WSMStorageManager",
}
osutils = osutils_factory.get_os_utils()
cl = classloader.ClassLoader()
if os.name == "nt":
if osutils.is_nano_server():
# VDS is not available on Nano Server
# WSM supersedes VDS since Windows Server 2012 / Windows 8
return cl.load_class(class_paths["WSM"])()
else:
return cl.load_class(class_paths["VDS"])()
raise NotImplementedError("No storage manager available for this platform")
| {
"content_hash": "b596cc8af7697ac462b773389ec3cca7",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 79,
"avg_line_length": 33.8,
"alnum_prop": 0.659171597633136,
"repo_name": "alexpilotti/cloudbase-init",
"id": "0f2488954dbe6f05703ab8461ceef9228a99ab64",
"size": "1461",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cloudbaseinit/utils/windows/storage/factory.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1230739"
}
],
"symlink_target": ""
} |
import pathlib
import matplotlib.pyplot as plt
import meep as mp
import numpy as np
import pandas as pd
import pydantic
from tqdm.auto import tqdm
from gdsfactory.simulation.modes.find_modes import find_modes_coupler
from gdsfactory.types import Optional, PathType
def coupling_length(
neff1: float,
neff2: float,
power_ratio: float = 1.0,
wavelength: float = 1.55,
) -> float:
"""Returns the coupling length (um) of the directional coupler to achieve power_ratio.
Args:
wavelength: in um.
neff1: even supermode of the directional coupler..
neff2: odd supermode of the directional coupler.
power_ratio: p2/p1, where 1 means 100% power transfer.
"""
dneff = (neff1 - neff2).real
return wavelength / (np.pi * dneff) * np.arcsin(np.sqrt(power_ratio))
@pydantic.validate_arguments
def find_coupling(
gap: float = 0.2, power_ratio: float = 1.0, wavelength: float = 1.55, **kwargs
) -> float:
"""Returns the coupling length (um) of the directional coupler to achieve power_ratio, where 1 means 100% power transfer.
Args:
gap: in um
power_ratio: p2/p1, where 1 means 100% power transfer
wavelength: in um
keyword Args:
nmodes: number of modes
parity: for symmetries
"""
modes = find_modes_coupler(gaps=(gap,), wavelength=wavelength, **kwargs)
ne = modes[1].neff
no = modes[2].neff
return coupling_length(
neff1=ne, neff2=no, power_ratio=power_ratio, wavelength=wavelength
)
@pydantic.validate_arguments
def find_coupling_vs_gap(
gap1: float = 0.2,
gap2: float = 0.4,
steps: int = 12,
nmodes: int = 4,
wavelength: float = 1.55,
parity=mp.NO_PARITY,
filepath: Optional[PathType] = None,
overwrite: bool = False,
**kwargs
) -> pd.DataFrame:
"""Returns coupling vs gap pandas DataFrame.
Args:
gap1: starting gap in um.
gap2: end gap in um.
steps: number of steps.
nmodes: number of modes.
wavelength: wavelength (um).
parity: for symmetries.
filepath: optional filepath to cache results on disk.
overwrite: overwrites results even if found on disk.
Keyword Args:
wg_width: wg_width (um) for the symmetric case.
gap: for the case of only two waveguides.
wg_widths: list or tuple of waveguide widths.
gaps: list or tuple of waveguide gaps.
wg_thickness: wg height (um)
slab_thickness: thickness for the waveguide slab
ncore: core material refractive index
nclad: clad material refractive index
nslab: Optional slab material refractive index. Defaults to ncore.
ymargin: margin in y.
sz: simulation region thickness (um).
resolution: resolution (pixels/um).
nmodes: number of modes.
sidewall_angles: waveguide sidewall angle (radians),
tapers from wg_width at top of slab, upwards, to top of waveguide.
"""
if filepath and not overwrite and pathlib.Path(filepath).exists():
return pd.read_csv(filepath)
gaps = np.linspace(gap1, gap2, steps)
ne = []
no = []
dn = []
lc = []
for gap in tqdm(gaps):
modes = find_modes_coupler(gaps=(gap,), **kwargs)
n1 = modes[1].neff
n2 = modes[2].neff
coupling = coupling_length(n1, n2)
ne.append(n1)
no.append(n2)
dn.append(n1 - n2)
lc.append(coupling)
df = pd.DataFrame(dict(gap=gaps, ne=ne, no=no, lc=lc, dn=dn))
if filepath:
filepath = pathlib.Path(filepath)
cache = filepath.parent
cache.mkdir(exist_ok=True, parents=True)
df.to_csv(filepath, index=False)
return df
def plot_coupling_vs_gap(df: pd.DataFrame, **kwargs) -> None:
plt.plot(df.gap, df.lc, ".-")
plt.ylabel("100% coupling length (um)")
plt.xlabel("gap (um)")
plt.show()
if __name__ == "__main__":
df = find_coupling_vs_gap(steps=3, filepath="coupling_vs_gap.csv", overwrite=True)
plot_coupling_vs_gap(df)
plt.show()
| {
"content_hash": "a25b9499de9bdc8bba17e40e74715663",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 125,
"avg_line_length": 29.453237410071942,
"alnum_prop": 0.6328773815339521,
"repo_name": "gdsfactory/gdsfactory",
"id": "00a053f4f5e2c201598523a8f5be5a09f97d7605",
"size": "4094",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gdsfactory/simulation/modes/find_coupling_vs_gap.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "605"
},
{
"name": "Dockerfile",
"bytes": "31"
},
{
"name": "Makefile",
"bytes": "4572"
},
{
"name": "Python",
"bytes": "2471982"
},
{
"name": "Shell",
"bytes": "671"
},
{
"name": "XS",
"bytes": "10045"
}
],
"symlink_target": ""
} |
import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data['metadata'] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data['resource'] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
self._data['uri'] = kwargs['uri']
@property
def data(self):
return self._data
def find_name(self, resource_name):
"""
Simple method to find out the name of the dataset based on a resource name (essentially raster file name).
If no match is found, return a None.
:param resource_name: String raster file name as stated in the data source.
:return: String name of the dataset.
"""
name = None
for item in self.data['metadata'].data.items():
if item[1]['resource_name'] == resource_name:
name = item[1]['name']
return name
def get_metadata_value(self, key, value):
return self.data['metadata'].get_value(key, value)
def get_resource_value(self, key, value):
return self.data['resource'].get_value(key, value)
@property
def metadata(self):
return self.data['metadata'].data
@property
def resource_metadata(self):
return self.data['resource'].data
| {
"content_hash": "659faedce0cd75c161d23238b9765348",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 114,
"avg_line_length": 32.927536231884055,
"alnum_prop": 0.6095950704225352,
"repo_name": "VUEG/egpackager",
"id": "5410fcef07609b866ba63e5a91521d3b57fec6f6",
"size": "2272",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "egpackager/datamanager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9757"
}
],
"symlink_target": ""
} |
from test_support import verbose, verify
from types import TupleType, StringType, IntType
import __future__
GOOD_SERIALS = ("alpha", "beta", "candidate", "final")
features = __future__.all_feature_names
# Verify that all_feature_names appears correct.
given_feature_names = features[:]
for name in dir(__future__):
obj = getattr(__future__, name, None)
if obj is not None and isinstance(obj, __future__._Feature):
verify(name in given_feature_names,
"%r should have been in all_feature_names" % name)
given_feature_names.remove(name)
verify(len(given_feature_names) == 0,
"all_feature_names has too much: %r" % given_feature_names)
del given_feature_names
for feature in features:
value = getattr(__future__, feature)
if verbose:
print "Checking __future__ ", feature, "value", value
optional = value.getOptionalRelease()
mandatory = value.getMandatoryRelease()
verify(type(optional) is TupleType, "optional isn't tuple")
verify(len(optional) == 5, "optional isn't 5-tuple")
major, minor, micro, level, serial = optional
verify(type(major) is IntType, "optional major isn't int")
verify(type(minor) is IntType, "optional minor isn't int")
verify(type(micro) is IntType, "optional micro isn't int")
verify(type(level) is StringType, "optional level isn't string")
verify(level in GOOD_SERIALS,
"optional level string has unknown value")
verify(type(serial) is IntType, "optional serial isn't int")
verify(type(mandatory) is TupleType or
mandatory is None, "mandatory isn't tuple or None")
if mandatory is not None:
verify(len(mandatory) == 5, "mandatory isn't 5-tuple")
major, minor, micro, level, serial = mandatory
verify(type(major) is IntType, "mandatory major isn't int")
verify(type(minor) is IntType, "mandatory minor isn't int")
verify(type(micro) is IntType, "mandatory micro isn't int")
verify(type(level) is StringType, "mandatory level isn't string")
verify(level in GOOD_SERIALS,
"mandatory serial string has unknown value")
verify(type(serial) is IntType, "mandatory serial isn't int")
verify(optional < mandatory,
"optional not less than mandatory, and mandatory not None")
verify(hasattr(value, "compiler_flag"),
"feature is missing a .compiler_flag attr")
verify(type(getattr(value, "compiler_flag")) is IntType,
".compiler_flag isn't int")
| {
"content_hash": "79820ce49eab20bfc47327e82c8ec098",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 74,
"avg_line_length": 43.672413793103445,
"alnum_prop": 0.6667982629293328,
"repo_name": "MalloyPower/parsing-python",
"id": "fa8224f7b02b2e1dd36f74c38f39a8d8b36ede56",
"size": "2556",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-2.2/Lib/test/test___future__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
} |
"""
WSGI config for rainapp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "rainapp.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
| {
"content_hash": "c89db7e28a435209b376dcbd3c8ba81c",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 78,
"avg_line_length": 26.833333333333332,
"alnum_prop": 0.7908902691511387,
"repo_name": "pjsier/rainapp",
"id": "4b9482c8a1d657abfda87fdeff77a8e1a96f96b9",
"size": "483",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rainapp/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "49748"
},
{
"name": "HTML",
"bytes": "21366"
},
{
"name": "JavaScript",
"bytes": "44221"
},
{
"name": "Python",
"bytes": "29852"
}
],
"symlink_target": ""
} |
"""
A collection of parsers that should match those listed in the config.yml file.
Each parser must be added ans an entrypoint. This allows MSL plugins that only
define parsers.
"""
import simplejson as json
def parser_example(data):
"""Manipulate incoming data and return the modified data."""
return data
def _data_zero(data):
print 'data[0]:'
print data[0]
print 'data[0].keys:'
print data[0].keys()
def addresses_json_generator(data):
"""Strip names and addresses from this source format."""
data = json.loads(data)
_data_zero(data)
data = [('{first} {last}'.format(**x['name']),
str(x['address']))
for x in data]
return data
def addresses_filltext(data):
"""Strip names and addresses from this source format."""
data = json.loads(data)
_data_zero(data)
data = [('{fname} {lname}'.format(**x),
'{address}, {city}, {state} {zip}'.format(**x))
for x in data]
return data
| {
"content_hash": "d4a507996ef32705e9be4dcf55bd2de9",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 78,
"avg_line_length": 24.875,
"alnum_prop": 0.6221105527638191,
"repo_name": "RichardBronosky/service_layer_config",
"id": "acf504dc21f750a3bf3c2373e15b90f296bc51c7",
"size": "995",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "service_layer_config/parsers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7373"
}
],
"symlink_target": ""
} |
"""Makes sure that all files contain proper licensing information."""
import optparse
import os.path
import subprocess
import sys
def PrintUsage():
print """Usage: python checklicenses.py [--root <root>] [tocheck]
--root Specifies the repository root. This defaults to "../.." relative
to the script file. This will be correct given the normal location
of the script in "<root>/tools/checklicenses".
--ignore-suppressions Ignores path-specific license whitelist. Useful when
trying to remove a suppression/whitelist entry.
tocheck Specifies the directory, relative to root, to check. This defaults
to "." so it checks everything.
Examples:
python checklicenses.py
python checklicenses.py --root ~/chromium/src third_party"""
WHITELISTED_LICENSES = [
'Apache (v2.0)',
'Apache (v2.0) BSD (2 clause)',
'Apache (v2.0) GPL (v2)',
'Apple MIT', # https://fedoraproject.org/wiki/Licensing/Apple_MIT_License
'APSL (v2)',
'APSL (v2) BSD (4 clause)',
'BSD',
'BSD (2 clause)',
'BSD (2 clause) MIT/X11 (BSD like)',
'BSD (3 clause)',
'BSD (3 clause) ISC',
'BSD (3 clause) LGPL (v2.1 or later)',
'BSD (3 clause) MIT/X11 (BSD like)',
'BSD (4 clause)',
'BSD-like',
# TODO(phajdan.jr): Make licensecheck not print BSD-like twice.
'BSD-like MIT/X11 (BSD like)',
'BSL (v1.0)',
# TODO(phajdan.jr): Make licensecheck not print the comma after 3.1.
'BSL (v1.0) GPL (v3.1,)',
'GPL (v3 or later) with Bison parser exception',
'GPL with Bison parser exception',
'ISC',
'LGPL',
'LGPL (v2)',
'LGPL (v2 or later)',
'LGPL (v2.1)',
'LGPL (v3 or later)',
# TODO(phajdan.jr): Make licensecheck convert that comma to a dot.
'LGPL (v2,1 or later)',
'LGPL (v2.1 or later)',
'MPL (v1.0) LGPL (v2 or later)',
'MPL (v1.1)',
'MPL (v1.1) BSD-like',
'MPL (v1.1) BSD-like GPL (unversioned/unknown version)',
'MPL (v1.1) GPL (unversioned/unknown version)',
# TODO(phajdan.jr): Make licensecheck not print the comma after 1.1.
'MPL (v1.1,) GPL (unversioned/unknown version) LGPL (v2 or later)',
'MPL (v1.1,) GPL (unversioned/unknown version) LGPL (v2.1 or later)',
'MIT/X11 (BSD like)',
'Ms-PL',
'Public domain',
'libpng',
'zlib/libpng',
'SGI Free Software License B',
]
PATH_SPECIFIC_WHITELISTED_LICENSES = {
'base/third_party/icu': [ # http://crbug.com/98087
'UNKNOWN',
],
# http://code.google.com/p/google-breakpad/issues/detail?id=450
'breakpad/src': [
'UNKNOWN',
],
'chrome/common/extensions/docs/examples': [ # http://crbug.com/98092
'UNKNOWN',
],
'chrome/test/data/layout_tests/LayoutTests': [
'UNKNOWN',
],
'courgette/third_party/bsdiff_create.cc': [ # http://crbug.com/98095
'UNKNOWN',
],
'data/mozilla_js_tests': [
'UNKNOWN',
],
'data/page_cycler': [
'UNKNOWN',
'GPL (v2 or later)',
],
'data/tab_switching': [
'UNKNOWN',
],
'googleurl': [ # http://code.google.com/p/google-url/issues/detail?id=15
'UNKNOWN',
],
'native_client': [ # http://crbug.com/98099
'UNKNOWN',
],
'native_client/toolchain': [
'BSD GPL (v2 or later)',
'BSD (2 clause) GPL (v2 or later)',
'BSD (3 clause) GPL (v2 or later)',
'BSL (v1.0) GPL',
'GPL',
'GPL (unversioned/unknown version)',
'GPL (v2)',
# TODO(phajdan.jr): Make licensecheck not print the comma after v2.
'GPL (v2,)',
'GPL (v2 or later)',
# TODO(phajdan.jr): Make licensecheck not print the comma after 3.1.
'GPL (v3.1,)',
'GPL (v3 or later)',
],
'net/disk_cache/hash.cc': [ # http://crbug.com/98100
'UNKNOWN',
],
'net/tools/spdyshark': [
'GPL (v2 or later)',
'UNKNOWN',
],
# http://crbug.com/98107
'ppapi/c/documentation/check.sh': [
'UNKNOWN',
],
'ppapi/cpp/documentation/check.sh': [
'UNKNOWN',
],
'ppapi/lib/gl/include': [
'UNKNOWN',
],
'ppapi/native_client/tests/earth/earth_image.inc': [
'UNKNOWN',
],
'third_party/WebKit': [
'UNKNOWN',
],
'third_party/WebKit/Source/JavaScriptCore/tests/mozilla': [
'GPL',
'GPL (unversioned/unknown version)',
],
'third_party/active_doc': [ # http://crbug.com/98113
'UNKNOWN',
],
# http://code.google.com/p/angleproject/issues/detail?id=217
'third_party/angle': [
'UNKNOWN',
],
'third_party/bsdiff/mbsdiff.cc': [
'UNKNOWN',
],
'third_party/bzip2': [
'UNKNOWN',
],
'third_party/cld/encodings/compact_lang_det': [ # http://crbug.com/98120
'UNKNOWN',
],
'third_party/devscripts': [
'GPL (v2 or later)',
],
'third_party/expat/files/lib': [ # http://crbug.com/98121
'UNKNOWN',
],
'third_party/ffmpeg': [
'GPL',
'GPL (v2)',
'GPL (v2 or later)',
'UNKNOWN', # http://crbug.com/98123
],
'third_party/gles2_book': [ # http://crbug.com/98130
'UNKNOWN',
],
'third_party/gles2_conform/GTF_ES': [ # http://crbug.com/98131
'UNKNOWN',
],
'third_party/harfbuzz': [ # http://crbug.com/98133
'UNKNOWN',
],
'third_party/hunspell': [ # http://crbug.com/98134
'UNKNOWN',
],
'third_party/iccjpeg': [ # http://crbug.com/98137
'UNKNOWN',
],
'third_party/icu': [ # http://crbug.com/98301
'UNKNOWN',
],
'third_party/jemalloc': [ # http://crbug.com/98302
'UNKNOWN',
],
'third_party/lcov': [ # http://crbug.com/98304
'UNKNOWN',
],
'third_party/lcov/contrib/galaxy/genflat.pl': [
'GPL (v2 or later)',
],
'third_party/lcov-1.9/contrib/galaxy/genflat.pl': [
'GPL (v2 or later)',
],
'third_party/libevent': [ # http://crbug.com/98309
'UNKNOWN',
],
'third_party/libjingle/source/talk': [ # http://crbug.com/98310
'UNKNOWN',
],
'third_party/libjpeg': [ # http://crbug.com/98313
'UNKNOWN',
],
'third_party/libjpeg_turbo': [ # http://crbug.com/98314
'UNKNOWN',
],
'third_party/libpng': [ # http://crbug.com/98318
'UNKNOWN',
],
# The following files lack license headers, but are trivial.
'third_party/libusb/libusb/os/poll_posix.h': [
'UNKNOWN',
],
'third_party/libusb/libusb/version.h': [
'UNKNOWN',
],
'third_party/libusb/autogen.sh': [
'UNKNOWN',
],
'third_party/libusb/config.h': [
'UNKNOWN',
],
'third_party/libusb/msvc/config.h': [
'UNKNOWN',
],
'third_party/libvpx/source': [ # http://crbug.com/98319
'UNKNOWN',
],
'third_party/libvpx/source/libvpx/examples/includes': [
'GPL (v2 or later)',
],
'third_party/libwebp': [ # http://crbug.com/98448
'UNKNOWN',
],
'third_party/libxml': [
'UNKNOWN',
],
'third_party/libxslt': [
'UNKNOWN',
],
'third_party/lzma_sdk': [
'UNKNOWN',
],
'third_party/mesa/MesaLib': [
'GPL (v2)',
'GPL (v3 or later)',
'UNKNOWN', # http://crbug.com/98450
],
'third_party/modp_b64': [
'UNKNOWN',
],
'third_party/npapi/npspy/extern/java': [
'GPL (unversioned/unknown version)',
],
'third_party/openssl': [ # http://crbug.com/98451
'UNKNOWN',
],
'third_party/ots/tools/ttf-checksum.py': [ # http://code.google.com/p/ots/issues/detail?id=2
'UNKNOWN',
],
'third_party/molokocacao/NSBezierPath+MCAdditions.h': [ # http://crbug.com/98453
'UNKNOWN',
],
'third_party/npapi/npspy': [
'UNKNOWN',
],
'third_party/ocmock/OCMock': [ # http://crbug.com/98454
'UNKNOWN',
],
'third_party/ply/__init__.py': [
'UNKNOWN',
],
'third_party/protobuf': [ # http://crbug.com/98455
'UNKNOWN',
],
'third_party/pylib': [
'UNKNOWN',
],
'third_party/scons-2.0.1/engine/SCons': [ # http://crbug.com/98462
'UNKNOWN',
],
'third_party/simplejson': [
'UNKNOWN',
],
'third_party/skia': [ # http://crbug.com/98463
'UNKNOWN',
],
'third_party/snappy/src': [ # http://crbug.com/98464
'UNKNOWN',
],
'third_party/smhasher/src': [ # http://crbug.com/98465
'UNKNOWN',
],
'third_party/sqlite': [
'UNKNOWN',
],
'third_party/swig/Lib/linkruntime.c': [ # http://crbug.com/98585
'UNKNOWN',
],
'third_party/talloc': [
'GPL (v3 or later)',
'UNKNOWN', # http://crbug.com/98588
],
'third_party/tcmalloc': [
'UNKNOWN', # http://crbug.com/98589
],
'third_party/tlslite': [
'UNKNOWN',
],
'third_party/webdriver': [ # http://crbug.com/98590
'UNKNOWN',
],
'third_party/webrtc': [ # http://crbug.com/98592
'UNKNOWN',
],
'third_party/xdg-utils': [ # http://crbug.com/98593
'UNKNOWN',
],
'third_party/yasm/source': [ # http://crbug.com/98594
'UNKNOWN',
],
'third_party/zlib/contrib/minizip': [
'UNKNOWN',
],
'third_party/zlib/trees.h': [
'UNKNOWN',
],
'tools/dromaeo_benchmark_runner/dromaeo_benchmark_runner.py': [
'UNKNOWN',
],
'tools/emacs': [ # http://crbug.com/98595
'UNKNOWN',
],
'tools/grit/grit/node/custom/__init__.py': [
'UNKNOWN',
],
'tools/gyp/test': [
'UNKNOWN',
],
'tools/histograms': [
'UNKNOWN',
],
'tools/memory_watcher': [
'UNKNOWN',
],
'tools/playback_benchmark': [
'UNKNOWN',
],
'tools/python/google/__init__.py': [
'UNKNOWN',
],
'tools/site_compare': [
'UNKNOWN',
],
'tools/stats_viewer/Properties/AssemblyInfo.cs': [
'UNKNOWN',
],
'tools/symsrc/pefile.py': [
'UNKNOWN',
],
'v8/test/cctest': [ # http://crbug.com/98597
'UNKNOWN',
],
'webkit/data/ico_decoder': [
'UNKNOWN',
],
}
def check_licenses(options, args):
# Figure out which directory we have to check.
if len(args) == 0:
# No directory to check specified, use the repository root.
start_dir = options.base_directory
elif len(args) == 1:
# Directory specified. Start here. It's supposed to be relative to the
# base directory.
start_dir = os.path.abspath(os.path.join(options.base_directory, args[0]))
else:
# More than one argument, we don't handle this.
PrintUsage()
return 1
print "Using base directory:", options.base_directory
print "Checking:", start_dir
print
licensecheck_path = os.path.abspath(os.path.join(options.base_directory,
'third_party',
'devscripts',
'licensecheck.pl'))
licensecheck = subprocess.Popen([licensecheck_path, '-r', start_dir],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = licensecheck.communicate()
if options.verbose:
print '----------- licensecheck stdout -----------'
print stdout
print '--------- end licensecheck stdout ---------'
if licensecheck.returncode != 0 or stderr:
print '----------- licensecheck stderr -----------'
print stderr
print '--------- end licensecheck stderr ---------'
print "\nFAILED\n"
return 1
success = True
for line in stdout.splitlines():
filename, license = line.split(':', 1)
filename = os.path.relpath(filename.strip(), options.base_directory)
# All files in the build output directory are generated one way or another.
# There's no need to check them.
if filename.startswith('out/') or filename.startswith('sconsbuild/'):
continue
# For now we're just interested in the license.
license = license.replace('*No copyright*', '').strip()
# Skip generated files.
if 'GENERATED FILE' in license:
continue
if license in WHITELISTED_LICENSES:
continue
if not options.ignore_suppressions:
found_path_specific = False
for prefix in PATH_SPECIFIC_WHITELISTED_LICENSES:
if (filename.startswith(prefix) and
license in PATH_SPECIFIC_WHITELISTED_LICENSES[prefix]):
found_path_specific = True
break
if found_path_specific:
continue
print "'%s' has non-whitelisted license '%s'" % (filename, license)
success = False
if success:
print "\nSUCCESS\n"
return 0
else:
print "\nFAILED\n"
print "Please read",
print "http://www.chromium.org/developers/adding-3rd-party-libraries"
print "for more info how to handle the failure."
print
print "Please respect OWNERS of checklicenses.py. Changes violating"
print "this requirement may be reverted."
return 1
def main():
default_root = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..'))
option_parser = optparse.OptionParser()
option_parser.add_option('--root', default=default_root,
dest='base_directory',
help='Specifies the repository root. This defaults '
'to "../.." relative to the script file, which '
'will normally be the repository root.')
option_parser.add_option('-v', '--verbose', action='store_true',
default=False, help='Print debug logging')
option_parser.add_option('--ignore-suppressions',
action='store_true',
default=False,
help='Ignore path-specific license whitelist.')
options, args = option_parser.parse_args()
return check_licenses(options, args)
if '__main__' == __name__:
sys.exit(main())
| {
"content_hash": "adb9c0aa8206eced784da288895b2e77",
"timestamp": "",
"source": "github",
"line_count": 508,
"max_line_length": 97,
"avg_line_length": 28.033464566929133,
"alnum_prop": 0.550523137420125,
"repo_name": "ropik/chromium",
"id": "68d03bd5c585b1db62333c7eeb27b6a695eabbf3",
"size": "14430",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tools/checklicenses/checklicenses.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Assembly",
"bytes": "53437"
},
{
"name": "C",
"bytes": "3133406"
},
{
"name": "C++",
"bytes": "105465099"
},
{
"name": "CSS",
"bytes": "472963"
},
{
"name": "Go",
"bytes": "18556"
},
{
"name": "Java",
"bytes": "62764"
},
{
"name": "JavaScript",
"bytes": "8875410"
},
{
"name": "Makefile",
"bytes": "49910"
},
{
"name": "Objective-C",
"bytes": "805816"
},
{
"name": "Objective-C++",
"bytes": "4643562"
},
{
"name": "PHP",
"bytes": "97796"
},
{
"name": "Perl",
"bytes": "63993"
},
{
"name": "Python",
"bytes": "4506305"
},
{
"name": "Rebol",
"bytes": "524"
},
{
"name": "Shell",
"bytes": "271559"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "8497"
}
],
"symlink_target": ""
} |
"""Invokes all the unit tests for the module."""
import unittest
import dir_archive_test
import path2listtest
import pyfib_test
def suite():
"""Returns the suite of unit tests."""
suites = [dir_archive_test.suite(), path2listtest.suite(),
pyfib_test.suite()]
return unittest.TestSuite(suites)
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| {
"content_hash": "7a4a648e2bee0c3d66e70dd538d535ed",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 62,
"avg_line_length": 20.842105263157894,
"alnum_prop": 0.6616161616161617,
"repo_name": "mrwizard82d1/pyutils",
"id": "cd71bef88e27b633308b261923be86d4c03017ac",
"size": "412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "all_tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "61490"
},
{
"name": "Shell",
"bytes": "94"
}
],
"symlink_target": ""
} |
import pytest
import os
import shutil
import time
from datetime import datetime
from shapely.geometry import Point
from pygw.base import CloseableIterator
from pygw.store import DataStoreFactory
from pygw.store.rocksdb import RocksDBOptions
from pygw.geotools import SimpleFeatureTypeBuilder
from pygw.geotools import AttributeDescriptor
from pygw.geotools import FeatureDataAdapter
from pygw.geotools import SimpleFeatureBuilder
# "Point" Type
POINT_TYPE_NAME = "TestPointType"
POINT_GEOMETRY_FIELD = "the_geom"
POINT_TIME_FIELD = "date"
POINT_NUMBER_FIELD = "flt"
POINT_COLOR_FIELD = "color"
POINT_SHAPE_FIELD = "shape"
_point_type_builder = SimpleFeatureTypeBuilder()
_point_type_builder.set_name(POINT_TYPE_NAME)
_point_type_builder.add(AttributeDescriptor.point(POINT_GEOMETRY_FIELD))
_point_type_builder.add(AttributeDescriptor.date(POINT_TIME_FIELD))
_point_type_builder.add(AttributeDescriptor.float(POINT_NUMBER_FIELD))
_point_type_builder.add(AttributeDescriptor.string(POINT_COLOR_FIELD))
_point_type_builder.add(AttributeDescriptor.string(POINT_SHAPE_FIELD))
POINT_TYPE = _point_type_builder.build_feature_type()
# "Point" Type Adapter
POINT_TYPE_ADAPTER = FeatureDataAdapter(POINT_TYPE)
# "Point" Feature builder
POINT_FEATURE_BUILDER = SimpleFeatureBuilder(POINT_TYPE)
COLORS = ['RED', 'GREEN', 'BLUE']
SHAPES = ['SQUARE', 'CIRCLE', 'TRIANGLE', 'RECTANGLE']
def _create_feature(fid, geometry, timestamp):
POINT_FEATURE_BUILDER.set_attr(POINT_GEOMETRY_FIELD, geometry)
POINT_FEATURE_BUILDER.set_attr(POINT_TIME_FIELD, datetime.utcfromtimestamp(timestamp))
POINT_FEATURE_BUILDER.set_attr(POINT_NUMBER_FIELD, timestamp)
POINT_FEATURE_BUILDER.set_attr(POINT_COLOR_FIELD, COLORS[timestamp % 3])
POINT_FEATURE_BUILDER.set_attr(POINT_SHAPE_FIELD, SHAPES[timestamp % 4])
return POINT_FEATURE_BUILDER.build(fid)
def latitude(lon_value):
if lon_value < 0:
return lon_value % -90
return lon_value % 90
TEST_DATA = [
_create_feature(id_, Point(i, latitude(i)), i) for
id_, i in enumerate(range(-180, 180))]
TEST_DATA_OFFSET = [
_create_feature(id_, Point(i+0.5, latitude(i+0.5)), i) for
id_, i in enumerate(range(-180, 180))]
# Test Directory
TEST_DIR = os.path.join(os.getcwd(), "test")
@pytest.fixture
def test_ds():
os.makedirs(TEST_DIR, exist_ok=True)
options = RocksDBOptions()
options.set_geowave_namespace("geowave.tests")
options.set_directory(os.path.join(TEST_DIR, "datastore"))
ds = DataStoreFactory.create_data_store(options)
yield ds
# teardown here
ds.delete_all()
shutil.rmtree(TEST_DIR)
while os.path.isdir(TEST_DIR):
time.sleep(0.01)
def write_test_data_offset(ds, *expected_indices):
write_test_data(ds, *expected_indices, data=TEST_DATA_OFFSET)
def write_test_data(ds, *expected_indices, data=TEST_DATA):
writer = ds.create_writer(POINT_TYPE_ADAPTER.get_type_name())
for pt in data:
results = writer.write(pt)
assert not results.is_empty()
written_indices = results.get_written_index_names()
assert len(written_indices) == len(expected_indices)
assert all([idx.get_name() in written_indices for idx in expected_indices])
writer.close()
def results_as_list(results):
assert isinstance(results, CloseableIterator)
res = [d for d in results]
results.close()
return res
| {
"content_hash": "e870545abb2a2380c2affdf15090edd2",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 90,
"avg_line_length": 32.85436893203884,
"alnum_prop": 0.7310874704491725,
"repo_name": "locationtech/geowave",
"id": "071a1644d4d2381dd0719a60984289b369858304",
"size": "3904",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/src/main/python/pygw/test/conftest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "10168"
},
{
"name": "Dockerfile",
"bytes": "3268"
},
{
"name": "FreeMarker",
"bytes": "2879"
},
{
"name": "Gnuplot",
"bytes": "57750"
},
{
"name": "Java",
"bytes": "11564564"
},
{
"name": "Puppet",
"bytes": "8849"
},
{
"name": "Python",
"bytes": "418256"
},
{
"name": "Scheme",
"bytes": "20491"
},
{
"name": "Shell",
"bytes": "100172"
}
],
"symlink_target": ""
} |
from os.path import basename, dirname, exists, isdir, isfile, join, realpath, split
import glob
from shutil import rmtree
from six import with_metaclass
import hashlib
from re import match
import sh
import shutil
import fnmatch
from os import listdir, unlink, environ, mkdir, curdir, walk
from sys import stdout
import time
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
from pythonforandroid.logger import (logger, info, warning, debug, shprint, info_main)
from pythonforandroid.util import (urlretrieve, current_directory, ensure_dir,
BuildInterruptingException)
def import_recipe(module, filename):
# Python 3.5+
import importlib.util
if hasattr(importlib.util, 'module_from_spec'):
spec = importlib.util.spec_from_file_location(module, filename)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
return mod
else:
# Python 3.3 and 3.4:
from importlib.machinery import SourceFileLoader
return SourceFileLoader(module, filename).load_module()
class RecipeMeta(type):
def __new__(cls, name, bases, dct):
if name != 'Recipe':
if 'url' in dct:
dct['_url'] = dct.pop('url')
if 'version' in dct:
dct['_version'] = dct.pop('version')
return super().__new__(cls, name, bases, dct)
class Recipe(with_metaclass(RecipeMeta)):
_url = None
'''The address from which the recipe may be downloaded. This is not
essential, it may be omitted if the source is available some other
way, such as via the :class:`IncludedFilesBehaviour` mixin.
If the url includes the version, you may (and probably should)
replace this with ``{version}``, which will automatically be
replaced by the :attr:`version` string during download.
.. note:: Methods marked (internal) are used internally and you
probably don't need to call them, but they are available
if you want.
'''
_version = None
'''A string giving the version of the software the recipe describes,
e.g. ``2.0.3`` or ``master``.'''
md5sum = None
'''The md5sum of the source from the :attr:`url`. Non-essential, but
you should try to include this, it is used to check that the download
finished correctly.
'''
depends = []
'''A list containing the names of any recipes that this recipe depends on.
'''
conflicts = []
'''A list containing the names of any recipes that are known to be
incompatible with this one.'''
opt_depends = []
'''A list of optional dependencies, that must be built before this
recipe if they are built at all, but whose presence is not essential.'''
patches = []
'''A list of patches to apply to the source. Values can be either a string
referring to the patch file relative to the recipe dir, or a tuple of the
string patch file and a callable, which will receive the kwargs `arch` and
`recipe`, which should return True if the patch should be applied.'''
python_depends = []
'''A list of pure-Python packages that this package requires. These
packages will NOT be available at build time, but will be added to the
list of pure-Python packages to install via pip. If you need these packages
at build time, you must create a recipe.'''
archs = ['armeabi'] # Not currently implemented properly
built_libraries = {}
"""Each recipe that builds a system library (e.g.:libffi, openssl, etc...)
should contain a dict holding the relevant information of the library. The
keys should be the generated libraries and the values the relative path of
the library inside his build folder. This dict will be used to perform
different operations:
- copy the library into the right location, depending on if it's shared
or static)
- check if we have to rebuild the library
Here an example of how it would look like for `libffi` recipe:
- `built_libraries = {'libffi.so': '.libs'}`
.. note:: in case that the built library resides in recipe's build
directory, you can set the following values for the relative
path: `'.', None or ''`
"""
need_stl_shared = False
'''Some libraries or python packages may need to be linked with android's
stl. We can automatically do this for any recipe if we set this property to
`True`'''
stl_lib_name = 'c++_shared'
'''
The default STL shared lib to use: `c++_shared`.
.. note:: Android NDK version > 17 only supports 'c++_shared', because
starting from NDK r18 the `gnustl_shared` lib has been deprecated.
'''
stl_lib_source = '{ctx.ndk_dir}/sources/cxx-stl/llvm-libc++'
'''
The source directory of the selected stl lib, defined in property
`stl_lib_name`
'''
@property
def stl_include_dir(self):
return join(self.stl_lib_source.format(ctx=self.ctx), 'include')
def get_stl_lib_dir(self, arch):
return join(
self.stl_lib_source.format(ctx=self.ctx), 'libs', arch.arch
)
def get_stl_library(self, arch):
return join(
self.get_stl_lib_dir(arch),
'lib{name}.so'.format(name=self.stl_lib_name),
)
def install_stl_lib(self, arch):
if not self.ctx.has_lib(
arch.arch, 'lib{name}.so'.format(name=self.stl_lib_name)
):
self.install_libs(arch, self.get_stl_library(arch))
@property
def version(self):
key = 'VERSION_' + self.name
return environ.get(key, self._version)
@property
def url(self):
key = 'URL_' + self.name
return environ.get(key, self._url)
@property
def versioned_url(self):
'''A property returning the url of the recipe with ``{version}``
replaced by the :attr:`url`. If accessing the url, you should use this
property, *not* access the url directly.'''
if self.url is None:
return None
return self.url.format(version=self.version)
def download_file(self, url, target, cwd=None):
"""
(internal) Download an ``url`` to a ``target``.
"""
if not url:
return
info('Downloading {} from {}'.format(self.name, url))
if cwd:
target = join(cwd, target)
parsed_url = urlparse(url)
if parsed_url.scheme in ('http', 'https'):
def report_hook(index, blksize, size):
if size <= 0:
progression = '{0} bytes'.format(index * blksize)
else:
progression = '{0:.2f}%'.format(
index * blksize * 100. / float(size))
if "CI" not in environ:
stdout.write('- Download {}\r'.format(progression))
stdout.flush()
if exists(target):
unlink(target)
# Download item with multiple attempts (for bad connections):
attempts = 0
while True:
try:
urlretrieve(url, target, report_hook)
except OSError:
attempts += 1
if attempts >= 5:
raise
stdout.write('Download failed retrying in a second...')
time.sleep(1)
continue
break
return target
elif parsed_url.scheme in ('git', 'git+file', 'git+ssh', 'git+http', 'git+https'):
if isdir(target):
with current_directory(target):
shprint(sh.git, 'fetch', '--tags')
if self.version:
shprint(sh.git, 'checkout', self.version)
shprint(sh.git, 'pull')
shprint(sh.git, 'pull', '--recurse-submodules')
shprint(sh.git, 'submodule', 'update', '--recursive')
else:
if url.startswith('git+'):
url = url[4:]
shprint(sh.git, 'clone', '--recursive', url, target)
if self.version:
with current_directory(target):
shprint(sh.git, 'checkout', self.version)
shprint(sh.git, 'submodule', 'update', '--recursive')
return target
def apply_patch(self, filename, arch, build_dir=None):
"""
Apply a patch from the current recipe directory into the current
build directory.
.. versionchanged:: 0.6.0
Add ability to apply patch from any dir via kwarg `build_dir`'''
"""
info("Applying patch {}".format(filename))
build_dir = build_dir if build_dir else self.get_build_dir(arch)
filename = join(self.get_recipe_dir(), filename)
shprint(sh.patch, "-t", "-d", build_dir, "-p1",
"-i", filename, _tail=10)
def copy_file(self, filename, dest):
info("Copy {} to {}".format(filename, dest))
filename = join(self.get_recipe_dir(), filename)
dest = join(self.build_dir, dest)
shutil.copy(filename, dest)
def append_file(self, filename, dest):
info("Append {} to {}".format(filename, dest))
filename = join(self.get_recipe_dir(), filename)
dest = join(self.build_dir, dest)
with open(filename, "rb") as fd:
data = fd.read()
with open(dest, "ab") as fd:
fd.write(data)
@property
def name(self):
'''The name of the recipe, the same as the folder containing it.'''
modname = self.__class__.__module__
return modname.split(".", 2)[-1]
@property
def filtered_archs(self):
'''Return archs of self.ctx that are valid build archs
for the Recipe.'''
result = []
for arch in self.ctx.archs:
if not self.archs or (arch.arch in self.archs):
result.append(arch)
return result
def check_recipe_choices(self):
'''Checks what recipes are being built to see which of the alternative
and optional dependencies are being used,
and returns a list of these.'''
recipes = []
built_recipes = self.ctx.recipe_build_order
for recipe in self.depends:
if isinstance(recipe, (tuple, list)):
for alternative in recipe:
if alternative in built_recipes:
recipes.append(alternative)
break
for recipe in self.opt_depends:
if recipe in built_recipes:
recipes.append(recipe)
return sorted(recipes)
def get_opt_depends_in_list(self, recipes):
'''Given a list of recipe names, returns those that are also in
self.opt_depends.
'''
return [recipe for recipe in recipes if recipe in self.opt_depends]
def get_build_container_dir(self, arch):
'''Given the arch name, returns the directory where it will be
built.
This returns a different directory depending on what
alternative or optional dependencies are being built.
'''
dir_name = self.get_dir_name()
return join(self.ctx.build_dir, 'other_builds',
dir_name, '{}__ndk_target_{}'.format(arch, self.ctx.ndk_api))
def get_dir_name(self):
choices = self.check_recipe_choices()
dir_name = '-'.join([self.name] + choices)
return dir_name
def get_build_dir(self, arch):
'''Given the arch name, returns the directory where the
downloaded/copied package will be built.'''
return join(self.get_build_container_dir(arch), self.name)
def get_recipe_dir(self):
"""
Returns the local recipe directory or defaults to the core recipe
directory.
"""
if self.ctx.local_recipes is not None:
local_recipe_dir = join(self.ctx.local_recipes, self.name)
if exists(local_recipe_dir):
return local_recipe_dir
return join(self.ctx.root_dir, 'recipes', self.name)
# Public Recipe API to be subclassed if needed
def download_if_necessary(self):
info_main('Downloading {}'.format(self.name))
user_dir = environ.get('P4A_{}_DIR'.format(self.name.lower()))
if user_dir is not None:
info('P4A_{}_DIR is set, skipping download for {}'.format(
self.name, self.name))
return
self.download()
def download(self):
if self.url is None:
info('Skipping {} download as no URL is set'.format(self.name))
return
url = self.versioned_url
ma = match(u'^(.+)#md5=([0-9a-f]{32})$', url)
if ma: # fragmented URL?
if self.md5sum:
raise ValueError(
('Received md5sum from both the {} recipe '
'and its url').format(self.name))
url = ma.group(1)
expected_md5 = ma.group(2)
else:
expected_md5 = self.md5sum
shprint(sh.mkdir, '-p', join(self.ctx.packages_path, self.name))
with current_directory(join(self.ctx.packages_path, self.name)):
filename = shprint(sh.basename, url).stdout[:-1].decode('utf-8')
do_download = True
marker_filename = '.mark-{}'.format(filename)
if exists(filename) and isfile(filename):
if not exists(marker_filename):
shprint(sh.rm, filename)
elif expected_md5:
current_md5 = md5sum(filename)
if current_md5 != expected_md5:
debug('* Generated md5sum: {}'.format(current_md5))
debug('* Expected md5sum: {}'.format(expected_md5))
raise ValueError(
('Generated md5sum does not match expected md5sum '
'for {} recipe').format(self.name))
do_download = False
else:
do_download = False
# If we got this far, we will download
if do_download:
debug('Downloading {} from {}'.format(self.name, url))
shprint(sh.rm, '-f', marker_filename)
self.download_file(self.versioned_url, filename)
shprint(sh.touch, marker_filename)
if exists(filename) and isfile(filename) and expected_md5:
current_md5 = md5sum(filename)
if expected_md5 is not None:
if current_md5 != expected_md5:
debug('* Generated md5sum: {}'.format(current_md5))
debug('* Expected md5sum: {}'.format(expected_md5))
raise ValueError(
('Generated md5sum does not match expected md5sum '
'for {} recipe').format(self.name))
else:
info('{} download already cached, skipping'.format(self.name))
def unpack(self, arch):
info_main('Unpacking {} for {}'.format(self.name, arch))
build_dir = self.get_build_container_dir(arch)
user_dir = environ.get('P4A_{}_DIR'.format(self.name.lower()))
if user_dir is not None:
info('P4A_{}_DIR exists, symlinking instead'.format(
self.name.lower()))
if exists(self.get_build_dir(arch)):
return
shprint(sh.rm, '-rf', build_dir)
shprint(sh.mkdir, '-p', build_dir)
shprint(sh.rmdir, build_dir)
ensure_dir(build_dir)
shprint(sh.cp, '-a', user_dir, self.get_build_dir(arch))
return
if self.url is None:
info('Skipping {} unpack as no URL is set'.format(self.name))
return
filename = shprint(
sh.basename, self.versioned_url).stdout[:-1].decode('utf-8')
ma = match(u'^(.+)#md5=([0-9a-f]{32})$', filename)
if ma: # fragmented URL?
filename = ma.group(1)
with current_directory(build_dir):
directory_name = self.get_build_dir(arch)
if not exists(directory_name) or not isdir(directory_name):
extraction_filename = join(
self.ctx.packages_path, self.name, filename)
if isfile(extraction_filename):
if extraction_filename.endswith('.zip'):
try:
sh.unzip(extraction_filename)
except (sh.ErrorReturnCode_1, sh.ErrorReturnCode_2):
# return code 1 means unzipping had
# warnings but did complete,
# apparently happens sometimes with
# github zips
pass
import zipfile
fileh = zipfile.ZipFile(extraction_filename, 'r')
root_directory = fileh.filelist[0].filename.split('/')[0]
if root_directory != basename(directory_name):
shprint(sh.mv, root_directory, directory_name)
elif extraction_filename.endswith(
('.tar.gz', '.tgz', '.tar.bz2', '.tbz2', '.tar.xz', '.txz')):
sh.tar('xf', extraction_filename)
root_directory = sh.tar('tf', extraction_filename).stdout.decode(
'utf-8').split('\n')[0].split('/')[0]
if root_directory != basename(directory_name):
shprint(sh.mv, root_directory, directory_name)
else:
raise Exception(
'Could not extract {} download, it must be .zip, '
'.tar.gz or .tar.bz2 or .tar.xz'.format(extraction_filename))
elif isdir(extraction_filename):
mkdir(directory_name)
for entry in listdir(extraction_filename):
if entry not in ('.git',):
shprint(sh.cp, '-Rv',
join(extraction_filename, entry),
directory_name)
else:
raise Exception(
'Given path is neither a file nor a directory: {}'
.format(extraction_filename))
else:
info('{} is already unpacked, skipping'.format(self.name))
def get_recipe_env(self, arch=None, with_flags_in_cc=True):
"""Return the env specialized for the recipe
"""
if arch is None:
arch = self.filtered_archs[0]
env = arch.get_env(with_flags_in_cc=with_flags_in_cc)
if self.need_stl_shared:
env['CPPFLAGS'] = env.get('CPPFLAGS', '')
env['CPPFLAGS'] += ' -I{}'.format(self.stl_include_dir)
env['CXXFLAGS'] = env['CFLAGS'] + ' -frtti -fexceptions'
if with_flags_in_cc:
env['CXX'] += ' -frtti -fexceptions'
env['LDFLAGS'] += ' -L{}'.format(self.get_stl_lib_dir(arch))
env['LIBS'] = env.get('LIBS', '') + " -l{}".format(
self.stl_lib_name
)
return env
def prebuild_arch(self, arch):
'''Run any pre-build tasks for the Recipe. By default, this checks if
any prebuild_archname methods exist for the archname of the current
architecture, and runs them if so.'''
prebuild = "prebuild_{}".format(arch.arch.replace('-', '_'))
if hasattr(self, prebuild):
getattr(self, prebuild)()
else:
info('{} has no {}, skipping'.format(self.name, prebuild))
def is_patched(self, arch):
build_dir = self.get_build_dir(arch.arch)
return exists(join(build_dir, '.patched'))
def apply_patches(self, arch, build_dir=None):
'''Apply any patches for the Recipe.
.. versionchanged:: 0.6.0
Add ability to apply patches from any dir via kwarg `build_dir`'''
if self.patches:
info_main('Applying patches for {}[{}]'
.format(self.name, arch.arch))
if self.is_patched(arch):
info_main('{} already patched, skipping'.format(self.name))
return
build_dir = build_dir if build_dir else self.get_build_dir(arch.arch)
for patch in self.patches:
if isinstance(patch, (tuple, list)):
patch, patch_check = patch
if not patch_check(arch=arch, recipe=self):
continue
self.apply_patch(
patch.format(version=self.version, arch=arch.arch),
arch.arch, build_dir=build_dir)
shprint(sh.touch, join(build_dir, '.patched'))
def should_build(self, arch):
'''Should perform any necessary test and return True only if it needs
building again. Per default we implement a library test, in case that
we detect so.
'''
if self.built_libraries:
return not all(
exists(lib) for lib in self.get_libraries(arch.arch)
)
return True
def build_arch(self, arch):
'''Run any build tasks for the Recipe. By default, this checks if
any build_archname methods exist for the archname of the current
architecture, and runs them if so.'''
build = "build_{}".format(arch.arch)
if hasattr(self, build):
getattr(self, build)()
def install_libraries(self, arch):
'''This method is always called after `build_arch`. In case that we
detect a library recipe, defined by the class attribute
`built_libraries`, we will copy all defined libraries into the
right location.
'''
if not self.built_libraries:
return
shared_libs = [
lib for lib in self.get_libraries(arch) if lib.endswith(".so")
]
self.install_libs(arch, *shared_libs)
def postbuild_arch(self, arch):
'''Run any post-build tasks for the Recipe. By default, this checks if
any postbuild_archname methods exist for the archname of the
current architecture, and runs them if so.
'''
postbuild = "postbuild_{}".format(arch.arch)
if hasattr(self, postbuild):
getattr(self, postbuild)()
if self.need_stl_shared:
self.install_stl_lib(arch)
def prepare_build_dir(self, arch):
'''Copies the recipe data into a build dir for the given arch. By
default, this unpacks a downloaded recipe. You should override
it (or use a Recipe subclass with different behaviour) if you
want to do something else.
'''
self.unpack(arch)
def clean_build(self, arch=None):
'''Deletes all the build information of the recipe.
If arch is not None, only this arch dir is deleted. Otherwise
(the default) all builds for all archs are deleted.
By default, this just deletes the main build dir. If the
recipe has e.g. object files biglinked, or .so files stored
elsewhere, you should override this method.
This method is intended for testing purposes, it may have
strange results. Rebuild everything if this seems to happen.
'''
if arch is None:
base_dir = join(self.ctx.build_dir, 'other_builds', self.name)
else:
base_dir = self.get_build_container_dir(arch)
dirs = glob.glob(base_dir + '-*')
if exists(base_dir):
dirs.append(base_dir)
if not dirs:
warning('Attempted to clean build for {} but found no existing '
'build dirs'.format(self.name))
for directory in dirs:
if exists(directory):
info('Deleting {}'.format(directory))
shutil.rmtree(directory)
# Delete any Python distributions to ensure the recipe build
# doesn't persist in site-packages
shutil.rmtree(self.ctx.python_installs_dir)
def install_libs(self, arch, *libs):
libs_dir = self.ctx.get_libs_dir(arch.arch)
if not libs:
warning('install_libs called with no libraries to install!')
return
args = libs + (libs_dir,)
shprint(sh.cp, *args)
def has_libs(self, arch, *libs):
return all(map(lambda l: self.ctx.has_lib(arch.arch, l), libs))
def get_libraries(self, arch_name, in_context=False):
"""Return the full path of the library depending on the architecture.
Per default, the build library path it will be returned, unless
`get_libraries` has been called with kwarg `in_context` set to
True.
.. note:: this method should be used for library recipes only
"""
recipe_libs = set()
if not self.built_libraries:
return recipe_libs
for lib, rel_path in self.built_libraries.items():
if not in_context:
abs_path = join(self.get_build_dir(arch_name), rel_path, lib)
if rel_path in {".", "", None}:
abs_path = join(self.get_build_dir(arch_name), lib)
else:
abs_path = join(self.ctx.get_libs_dir(arch_name), lib)
recipe_libs.add(abs_path)
return recipe_libs
@classmethod
def recipe_dirs(cls, ctx):
recipe_dirs = []
if ctx.local_recipes is not None:
recipe_dirs.append(realpath(ctx.local_recipes))
if ctx.storage_dir:
recipe_dirs.append(join(ctx.storage_dir, 'recipes'))
recipe_dirs.append(join(ctx.root_dir, "recipes"))
return recipe_dirs
@classmethod
def list_recipes(cls, ctx):
forbidden_dirs = ('__pycache__', )
for recipes_dir in cls.recipe_dirs(ctx):
if recipes_dir and exists(recipes_dir):
for name in listdir(recipes_dir):
if name in forbidden_dirs:
continue
fn = join(recipes_dir, name)
if isdir(fn):
yield name
@classmethod
def get_recipe(cls, name, ctx):
'''Returns the Recipe with the given name, if it exists.'''
name = name.lower()
if not hasattr(cls, "recipes"):
cls.recipes = {}
if name in cls.recipes:
return cls.recipes[name]
recipe_file = None
for recipes_dir in cls.recipe_dirs(ctx):
if not exists(recipes_dir):
continue
# Find matching folder (may differ in case):
for subfolder in listdir(recipes_dir):
if subfolder.lower() == name:
recipe_file = join(recipes_dir, subfolder, '__init__.py')
if exists(recipe_file):
name = subfolder # adapt to actual spelling
break
recipe_file = None
if recipe_file is not None:
break
if not recipe_file:
raise ValueError('Recipe does not exist: {}'.format(name))
mod = import_recipe('pythonforandroid.recipes.{}'.format(name), recipe_file)
if len(logger.handlers) > 1:
logger.removeHandler(logger.handlers[1])
recipe = mod.recipe
recipe.ctx = ctx
cls.recipes[name.lower()] = recipe
return recipe
class IncludedFilesBehaviour(object):
'''Recipe mixin class that will automatically unpack files included in
the recipe directory.'''
src_filename = None
def prepare_build_dir(self, arch):
if self.src_filename is None:
raise BuildInterruptingException(
'IncludedFilesBehaviour failed: no src_filename specified')
shprint(sh.rm, '-rf', self.get_build_dir(arch))
shprint(sh.cp, '-a', join(self.get_recipe_dir(), self.src_filename),
self.get_build_dir(arch))
class BootstrapNDKRecipe(Recipe):
'''A recipe class for recipes built in an Android project jni dir with
an Android.mk. These are not cached separatly, but built in the
bootstrap's own building directory.
To build an NDK project which is not part of the bootstrap, see
:class:`~pythonforandroid.recipe.NDKRecipe`.
To link with python, call the method :meth:`get_recipe_env`
with the kwarg *with_python=True*.
'''
dir_name = None # The name of the recipe build folder in the jni dir
def get_build_container_dir(self, arch):
return self.get_jni_dir()
def get_build_dir(self, arch):
if self.dir_name is None:
raise ValueError('{} recipe doesn\'t define a dir_name, but '
'this is necessary'.format(self.name))
return join(self.get_build_container_dir(arch), self.dir_name)
def get_jni_dir(self):
return join(self.ctx.bootstrap.build_dir, 'jni')
def get_recipe_env(self, arch=None, with_flags_in_cc=True, with_python=False):
env = super().get_recipe_env(arch, with_flags_in_cc)
if not with_python:
return env
env['PYTHON_INCLUDE_ROOT'] = self.ctx.python_recipe.include_root(arch.arch)
env['PYTHON_LINK_ROOT'] = self.ctx.python_recipe.link_root(arch.arch)
env['EXTRA_LDLIBS'] = ' -lpython{}'.format(
self.ctx.python_recipe.major_minor_version_string)
if 'python3' in self.ctx.python_recipe.name:
env['EXTRA_LDLIBS'] += 'm'
return env
class NDKRecipe(Recipe):
'''A recipe class for any NDK project not included in the bootstrap.'''
generated_libraries = []
def should_build(self, arch):
lib_dir = self.get_lib_dir(arch)
for lib in self.generated_libraries:
if not exists(join(lib_dir, lib)):
return True
return False
def get_lib_dir(self, arch):
return join(self.get_build_dir(arch.arch), 'obj', 'local', arch.arch)
def get_jni_dir(self, arch):
return join(self.get_build_dir(arch.arch), 'jni')
def build_arch(self, arch, *extra_args):
super().build_arch(arch)
env = self.get_recipe_env(arch)
with current_directory(self.get_build_dir(arch.arch)):
shprint(
sh.ndk_build,
'V=1',
'NDK_DEBUG=' + ("1" if self.ctx.build_as_debuggable else "0"),
'APP_PLATFORM=android-' + str(self.ctx.ndk_api),
'APP_ABI=' + arch.arch,
*extra_args, _env=env
)
class PythonRecipe(Recipe):
site_packages_name = None
'''The name of the module's folder when installed in the Python
site-packages (e.g. for pyjnius it is 'jnius')'''
call_hostpython_via_targetpython = True
'''If True, tries to install the module using the hostpython binary
copied to the target (normally arm) python build dir. However, this
will fail if the module tries to import e.g. _io.so. Set this to False
to call hostpython from its own build dir, installing the module in
the right place via arguments to setup.py. However, this may not set
the environment correctly and so False is not the default.'''
install_in_hostpython = False
'''If True, additionally installs the module in the hostpython build
dir. This will make it available to other recipes if
call_hostpython_via_targetpython is False.
'''
install_in_targetpython = True
'''If True, installs the module in the targetpython installation dir.
This is almost always what you want to do.'''
setup_extra_args = []
'''List of extra arguments to pass to setup.py'''
depends = ['python3']
'''
.. note:: it's important to keep this depends as a class attribute outside
`__init__` because sometimes we only initialize the class, so the
`__init__` call won't be called and the deps would be missing
(which breaks the dependency graph computation)
.. warning:: don't forget to call `super().__init__()` in any recipe's
`__init__`, or otherwise it may not be ensured that it depends
on python2 or python3 which can break the dependency graph
'''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if 'python3' not in self.depends:
# We ensure here that the recipe depends on python even it overrode
# `depends`. We only do this if it doesn't already depend on any
# python, since some recipes intentionally don't depend on/work
# with all python variants
depends = self.depends
depends.append('python3')
depends = list(set(depends))
self.depends = depends
def clean_build(self, arch=None):
super().clean_build(arch=arch)
name = self.folder_name
python_install_dirs = glob.glob(join(self.ctx.python_installs_dir, '*'))
for python_install in python_install_dirs:
site_packages_dir = glob.glob(join(python_install, 'lib', 'python*',
'site-packages'))
if site_packages_dir:
build_dir = join(site_packages_dir[0], name)
if exists(build_dir):
info('Deleted {}'.format(build_dir))
rmtree(build_dir)
@property
def real_hostpython_location(self):
host_name = 'host{}'.format(self.ctx.python_recipe.name)
if host_name == 'hostpython3':
python_recipe = Recipe.get_recipe(host_name, self.ctx)
return python_recipe.python_exe
else:
python_recipe = self.ctx.python_recipe
return 'python{}'.format(python_recipe.version)
@property
def hostpython_location(self):
if not self.call_hostpython_via_targetpython:
return self.real_hostpython_location
return self.ctx.hostpython
@property
def folder_name(self):
'''The name of the build folders containing this recipe.'''
name = self.site_packages_name
if name is None:
name = self.name
return name
def get_recipe_env(self, arch=None, with_flags_in_cc=True):
env = super().get_recipe_env(arch, with_flags_in_cc)
env['PYTHONNOUSERSITE'] = '1'
# Set the LANG, this isn't usually important but is a better default
# as it occasionally matters how Python e.g. reads files
env['LANG'] = "en_GB.UTF-8"
if not self.call_hostpython_via_targetpython:
python_name = self.ctx.python_recipe.name
env['CFLAGS'] += ' -I{}'.format(
self.ctx.python_recipe.include_root(arch.arch)
)
env['LDFLAGS'] += ' -L{} -lpython{}'.format(
self.ctx.python_recipe.link_root(arch.arch),
self.ctx.python_recipe.major_minor_version_string,
)
if python_name == 'python3':
env['LDFLAGS'] += 'm'
hppath = []
hppath.append(join(dirname(self.hostpython_location), 'Lib'))
hppath.append(join(hppath[0], 'site-packages'))
builddir = join(dirname(self.hostpython_location), 'build')
if exists(builddir):
hppath += [join(builddir, d) for d in listdir(builddir)
if isdir(join(builddir, d))]
if len(hppath) > 0:
if 'PYTHONPATH' in env:
env['PYTHONPATH'] = ':'.join(hppath + [env['PYTHONPATH']])
else:
env['PYTHONPATH'] = ':'.join(hppath)
return env
def should_build(self, arch):
name = self.folder_name
if self.ctx.has_package(name):
info('Python package already exists in site-packages')
return False
info('{} apparently isn\'t already in site-packages'.format(name))
return True
def build_arch(self, arch):
'''Install the Python module by calling setup.py install with
the target Python dir.'''
super().build_arch(arch)
self.install_python_package(arch)
def install_python_package(self, arch, name=None, env=None, is_dir=True):
'''Automate the installation of a Python package (or a cython
package where the cython components are pre-built).'''
# arch = self.filtered_archs[0] # old kivy-ios way
if name is None:
name = self.name
if env is None:
env = self.get_recipe_env(arch)
info('Installing {} into site-packages'.format(self.name))
hostpython = sh.Command(self.hostpython_location)
hpenv = env.copy()
with current_directory(self.get_build_dir(arch.arch)):
shprint(hostpython, 'setup.py', 'install', '-O2',
'--root={}'.format(self.ctx.get_python_install_dir()),
'--install-lib=.',
_env=hpenv, *self.setup_extra_args)
# If asked, also install in the hostpython build dir
if self.install_in_hostpython:
self.install_hostpython_package(arch)
def get_hostrecipe_env(self, arch):
env = environ.copy()
env['PYTHONPATH'] = join(dirname(self.real_hostpython_location), 'Lib', 'site-packages')
return env
def install_hostpython_package(self, arch):
env = self.get_hostrecipe_env(arch)
real_hostpython = sh.Command(self.real_hostpython_location)
shprint(real_hostpython, 'setup.py', 'install', '-O2',
'--root={}'.format(dirname(self.real_hostpython_location)),
'--install-lib=Lib/site-packages',
_env=env, *self.setup_extra_args)
class CompiledComponentsPythonRecipe(PythonRecipe):
pre_build_ext = False
build_cmd = 'build_ext'
def build_arch(self, arch):
'''Build any cython components, then install the Python module by
calling setup.py install with the target Python dir.
'''
Recipe.build_arch(self, arch)
self.build_compiled_components(arch)
self.install_python_package(arch)
def build_compiled_components(self, arch):
info('Building compiled components in {}'.format(self.name))
env = self.get_recipe_env(arch)
hostpython = sh.Command(self.hostpython_location)
with current_directory(self.get_build_dir(arch.arch)):
if self.install_in_hostpython:
shprint(hostpython, 'setup.py', 'clean', '--all', _env=env)
shprint(hostpython, 'setup.py', self.build_cmd, '-v',
_env=env, *self.setup_extra_args)
build_dir = glob.glob('build/lib.*')[0]
shprint(sh.find, build_dir, '-name', '"*.o"', '-exec',
env['STRIP'], '{}', ';', _env=env)
def install_hostpython_package(self, arch):
env = self.get_hostrecipe_env(arch)
self.rebuild_compiled_components(arch, env)
super().install_hostpython_package(arch)
def rebuild_compiled_components(self, arch, env):
info('Rebuilding compiled components in {}'.format(self.name))
hostpython = sh.Command(self.real_hostpython_location)
shprint(hostpython, 'setup.py', 'clean', '--all', _env=env)
shprint(hostpython, 'setup.py', self.build_cmd, '-v', _env=env,
*self.setup_extra_args)
class CppCompiledComponentsPythonRecipe(CompiledComponentsPythonRecipe):
""" Extensions that require the cxx-stl """
call_hostpython_via_targetpython = False
need_stl_shared = True
class CythonRecipe(PythonRecipe):
pre_build_ext = False
cythonize = True
cython_args = []
call_hostpython_via_targetpython = False
def build_arch(self, arch):
'''Build any cython components, then install the Python module by
calling setup.py install with the target Python dir.
'''
Recipe.build_arch(self, arch)
self.build_cython_components(arch)
self.install_python_package(arch)
def build_cython_components(self, arch):
info('Cythonizing anything necessary in {}'.format(self.name))
env = self.get_recipe_env(arch)
with current_directory(self.get_build_dir(arch.arch)):
hostpython = sh.Command(self.ctx.hostpython)
shprint(hostpython, '-c', 'import sys; print(sys.path)', _env=env)
debug('cwd is {}'.format(realpath(curdir)))
info('Trying first build of {} to get cython files: this is '
'expected to fail'.format(self.name))
manually_cythonise = False
try:
shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env,
*self.setup_extra_args)
except sh.ErrorReturnCode_1:
print()
info('{} first build failed (as expected)'.format(self.name))
manually_cythonise = True
if manually_cythonise:
self.cythonize_build(env=env)
shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env,
_tail=20, _critical=True, *self.setup_extra_args)
else:
info('First build appeared to complete correctly, skipping manual'
'cythonising.')
if not self.ctx.with_debug_symbols:
self.strip_object_files(arch, env)
def strip_object_files(self, arch, env, build_dir=None):
if build_dir is None:
build_dir = self.get_build_dir(arch.arch)
with current_directory(build_dir):
info('Stripping object files')
shprint(sh.find, '.', '-iname', '*.so', '-exec',
'/usr/bin/echo', '{}', ';', _env=env)
shprint(sh.find, '.', '-iname', '*.so', '-exec',
env['STRIP'].split(' ')[0], '--strip-unneeded',
# '/usr/bin/strip', '--strip-unneeded',
'{}', ';', _env=env)
def cythonize_file(self, env, build_dir, filename):
short_filename = filename
if filename.startswith(build_dir):
short_filename = filename[len(build_dir) + 1:]
info(u"Cythonize {}".format(short_filename))
cyenv = env.copy()
if 'CYTHONPATH' in cyenv:
cyenv['PYTHONPATH'] = cyenv['CYTHONPATH']
elif 'PYTHONPATH' in cyenv:
del cyenv['PYTHONPATH']
if 'PYTHONNOUSERSITE' in cyenv:
cyenv.pop('PYTHONNOUSERSITE')
python_command = sh.Command("python{}".format(
self.ctx.python_recipe.major_minor_version_string.split(".")[0]
))
shprint(python_command, "-m", "Cython.Build.Cythonize",
filename, *self.cython_args, _env=cyenv)
def cythonize_build(self, env, build_dir="."):
if not self.cythonize:
info('Running cython cancelled per recipe setting')
return
info('Running cython where appropriate')
for root, dirnames, filenames in walk("."):
for filename in fnmatch.filter(filenames, "*.pyx"):
self.cythonize_file(env, build_dir, join(root, filename))
def get_recipe_env(self, arch, with_flags_in_cc=True):
env = super().get_recipe_env(arch, with_flags_in_cc)
env['LDFLAGS'] = env['LDFLAGS'] + ' -L{} '.format(
self.ctx.get_libs_dir(arch.arch) +
' -L{} '.format(self.ctx.libs_dir) +
' -L{}'.format(join(self.ctx.bootstrap.build_dir, 'obj', 'local',
arch.arch)))
env['LDSHARED'] = env['CC'] + ' -shared'
# shprint(sh.whereis, env['LDSHARED'], _env=env)
env['LIBLINK'] = 'NOTNONE'
env['NDKPLATFORM'] = self.ctx.ndk_platform
if self.ctx.copy_libs:
env['COPYLIBS'] = '1'
# Every recipe uses its own liblink path, object files are
# collected and biglinked later
liblink_path = join(self.get_build_container_dir(arch.arch),
'objects_{}'.format(self.name))
env['LIBLINK_PATH'] = liblink_path
ensure_dir(liblink_path)
return env
class TargetPythonRecipe(Recipe):
'''Class for target python recipes. Sets ctx.python_recipe to point to
itself, so as to know later what kind of Python was built or used.'''
def __init__(self, *args, **kwargs):
self._ctx = None
super().__init__(*args, **kwargs)
def prebuild_arch(self, arch):
super().prebuild_arch(arch)
self.ctx.python_recipe = self
def include_root(self, arch):
'''The root directory from which to include headers.'''
raise NotImplementedError('Not implemented in TargetPythonRecipe')
def link_root(self):
raise NotImplementedError('Not implemented in TargetPythonRecipe')
@property
def major_minor_version_string(self):
from distutils.version import LooseVersion
return '.'.join([str(v) for v in LooseVersion(self.version).version[:2]])
def create_python_bundle(self, dirn, arch):
"""
Create a packaged python bundle in the target directory, by
copying all the modules and standard library to the right
place.
"""
raise NotImplementedError('{} does not implement create_python_bundle'.format(self))
def reduce_object_file_names(self, dirn):
"""Recursively renames all files named XXX.cpython-...-linux-gnu.so"
to "XXX.so", i.e. removing the erroneous architecture name
coming from the local system.
"""
py_so_files = shprint(sh.find, dirn, '-iname', '*.so')
filens = py_so_files.stdout.decode('utf-8').split('\n')[:-1]
for filen in filens:
file_dirname, file_basename = split(filen)
parts = file_basename.split('.')
if len(parts) <= 2:
continue
shprint(sh.mv, filen, join(file_dirname, parts[0] + '.so'))
def md5sum(filen):
'''Calculate the md5sum of a file.
'''
with open(filen, 'rb') as fileh:
md5 = hashlib.md5(fileh.read())
return md5.hexdigest()
| {
"content_hash": "9e03d03b8c90736b665167e3399d2830",
"timestamp": "",
"source": "github",
"line_count": 1195,
"max_line_length": 96,
"avg_line_length": 39.25439330543933,
"alnum_prop": 0.5698480035814023,
"repo_name": "rnixx/python-for-android",
"id": "3933dd7824a0d5e890e563d5fe2b21767b6156f7",
"size": "46909",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pythonforandroid/recipe.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "70942"
},
{
"name": "C++",
"bytes": "491"
},
{
"name": "CMake",
"bytes": "250"
},
{
"name": "CSS",
"bytes": "3487"
},
{
"name": "Dockerfile",
"bytes": "4440"
},
{
"name": "HTML",
"bytes": "11631"
},
{
"name": "Java",
"bytes": "517112"
},
{
"name": "Makefile",
"bytes": "27307"
},
{
"name": "Python",
"bytes": "1359684"
},
{
"name": "Shell",
"bytes": "5340"
}
],
"symlink_target": ""
} |
import analysis.event
import analysis.hitfinding
import analysis.beamline
import plotting.line
import plotting.image
import plotting.correlation
import simulation.base
# Simulate diffraction data
sim = simulation.base.Simulation()
sim.hitrate = 0.5
sim.sigma = 1
# Specify the facility
state = {}
state['Facility'] = 'Dummy'
# Create a dummy facility
state['Dummy'] = {
# The event repetition rate of the dummy facility [Hz]
'Repetition Rate' : 10,
# Specify simulation
'Simulation': sim,
# Dictionary of data sources
'Data Sources': {
# Data from a virtual diffraction detector
'CCD': {
# Fetch diffraction data from the simulation
'data': sim.get_pattern,
'unit': 'ADU',
'type': 'photonPixelDetectors'
},
# Data from a virutal pulse energy detector
'pulseEnergy': {
# Fetch pulse energy valus from the simulation
'data': sim.get_pulse_energy,
'unit': 'J',
'type': 'pulseEnergies'
},
# Data from a virutal injector motor
'injectorX': {
# Fetch injector motor valus (x) from the simulation
'data': sim.get_injector_x,
'unit': 'm',
'type': 'parameters'
},
# Data from a virutal injector motor
'injectorY': {
# Fetch injector motor valus (y) from the simulation
'data': sim.get_injector_y,
'unit': 'm',
'type': 'parameters'
}
}
}
# Configuration for hitrate meanmap plot
hitmapParams = {
'xmin':0,
'xmax':1e-6,
'ymin':0,
'ymax':1e-6,
'xbins':10,
'ybins':10
}
# This function is called for every single event
# following the given recipy of analysis
def onEvent(evt):
# Processing rate [Hz]
analysis.event.printProcessingRate()
# Simple hit finding (counting the number of lit pixels)
analysis.hitfinding.countLitPixels(evt, evt["photonPixelDetectors"]["CCD"],
aduThreshold=10, hitscoreThreshold=100)
#analysis.beamline.averagePulseEnergy(evt, evt["pulseEnergies"])
# Extract boolean (hit or miss)
hit = evt["analysis"]["litpixel: isHit"].data
# Compute the hitrate
analysis.hitfinding.hitrate(evt, hit, history=1000)
# Plot history of pulse energy
plotting.line.plotHistory(evt['pulseEnergies']['pulseEnergy'])
# Plot scatter of pulse energy vs. hitscore
plotting.correlation.plotScatter(evt['pulseEnergies']['pulseEnergy'],
evt["analysis"]["litpixel: hitscore"],
xlabel='Pulse energy [J]', ylabel='Hitscore')
# Plot heat map of hitrate as function of injector position
plotting.correlation.plotMeanMap(evt["parameters"]['injectorX'], evt["parameters"]['injectorY'],
evt["analysis"]["hitrate"].data, name='hitrateMeanMap', **hitmapParams)
| {
"content_hash": "c9b27c55ede99a7880064cd65adf063b",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 108,
"avg_line_length": 31.479166666666668,
"alnum_prop": 0.600595632031767,
"repo_name": "SPIhub/hummingbird",
"id": "ed7def0a02ae168c34bd3e7544b07bdf829ff786",
"size": "3068",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/basic/correlation.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "624263"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import Commit, Release, ReleaseCommit, Repository
from sentry.testutils import APITestCase
class ReleaseCommitsListTest(APITestCase):
def test_simple(self):
project = self.create_project(
name='foo',
)
release = Release.objects.create(
organization_id=project.organization_id,
version='1',
)
release.add_project(project)
repo = Repository.objects.create(
organization_id=project.organization_id,
name=project.name,
)
commit = Commit.objects.create(
organization_id=project.organization_id,
repository_id=repo.id,
key='a' * 40,
)
commit2 = Commit.objects.create(
organization_id=project.organization_id,
repository_id=repo.id,
key='b' * 40,
)
ReleaseCommit.objects.create(
organization_id=project.organization_id,
release=release,
commit=commit,
order=1,
)
ReleaseCommit.objects.create(
organization_id=project.organization_id,
release=release,
commit=commit2,
order=0,
)
url = reverse('sentry-api-0-project-release-commits', kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
'version': release.version,
})
self.login_as(user=self.user)
response = self.client.get(url)
assert response.status_code == 200, response.content
assert len(response.data) == 2
assert response.data[0]['id'] == commit2.key
assert response.data[1]['id'] == commit.key
| {
"content_hash": "398307b508b35742db203730e028755b",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 70,
"avg_line_length": 31.724137931034484,
"alnum_prop": 0.5853260869565218,
"repo_name": "JackDanger/sentry",
"id": "71d3204336bddc053a06ac20f0e3f827dc574099",
"size": "1840",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/sentry/api/endpoints/test_project_release_commits.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "583430"
},
{
"name": "HTML",
"bytes": "319622"
},
{
"name": "JavaScript",
"bytes": "624672"
},
{
"name": "Makefile",
"bytes": "2660"
},
{
"name": "Python",
"bytes": "6279717"
}
],
"symlink_target": ""
} |
class Module:
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'Get Users',
# list of one or more authors for the module
'Author': ['@424f424f'],
# more verbose multi-line description of the module
'Description': 'This module list users found in Active Directory',
# True if the module needs to run in the background
'Background' : False,
# File extension to save the file as
'OutputExtension' : "",
# if the module needs administrative privileges
'NeedsAdmin' : False,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe' : True,
# the module language
'Language' : 'python',
# the minimum language version needed
'MinLanguageVersion' : '2.6',
# list of any references/other comments
'Comments': ['']
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to grab run on.',
'Required' : True,
'Value' : ''
},
'LDAPAddress' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'LDAP IP/Hostname',
'Required' : True,
'Value' : ''
},
'BindDN' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'user@penlab.local',
'Required' : True,
'Value' : ''
},
'Password' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Password to connect to LDAP',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
LDAPAddress = self.options['LDAPAddress']['Value']
BindDN = self.options['BindDN']['Value']
password = self.options['Password']['Value']
# the Python script itself, with the command to invoke
# for execution appended to the end. Scripts should output
# everything to the pipeline for proper parsing.
#
# the script should be stripped of comments, with a link to any
# original reference script included in the comments.
script = """
import sys, os, subprocess, re
BindDN = "%s"
LDAPAddress = "%s"
password = "%s"
regex = re.compile('.+@([^.]+)\..+')
global tld
match = re.match(regex, BindDN)
tld = match.group(1)
global ext
ext = BindDN.split('.')[1]
cmd = \"""ldapsearch -x -h {} -b "dc={},dc={}" -D {} -w {} "objectclass=user" sAMAccountName""\".format(LDAPAddress, tld, ext, BindDN, password)
output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, bufsize=1)
with output.stdout:
print ""
for line in iter(output.stdout.readline, b''):
if ("sAMAccountName:") in line:
if '$' not in line:
m = re.search(r'[^sAMAccountName:].*$', line)
print m.group(0).lstrip()
output.wait()
print ""
""" % (BindDN, LDAPAddress, password)
return script
| {
"content_hash": "9a5b4dcbaa004ccef1816be896577487",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 144,
"avg_line_length": 36.09090909090909,
"alnum_prop": 0.5335470574765285,
"repo_name": "Hackplayers/Empire-mod-Hpys-tests",
"id": "0188ddd6a65f403b72ccf81a7b8eb8cc787c005c",
"size": "4367",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/modules/python/situational_awareness/network/active_directory/get_users.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1966"
},
{
"name": "Java",
"bytes": "496"
},
{
"name": "Objective-C",
"bytes": "2664"
},
{
"name": "PHP",
"bytes": "2041"
},
{
"name": "PowerShell",
"bytes": "16200977"
},
{
"name": "Python",
"bytes": "2675256"
},
{
"name": "Shell",
"bytes": "3603"
}
],
"symlink_target": ""
} |
import base64
from collections import Counter
import sys
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
from azure.cli.core._profile import Profile
from azure.cli.core.commands.client_factory import get_subscription_id
from azure.cli.command_modules.botservice.web_app_operations import WebAppOperations
from azure.cli.command_modules.botservice.kudu_client import KuduClient
class BotJsonFormatter: # pylint:disable=too-few-public-methods
@staticmethod
def create_bot_json(cmd, client, resource_group_name, resource_name, logger, app_password=None, # pylint:disable=too-many-locals
raw_bot_properties=None, password_only=True):
"""
:param cmd:
:param client:
:param resource_group_name:
:param resource_name:
:param logger:
:param app_password:
:param raw_bot_properties:
:return: Dictionary
"""
if not raw_bot_properties:
raw_bot_properties = client.bots.get(
resource_group_name=resource_group_name,
resource_name=resource_name
)
# Initialize names bot_file and secret to capture botFilePath and botFileSecret values from the application's
# settings.
bot_file = None
bot_file_secret = None
profile = Profile(cli_ctx=cmd.cli_ctx)
if not app_password:
site_name = WebAppOperations.get_bot_site_name(raw_bot_properties.properties.endpoint)
app_settings = WebAppOperations.get_app_settings(
cmd=cmd,
resource_group_name=resource_group_name,
name=site_name
)
app_password_values = [item['value'] for item in app_settings if item['name'] == 'MicrosoftAppPassword']
app_password = app_password_values[0] if app_password_values else None
if not app_password:
bot_file_values = [item['value'] for item in app_settings if item['name'] == 'botFilePath']
bot_file = bot_file_values[0] if bot_file_values else None
bot_file_secret_values = [item['value'] for item in app_settings if item['name'] == 'botFileSecret']
bot_file_secret = bot_file_secret_values[0] if bot_file_secret_values else None
if not bot_file and not app_password:
bot_site_name = WebAppOperations.get_bot_site_name(raw_bot_properties.properties.endpoint)
scm_url = WebAppOperations.get_scm_url(cmd,
resource_group_name,
bot_site_name,
None)
# TODO: Reevaluate "Public-or-Gov" Azure logic.
is_public_azure = ('azurewebsites.net' in raw_bot_properties.properties.endpoint or
'.net' in raw_bot_properties.properties.endpoint or
'.com' in raw_bot_properties.properties.endpoint)
host = 'https://portal.azure.com/' if is_public_azure else 'https://portal.azure.us/'
subscription_id = get_subscription_id(cmd.cli_ctx)
tenant_id = profile.get_subscription(subscription=client.config.subscription_id)['tenantId']
settings_url = host + '#@{}/resource/subscriptions/{}/resourceGroups/{}/providers/Microsoft.BotService/botServices/{}/app_settings'.format(tenant_id, subscription_id, resource_group_name, resource_name) # pylint: disable=line-too-long
logger.warning('"MicrosoftAppPassword" and "botFilePath" not found in application settings')
logger.warning('To see your bot\'s application settings, visit %s' % settings_url)
logger.warning('To visit your deployed bot\'s code on Azure, visit Kudu for your bot at %s' % scm_url)
elif not app_password and bot_file:
# We have the information we need to obtain the MSA App app password via bot file data from Kudu.
kudu_client = KuduClient(cmd, resource_group_name, resource_name, raw_bot_properties, logger)
bot_file_data = kudu_client.get_bot_file(bot_file)
app_password = BotJsonFormatter.__decrypt_bot_file(bot_file_data, bot_file_secret, logger, password_only)
return {
'type': 'abs',
'id': raw_bot_properties.name,
'name': raw_bot_properties.properties.display_name,
'appId': raw_bot_properties.properties.msa_app_id,
'appPassword': app_password,
'endpoint': raw_bot_properties.properties.endpoint,
'resourceGroup': str(resource_group_name),
'tenantId': profile.get_subscription(subscription=client.config.subscription_id)['tenantId'],
'subscriptionId': client.config.subscription_id,
'serviceName': resource_name
}
@staticmethod
def __decrypt_bot_file(bot_file_data, bot_file_secret, logger, password_only=True):
"""Decrypt .bot file retrieved from Kudu.
:param bot_file_data:
:param bot_file_secret:
:param logger:
:return:
"""
services = bot_file_data['services']
if sys.version_info.major >= 3:
decrypt = BotJsonFormatter.__decrypt_py3
else:
decrypt = BotJsonFormatter.__decrypt_py2
if password_only:
# Get all endpoints that have potentially valid appPassword values
endpoints = [service for service in services
if service.get('type') == 'endpoint' and service.get('appPassword')]
# Reduce the retrieved endpoints to just their passwords
app_passwords = [e['appPassword'] for e in endpoints]
if len(app_passwords) == 1:
return decrypt(bot_file_secret, app_passwords[0], logger)
if len(app_passwords) > 1:
logger.info('More than one Microsoft App Password found in bot file. Evaluating if more than one '
'unique App Password exists.')
app_passwords = [decrypt(bot_file_secret, pw, logger) for pw in app_passwords]
unique_passwords = list(Counter(app_passwords)) # pylint:disable=too-many-function-args
if len(unique_passwords) == 1:
logger.info('One unique Microsoft App Password found, returning password.')
return unique_passwords[0]
logger.warning('More than one unique Microsoft App Password found in the bot file, please '
'manually retrieve your bot file from Kudu to retrieve this information.')
logger.warning('No Microsoft App Password returned.')
return ''
logger.warning('No Microsoft App Passwords found in bot file.')
return ''
for service in services:
# For Azure Blob Storage
if service.get('connectionString'):
service['connectionString'] = decrypt(bot_file_secret, service['connectionString'], logger)
# For LUIS and Dispatch
if service.get('authoringKey'):
service['authoringKey'] = decrypt(bot_file_secret, service['authoringKey'], logger)
# For LUIS and QnA Maker
if service.get('subscriptionKey'):
service['subscriptionKey'] = decrypt(bot_file_secret, service['subscriptionKey'], logger)
# For QnA Maker
if service.get('endpointKey'):
service['endpointKey'] = decrypt(bot_file_secret, service['endpointKey'], logger)
# For connecting to the bot
if service.get('appPassword'):
service['appPassword'] = decrypt(bot_file_secret, service['appPassword'], logger)
# For Application Insights
if service.get('instrumentationKey'):
service['instrumentationKey'] = decrypt(bot_file_secret, service['instrumentationKey'], logger)
if service.get('apiKeys'):
for apiKey in service['apiKeys']:
service['apiKeys'][apiKey] = decrypt(bot_file_secret, service['apiKeys'][apiKey], logger)
# For Cosmos DB
if service.get('key'):
service['key'] = decrypt(bot_file_secret, service['key'], logger)
# For generic services
if service.get('configuration') and isinstance(service.get('configuration'), dict):
for key in service['configuration']:
service['configuration'][key] = decrypt(bot_file_secret, service['configuration'][key], logger)
return services
@staticmethod
def __decrypt_py3(secret, encrypted_value, logger):
# If the string length is 0 or no secret was passed in, return the empty string.
if not encrypted_value or not secret:
return encrypted_value
parts = encrypted_value.split("!")
if len(parts) != 2:
logger.warn('Encrypted value "%s" not in standard encrypted format, decryption skipped.' % encrypted_value)
return encrypted_value
iv_text = parts[0]
encrypted_text = parts[1]
iv_bytes = base64.standard_b64decode(str.encode(iv_text))
secret_bytes = base64.standard_b64decode(str.encode(secret))
if len(iv_bytes) != 16:
logger.warn('Initialization Vector for "%s" not valid, decryption skipped.' % encrypted_value)
return encrypted_value
if len(secret_bytes) != 32:
logger.warn('Passed in secret length is invalid, decryption skipped.')
return encrypted_value
cipher = Cipher(algorithms.AES(secret_bytes), modes.CBC(iv_bytes), backend=default_backend())
decryptor = cipher.decryptor()
decrypted_bytes = decryptor.update(base64.standard_b64decode(str.encode(encrypted_text))) + decryptor.finalize()
decrypted_string = decrypted_bytes.decode('utf-8')
return ''.join([char for char in decrypted_string if ord(char) > 31])
@staticmethod
def __decrypt_py2(secret, encrypted_value, logger):
# If the string length is 0 or no secret was passed in, return the empty string.
if not encrypted_value or not secret:
return encrypted_value
parts = encrypted_value.split("!")
if len(parts) != 2:
logger.warn('Encrypted value "%s" not in standard encrypted format, decryption skipped.' % encrypted_value)
return encrypted_value
iv_text = parts[0]
encrypted_text = parts[1]
iv_bytes = base64.standard_b64decode(iv_text)
secret_bytes = base64.standard_b64decode(secret)
if len(iv_bytes) != 16:
logger.warn('Initialization Vector for "%s" not valid, decryption skipped.' % encrypted_value)
return encrypted_value
if len(secret_bytes) != 32:
logger.warn('Passed in secret length is invalid, decryption skipped.')
return encrypted_value
cipher = Cipher(algorithms.AES(secret_bytes), modes.CBC(iv_bytes), backend=default_backend())
decryptor = cipher.decryptor()
decrypted_bytes = decryptor.update(base64.standard_b64decode(encrypted_text)) + decryptor.finalize()
decrypted_string = decrypted_bytes.encode('utf-8')
return ''.join([char for char in decrypted_string if ord(char) > 31])
| {
"content_hash": "490fe37a0569e5a084ec986eee47c4df",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 247,
"avg_line_length": 51.123893805309734,
"alnum_prop": 0.6181409035831746,
"repo_name": "yugangw-msft/azure-cli",
"id": "f89e4cf338e46724974f407ea49b296cc1640c74",
"size": "11900",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "src/azure-cli/azure/cli/command_modules/botservice/bot_json_formatter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ANTLR",
"bytes": "5355"
},
{
"name": "Batchfile",
"bytes": "14110"
},
{
"name": "Bicep",
"bytes": "1679"
},
{
"name": "C#",
"bytes": "1971"
},
{
"name": "C++",
"bytes": "275"
},
{
"name": "Dockerfile",
"bytes": "8427"
},
{
"name": "HTML",
"bytes": "794"
},
{
"name": "JavaScript",
"bytes": "1404"
},
{
"name": "Jupyter Notebook",
"bytes": "389"
},
{
"name": "PowerShell",
"bytes": "1781"
},
{
"name": "Python",
"bytes": "24270340"
},
{
"name": "Rich Text Format",
"bytes": "12032"
},
{
"name": "Roff",
"bytes": "1036959"
},
{
"name": "Shell",
"bytes": "56023"
},
{
"name": "TSQL",
"bytes": "1145"
}
],
"symlink_target": ""
} |
from datetime import date, datetime
from inspect import isclass
import six
import sqlalchemy as sa
__version__ = '0.4.4'
class Column(sa.Column):
def __init__(self, *args, **kwargs):
kwargs.setdefault('info', {})
kwargs['info'].setdefault('choices', kwargs.pop('choices', None))
kwargs['info'].setdefault('label', kwargs.pop('label', ''))
kwargs['info'].setdefault('description', kwargs.pop('description', ''))
kwargs['info'].setdefault('validators', kwargs.pop('validators', []))
kwargs['info'].setdefault('min', kwargs.pop('min', None))
kwargs['info'].setdefault('max', kwargs.pop('max', None))
kwargs['info'].setdefault('auto_now', kwargs.pop('auto_now', False))
# Make strings and booleans not nullable by default
if args:
if (
any(bool_or_str(arg) for arg in args[0:2]) or
('type' in kwargs and bool_or_str(kwargs['type']))
):
kwargs.setdefault('nullable', False)
sa.Column.__init__(self, *args, **kwargs)
@property
def choices(self):
return self.info['choices'] if 'choices' in self.info else []
@property
def validators(self):
return self.info['validators'] if 'validators' in self.info else []
@property
def description(self):
return self.info['description'] if 'description' in self.info else ''
class ConfigurationManager(object):
DEFAULT_OPTIONS = {
'auto_now': True,
'numeric_defaults': True,
'string_defaults': True,
'boolean_defaults': True,
'min_max_check_constraints': True,
'enum_names': True,
'index_foreign_keys': True
}
def __call__(self, mapper, class_):
if hasattr(class_, '__lazy_options__'):
configurator = ModelConfigurator(self, class_)
configurator()
class ModelConfigurator(object):
def __init__(self, manager, model):
self.manager = manager
self.model = model
self.table = self.model.__table__
def get_option(self, name):
try:
return self.model.__lazy_options__[name]
except (AttributeError, KeyError):
return self.manager.DEFAULT_OPTIONS[name]
def literal_value(self, value):
return (
value.isoformat()
if isinstance(value, (date, datetime))
else value
)
def append_check_constraints(self, column):
"""
Generate check constraints based on min and max column info arguments
"""
if 'min' in column.info and column.info['min'] is not None:
constraint = sa.schema.CheckConstraint(
column >= self.literal_value(column.info['min'])
)
self.table.append_constraint(constraint)
if 'max' in column.info and column.info['max'] is not None:
constraint = sa.schema.CheckConstraint(
column <= self.literal_value(column.info['max'])
)
self.table.append_constraint(constraint)
def assign_foreign_key_indexes(self, column):
"""
Assign index for column if column has foreign key constraints.
"""
if column.foreign_keys:
column.index = True
def assign_datetime_auto_now(self, column):
"""
Assigns datetime auto now defaults
"""
if column.info.get('auto_now'):
column.default = sa.schema.ColumnDefault(datetime.utcnow)
if not column.server_default:
# Does not support MySQL < 5.6.5
column.server_default = sa.schema.DefaultClause(sa.func.now())
def assign_numeric_defaults(self, column):
"""
Assigns int column server_default based on column default value
"""
if column.default is not None and hasattr(column.default, 'arg'):
if not column.server_default:
column.server_default = sa.schema.DefaultClause(
six.text_type(column.default.arg)
)
def assign_string_defaults(self, column):
"""
Assigns string column server_default based on column default value
"""
if column.default is not None and column.server_default is None and (
isinstance(column.default.arg, six.text_type)
):
column.server_default = sa.schema.DefaultClause(
column.default.arg
)
def assign_boolean_defaults(self, column):
"""
Assigns int column server_default based on column default value
"""
if column.default is None:
column.default = sa.schema.ColumnDefault(False)
if column.default is not None:
if column.default.arg is False:
column.server_default = sa.schema.DefaultClause(
sa.sql.expression.false()
)
else:
column.server_default = sa.schema.DefaultClause(
sa.sql.expression.true()
)
def assign_type_defaults(self, column):
if (isinstance(column.type, sa.Boolean) and
self.get_option('boolean_defaults')):
self.assign_boolean_defaults(column)
elif (is_string(column.type) and self.get_option('string_defaults')):
self.assign_string_defaults(column)
elif (is_numeric(column.type) and self.get_option('numeric_defaults')):
self.assign_numeric_defaults(column)
elif (
(
isinstance(column.type, sa.Date) or
isinstance(column.type, sa.DateTime)
) and
self.get_option('auto_now')
):
self.assign_datetime_auto_now(column)
elif (isinstance(column.type, sa.Enum) and
self.get_option('enum_names')):
if (not hasattr(column.type, 'name') or not
column.type.name):
column.type.name = '%s_enum' % column.name
def __call__(self):
for column in self.table.columns:
if self.get_option('min_max_check_constraints'):
self.append_check_constraints(column)
if self.get_option('index_foreign_keys'):
self.assign_foreign_key_indexes(column)
self.assign_type_defaults(column)
def bool_or_str(type_):
return is_string(type_) or is_boolean(type_)
def is_string(type_):
return (
isinstance(type_, sa.String) or
(isclass(type_) and issubclass(type_, sa.String))
)
def is_boolean(type_):
return (
isinstance(type_, sa.Boolean) or
(isclass(type_) and issubclass(type_, sa.Boolean))
)
def is_numeric(type_):
return any(
isinstance(type_, type_cls)
for type_cls in (sa.Integer, sa.Float, sa.Numeric)
)
def make_lazy_configured(mapper):
manager = ConfigurationManager()
sa.event.listen(
mapper,
'mapper_configured',
manager
)
| {
"content_hash": "448bd26fe2360d3df13ba5c1b86564d6",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 79,
"avg_line_length": 32.42201834862385,
"alnum_prop": 0.5766836445953594,
"repo_name": "kvesteri/sqlalchemy-defaults",
"id": "4d8dc9b731f463c1c2f8b1280d164d3b3216c58e",
"size": "7068",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sqlalchemy_defaults/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "5616"
},
{
"name": "Python",
"bytes": "29310"
},
{
"name": "Shell",
"bytes": "5122"
}
],
"symlink_target": ""
} |
import os
import os.path as osp
# waf imports ---
import waflib.Utils
import waflib.Logs as msg
import waflib.Configure
import waflib.Build
import waflib.Task
import waflib.Tools.ccroot
from waflib.Configure import conf
from waflib.TaskGen import feature, before_method, after_method, extension, after
#
_heptooldir = osp.dirname(osp.abspath(__file__))
def options(ctx):
ctx.load('hwaf-base', tooldir=_heptooldir)
ctx.load('find_libxml2', tooldir=_heptooldir)
ctx.load('find_python', tooldir=_heptooldir)
ctx.load('find_gccxml', tooldir=_heptooldir)
ctx.add_option(
'--with-root',
default=None,
help="Look for CERN ROOT System at the given path")
return
def configure(ctx):
ctx.load('hwaf-base', tooldir=_heptooldir)
return
@conf
def find_root(ctx, **kwargs):
ctx.load('hwaf-base', tooldir=_heptooldir)
ctx.load('find_python', tooldir=_heptooldir)
ctx.load('find_libxml2', tooldir=_heptooldir)
ctx.load('find_gccxml', tooldir=_heptooldir)
if not ctx.env.HWAF_FOUND_C_COMPILER:
ctx.fatal('load a C compiler first')
pass
if not ctx.env.HWAF_FOUND_CXX_COMPILER:
ctx.fatal('load a C++ compiler first')
pass
if not ctx.env.HWAF_FOUND_PYTHON:
ctx.find_python(version=kwargs.get('python_version', (2,6)))
if not ctx.env.HWAF_FOUND_LIBXML2:
ctx.find_libxml2(mandatory=False)
if not ctx.env.PYTHON:
ctx.fatal('load a python interpreter first')
pass
# find root
root_cfg = "root-config"
path_list = waflib.Utils.to_list(kwargs.get('path_list', []))
for topdir in [getattr(ctx.options, 'with_root', None), os.getenv('ROOTSYS', None)]:
if topdir:
topdir = ctx.hwaf_subst_vars(topdir)
path_list.append(
osp.join(topdir, "bin")
)
pass
pass
kwargs['path_list']=path_list
ctx.find_program(
root_cfg,
var='ROOT-CONFIG',
**kwargs)
root_cfg = ctx.env['ROOT-CONFIG']
ctx.check_with(
ctx.check_cfg,
"root",
path=root_cfg,
package="",
uselib_store="ROOT",
args='--libs --cflags --ldflags',
**kwargs)
# -- check everything is kosher...
version = ctx.check_cxx(
msg="Checking ROOT version",
okmsg="ok",
fragment='''\
#include "RVersion.h"
#include <iostream>
int main(int argc, char* argv[]) {
std::cout << ROOT_RELEASE;
return 0;
}
''',
use="ROOT",
define_name = "HWAF_ROOT_VERSION",
define_ret = True,
execute = True,
mandatory=True,
)
ctx.msg("ROOT version", version)
## FIXME: this is "a bit" fugly...
use_root6 = version.startswith("6.")
use_root5 = not version.startswith("5.99") and not use_root6
if use_root5:
ctx.find_program('genmap', var='GENMAP', **kwargs)
ctx.find_program('genreflex', var='GENREFLEX', **kwargs)
ctx.find_program('root', var='ROOT-EXE', **kwargs)
ctx.find_program('rootcint', var='ROOTCINT', **kwargs)
ctx.find_program('rlibmap', var='RLIBMAP', **kwargs)
if use_root6:
ctx.find_program('rootcling', var='ROOTCLING', **kwargs)
pass
if use_root5:
# reflex...
ctx.copy_uselib_defs(dst='Reflex', src='ROOT')
ctx.env['LIB_Reflex'] = ['Reflex']
# cintex...
ctx.copy_uselib_defs(dst='Cintex', src='ROOT')
ctx.env['LIB_Cintex'] = ['Reflex', 'Cintex']
pass
# pyroot...
ctx.copy_uselib_defs(dst='PyROOT', src='ROOT')
ctx.env['LIB_PyROOT'] = ['PyROOT'] + ctx.env['LIB_python']
# XMLIO...
ctx.copy_uselib_defs(dst='ROOT-XMLIO', src='ROOT')
ctx.env['LIB_ROOT-XMLIO'] = ['XMLIO']
# XMLParser
ctx.copy_uselib_defs(dst='ROOT-XMLParser', src='ROOT')
ctx.env['LIB_ROOT-XMLParser'] = ['XMLParser']
# TreePlayer
ctx.copy_uselib_defs(dst='ROOT-TreePlayer', src='ROOT')
ctx.env['LIB_ROOT-TreePlayer'] = ['TreePlayer']
# GenVector
ctx.copy_uselib_defs(dst='ROOT-GenVector', src='ROOT')
ctx.env['LIB_ROOT-GenVector'] = ['GenVector']
# check for gccxml
if not ctx.env.HWAF_FOUND_GCCXML:
ctx.find_gccxml()
pass
ctx.check_cxx(
msg="Checking for ROOT::TH1",
fragment='''\
#include "TH1F.h"
void test_th1f() { new TH1F("th1f", "th1f", 100, 0., 100.); }
int main(int argc, char* argv[]) {
test_th1f();
return 0;
}
''',
use="ROOT",
execute = True,
mandatory= True,
)
ctx.check_cxx(
msg="Checking for ROOT::TTree",
fragment='''\
#include "TTree.h"
void test_ttree() { new TTree("tree", "tree"); }
int main(int argc, char* argv[]) {
test_ttree();
return 0;
}
''',
use="ROOT",
execute = True,
mandatory=True,
)
ctx.check_cxx(
msg="Checking for pyroot-cxx",
features='cxx cxxshlib',
fragment='''\
#include "Python.h"
#include "TPython.h"
#include "TPyException.h"
void throw_py_exception ()
{
throw PyROOT::TPyException();
}
''',
use="ROOT PyROOT python",
mandatory=True,
)
if use_root5:
ctx.check_cxx(
msg="Checking for reflex",
features='cxx cxxshlib',
fragment='''\
#include "Reflex/Type.h"
#include <iostream>
void check_reflex ()
{
std::cout << "typeof(int): ["
<< Reflex::Type::ByName("int").Name()
<< std::endl;
}
''',
use="ROOT Reflex",
mandatory=True,
)
ctx.check_cxx(
msg="Checking for cintex",
fragment='''\
#include "Cintex/Cintex.h"
int main()
{
ROOT::Cintex::Cintex::Enable();
return 0;
}
''',
use="ROOT Cintex",
execute = True,
mandatory = True,
)
pass
# check for ROOTSYS env. variable.
ctx.start_msg('Checking for $ROOTSYS')
rootsys = ctx.hwaf_subst_vars("${ROOTSYS}")
if not rootsys:
rootsys = os.getenv('ROOTSYS', None)
pass
if not rootsys:
# make up one.
rootsys = ctx.env.ROOT_HOME
pass
if not rootsys:
# make up one.
rootsys = getattr(ctx.options, 'with_root', None)
pass
ctx.end_msg(rootsys)
if not rootsys:
ctx.fatal("No $ROOTSYS environment variable")
pass
ctx.env.ROOTSYS = ctx.hwaf_subst_vars(rootsys)
ctx.hwaf_declare_runtime_env('ROOTSYS')
ctx.env.prepend_value('PATH', osp.join(ctx.env.ROOTSYS, 'bin'))
ctx.env.prepend_value('LD_LIBRARY_PATH', osp.join(ctx.env.ROOTSYS, 'lib'))
pyroot_path = osp.join(ctx.env.ROOTSYS, 'lib')
ctx.env.prepend_value('PYTHONPATH', pyroot_path)
ctx.env.prepend_value('LD_LIBRARY_PATH', pyroot_path)
# check also python environment
ctx.find_python_module('ROOT')
if use_root5:
ctx.find_python_module('PyCintex')
pass
ctx.env.ROOT_VERSION = version
# register the find_root module
import sys
fname = __file__
if fname.endswith('.pyc'): fname = fname[:-1]
ctx.hwaf_export_module(ctx.root.find_node(fname).abspath())
ctx.env.HWAF_FOUND_ROOT = 1
return
### ---------------------------------------------------------------------------
g_dsomap_merger = None
@feature('merge_dsomap')
def schedule_merge_dsomap(self):
#bld_area = self.env['BUILD_INSTALL_AREA']
pass
@extension('.dsomap')
def merge_dsomap_hook(self, node):
global g_dsomap_merger
if g_dsomap_merger is None:
import os
bld_area = os.path.basename(self.env['BUILD_INSTALL_AREA'])
bld_node = self.bld.bldnode.find_dir(bld_area)
if not bld_node:
bld_node = self.bld.bldnode.make_node(bld_area)
out_node = bld_node.make_node('lib').make_node(
'project_%s_merged.rootmap' %
self.bld.hwaf_project_name().replace('-', '_')
)
g_dsomap_merger = self.create_task('merge_dsomap', node, out_node)
self.bld.install_files(
'${INSTALL_AREA}/lib',
out_node,
relative_trick=False
)
else:
g_dsomap_merger.inputs.append(node)
return g_dsomap_merger
class merge_dsomap(waflib.Task.Task):
color='PINK'
ext_in = ['.dsomap']
ext_out= ['.rootmap']
after = ['gen_map', 'gen_reflex', 'symlink_tsk']
run_str = 'cat ${SRC} > ${TGT}'
shell = True
def runnable_status(self):
status = waflib.Task.Task.runnable_status(self)
if status == waflib.Task.ASK_LATER:
return status
import os
for in_node in self.inputs:
try:
os.stat(in_node.abspath())
except:
msg.debug("::missing input [%s]" % in_node.abspath())
return waflib.Task.ASK_LATER
return waflib.Task.RUN_ME
### ---------------------------------------------------------------------------
waflib.Tools.ccroot.USELIB_VARS['gen_reflex'] = set(['GCCXML_FLAGS', 'DEFINES', 'INCLUDES', 'CPPFLAGS', 'LIB'])
from waflib.Tools import c_preproc
@feature('gen_reflex')
@after_method('apply_incpaths')
def gen_reflex_dummy(self):
pass
#@extension('.h')
def gen_reflex_hook(self, node):
"Bind the .h file extension to the creation of a genreflex instance"
if not self.env['GENREFLEX_DSOMAP']:
# project with *no* Reflex target...
return
if not self.env['GENREFLEX']:
# project configuration failed to find genreflex binary
self.bld.fatal(
'package [%s] requested a gen_reflex task but binary "genreflex" not found (re-check configuration options)' %
self.bld.hwaf_pkg_name(self.path)
)
return
if not self.env['HWAF_FOUND_GCCXML']:
# project configuration failed to find genreflex binary
self.bld.fatal(
'package [%s] requested a gen_reflex task but binary "gccxml" not found (re-check configuration options)' %
self.bld.hwaf_pkg_name(self.path)
)
return
source = node.name
out_node_dir = self.path.get_bld().make_node(
"_reflex_dicts").make_node(
self.env['GENREFLEX_DICTNAME']
)
out_node_dir.mkdir()
out_node = out_node_dir.make_node("%s.cxx" % source)
dsomap_name = self.env['GENREFLEX_DSOMAP'].replace('--rootmap=','')
dsomap_node = out_node_dir.make_node(dsomap_name)
tsk = self.create_task('gen_reflex', node, [out_node,dsomap_node])
#tsk = self.create_task('gen_reflex', node, out_node)
self.source += tsk.outputs
merge_dsomap_hook(self, dsomap_node).set_run_after(tsk)
# classes ---
class gen_reflex(waflib.Task.Task):
vars = ['GENREFLEX', 'GENREFLEX_SELECTION', 'DEFINES', 'GCCXML_FLAGS', 'CPPFLAGS', 'INCLUDES']
color= 'BLUE'
run_str = '${GENREFLEX} ${SRC} -s ${GENREFLEX_SELECTION} -o ${TGT[0].abspath()} ${GCCXML_FLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${GENREFLEX_DSOMAP} ${GENREFLEX_DSOMAPLIB}'
ext_in = ['.h']
ext_out= ['.cxx', '.dsomap']
reentrant = True
shell = False
#shell = True
def scan(self):
selfile = self.env['GENREFLEX_SELECTION']
node = self.generator.bld.root.find_resource(selfile)
c_nodes, c_names = c_preproc.scan(self)
c_nodes.append(node)
c_names.append(waflib.Utils.h_file(node.abspath()))
return (c_nodes, c_names)
def exec_command(self, cmd, **kw):
cwd_node = self.outputs[0].parent
out = self.outputs[0].change_ext('.genreflex.log')
fout_node = cwd_node.find_or_declare(out.name)
fout = open(fout_node.abspath(), 'w')
kw['stdout'] = fout
kw['stderr'] = fout
rc = waflib.Task.Task.exec_command(self, cmd, **kw)
if rc != 0:
msg.error("** error running [%s]" % ' '.join(cmd))
msg.error(fout_node.read())
return rc
def runnable_status(self):
for tsk in self.run_after:
if not getattr(tsk, 'hasrun', False):
return waflib.Task.ASK_LATER
for in_node in self.inputs:
try:
os.stat(in_node.abspath())
except:
return waflib.Task.ASK_LATER
for out_node in self.outputs:
try:
os.stat(out_node.abspath())
except:
return waflib.Task.RUN_ME
return waflib.Task.Task.runnable_status(self)
### ---------------------------------------------------------------------------
@feature('gen_map')
@after('symlink_tsk')
def schedule_gen_map(self):
lnk_task = getattr(self, 'link_task', None)
if not lnk_task:
return
for n in lnk_task.outputs:
gen_map_hook(self, n)
pass
@after('symlink_tsk')
def gen_map_hook(self, node):
"Bind the .so file extension to the creation of a genmap task"
dso = node.name
bld_node = node.get_bld().parent
dso_ext = self.bld.dso_ext()
out_node = bld_node.make_node(dso.replace(dso_ext,".dsomap"))
tsk = self.create_task('gen_map', node, out_node)
self.source += tsk.outputs
if getattr(self, 'do_merge_rootmap', True):
merge_dsomap_hook(self, out_node).set_run_after(tsk)
class gen_map(waflib.Task.Task):
vars = ['GENMAP', 'DEFINES', 'CPPFLAGS', 'INCLUDES']
color= 'BLUE'
run_str = '${GENMAP} -input-library ${SRC[0].name} -o ${TGT[0].name}'
ext_in = ['.so', '.dylib', '.dll', '.bin']
ext_out = ['.dsomap']
shell = False
reentrant = True
after = ['cxxshlib', 'cxxprogram', 'symlink_tsk']
def exec_command(self, cmd, **kw):
cwd_node = self.outputs[0].parent
out = self.outputs[0].change_ext('.genmap.log')
fout_node = cwd_node.find_or_declare(out.name)
fout = open(fout_node.abspath(), 'w')
kw['stdout'] = fout
kw['stderr'] = fout
kw['env'] = self.generator.bld._get_env_for_subproc()
kw['cwd'] = self.inputs[0].get_bld().parent.abspath()
rc = waflib.Task.Task.exec_command(self, cmd, **kw)
if rc != 0:
msg.error("** error running [%s]" % ' '.join(cmd))
msg.error(fout_node.read())
return rc
def runnable_status(self):
status = waflib.Task.Task.runnable_status(self)
if status == waflib.Task.ASK_LATER:
return status
for out_node in self.outputs:
try:
os.stat(out_node.abspath())
except:
return waflib.Task.RUN_ME
return status
pass
### ---------------------------------------------------------------------------
def build_reflex_dict(self, name, source, selection_file, **kw):
# extract package name
PACKAGE_NAME = self._get_pkg_name()
source = self._cmt_get_srcs_lst(source)
src_node = source[0]
kw = dict(kw)
kw['name'] = name
linkflags = [] # kw.get('linkflags', [])
linkflags = self.env.SHLINKFLAGS + linkflags
kw['linkflags'] = linkflags
kw['includes'] = waflib.Utils.to_list(kw.get('includes',[]))
bld_node = self.root.find_dir(self.env['BUILD_INSTALL_AREA'])
if not bld_node:
bld_node = self.root.make_node(self.env['BUILD_INSTALL_AREA'])
kw['includes'].append(bld_node.parent.abspath())
defines = waflib.Utils.to_list(kw.get('defines', []))
kw['defines'] = defines + self._get_pkg_version_defines() + ['__REFLEX__',]
if self.is_dbg():
#print(":"*80)
# only add NDEBUG in dbg mode as it should already be added
# by somebody else for -opt mode.
kw['defines'].append('NDEBUG')
pass
uses = kw.get('use', [])
kw['use'] = uses + ['Reflex']
def _maybe_tgen(*names):
for name in names:
try:
return self.get_tgen_by_name(name), name
except:
pass
return None, None
dep_inc_dirs = []
def _get_deps(obj):
uses = waflib.Utils.to_list(getattr(obj, 'use', []))
ld = obj.path.get_bld().abspath()
includes = waflib.Utils.to_list(getattr(obj,'includes',[]))
for inc in includes:
if isinstance(inc, type("")):
inc_node = obj.path.find_dir(inc)
else:
inc_node = inc
if inc_node:
dep_inc_dirs.append(inc_node.abspath())
for u in uses:
tgt,n = _maybe_tgen(u, 'complib-%s' % u, 'genreflex-%s' % u)
if tgt:
_get_deps(tgt)
for u in kw['use']:
tgt,n = _maybe_tgen(u)
if tgt:
_get_deps(tgt)
kw['includes'] = dep_inc_dirs + kw['includes']
target = kw.get('target', None)
if not target:
if not name.endswith('Dict'):
target = name + 'Dict'
else:
target = name
kw['target'] = target
del kw['target']
features = waflib.Utils.to_list(kw.get('features', [])) + [
'gen_reflex', 'cxx', 'cxxshlib', 'symlink_tsk',
]
kw['features'] = features
defines= kw['defines']
del kw['defines']
o = self(
source=source,
target=target,
defines=defines,
**kw
)
o.name = 'genreflex-%s' % name
o.libpath = self.env.LD_LIBRARY_PATH + [self.path.get_bld().abspath()]
o.install_path ='${INSTALL_AREA}/lib'
o.reentrant = False
o.depends_on = [self.path.find_resource(selection_file)]
o.mappings['.h'] = gen_reflex_hook
o.env.GENREFLEX = self.env['GENREFLEX']
o.env.GCCXML_USER_FLAGS = ['-D__GNUC_MINOR__=2',]
o.env.GCCXML_FLAGS = [
#'--quiet',
'--debug',
'--gccxmlopt=--gccxml-cxxflags', '--fail_on_warnings',
#'--gccxmlopt=--gccxml-cxxflags', '-D__STRICT_ANSI__',
self.hwaf_subst_vars('--gccxmlpath=${GCCXML_BINDIR}', o.env),
#'--gccxmlpath=',
]
if 'clang' in o.env.CFG_COMPILER:
if self.is_darwin():
# latest macosx XCode-5 needs to use llvm-gcc as a compiler b/c of
# system headers gcc can't grok
o.env.append_unique('GCCXML_FLAGS', '--gccxmlopt=--gccxml-compiler llvm-gcc')
else:
# FIXME: for the moment, always using gcc is fine
# even in the context of a clang-based toolchain.
# This should be revisited w/ VisualStudio...
o.env.append_unique('GCCXML_FLAGS', '--gccxmlopt=--gccxml-compiler gcc')
pass
lib_name = "lib%s" % (o.target,) # FIXME !!
o.env.GENREFLEX_DSOMAP = '--rootmap=%s.dsomap' % lib_name
o.env.GENREFLEX_DSOMAPLIB = '--rootmap-lib=%s.so' % lib_name
if self.is_32b():
o.env.GCCXML_FLAGS.append('--gccxmlopt=-m32')
#o.env.GCCXML_FLAGS.append('--gccxmlopt=--gccxml-cxxflags -m32')
else:
o.env.GCCXML_FLAGS.append('--gccxmlopt=-m64')
#o.env.GCCXML_FLAGS.append('--gccxmlopt=--gccxml-cxxflags -m64')
pass
o.env.GENREFLEX_SELECTION = self.path.find_resource(selection_file).abspath()
o.env.GENREFLEX_DICTNAME = name
return o
### ---------------------------------------------------------------------------
waflib.Tools.ccroot.USELIB_VARS['gen_rootcint'] = set(['DEFINES', 'INCLUDES', 'CPPFLAGS', 'LIB'])
@feature('gen_rootcint')
@after_method('apply_incpaths')
def gen_rootcint_dummy(self):
pass
#@extension('.h')
def gen_rootcint_hook(self, node):
"Bind the .h file extension to the creation of a gen_rootcint instance"
if not self.env['GENROOTCINT_DICTNAME']:
# project with *no* rootcint target...
return
source = node.name
out_node_dir = self.path.get_bld().make_node(
"_rootcint_dicts").make_node(
self.env['GENROOTCINT_DICTNAME']
)
bld_node = out_node_dir
out_node = bld_node.make_node(source.replace(".h",".cxx"))
tsk = self.create_task('gen_rootcint', node, [out_node,])
self.source += tsk.outputs
#merge_dsomap_hook(self, dsomap_node).set_run_after(tsk)
class gen_rootcint(waflib.Task.Task):
vars = ['ROOTCINT', 'ROOTCINT_LINKDEF', 'DEFINES', 'CPPFLAGS', 'INCLUDES']
color= 'BLUE'
run_str = '${ROOTCINT} -f ${TGT} -c -p ${ROOTCINTINCPATHS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${SRC} ${ROOTCINT_LINKDEF}'
ext_in = ['.h']
ext_out= ['.cxx']
reentrant = True
shell = False
#shell = True
#after = ['apply_incpaths',]
def scan(self):
c_nodes, c_names = c_preproc.scan(self)
linkdef = self.env['ROOTCINT_LINKDEF']
linkdef_node = self.generator.bld.root.find_resource(linkdef)
c_nodes.append(linkdef_node)
c_names.append(waflib.Utils.h_file(linkdef_node.abspath()))
src_node = self.inputs[0]
c_nodes.append(src_node)
c_names.append(waflib.Utils.h_file(src_node.abspath()))
return (c_nodes, c_names)
def exec_command(self, cmd, **kw):
cwd_node = self.outputs[0].parent
out = self.outputs[0].change_ext('.genrootcint.log')
fout_node = cwd_node.find_or_declare(out.name)
fout = open(fout_node.abspath(), 'w')
kw['stdout'] = fout
kw['stderr'] = fout
rc = waflib.Task.Task.exec_command(self, cmd, **kw)
if rc != 0:
msg.error("** error running [%s]" % ' '.join(cmd))
msg.error(fout_node.read())
return rc
def runnable_status(self):
for tsk in self.run_after:
if not getattr(tsk, 'hasrun', False):
return waflib.Task.ASK_LATER
for in_node in self.inputs:
try:
os.stat(in_node.abspath())
except:
return waflib.Task.ASK_LATER
for out_node in self.outputs:
try:
os.stat(out_node.abspath())
except:
return waflib.Task.RUN_ME
return waflib.Task.Task.runnable_status(self)
### ---------------------------------------------------------------------------
@feature('gen_rootcint_map')
@after('symlink_tsk')
def schedule_gen_rootcint_map(self):
lnk_task = getattr(self, 'link_task', None)
if not lnk_task:
return
for n in lnk_task.outputs:
gen_rootcint_map_hook(self, n)
pass
@extension('.bin')
def gen_rootcint_map_hook(self, node):
"Create a rootmap file for a rootcint dict"
dso = node.name
bld_node = node.get_bld().parent
dso_ext = self.bld.dso_ext()
out_node = bld_node.make_node(dso.replace(dso_ext,".dsomap"))
tsk = self.create_task('gen_rootcint_map', node, out_node)
self.source += tsk.outputs
merge_dsomap_hook(self, out_node).set_run_after(tsk)
class gen_rootcint_map(waflib.Task.Task):
vars = ['RLIBMAP', 'DEFINES', 'CPPFLAGS', 'INCLUDES', 'RLIBMAP_LINKDEF']
color= 'BLUE'
run_str = '${RLIBMAP} -o ${TGT[0].name} -l ${SRC} -c ${RLIBMAP_LINKDEF}'
ext_in = ['.so', '.dylib', '.dll', '.bin']
ext_out = ['.dsomap']
shell = False
reentrant = True
after = ['cxxshlib', 'cxxprogram', 'symlink_tsk']
def exec_command(self, cmd, **kw):
cwd_node = self.outputs[0].parent
out = self.outputs[0].change_ext('.gen_rootcint_map.log')
fout_node = cwd_node.find_or_declare(out.name)
fout = open(fout_node.abspath(), 'w')
kw['stdout'] = fout
kw['stderr'] = fout
kw['env'] = self.generator.bld._get_env_for_subproc()
kw['cwd'] = self.inputs[0].get_bld().parent.abspath()
rc = waflib.Task.Task.exec_command(self, cmd, **kw)
if rc != 0:
msg.error("** error running [%s]" % ' '.join(cmd))
msg.error(fout_node.read())
return rc
def runnable_status(self):
status = waflib.Task.Task.runnable_status(self)
if status == waflib.Task.ASK_LATER:
return status
for out_node in self.outputs:
try:
os.stat(out_node.abspath())
except:
return waflib.Task.RUN_ME
return status
pass
### ------------------------------------------------------------------------
def build_rootcint_dict(self, name, source, **kw):
kw = dict(kw)
# extract package name
PACKAGE_NAME = self._get_pkg_name()
srcs = self._cmt_get_srcs_lst(source)
includes = waflib.Utils.to_list(kw.get('includes', []))
tgtdir = self.bldnode.find_or_declare(name).parent.abspath()
kw['includes'] = [
self.path.abspath(),
self.bldnode.abspath(),
tgtdir,
] + includes
defines = waflib.Utils.to_list(kw.get('defines', []))
defines.insert(0, 'R__ACCESS_IN_SYMBOL=1')
kw['defines'] = defines
kw['rootcint_linkdef'] = kw.get('rootcint_linkdef', 'src/LinkDef.h')
linkdef_node = self.path.find_node(kw['rootcint_linkdef'])
kw['features'] = waflib.Utils.to_list(kw.get('features', [])) + [
'gen_rootcint', 'gen_rootcint_map', 'cxx', 'symlink_tsk',
]
env = self.env
incpaths = [env.CPPPATH_ST % x for x in kw['includes']]
o = self(
name = name,
source=source,
**kw
)
o.mappings['.h'] = gen_rootcint_hook
if not 'cxxshlib' in o.features:
o.name = 'rootcint-dict-%s' % name
o.reentrant = True
o.depends_on = [linkdef_node.abspath()]
o.env['ROOTCINT_LINKDEF'] = linkdef_node.abspath()
o.env['RLIBMAP_LINKDEF'] = linkdef_node.abspath()
o.env['GENROOTCINT_DICTNAME'] = name
return o
waflib.Build.BuildContext.build_reflex_dict = build_reflex_dict
waflib.Build.BuildContext.build_rootcint_dict = build_rootcint_dict
## EOF ##
| {
"content_hash": "248ab7fe44d88250d67ecd4989dfab3e",
"timestamp": "",
"source": "github",
"line_count": 814,
"max_line_length": 188,
"avg_line_length": 32.25675675675676,
"alnum_prop": 0.5529573066229958,
"repo_name": "hwaf/hwaf",
"id": "f239aa65341195e76ab15fa5d1a585732a8dcc5c",
"size": "26296",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py-hwaftools/find_root.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Go",
"bytes": "255531"
},
{
"name": "Python",
"bytes": "345723"
}
],
"symlink_target": ""
} |
import mysql.connector
from model.group import Group
from model.contact import Contact
class DbFixture:
def __init__(self, host, name, user, password):
self.host = host
self.name = name
self.user = user
self.password = password
self.connection = mysql.connector.connect(host=host, database=name, user=user, password=password)
self.connection.autocommit = True
def get_group_list(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("select group_id, group_name, group_header, group_footer from group_list")
for row in cursor:
(id, name, header, footer) = row
list.append(Group(id=str(id), name=name, header=header, footer=footer))
finally:
cursor.close()
return list
def get_contact_list(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("select id, firstname, lastname, address, home, mobile, work, phone2, email, email2, email3 "
"from addressbook where deprecated='0000-00-00 00:00:00'")
for row in cursor:
(id, firstname, lastname, address, home, mobile, work, phone2, email, email2, email3) = row
list.append(Contact(id=str(id), firstname=firstname, lastname=lastname, address=address, homephone=home,
mobilephone=mobile, workphone=work, sec_home=phone2, email1=email, email2=email2,
email3=email3))
finally:
cursor.close()
return list
def destroy(self):
self.connection.close()
| {
"content_hash": "3d372437c6ddca9f03bd37702bd8b72d",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 120,
"avg_line_length": 39,
"alnum_prop": 0.5856643356643356,
"repo_name": "mycolam/python_training",
"id": "d0369b10d5a65345fe63af4e7baa29239c7142b3",
"size": "1716",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fixture/db.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "35801"
}
],
"symlink_target": ""
} |
"""Command for listing selection of a platform."""
import clif.conf as conf
import clif.logger as logger
from pprint import pformat
def main(args):
conf.init(args)
logger.info('command-line arguments:\n%s' % pformat(vars(args)))
logger.info('columns definition from configuration file (conf/platforms/selections/list.yml):\n%s'
% (pformat(conf.COLUMNS)))
| {
"content_hash": "1f62537ea4cacb188ce228cdcc0fc0ed",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 102,
"avg_line_length": 35.09090909090909,
"alnum_prop": 0.7046632124352331,
"repo_name": "fmenabe/python-clif",
"id": "213478f1d8703defb0e8bde5915ef5f490b88df8",
"size": "403",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/backup/commands/platforms/selections/list.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16830"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
orm.Station.objects.filter(type='similar_artists').update(type='similar')
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'radio.favoritedstation': {
'Meta': {'ordering': "('-date_added',)", 'object_name': 'FavoritedStation'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'station': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['radio.Station']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'radio.recentstation': {
'Meta': {'ordering': "('-date_added',)", 'object_name': 'RecentStation'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'station': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['radio.Station']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'radio.station': {
'Meta': {'ordering': "('type', 'name')", 'object_name': 'Station'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'plays_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'})
},
'radio.topartist': {
'Meta': {'ordering': "('-popularity',)", 'object_name': 'TopArtist'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'popularity': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'})
},
'radio.toptag': {
'Meta': {'ordering': "('name',)", 'object_name': 'TopTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'popularity': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['radio']
| {
"content_hash": "9faa24b0901c3e8a40bc4b211b2ef25c",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 182,
"avg_line_length": 67.42696629213484,
"alnum_prop": 0.5449091818030328,
"repo_name": "kirov/tvoeradio",
"id": "ad90d978c61fa7e196582f9d77831034bdc5f596",
"size": "6019",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tvoeradio/radio/migrations/0002_rename_station_similar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "72768"
},
{
"name": "JavaScript",
"bytes": "314420"
},
{
"name": "Python",
"bytes": "92347"
}
],
"symlink_target": ""
} |
"""Setup for tests exercising visualization displays."""
__author__ = 'Mike Gainer (mgainer@google.com)'
from common import schema_fields
from controllers import utils
from models import analytics
from models import custom_modules
from models import data_sources
from modules.dashboard import tabs
class FakeDataSource(data_sources.AbstractRestDataSource):
_exception = None
_log_critical = None
_page_number = None
@classmethod
def get_context_class(cls):
return data_sources.DbTableContext
@classmethod
def get_schema(cls, *args, **kwargs):
reg = schema_fields.FieldRegistry(
'Bogus', description='bogus')
reg.add_property(schema_fields.SchemaField(
'bogus', 'Bogus', 'integer', description='Fake schema'))
return reg.get_json_schema_dict()['properties']
@classmethod
def set_fetch_values_page_number(cls, page_number=None):
"""For testing. Force fetch_values to return a specific page number."""
cls._page_number = page_number
@classmethod
def set_fetch_values_exception(cls):
"""For testing. Force fetch_values to raise an exception."""
cls._exception = True
@classmethod
def set_fetch_values_log_critical(cls):
"""For testing. Force fetch_values to log an error message."""
cls._log_critical = True
@classmethod
def fetch_values(cls, _app_context, _source_context, _schema, log,
page_number):
if cls._exception:
cls._exception = None
raise ValueError('Error for testing')
if cls._log_critical:
cls._log_critical = None
log.critical('Error for testing')
if cls._page_number is not None:
if cls._page_number != page_number:
log.warning('Stopping at last page %d' % cls._page_number)
page_number = cls._page_number
cls._page_number = None
return [
{'name': 'Snoopy', 'score': 10, 'page_number': page_number},
{'name': 'Linus', 'score': 8},
{'name': 'Lucy', 'score': 3},
{'name': 'Schroeder', 'score': 5},
], page_number
class ExamsDataSource(FakeDataSource):
@classmethod
def get_name(cls):
return 'exams'
@classmethod
def get_title(cls):
return 'Exams'
@classmethod
def get_default_chunk_size(cls):
return 0 # Not paginated
class PupilsDataSource(FakeDataSource):
@classmethod
def get_name(cls):
return 'pupils'
@classmethod
def get_title(cls):
return 'Pupils'
class AnswersDataSource(FakeDataSource):
@classmethod
def get_name(cls):
return 'fake_answers'
@classmethod
def get_title(cls):
return 'Fake Answers'
class ForceResponseHandler(utils.ApplicationHandler):
"""REST service to allow tests to affect the behavior of FakeDataSource."""
URL = '/fake_data_source_response'
PARAM_DATA_SOURCE = 'data_source'
PARAM_ACTION = 'action'
PARAM_PAGE_NUMBER = 'page_number'
ACTION_PAGE_NUMBER = 'page_number'
ACTION_LOG_CRITICAL = 'log_critical'
ACTION_EXCEPTION = 'exception'
def post(self):
data_source_classes = {
'exams': ExamsDataSource,
'pupils': PupilsDataSource,
'fake_answers': AnswersDataSource,
}
data_source = data_source_classes[
self.request.get(ForceResponseHandler.PARAM_DATA_SOURCE)]
action = self.request.get(ForceResponseHandler.PARAM_ACTION)
if action == ForceResponseHandler.ACTION_PAGE_NUMBER:
data_source.set_fetch_values_page_number(
int(self.request.get(ForceResponseHandler.PARAM_PAGE_NUMBER)))
elif action == ForceResponseHandler.ACTION_LOG_CRITICAL:
data_source.set_fetch_values_log_critical()
elif action == ForceResponseHandler.ACTION_EXCEPTION:
data_source.set_fetch_values_exception()
else:
self.response.set_status(400)
self.response.write('Malformed Request')
return
def register_on_enable():
data_sources.Registry.register(ExamsDataSource)
data_sources.Registry.register(PupilsDataSource)
data_sources.Registry.register(AnswersDataSource)
exams = analytics.Visualization(
'exams', 'Exams', 'fake_visualizations.html',
[ExamsDataSource])
pupils = analytics.Visualization(
'pupils', 'Pupils', 'fake_visualizations.html',
[PupilsDataSource])
scoring = analytics.Visualization(
'scoring', 'Scoring', 'fake_visualizations.html',
[ExamsDataSource, PupilsDataSource, AnswersDataSource])
tabs.Registry.register('analytics', 'exams', 'Exams', [exams])
tabs.Registry.register('analytics', 'pupils', 'Pupils', [pupils])
tabs.Registry.register('analytics', 'scoring', 'Scoring', [scoring])
def register_module():
"""Dynamically registered module providing fake analytics for testing."""
namespaced_handlers = [(ForceResponseHandler.URL, ForceResponseHandler)]
return custom_modules.Module(
'FakeVisualizations', 'Provide visualizations requiring simple, '
'paginated, and multiple data streams for testing.',
[], namespaced_handlers, register_on_enable, None)
| {
"content_hash": "e2642352907c0ef915540f79133934e0",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 80,
"avg_line_length": 31.403508771929825,
"alnum_prop": 0.6417132216014898,
"repo_name": "wavemind/gcb17ml",
"id": "bbeab1c28d720330c7f3487819683fe66b9de133",
"size": "5968",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/integration/fake_visualizations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "62209"
},
{
"name": "JavaScript",
"bytes": "425162"
},
{
"name": "Python",
"bytes": "3344249"
},
{
"name": "Shell",
"bytes": "23773"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'ClassRoom', fields ['slug']
db.create_unique('classroom_classroom', ['slug'])
def backwards(self, orm):
# Removing unique constraint on 'ClassRoom', fields ['slug']
db.delete_unique('classroom_classroom', ['slug'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'classroom.classroom': {
'Meta': {'object_name': 'ClassRoom'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'default': "''", 'max_length': '50', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'status': ('model_utils.fields.StatusField', [], {'default': "'draft'", 'max_length': '100', 'no_check_for_status': 'True'}),
'tutor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'classroom.classroomstudentinterest': {
'Meta': {'object_name': 'ClassRoomStudentInterest'},
'classroom': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['classroom.ClassRoom']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['classroom'] | {
"content_hash": "a3c4ae90142b4e9c60a234274b31946e",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 182,
"avg_line_length": 67.84210526315789,
"alnum_prop": 0.5591543832428238,
"repo_name": "reinbach/tutorus",
"id": "4daab91ecc512f64f0b717985d0ae5252807ec0a",
"size": "5180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tutorus/classroom/migrations/0004_auto__add_unique_classroom_slug.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "160397"
},
{
"name": "Python",
"bytes": "168905"
}
],
"symlink_target": ""
} |
def find_getch():
try:
import termios
except ImportError:
# Non-POSIX. Return msvcrt's (Windows') getch.
import msvcrt
return msvcrt.getch
# POSIX system. Create and return a getch that manipulates the tty.
import sys, tty
def _getch():
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(fd)
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
return _getch()
#getch = find_getch()
| {
"content_hash": "699b23c4e3c0ba35de1cc1611003f990",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 71,
"avg_line_length": 25.565217391304348,
"alnum_prop": 0.576530612244898,
"repo_name": "UASLab/ImageAnalysis",
"id": "e6f63f3ccdf0ce7579ae2d891fc15741d6331d99",
"size": "589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/lib/getchar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "GLSL",
"bytes": "2077"
},
{
"name": "Python",
"bytes": "1805747"
}
],
"symlink_target": ""
} |
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Copyright (c) 2012 Intel Corporation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os.path
import sys
import string
import optparse
import re
try:
import json
except ImportError:
import simplejson as json
import CodeGeneratorInspectorStrings
# Manually-filled map of type name replacements.
TYPE_NAME_FIX_MAP = {
"RGBA": "Rgba", # RGBA is reported to be conflicting with a define name in Windows CE.
"": "Empty",
}
TYPES_WITH_RUNTIME_CAST_SET = frozenset(["Runtime.RemoteObject", "Runtime.PropertyDescriptor", "Runtime.InternalPropertyDescriptor",
"Debugger.FunctionDetails", "Debugger.GeneratorObjectDetails", "Debugger.CollectionEntry", "Debugger.CallFrame", "Debugger.Location"])
TYPES_WITH_OPEN_FIELD_LIST_SET = frozenset([
# InspectorStyleSheet not only creates this property but wants to read it and modify it.
"CSS.CSSProperty",
# InspectorResourceAgent needs to update mime-type.
"Network.Response"])
cmdline_parser = optparse.OptionParser()
cmdline_parser.add_option("--output_dir")
try:
arg_options, arg_values = cmdline_parser.parse_args()
if (len(arg_values) != 1):
raise Exception("Exactly one plain argument expected (found %s)" % len(arg_values))
input_json_filename = arg_values[0]
output_dirname = arg_options.output_dir
if not output_dirname:
raise Exception("Output directory must be specified")
except Exception:
# Work with python 2 and 3 http://docs.python.org/py3k/howto/pyporting.html
exc = sys.exc_info()[1]
sys.stderr.write("Failed to parse command-line arguments: %s\n\n" % exc)
sys.stderr.write("Usage: <script> --output_dir <output_dir> protocol.json\n")
exit(1)
# FIXME: move this methods under Capitalizer class below and remove duplications.
def dash_to_camelcase(word):
return ''.join(x.capitalize() or '-' for x in word.split('-'))
def fix_camel_case(name):
refined = re.sub(r'-(\w)', lambda pat: pat.group(1).upper(), name)
refined = to_title_case(refined)
return re.sub(r'(?i)HTML|XML|WML|API', lambda pat: pat.group(0).upper(), refined)
def to_title_case(name):
return name[:1].upper() + name[1:]
class Capitalizer:
@staticmethod
def lower_camel_case_to_upper(str):
if len(str) > 0 and str[0].islower():
str = str[0].upper() + str[1:]
return str
@staticmethod
def upper_camel_case_to_lower(str):
pos = 0
while pos < len(str) and str[pos].isupper():
pos += 1
if pos == 0:
return str
if pos == 1:
return str[0].lower() + str[1:]
if pos < len(str):
pos -= 1
possible_abbreviation = str[0:pos]
if possible_abbreviation not in Capitalizer.ABBREVIATION:
raise Exception("Unknown abbreviation %s" % possible_abbreviation)
str = possible_abbreviation.lower() + str[pos:]
return str
ABBREVIATION = frozenset(["XHR", "DOM", "CSS", "IO"])
VALIDATOR_IFDEF_NAME = "ENABLE(ASSERT)"
class DomainNameFixes:
@staticmethod
def get_fixed_data(domain_name):
return Capitalizer.upper_camel_case_to_lower(domain_name) + "Agent"
class RawTypes(object):
@staticmethod
def get(json_type):
if json_type == "boolean":
return RawTypes.Bool
elif json_type == "string":
return RawTypes.String
elif json_type == "array":
return RawTypes.Array
elif json_type == "object":
return RawTypes.Object
elif json_type == "integer":
return RawTypes.Int
elif json_type == "number":
return RawTypes.Number
elif json_type == "any":
return RawTypes.Any
else:
raise Exception("Unknown type: %s" % json_type)
class BaseType(object):
@classmethod
def get_raw_validator_call_text(cls):
return "RuntimeCastHelper::assertType<JSONValue::Type%s>" % cls.get_getter_name()
@staticmethod
def get_getter_name():
raise Exception("Unsupported")
class String(BaseType):
@staticmethod
def get_getter_name():
return "String"
get_setter_name = get_getter_name
@staticmethod
def get_constructor_pattern():
return "InspectorString::create(%s)"
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "String"
@staticmethod
def get_raw_type_model():
return TypeModel.String
class Int(BaseType):
@staticmethod
def get_getter_name():
return "Int"
@staticmethod
def get_setter_name():
return "Number"
@staticmethod
def get_constructor_pattern():
return "InspectorBasicValue::create(%s)"
@classmethod
def get_raw_validator_call_text(cls):
return "RuntimeCastHelper::assertInt"
@staticmethod
def is_heavy_value():
return False
@staticmethod
def get_array_item_raw_c_type_text():
return "int"
@staticmethod
def get_raw_type_model():
return TypeModel.Int
class Number(BaseType):
@staticmethod
def get_getter_name():
return "Double"
@staticmethod
def get_setter_name():
return "Number"
@staticmethod
def get_constructor_pattern():
return "InspectorBasicValue::create(%s)"
@staticmethod
def get_raw_validator_call_text():
return "RuntimeCastHelper::assertType<JSONValue::TypeNumber>"
@staticmethod
def is_heavy_value():
return False
@staticmethod
def get_array_item_raw_c_type_text():
return "double"
@staticmethod
def get_raw_type_model():
return TypeModel.Number
class Bool(BaseType):
@staticmethod
def get_getter_name():
return "Boolean"
get_setter_name = get_getter_name
@staticmethod
def get_constructor_pattern():
return "InspectorBasicValue::create(%s)"
@staticmethod
def is_heavy_value():
return False
@staticmethod
def get_array_item_raw_c_type_text():
return "bool"
@staticmethod
def get_raw_type_model():
return TypeModel.Bool
class Object(BaseType):
@staticmethod
def get_getter_name():
return "Object"
@staticmethod
def get_setter_name():
return "Value"
@staticmethod
def get_constructor_pattern():
return "%s"
@staticmethod
def get_output_argument_prefix():
return ""
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "JSONObject"
@staticmethod
def get_raw_type_model():
return TypeModel.Object
class Any(BaseType):
@staticmethod
def get_getter_name():
return "Value"
get_setter_name = get_getter_name
@staticmethod
def get_constructor_pattern():
raise Exception("Unsupported")
@staticmethod
def get_raw_validator_call_text():
return "RuntimeCastHelper::assertAny"
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "JSONValue"
@staticmethod
def get_raw_type_model():
return TypeModel.Any
class Array(BaseType):
@staticmethod
def get_getter_name():
return "Array"
@staticmethod
def get_setter_name():
return "Value"
@staticmethod
def get_constructor_pattern():
return "%s"
@staticmethod
def get_output_argument_prefix():
return ""
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "JSONArray"
@staticmethod
def get_raw_type_model():
return TypeModel.Array
def replace_right_shift(input_str):
return input_str.replace(">>", "> >")
class CommandReturnPassModel:
class ByReference:
def __init__(self, var_type, set_condition):
self.var_type = var_type
self.set_condition = set_condition
def get_return_var_type(self):
return self.var_type
@staticmethod
def get_output_argument_prefix():
return ""
@staticmethod
def get_output_to_raw_expression():
return "%s"
def get_output_parameter_type(self):
return self.var_type + "&"
def get_set_return_condition(self):
return self.set_condition
class ByPointer:
def __init__(self, var_type):
self.var_type = var_type
def get_return_var_type(self):
return self.var_type
@staticmethod
def get_output_argument_prefix():
return "&"
@staticmethod
def get_output_to_raw_expression():
return "%s"
def get_output_parameter_type(self):
return self.var_type + "*"
@staticmethod
def get_set_return_condition():
return None
class OptOutput:
def __init__(self, var_type):
self.var_type = var_type
def get_return_var_type(self):
return "TypeBuilder::OptOutput<%s>" % self.var_type
@staticmethod
def get_output_argument_prefix():
return "&"
@staticmethod
def get_output_to_raw_expression():
return "%s.getValue()"
def get_output_parameter_type(self):
return "TypeBuilder::OptOutput<%s>*" % self.var_type
@staticmethod
def get_set_return_condition():
return "%s.isAssigned()"
class TypeModel:
class RefPtrBased(object):
def __init__(self, class_name):
self.class_name = class_name
self.optional = False
def get_optional(self):
result = TypeModel.RefPtrBased(self.class_name)
result.optional = True
return result
def get_command_return_pass_model(self):
if self.optional:
set_condition = "%s"
else:
set_condition = None
return CommandReturnPassModel.ByReference(replace_right_shift("RefPtr<%s>" % self.class_name), set_condition)
def get_input_param_type_text(self):
return replace_right_shift("PassRefPtr<%s>" % self.class_name)
@staticmethod
def get_event_setter_expression_pattern():
return "%s"
class Enum(object):
def __init__(self, base_type_name):
self.type_name = base_type_name + "::Enum"
def get_optional(base_self):
class EnumOptional:
@classmethod
def get_optional(cls):
return cls
@staticmethod
def get_command_return_pass_model():
return CommandReturnPassModel.OptOutput(base_self.type_name)
@staticmethod
def get_input_param_type_text():
return base_self.type_name + "*"
@staticmethod
def get_event_setter_expression_pattern():
raise Exception("TODO")
return EnumOptional
def get_command_return_pass_model(self):
return CommandReturnPassModel.ByPointer(self.type_name)
def get_input_param_type_text(self):
return self.type_name
@staticmethod
def get_event_setter_expression_pattern():
return "%s"
class ValueType(object):
def __init__(self, type_name, is_heavy):
self.type_name = type_name
self.is_heavy = is_heavy
def get_optional(self):
return self.ValueOptional(self)
def get_command_return_pass_model(self):
return CommandReturnPassModel.ByPointer(self.type_name)
def get_input_param_type_text(self):
if self.is_heavy:
return "const %s&" % self.type_name
else:
return self.type_name
def get_opt_output_type_(self):
return self.type_name
@staticmethod
def get_event_setter_expression_pattern():
return "%s"
class ValueOptional:
def __init__(self, base):
self.base = base
def get_optional(self):
return self
def get_command_return_pass_model(self):
return CommandReturnPassModel.OptOutput(self.base.get_opt_output_type_())
def get_input_param_type_text(self):
return "const %s* const" % self.base.type_name
@staticmethod
def get_event_setter_expression_pattern():
return "*%s"
@classmethod
def init_class(cls):
cls.Bool = cls.ValueType("bool", False)
cls.Int = cls.ValueType("int", False)
cls.Number = cls.ValueType("double", False)
cls.String = cls.ValueType("String", True,)
cls.Object = cls.RefPtrBased("JSONObject")
cls.Array = cls.RefPtrBased("JSONArray")
cls.Any = cls.RefPtrBased("JSONValue")
TypeModel.init_class()
# Collection of JSONObject class methods that are likely to be overloaded in generated class.
# We must explicitly import all overloaded methods or they won't be available to user.
INSPECTOR_OBJECT_SETTER_NAMES = frozenset(["setValue", "setBoolean", "setNumber", "setString", "setValue", "setObject", "setArray"])
def fix_type_name(json_name):
if json_name in TYPE_NAME_FIX_MAP:
fixed = TYPE_NAME_FIX_MAP[json_name]
class Result(object):
class_name = fixed
@staticmethod
def output_comment(writer):
writer.newline("// Type originally was named '%s'.\n" % json_name)
else:
class Result(object):
class_name = json_name
@staticmethod
def output_comment(writer):
pass
return Result
class Writer:
def __init__(self, output, indent):
self.output = output
self.indent = indent
def newline(self, str):
if (self.indent):
self.output.append(self.indent)
self.output.append(str)
def append(self, str):
self.output.append(str)
def newline_multiline(self, str):
parts = str.split('\n')
self.newline(parts[0])
for p in parts[1:]:
self.output.append('\n')
if p:
self.newline(p)
def append_multiline(self, str):
parts = str.split('\n')
self.append(parts[0])
for p in parts[1:]:
self.output.append('\n')
if p:
self.newline(p)
def get_indent(self):
return self.indent
def insert_writer(self, additional_indent):
new_output = []
self.output.append(new_output)
return Writer(new_output, self.indent + additional_indent)
class EnumConstants:
map_ = {}
constants_ = []
@classmethod
def add_constant(cls, value):
if value in cls.map_:
return cls.map_[value]
else:
pos = len(cls.map_)
cls.map_[value] = pos
cls.constants_.append(value)
return pos
@classmethod
def get_enum_constant_code(cls):
output = []
for item in cls.constants_:
output.append(" \"" + item + "\"")
return ",\n".join(output) + "\n"
# Typebuilder code is generated in several passes: first typedefs, then other classes.
# Manual pass management is needed because we cannot have forward declarations for typedefs.
class TypeBuilderPass:
TYPEDEF = "typedef"
MAIN = "main"
class TypeBindings:
@staticmethod
def create_named_type_declaration(json_typable, context_domain_name, type_data):
json_type = type_data.get_json_type()
class Helper:
is_ad_hoc = False
full_name_prefix_for_use = "TypeBuilder::" + context_domain_name + "::"
full_name_prefix_for_impl = "TypeBuilder::" + context_domain_name + "::"
@staticmethod
def write_doc(writer):
if "description" in json_type:
writer.newline("/* ")
writer.append(json_type["description"])
writer.append(" */\n")
@staticmethod
def add_to_forward_listener(forward_listener):
forward_listener.add_type_data(type_data)
fixed_type_name = fix_type_name(json_type["id"])
return TypeBindings.create_type_declaration_(json_typable, context_domain_name, fixed_type_name, Helper)
@staticmethod
def create_ad_hoc_type_declaration(json_typable, context_domain_name, ad_hoc_type_context):
class Helper:
is_ad_hoc = True
full_name_prefix_for_use = ad_hoc_type_context.container_relative_name_prefix
full_name_prefix_for_impl = ad_hoc_type_context.container_full_name_prefix
@staticmethod
def write_doc(writer):
pass
@staticmethod
def add_to_forward_listener(forward_listener):
pass
fixed_type_name = ad_hoc_type_context.get_type_name_fix()
return TypeBindings.create_type_declaration_(json_typable, context_domain_name, fixed_type_name, Helper)
@staticmethod
def create_type_declaration_(json_typable, context_domain_name, fixed_type_name, helper):
if json_typable["type"] == "string":
if "enum" in json_typable:
class EnumBinding:
need_user_runtime_cast_ = False
need_internal_runtime_cast_ = False
@classmethod
def resolve_inner(cls, resolve_context):
pass
@classmethod
def request_user_runtime_cast(cls, request):
if request:
cls.need_user_runtime_cast_ = True
request.acknowledge()
@classmethod
def request_internal_runtime_cast(cls):
cls.need_internal_runtime_cast_ = True
@classmethod
def get_code_generator(enum_binding_cls):
class CodeGenerator:
@staticmethod
def generate_type_builder(writer, generate_context):
enum = json_typable["enum"]
helper.write_doc(writer)
enum_name = fixed_type_name.class_name
fixed_type_name.output_comment(writer)
writer.newline("struct ")
writer.append(enum_name)
writer.append(" {\n")
writer.newline(" enum Enum {\n")
for enum_item in enum:
enum_pos = EnumConstants.add_constant(enum_item)
item_c_name = enum_item.replace('-', '_')
item_c_name = Capitalizer.lower_camel_case_to_upper(item_c_name)
if item_c_name in TYPE_NAME_FIX_MAP:
item_c_name = TYPE_NAME_FIX_MAP[item_c_name]
writer.newline(" ")
writer.append(item_c_name)
writer.append(" = ")
writer.append("%s" % enum_pos)
writer.append(",\n")
writer.newline(" };\n")
if enum_binding_cls.need_user_runtime_cast_:
raise Exception("Not yet implemented")
if enum_binding_cls.need_internal_runtime_cast_:
writer.append("#if %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" static void assertCorrectValue(JSONValue* value);\n")
writer.append("#endif // %s\n" % VALIDATOR_IFDEF_NAME)
validator_writer = generate_context.validator_writer
validator_writer.newline("void %s%s::assertCorrectValue(JSONValue* value)\n" % (helper.full_name_prefix_for_impl, enum_name))
validator_writer.newline("{\n")
validator_writer.newline(" WTF::String s;\n")
validator_writer.newline(" bool cast_res = value->asString(&s);\n")
validator_writer.newline(" ASSERT(cast_res);\n")
if len(enum) > 0:
condition_list = []
for enum_item in enum:
enum_pos = EnumConstants.add_constant(enum_item)
condition_list.append("s == \"%s\"" % enum_item)
validator_writer.newline(" ASSERT(%s);\n" % " || ".join(condition_list))
validator_writer.newline("}\n")
validator_writer.newline("\n\n")
writer.newline("}; // struct ")
writer.append(enum_name)
writer.append("\n\n")
@staticmethod
def register_use(forward_listener):
pass
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.MAIN
return CodeGenerator
@classmethod
def get_validator_call_text(cls):
return helper.full_name_prefix_for_use + fixed_type_name.class_name + "::assertCorrectValue"
@classmethod
def get_array_item_c_type_text(cls):
return helper.full_name_prefix_for_use + fixed_type_name.class_name + "::Enum"
@staticmethod
def get_setter_value_expression_pattern():
return "TypeBuilder::getEnumConstantValue(%s)"
@staticmethod
def reduce_to_raw_type():
return RawTypes.String
@staticmethod
def get_type_model():
return TypeModel.Enum(helper.full_name_prefix_for_use + fixed_type_name.class_name)
return EnumBinding
else:
if helper.is_ad_hoc:
class PlainString:
@classmethod
def resolve_inner(cls, resolve_context):
pass
@staticmethod
def request_user_runtime_cast(request):
raise Exception("Unsupported")
@staticmethod
def request_internal_runtime_cast():
pass
@staticmethod
def get_code_generator():
return None
@classmethod
def get_validator_call_text(cls):
return RawTypes.String.get_raw_validator_call_text()
@staticmethod
def reduce_to_raw_type():
return RawTypes.String
@staticmethod
def get_type_model():
return TypeModel.String
@staticmethod
def get_setter_value_expression_pattern():
return None
@classmethod
def get_array_item_c_type_text(cls):
return cls.reduce_to_raw_type().get_array_item_raw_c_type_text()
return PlainString
else:
class TypedefString:
@classmethod
def resolve_inner(cls, resolve_context):
pass
@staticmethod
def request_user_runtime_cast(request):
raise Exception("Unsupported")
@staticmethod
def request_internal_runtime_cast():
pass
@staticmethod
def get_code_generator():
class CodeGenerator:
@staticmethod
def generate_type_builder(writer, generate_context):
helper.write_doc(writer)
fixed_type_name.output_comment(writer)
writer.newline("typedef String ")
writer.append(fixed_type_name.class_name)
writer.append(";\n\n")
@staticmethod
def register_use(forward_listener):
pass
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.TYPEDEF
return CodeGenerator
@classmethod
def get_validator_call_text(cls):
return RawTypes.String.get_raw_validator_call_text()
@staticmethod
def reduce_to_raw_type():
return RawTypes.String
@staticmethod
def get_type_model():
return TypeModel.ValueType("%s%s" % (helper.full_name_prefix_for_use, fixed_type_name.class_name), True)
@staticmethod
def get_setter_value_expression_pattern():
return None
@classmethod
def get_array_item_c_type_text(cls):
return "%s%s" % (helper.full_name_prefix_for_use, fixed_type_name.class_name)
return TypedefString
elif json_typable["type"] == "object":
if "properties" in json_typable:
class ClassBinding:
resolve_data_ = None
need_user_runtime_cast_ = False
need_internal_runtime_cast_ = False
@classmethod
def resolve_inner(cls, resolve_context):
if cls.resolve_data_:
return
properties = json_typable["properties"]
main = []
optional = []
ad_hoc_type_list = []
for prop in properties:
prop_name = prop["name"]
ad_hoc_type_context = cls.AdHocTypeContextImpl(prop_name, fixed_type_name.class_name, resolve_context, ad_hoc_type_list, helper.full_name_prefix_for_impl)
binding = resolve_param_type(prop, context_domain_name, ad_hoc_type_context)
code_generator = binding.get_code_generator()
if code_generator:
code_generator.register_use(resolve_context.forward_listener)
class PropertyData:
param_type_binding = binding
p = prop
if prop.get("optional"):
optional.append(PropertyData)
else:
main.append(PropertyData)
class ResolveData:
main_properties = main
optional_properties = optional
ad_hoc_types = ad_hoc_type_list
cls.resolve_data_ = ResolveData
for ad_hoc in ad_hoc_type_list:
ad_hoc.resolve_inner(resolve_context)
@classmethod
def request_user_runtime_cast(cls, request):
if not request:
return
cls.need_user_runtime_cast_ = True
request.acknowledge()
cls.request_internal_runtime_cast()
@classmethod
def request_internal_runtime_cast(cls):
if cls.need_internal_runtime_cast_:
return
cls.need_internal_runtime_cast_ = True
for p in cls.resolve_data_.main_properties:
p.param_type_binding.request_internal_runtime_cast()
for p in cls.resolve_data_.optional_properties:
p.param_type_binding.request_internal_runtime_cast()
@classmethod
def get_code_generator(class_binding_cls):
class CodeGenerator:
@classmethod
def generate_type_builder(cls, writer, generate_context):
resolve_data = class_binding_cls.resolve_data_
helper.write_doc(writer)
class_name = fixed_type_name.class_name
is_open_type = (context_domain_name + "." + class_name) in TYPES_WITH_OPEN_FIELD_LIST_SET
fixed_type_name.output_comment(writer)
writer.newline("class ")
writer.append(class_name)
writer.append(" : public ")
if is_open_type:
writer.append("JSONObject")
else:
writer.append("JSONObjectBase")
writer.append(" {\n")
writer.newline("public:\n")
ad_hoc_type_writer = writer.insert_writer(" ")
for ad_hoc_type in resolve_data.ad_hoc_types:
code_generator = ad_hoc_type.get_code_generator()
if code_generator:
code_generator.generate_type_builder(ad_hoc_type_writer, generate_context)
writer.newline_multiline(
""" enum {
NoFieldsSet = 0,
""")
state_enum_items = []
if len(resolve_data.main_properties) > 0:
pos = 0
for prop_data in resolve_data.main_properties:
item_name = Capitalizer.lower_camel_case_to_upper(prop_data.p["name"]) + "Set"
state_enum_items.append(item_name)
writer.newline(" %s = 1 << %s,\n" % (item_name, pos))
pos += 1
all_fields_set_value = "(" + (" | ".join(state_enum_items)) + ")"
else:
all_fields_set_value = "0"
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_1
% (all_fields_set_value, class_name, class_name))
pos = 0
for prop_data in resolve_data.main_properties:
prop_name = prop_data.p["name"]
param_type_binding = prop_data.param_type_binding
param_raw_type = param_type_binding.reduce_to_raw_type()
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_2
% (state_enum_items[pos],
Capitalizer.lower_camel_case_to_upper(prop_name),
param_type_binding.get_type_model().get_input_param_type_text(),
state_enum_items[pos], prop_name,
param_raw_type.get_setter_name(), prop_name,
format_setter_value_expression(param_type_binding, "value"),
state_enum_items[pos]))
pos += 1
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_3
% (class_name, class_name, class_name, class_name, class_name, class_name))
writer.newline(" /*\n")
writer.newline(" * Synthetic constructor:\n")
writer.newline(" * RefPtr<%s> result = %s::create()" % (class_name, class_name))
for prop_data in resolve_data.main_properties:
writer.append_multiline("\n * .set%s(...)" % Capitalizer.lower_camel_case_to_upper(prop_data.p["name"]))
writer.append_multiline(";\n */\n")
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_4)
writer.newline(" typedef TypeBuilder::StructItemTraits ItemTraits;\n")
for prop_data in resolve_data.main_properties:
prop_name = prop_data.p["name"]
param_type_binding = prop_data.param_type_binding
if isinstance(param_type_binding.get_type_model(), TypeModel.ValueType):
writer.append_multiline("\n void %s" % prop_name)
writer.append("(%s value)\n" % param_type_binding.get_type_model().get_command_return_pass_model().get_output_parameter_type())
writer.newline(" {\n")
writer.newline(" JSONObjectBase::get%s(\"%s\", value);\n"
% (param_type_binding.reduce_to_raw_type().get_setter_name(), prop_data.p["name"]))
writer.newline(" }\n")
for prop_data in resolve_data.optional_properties:
prop_name = prop_data.p["name"]
param_type_binding = prop_data.param_type_binding
setter_name = "set%s" % Capitalizer.lower_camel_case_to_upper(prop_name)
writer.append_multiline("\n void %s" % setter_name)
writer.append("(%s value)\n" % param_type_binding.get_type_model().get_input_param_type_text())
writer.newline(" {\n")
writer.newline(" this->set%s(\"%s\", %s);\n"
% (param_type_binding.reduce_to_raw_type().get_setter_name(), prop_data.p["name"],
format_setter_value_expression(param_type_binding, "value")))
writer.newline(" }\n")
if setter_name in INSPECTOR_OBJECT_SETTER_NAMES:
writer.newline(" using JSONObjectBase::%s;\n\n" % setter_name)
if class_binding_cls.need_user_runtime_cast_:
writer.newline(" static PassRefPtr<%s> runtimeCast(PassRefPtr<JSONValue> value)\n" % class_name)
writer.newline(" {\n")
writer.newline(" RefPtr<JSONObject> object;\n")
writer.newline(" bool castRes = value->asObject(&object);\n")
writer.newline(" ASSERT_UNUSED(castRes, castRes);\n")
writer.append("#if %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" assertCorrectValue(object.get());\n")
writer.append("#endif // %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" static_assert(sizeof(%s) == sizeof(JSONObjectBase), \"%s should be the same size as JSONObjectBase\");\n" % (class_name, class_name))
writer.newline(" return static_cast<%s*>(static_cast<JSONObjectBase*>(object.get()));\n" % class_name)
writer.newline(" }\n")
writer.append("\n")
if class_binding_cls.need_internal_runtime_cast_:
writer.append("#if %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" static void assertCorrectValue(JSONValue* value);\n")
writer.append("#endif // %s\n" % VALIDATOR_IFDEF_NAME)
closed_field_set = (context_domain_name + "." + class_name) not in TYPES_WITH_OPEN_FIELD_LIST_SET
validator_writer = generate_context.validator_writer
validator_writer.newline("void %s%s::assertCorrectValue(JSONValue* value)\n" % (helper.full_name_prefix_for_impl, class_name))
validator_writer.newline("{\n")
validator_writer.newline(" RefPtr<JSONObject> object;\n")
validator_writer.newline(" bool castRes = value->asObject(&object);\n")
validator_writer.newline(" ASSERT_UNUSED(castRes, castRes);\n")
for prop_data in resolve_data.main_properties:
validator_writer.newline(" {\n")
it_name = "%sPos" % prop_data.p["name"]
validator_writer.newline(" JSONObject::iterator %s;\n" % it_name)
validator_writer.newline(" %s = object->find(\"%s\");\n" % (it_name, prop_data.p["name"]))
validator_writer.newline(" ASSERT(%s != object->end());\n" % it_name)
validator_writer.newline(" %s(%s->value.get());\n" % (prop_data.param_type_binding.get_validator_call_text(), it_name))
validator_writer.newline(" }\n")
if closed_field_set:
validator_writer.newline(" int foundPropertiesCount = %s;\n" % len(resolve_data.main_properties))
for prop_data in resolve_data.optional_properties:
validator_writer.newline(" {\n")
it_name = "%sPos" % prop_data.p["name"]
validator_writer.newline(" JSONObject::iterator %s;\n" % it_name)
validator_writer.newline(" %s = object->find(\"%s\");\n" % (it_name, prop_data.p["name"]))
validator_writer.newline(" if (%s != object->end()) {\n" % it_name)
validator_writer.newline(" %s(%s->value.get());\n" % (prop_data.param_type_binding.get_validator_call_text(), it_name))
if closed_field_set:
validator_writer.newline(" ++foundPropertiesCount;\n")
validator_writer.newline(" }\n")
validator_writer.newline(" }\n")
if closed_field_set:
validator_writer.newline(" if (foundPropertiesCount != object->size()) {\n")
validator_writer.newline(" FATAL(\"Unexpected properties in object: %s\\n\", object->toJSONString().ascii().data());\n")
validator_writer.newline(" }\n")
validator_writer.newline("}\n")
validator_writer.newline("\n\n")
if is_open_type:
cpp_writer = generate_context.cpp_writer
writer.append("\n")
writer.newline(" // Property names for type generated as open.\n")
for prop_data in resolve_data.main_properties + resolve_data.optional_properties:
prop_name = prop_data.p["name"]
prop_field_name = Capitalizer.lower_camel_case_to_upper(prop_name)
writer.newline(" static const char %s[];\n" % (prop_field_name))
cpp_writer.newline("const char %s%s::%s[] = \"%s\";\n" % (helper.full_name_prefix_for_impl, class_name, prop_field_name, prop_name))
writer.newline("};\n\n")
@staticmethod
def generate_forward_declaration(writer):
class_name = fixed_type_name.class_name
writer.newline("class ")
writer.append(class_name)
writer.append(";\n")
@staticmethod
def register_use(forward_listener):
helper.add_to_forward_listener(forward_listener)
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.MAIN
return CodeGenerator
@staticmethod
def get_validator_call_text():
return helper.full_name_prefix_for_use + fixed_type_name.class_name + "::assertCorrectValue"
@classmethod
def get_array_item_c_type_text(cls):
return helper.full_name_prefix_for_use + fixed_type_name.class_name
@staticmethod
def get_setter_value_expression_pattern():
return None
@staticmethod
def reduce_to_raw_type():
return RawTypes.Object
@staticmethod
def get_type_model():
return TypeModel.RefPtrBased(helper.full_name_prefix_for_use + fixed_type_name.class_name)
class AdHocTypeContextImpl:
def __init__(self, property_name, class_name, resolve_context, ad_hoc_type_list, parent_full_name_prefix):
self.property_name = property_name
self.class_name = class_name
self.resolve_context = resolve_context
self.ad_hoc_type_list = ad_hoc_type_list
self.container_full_name_prefix = parent_full_name_prefix + class_name + "::"
self.container_relative_name_prefix = ""
def get_type_name_fix(self):
class NameFix:
class_name = Capitalizer.lower_camel_case_to_upper(self.property_name)
@staticmethod
def output_comment(writer):
writer.newline("// Named after property name '%s' while generating %s.\n" % (self.property_name, self.class_name))
return NameFix
def add_type(self, binding):
self.ad_hoc_type_list.append(binding)
return ClassBinding
else:
class PlainObjectBinding:
@classmethod
def resolve_inner(cls, resolve_context):
pass
@staticmethod
def request_user_runtime_cast(request):
pass
@staticmethod
def request_internal_runtime_cast():
pass
@staticmethod
def get_code_generator():
pass
@staticmethod
def get_validator_call_text():
return "RuntimeCastHelper::assertType<JSONValue::TypeObject>"
@classmethod
def get_array_item_c_type_text(cls):
return cls.reduce_to_raw_type().get_array_item_raw_c_type_text()
@staticmethod
def get_setter_value_expression_pattern():
return None
@staticmethod
def reduce_to_raw_type():
return RawTypes.Object
@staticmethod
def get_type_model():
return TypeModel.Object
return PlainObjectBinding
elif json_typable["type"] == "array":
if "items" in json_typable:
ad_hoc_types = []
class AdHocTypeContext:
container_full_name_prefix = "<not yet defined>"
container_relative_name_prefix = ""
@staticmethod
def get_type_name_fix():
return fixed_type_name
@staticmethod
def add_type(binding):
ad_hoc_types.append(binding)
item_binding = resolve_param_type(json_typable["items"], context_domain_name, AdHocTypeContext)
class ArrayBinding:
resolve_data_ = None
need_internal_runtime_cast_ = False
@classmethod
def resolve_inner(cls, resolve_context):
if cls.resolve_data_:
return
class ResolveData:
item_type_binding = item_binding
ad_hoc_type_list = ad_hoc_types
cls.resolve_data_ = ResolveData
for t in ad_hoc_types:
t.resolve_inner(resolve_context)
@classmethod
def request_user_runtime_cast(cls, request):
raise Exception("Not implemented yet")
@classmethod
def request_internal_runtime_cast(cls):
if cls.need_internal_runtime_cast_:
return
cls.need_internal_runtime_cast_ = True
cls.resolve_data_.item_type_binding.request_internal_runtime_cast()
@classmethod
def get_code_generator(array_binding_cls):
class CodeGenerator:
@staticmethod
def generate_type_builder(writer, generate_context):
ad_hoc_type_writer = writer
resolve_data = array_binding_cls.resolve_data_
for ad_hoc_type in resolve_data.ad_hoc_type_list:
code_generator = ad_hoc_type.get_code_generator()
if code_generator:
code_generator.generate_type_builder(ad_hoc_type_writer, generate_context)
@staticmethod
def generate_forward_declaration(writer):
pass
@staticmethod
def register_use(forward_listener):
item_code_generator = item_binding.get_code_generator()
if item_code_generator:
item_code_generator.register_use(forward_listener)
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.MAIN
return CodeGenerator
@classmethod
def get_validator_call_text(cls):
return cls.get_array_item_c_type_text() + "::assertCorrectValue"
@classmethod
def get_array_item_c_type_text(cls):
return replace_right_shift("TypeBuilder::Array<%s>" % cls.resolve_data_.item_type_binding.get_array_item_c_type_text())
@staticmethod
def get_setter_value_expression_pattern():
return None
@staticmethod
def reduce_to_raw_type():
return RawTypes.Array
@classmethod
def get_type_model(cls):
return TypeModel.RefPtrBased(cls.get_array_item_c_type_text())
return ArrayBinding
else:
# Fall-through to raw type.
pass
raw_type = RawTypes.get(json_typable["type"])
return RawTypeBinding(raw_type)
class RawTypeBinding:
def __init__(self, raw_type):
self.raw_type_ = raw_type
def resolve_inner(self, resolve_context):
pass
def request_user_runtime_cast(self, request):
raise Exception("Unsupported")
def request_internal_runtime_cast(self):
pass
def get_code_generator(self):
return None
def get_validator_call_text(self):
return self.raw_type_.get_raw_validator_call_text()
def get_array_item_c_type_text(self):
return self.raw_type_.get_array_item_raw_c_type_text()
def get_setter_value_expression_pattern(self):
return None
def reduce_to_raw_type(self):
return self.raw_type_
def get_type_model(self):
return self.raw_type_.get_raw_type_model()
class TypeData(object):
def __init__(self, json_type, json_domain, domain_data):
self.json_type_ = json_type
self.json_domain_ = json_domain
self.domain_data_ = domain_data
if "type" not in json_type:
raise Exception("Unknown type")
json_type_name = json_type["type"]
raw_type = RawTypes.get(json_type_name)
self.raw_type_ = raw_type
self.binding_being_resolved_ = False
self.binding_ = None
def get_raw_type(self):
return self.raw_type_
def get_binding(self):
if not self.binding_:
if self.binding_being_resolved_:
raise Error("Type %s is already being resolved" % self.json_type_["type"])
# Resolve only lazily, because resolving one named type may require resolving some other named type.
self.binding_being_resolved_ = True
try:
self.binding_ = TypeBindings.create_named_type_declaration(self.json_type_, self.json_domain_["domain"], self)
finally:
self.binding_being_resolved_ = False
return self.binding_
def get_json_type(self):
return self.json_type_
def get_name(self):
return self.json_type_["id"]
def get_domain_name(self):
return self.json_domain_["domain"]
class DomainData:
def __init__(self, json_domain):
self.json_domain = json_domain
self.types_ = []
def add_type(self, type_data):
self.types_.append(type_data)
def name(self):
return self.json_domain["domain"]
def types(self):
return self.types_
class TypeMap:
def __init__(self, api):
self.map_ = {}
self.domains_ = []
for json_domain in api["domains"]:
domain_name = json_domain["domain"]
domain_map = {}
self.map_[domain_name] = domain_map
domain_data = DomainData(json_domain)
self.domains_.append(domain_data)
if "types" in json_domain:
for json_type in json_domain["types"]:
type_name = json_type["id"]
type_data = TypeData(json_type, json_domain, domain_data)
domain_map[type_name] = type_data
domain_data.add_type(type_data)
def domains(self):
return self.domains_
def get(self, domain_name, type_name):
return self.map_[domain_name][type_name]
def resolve_param_type(json_parameter, scope_domain_name, ad_hoc_type_context):
if "$ref" in json_parameter:
json_ref = json_parameter["$ref"]
type_data = get_ref_data(json_ref, scope_domain_name)
return type_data.get_binding()
elif "type" in json_parameter:
result = TypeBindings.create_ad_hoc_type_declaration(json_parameter, scope_domain_name, ad_hoc_type_context)
ad_hoc_type_context.add_type(result)
return result
else:
raise Exception("Unknown type")
def resolve_param_raw_type(json_parameter, scope_domain_name):
if "$ref" in json_parameter:
json_ref = json_parameter["$ref"]
type_data = get_ref_data(json_ref, scope_domain_name)
return type_data.get_raw_type()
elif "type" in json_parameter:
json_type = json_parameter["type"]
return RawTypes.get(json_type)
else:
raise Exception("Unknown type")
def get_ref_data(json_ref, scope_domain_name):
dot_pos = json_ref.find(".")
if dot_pos == -1:
domain_name = scope_domain_name
type_name = json_ref
else:
domain_name = json_ref[:dot_pos]
type_name = json_ref[dot_pos + 1:]
return type_map.get(domain_name, type_name)
input_file = open(input_json_filename, "r")
json_string = input_file.read()
json_api = json.loads(json_string)
class Templates:
def get_this_script_path_(absolute_path):
absolute_path = os.path.abspath(absolute_path)
components = []
def fill_recursive(path_part, depth):
if depth <= 0 or path_part == '/':
return
fill_recursive(os.path.dirname(path_part), depth - 1)
components.append(os.path.basename(path_part))
# Typical path is /Source/WebCore/inspector/CodeGeneratorInspector.py
# Let's take 4 components from the real path then.
fill_recursive(absolute_path, 4)
return "/".join(components)
file_header_ = ("// File is generated by %s\n\n" % get_this_script_path_(sys.argv[0]) +
"""// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
""")
frontend_domain_class = string.Template(CodeGeneratorInspectorStrings.frontend_domain_class)
backend_method = string.Template(CodeGeneratorInspectorStrings.backend_method)
frontend_method = string.Template(CodeGeneratorInspectorStrings.frontend_method)
callback_main_methods = string.Template(CodeGeneratorInspectorStrings.callback_main_methods)
callback_failure_method = string.Template(CodeGeneratorInspectorStrings.callback_failure_method)
frontend_h = string.Template(file_header_ + CodeGeneratorInspectorStrings.frontend_h)
backend_h = string.Template(file_header_ + CodeGeneratorInspectorStrings.backend_h)
backend_cpp = string.Template(file_header_ + CodeGeneratorInspectorStrings.backend_cpp)
frontend_cpp = string.Template(file_header_ + CodeGeneratorInspectorStrings.frontend_cpp)
typebuilder_h = string.Template(file_header_ + CodeGeneratorInspectorStrings.typebuilder_h)
typebuilder_cpp = string.Template(file_header_ + CodeGeneratorInspectorStrings.typebuilder_cpp)
param_container_access_code = CodeGeneratorInspectorStrings.param_container_access_code
type_map = TypeMap(json_api)
class NeedRuntimeCastRequest:
def __init__(self):
self.ack_ = None
def acknowledge(self):
self.ack_ = True
def is_acknowledged(self):
return self.ack_
def resolve_all_types():
runtime_cast_generate_requests = {}
for type_name in TYPES_WITH_RUNTIME_CAST_SET:
runtime_cast_generate_requests[type_name] = NeedRuntimeCastRequest()
class ForwardListener:
type_data_set = set()
already_declared_set = set()
@classmethod
def add_type_data(cls, type_data):
if type_data not in cls.already_declared_set:
cls.type_data_set.add(type_data)
class ResolveContext:
forward_listener = ForwardListener
for domain_data in type_map.domains():
for type_data in domain_data.types():
# Do not generate forwards for this type any longer.
ForwardListener.already_declared_set.add(type_data)
binding = type_data.get_binding()
binding.resolve_inner(ResolveContext)
for domain_data in type_map.domains():
for type_data in domain_data.types():
full_type_name = "%s.%s" % (type_data.get_domain_name(), type_data.get_name())
request = runtime_cast_generate_requests.pop(full_type_name, None)
binding = type_data.get_binding()
if request:
binding.request_user_runtime_cast(request)
if request and not request.is_acknowledged():
raise Exception("Failed to generate runtimeCast in " + full_type_name)
for full_type_name in runtime_cast_generate_requests:
raise Exception("Failed to generate runtimeCast. Type " + full_type_name + " not found")
return ForwardListener
global_forward_listener = resolve_all_types()
def get_annotated_type_text(raw_type, annotated_type):
if annotated_type != raw_type:
return "/*%s*/ %s" % (annotated_type, raw_type)
else:
return raw_type
def format_setter_value_expression(param_type_binding, value_ref):
pattern = param_type_binding.get_setter_value_expression_pattern()
if pattern:
return pattern % value_ref
else:
return value_ref
class Generator:
frontend_class_field_lines = []
frontend_domain_class_lines = []
method_name_enum_list = []
backend_method_declaration_list = []
backend_method_implementation_list = []
backend_method_name_declaration_list = []
backend_method_name_declaration_index_list = []
backend_method_name_declaration_current_index = 0
method_handler_list = []
frontend_method_list = []
backend_virtual_setters_list = []
backend_agent_interface_list = []
backend_setters_list = []
backend_constructor_init_list = []
backend_field_list = []
frontend_constructor_init_list = []
type_builder_fragments = []
type_builder_forwards = []
validator_impl_list = []
type_builder_impl_list = []
@staticmethod
def go():
Generator.process_types(type_map)
for json_domain in json_api["domains"]:
domain_name = json_domain["domain"]
domain_name_lower = domain_name.lower()
agent_field_name = DomainNameFixes.get_fixed_data(domain_name)
frontend_method_declaration_lines = []
if "events" in json_domain:
for json_event in json_domain["events"]:
Generator.process_event(json_event, domain_name, frontend_method_declaration_lines)
Generator.frontend_class_field_lines.append(" %s m_%s;\n" % (domain_name, domain_name_lower))
if Generator.frontend_constructor_init_list:
Generator.frontend_constructor_init_list.append(" , ")
Generator.frontend_constructor_init_list.append("m_%s(inspectorFrontendChannel)\n" % domain_name_lower)
Generator.frontend_domain_class_lines.append(Templates.frontend_domain_class.substitute(None,
domainClassName=domain_name,
domainFieldName=domain_name_lower,
frontendDomainMethodDeclarations="".join(flatten_list(frontend_method_declaration_lines))))
agent_interface_name = Capitalizer.lower_camel_case_to_upper(domain_name) + "CommandHandler"
Generator.backend_agent_interface_list.append(" class CORE_EXPORT %s {\n" % agent_interface_name)
Generator.backend_agent_interface_list.append(" public:\n")
if "commands" in json_domain:
for json_command in json_domain["commands"]:
Generator.process_command(json_command, domain_name, agent_field_name, agent_interface_name)
Generator.backend_agent_interface_list.append("\n protected:\n")
Generator.backend_agent_interface_list.append(" virtual ~%s() { }\n" % agent_interface_name)
Generator.backend_agent_interface_list.append(" };\n\n")
Generator.backend_constructor_init_list.append(" , m_%s(0)" % agent_field_name)
Generator.backend_virtual_setters_list.append(" virtual void registerAgent(%s* %s) = 0;" % (agent_interface_name, agent_field_name))
Generator.backend_setters_list.append(" virtual void registerAgent(%s* %s) { ASSERT(!m_%s); m_%s = %s; }" % (agent_interface_name, agent_field_name, agent_field_name, agent_field_name, agent_field_name))
Generator.backend_field_list.append(" %s* m_%s;" % (agent_interface_name, agent_field_name))
@staticmethod
def process_event(json_event, domain_name, frontend_method_declaration_lines):
if (("handlers" in json_event) and (not ("renderer" in json_event["handlers"]))):
return
event_name = json_event["name"]
ad_hoc_type_output = []
frontend_method_declaration_lines.append(ad_hoc_type_output)
ad_hoc_type_writer = Writer(ad_hoc_type_output, " ")
decl_parameter_list = []
json_parameters = json_event.get("parameters")
Generator.generate_send_method(json_parameters, event_name, domain_name, ad_hoc_type_writer,
decl_parameter_list,
Generator.EventMethodStructTemplate,
Generator.frontend_method_list, Templates.frontend_method, {"eventName": event_name})
frontend_method_declaration_lines.append(
" void %s(%s);\n" % (event_name, ", ".join(decl_parameter_list)))
class EventMethodStructTemplate:
@staticmethod
def append_prolog(line_list):
line_list.append(" RefPtr<JSONObject> paramsObject = JSONObject::create();\n")
@staticmethod
def append_epilog(line_list):
line_list.append(" jsonMessage->setObject(\"params\", paramsObject);\n")
container_name = "paramsObject"
@staticmethod
def process_command(json_command, domain_name, agent_field_name, agent_interface_name):
if (("handlers" in json_command) and (not ("renderer" in json_command["handlers"]))):
return
json_command_name = json_command["name"]
cmd_enum_name = "k%s_%sCmd" % (domain_name, json_command["name"])
Generator.method_name_enum_list.append(" %s," % cmd_enum_name)
Generator.method_handler_list.append(" &InspectorBackendDispatcherImpl::%s_%s," % (domain_name, json_command_name))
Generator.backend_method_declaration_list.append(" void %s_%s(int callId, JSONObject* requestMessageObject, JSONArray* protocolErrors);" % (domain_name, json_command_name))
backend_agent_interface_list = [] if "redirect" in json_command else Generator.backend_agent_interface_list
ad_hoc_type_output = []
backend_agent_interface_list.append(ad_hoc_type_output)
ad_hoc_type_writer = Writer(ad_hoc_type_output, " ")
backend_agent_interface_list.append(" virtual void %s(ErrorString*" % json_command_name)
method_in_code = ""
method_out_code = ""
agent_call_param_list = ["&error"]
agent_call_params_declaration_list = [" ErrorString error;"]
send_response_call_params_list = ["error"]
request_message_param = ""
normal_response_cook_text = ""
error_type_binding = None
if "error" in json_command:
json_error = json_command["error"]
error_type_binding = Generator.resolve_type_and_generate_ad_hoc(json_error, json_command_name + "Error", json_command_name, domain_name, ad_hoc_type_writer, agent_interface_name + "::")
error_type_model = error_type_binding.get_type_model().get_optional()
error_annotated_type = error_type_model.get_command_return_pass_model().get_output_parameter_type()
agent_call_param_list.append("%serrorData" % error_type_model.get_command_return_pass_model().get_output_argument_prefix())
backend_agent_interface_list.append(", %s errorData" % error_annotated_type)
method_in_code += " %s errorData;\n" % error_type_model.get_command_return_pass_model().get_return_var_type()
send_response_call_params_list.append("errorData")
if "parameters" in json_command:
json_params = json_command["parameters"]
request_message_param = " requestMessageObject"
if json_params:
method_in_code += Templates.param_container_access_code
for json_parameter in json_params:
json_param_name = json_parameter["name"]
param_raw_type = resolve_param_raw_type(json_parameter, domain_name)
getter_name = param_raw_type.get_getter_name()
optional = json_parameter.get("optional")
non_optional_type_model = param_raw_type.get_raw_type_model()
if optional:
code = (" bool %s_valueFound = false;\n"
" %s in_%s = get%s(paramsContainerPtr, \"%s\", &%s_valueFound, protocolErrors);\n" %
(json_param_name, non_optional_type_model.get_command_return_pass_model().get_return_var_type(), json_param_name, getter_name, json_param_name, json_param_name))
param = "%s_valueFound ? &in_%s : 0" % (json_param_name, json_param_name)
# FIXME: pass optional refptr-values as PassRefPtr
formal_param_type_pattern = "const %s*"
else:
code = (" %s in_%s = get%s(paramsContainerPtr, \"%s\", 0, protocolErrors);\n" %
(non_optional_type_model.get_command_return_pass_model().get_return_var_type(), json_param_name, getter_name, json_param_name))
param = "in_%s" % json_param_name
# FIXME: pass not-optional refptr-values as NonNullPassRefPtr
if param_raw_type.is_heavy_value():
formal_param_type_pattern = "const %s&"
else:
formal_param_type_pattern = "%s"
method_in_code += code
agent_call_param_list.append(param)
backend_agent_interface_list.append(", %s in_%s" % (formal_param_type_pattern % non_optional_type_model.get_command_return_pass_model().get_return_var_type(), json_param_name))
if json_command.get("async") == True:
callback_name = Capitalizer.lower_camel_case_to_upper(json_command_name) + "Callback"
callback_output = []
callback_writer = Writer(callback_output, ad_hoc_type_writer.get_indent())
decl_parameter_list = []
Generator.generate_send_method(json_command.get("returns"), json_command_name, domain_name, ad_hoc_type_writer,
decl_parameter_list,
Generator.CallbackMethodStructTemplate,
Generator.backend_method_implementation_list, Templates.callback_main_methods,
{"callbackName": callback_name, "agentName": agent_interface_name})
callback_writer.newline("class " + callback_name + " : public CallbackBase {\n")
callback_writer.newline("public:\n")
callback_writer.newline(" " + callback_name + "(PassRefPtrWillBeRawPtr<InspectorBackendDispatcherImpl>, int id);\n")
callback_writer.newline(" CORE_EXPORT void sendSuccess(" + ", ".join(decl_parameter_list) + ");\n")
error_part_writer = callback_writer.insert_writer("")
callback_writer.newline("};\n")
if error_type_binding:
annotated_type = error_type_model.get_input_param_type_text()
error_part_writer.newline(" void sendFailure(const ErrorString&, %s);\n" % annotated_type)
error_part_writer.newline(" using CallbackBase::sendFailure;\n")
assigment_value = error_type_model.get_event_setter_expression_pattern() % "errorData"
assigment_value = error_type_binding.reduce_to_raw_type().get_constructor_pattern() % assigment_value
Generator.backend_method_implementation_list.append(Templates.callback_failure_method.substitute(None,
agentName=agent_interface_name,
callbackName=callback_name,
parameter=annotated_type + " errorData",
argument=assigment_value))
ad_hoc_type_output.append(callback_output)
method_out_code += " RefPtrWillBeRawPtr<" + agent_interface_name + "::" + callback_name + "> callback = adoptRefWillBeNoop(new " + agent_interface_name + "::" + callback_name + "(this, callId));\n"
agent_call_param_list.append("callback")
normal_response_cook_text += " if (!error.length()) \n"
normal_response_cook_text += " return;\n"
normal_response_cook_text += " callback->disable();\n"
backend_agent_interface_list.append(", PassRefPtrWillBeRawPtr<%s> callback" % callback_name)
else:
if "returns" in json_command:
method_out_code += "\n"
agent_call_params_declaration_list.append(" RefPtr<JSONObject> result = JSONObject::create();")
send_response_call_params_list.append("result")
response_cook_list = []
for json_return in json_command["returns"]:
json_return_name = json_return["name"]
optional = bool(json_return.get("optional"))
return_type_binding = Generator.resolve_param_type_and_generate_ad_hoc(json_return, json_command_name, domain_name, ad_hoc_type_writer, agent_interface_name + "::")
raw_type = return_type_binding.reduce_to_raw_type()
setter_type = raw_type.get_setter_name()
type_model = return_type_binding.get_type_model()
if optional:
type_model = type_model.get_optional()
code = " %s out_%s;\n" % (type_model.get_command_return_pass_model().get_return_var_type(), json_return_name)
param = "%sout_%s" % (type_model.get_command_return_pass_model().get_output_argument_prefix(), json_return_name)
var_name = "out_%s" % json_return_name
setter_argument = type_model.get_command_return_pass_model().get_output_to_raw_expression() % var_name
if return_type_binding.get_setter_value_expression_pattern():
setter_argument = return_type_binding.get_setter_value_expression_pattern() % setter_argument
cook = " result->set%s(\"%s\", %s);\n" % (setter_type, json_return_name,
setter_argument)
set_condition_pattern = type_model.get_command_return_pass_model().get_set_return_condition()
if set_condition_pattern:
cook = (" if (%s)\n " % (set_condition_pattern % var_name)) + cook
annotated_type = type_model.get_command_return_pass_model().get_output_parameter_type()
param_name = var_name
if optional:
param_name = "opt_" + param_name
backend_agent_interface_list.append(", %s %s" % (annotated_type, param_name))
response_cook_list.append(cook)
method_out_code += code
agent_call_param_list.append(param)
normal_response_cook_text += "".join(response_cook_list)
if len(normal_response_cook_text) != 0:
normal_response_cook_text = " if (!error.length()) {\n" + normal_response_cook_text + " }"
# Redirect to another agent's implementation.
agent_field = "m_" + agent_field_name
if "redirect" in json_command:
agent_field = "m_" + DomainNameFixes.get_fixed_data(json_command.get("redirect"))
Generator.backend_method_implementation_list.append(Templates.backend_method.substitute(None,
domainName=domain_name, methodName=json_command_name,
agentField=agent_field,
methodCode="".join([method_in_code, method_out_code]),
agentCallParamsDeclaration="\n".join(agent_call_params_declaration_list),
agentCallParams=", ".join(agent_call_param_list),
requestMessageObject=request_message_param,
responseCook=normal_response_cook_text,
sendResponseCallParams=", ".join(send_response_call_params_list),
commandNameIndex=cmd_enum_name))
declaration_command_name = "%s.%s\\0" % (domain_name, json_command_name)
Generator.backend_method_name_declaration_list.append(" \"%s\"" % declaration_command_name)
assert Generator.backend_method_name_declaration_current_index < 2 ** 16, "Number too large for unsigned short."
Generator.backend_method_name_declaration_index_list.append(" %d," % Generator.backend_method_name_declaration_current_index)
Generator.backend_method_name_declaration_current_index += len(declaration_command_name) - 1
backend_agent_interface_list.append(") = 0;\n")
class CallbackMethodStructTemplate:
@staticmethod
def append_prolog(line_list):
pass
@staticmethod
def append_epilog(line_list):
pass
container_name = "jsonMessage"
# Generates common code for event sending and callback response data sending.
@staticmethod
def generate_send_method(parameters, event_name, domain_name, ad_hoc_type_writer, decl_parameter_list,
method_struct_template,
generator_method_list, method_template, template_params):
method_line_list = []
if parameters:
method_struct_template.append_prolog(method_line_list)
for json_parameter in parameters:
parameter_name = json_parameter["name"]
param_type_binding = Generator.resolve_param_type_and_generate_ad_hoc(json_parameter, event_name, domain_name, ad_hoc_type_writer, "")
raw_type = param_type_binding.reduce_to_raw_type()
raw_type_binding = RawTypeBinding(raw_type)
optional = bool(json_parameter.get("optional"))
setter_type = raw_type.get_setter_name()
type_model = param_type_binding.get_type_model()
raw_type_model = raw_type_binding.get_type_model()
if optional:
type_model = type_model.get_optional()
raw_type_model = raw_type_model.get_optional()
annotated_type = type_model.get_input_param_type_text()
mode_type_binding = param_type_binding
decl_parameter_list.append("%s %s" % (annotated_type, parameter_name))
setter_argument = raw_type_model.get_event_setter_expression_pattern() % parameter_name
if mode_type_binding.get_setter_value_expression_pattern():
setter_argument = mode_type_binding.get_setter_value_expression_pattern() % setter_argument
setter_code = " %s->set%s(\"%s\", %s);\n" % (method_struct_template.container_name, setter_type, parameter_name, setter_argument)
if optional:
setter_code = (" if (%s)\n " % parameter_name) + setter_code
method_line_list.append(setter_code)
method_struct_template.append_epilog(method_line_list)
generator_method_list.append(method_template.substitute(None,
domainName=domain_name,
parameters=", ".join(decl_parameter_list),
code="".join(method_line_list), **template_params))
@classmethod
def resolve_param_type_and_generate_ad_hoc(cls, json_param, method_name, domain_name, ad_hoc_type_writer, container_relative_name_prefix_param):
param_name = json_param["name"]
return cls.resolve_type_and_generate_ad_hoc(json_param, param_name, method_name, domain_name, ad_hoc_type_writer, container_relative_name_prefix_param)
@staticmethod
def resolve_type_and_generate_ad_hoc(typable_element, element_name, method_name, domain_name, ad_hoc_type_writer, container_relative_name_prefix_param):
ad_hoc_type_list = []
class AdHocTypeContext:
container_full_name_prefix = "<not yet defined>"
container_relative_name_prefix = container_relative_name_prefix_param
@staticmethod
def get_type_name_fix():
class NameFix:
class_name = Capitalizer.lower_camel_case_to_upper(element_name)
@staticmethod
def output_comment(writer):
writer.newline("// Named after parameter '%s' while generating command/event %s.\n" % (element_name, method_name))
return NameFix
@staticmethod
def add_type(binding):
ad_hoc_type_list.append(binding)
type_binding = resolve_param_type(typable_element, domain_name, AdHocTypeContext)
class InterfaceForwardListener:
@staticmethod
def add_type_data(type_data):
pass
class InterfaceResolveContext:
forward_listener = InterfaceForwardListener
for type in ad_hoc_type_list:
type.resolve_inner(InterfaceResolveContext)
class InterfaceGenerateContext:
validator_writer = "not supported in InterfaceGenerateContext"
cpp_writer = validator_writer
for type in ad_hoc_type_list:
generator = type.get_code_generator()
if generator:
generator.generate_type_builder(ad_hoc_type_writer, InterfaceGenerateContext)
return type_binding
@staticmethod
def process_types(type_map):
output = Generator.type_builder_fragments
class GenerateContext:
validator_writer = Writer(Generator.validator_impl_list, "")
cpp_writer = Writer(Generator.type_builder_impl_list, "")
def generate_all_domains_code(out, type_data_callback):
writer = Writer(out, "")
for domain_data in type_map.domains():
namespace_declared = []
def namespace_lazy_generator():
if not namespace_declared:
writer.newline("namespace ")
writer.append(domain_data.name())
writer.append(" {\n")
# What is a better way to change value from outer scope?
namespace_declared.append(True)
return writer
for type_data in domain_data.types():
type_data_callback(type_data, namespace_lazy_generator)
if namespace_declared:
writer.append("} // ")
writer.append(domain_data.name())
writer.append("\n\n")
def create_type_builder_caller(generate_pass_id):
def call_type_builder(type_data, writer_getter):
code_generator = type_data.get_binding().get_code_generator()
if code_generator and generate_pass_id == code_generator.get_generate_pass_id():
writer = writer_getter()
code_generator.generate_type_builder(writer, GenerateContext)
return call_type_builder
generate_all_domains_code(output, create_type_builder_caller(TypeBuilderPass.MAIN))
Generator.type_builder_forwards.append("// Forward declarations.\n")
def generate_forward_callback(type_data, writer_getter):
if type_data in global_forward_listener.type_data_set:
binding = type_data.get_binding()
binding.get_code_generator().generate_forward_declaration(writer_getter())
generate_all_domains_code(Generator.type_builder_forwards, generate_forward_callback)
Generator.type_builder_forwards.append("// End of forward declarations.\n\n")
Generator.type_builder_forwards.append("// Typedefs.\n")
generate_all_domains_code(Generator.type_builder_forwards, create_type_builder_caller(TypeBuilderPass.TYPEDEF))
Generator.type_builder_forwards.append("// End of typedefs.\n\n")
def flatten_list(input):
res = []
def fill_recursive(l):
for item in l:
if isinstance(item, list):
fill_recursive(item)
else:
res.append(item)
fill_recursive(input)
return res
def output_file(file_name):
return open(file_name, "w")
Generator.go()
backend_h_file = output_file(output_dirname + "/InspectorBackendDispatcher.h")
backend_cpp_file = output_file(output_dirname + "/InspectorBackendDispatcher.cpp")
frontend_h_file = output_file(output_dirname + "/InspectorFrontend.h")
frontend_cpp_file = output_file(output_dirname + "/InspectorFrontend.cpp")
typebuilder_h_file = output_file(output_dirname + "/InspectorTypeBuilder.h")
typebuilder_cpp_file = output_file(output_dirname + "/InspectorTypeBuilder.cpp")
backend_h_file.write(Templates.backend_h.substitute(None,
virtualSetters="\n".join(Generator.backend_virtual_setters_list),
agentInterfaces="".join(flatten_list(Generator.backend_agent_interface_list)),
methodNamesEnumContent="\n".join(Generator.method_name_enum_list)))
backend_cpp_file.write(Templates.backend_cpp.substitute(None,
constructorInit="\n".join(Generator.backend_constructor_init_list),
setters="\n".join(Generator.backend_setters_list),
fieldDeclarations="\n".join(Generator.backend_field_list),
methodNameDeclarations="\n".join(Generator.backend_method_name_declaration_list),
methodNameDeclarationsIndex="\n".join(Generator.backend_method_name_declaration_index_list),
methods="\n".join(Generator.backend_method_implementation_list),
methodDeclarations="\n".join(Generator.backend_method_declaration_list),
messageHandlers="\n".join(Generator.method_handler_list)))
frontend_h_file.write(Templates.frontend_h.substitute(None,
fieldDeclarations="".join(Generator.frontend_class_field_lines),
domainClassList="".join(Generator.frontend_domain_class_lines)))
frontend_cpp_file.write(Templates.frontend_cpp.substitute(None,
constructorInit="".join(Generator.frontend_constructor_init_list),
methods="\n".join(Generator.frontend_method_list)))
typebuilder_h_file.write(Templates.typebuilder_h.substitute(None,
typeBuilders="".join(flatten_list(Generator.type_builder_fragments)),
forwards="".join(Generator.type_builder_forwards),
validatorIfdefName=VALIDATOR_IFDEF_NAME))
typebuilder_cpp_file.write(Templates.typebuilder_cpp.substitute(None,
enumConstantValues=EnumConstants.get_enum_constant_code(),
implCode="".join(flatten_list(Generator.type_builder_impl_list)),
validatorCode="".join(flatten_list(Generator.validator_impl_list)),
validatorIfdefName=VALIDATOR_IFDEF_NAME))
backend_h_file.close()
backend_cpp_file.close()
frontend_h_file.close()
frontend_cpp_file.close()
typebuilder_h_file.close()
typebuilder_cpp_file.close()
| {
"content_hash": "2b2d5a607ce7d8ced04e06ddedcb4d33",
"timestamp": "",
"source": "github",
"line_count": 2127,
"max_line_length": 218,
"avg_line_length": 41.93464974141984,
"alnum_prop": 0.5368462357755479,
"repo_name": "Bysmyyr/chromium-crosswalk",
"id": "446a2ea94ccedb9295e51bbf9a1295c6e52373ee",
"size": "89195",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "third_party/WebKit/Source/core/inspector/CodeGeneratorInspector.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""Implementation of platform-specific functionality.
For each function or class described in `tornado.platform.interface`,
the appropriate platform-specific implementation exists in this module.
Most code that needs access to this functionality should do e.g.::
from tornado.platform.auto import set_close_exec
"""
from __future__ import absolute_import, division, print_function, with_statement
import os
if 'APPENGINE_RUNTIME' in os.environ:
from tornado.platform.common import Waker
def set_close_exec(fd):
pass
elif os.name == 'nt':
from tornado.platform.common import Waker
from tornado.platform.windows import set_close_exec
else:
from tornado.platform.posix import set_close_exec, Waker
try:
# monotime monkey-patches the time module to have a monotonic function
# in versions of python before 3.3.
import monotime
# Silence pyflakes warning about this unused import
monotime
except ImportError:
pass
try:
# monotonic can provide a monotonic function in versions of python before
# 3.3, too.
from monotonic import monotonic as monotonic_time
except ImportError:
try:
from time import monotonic as monotonic_time
except ImportError:
monotonic_time = None
__all__ = ['Waker', 'set_close_exec', 'monotonic_time']
| {
"content_hash": "27abde48429492ff1bf0410c541f0003",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 80,
"avg_line_length": 30.651162790697676,
"alnum_prop": 0.7344461305007587,
"repo_name": "suninsky/ReceiptOCR",
"id": "449b634b9bd53e030da07c97c1407c82dfcb3d91",
"size": "1915",
"binary": false,
"copies": "23",
"ref": "refs/heads/master",
"path": "Python/server/lib/python2.7/site-packages/tornado/platform/auto.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5936"
},
{
"name": "C++",
"bytes": "9350"
},
{
"name": "CSS",
"bytes": "6270"
},
{
"name": "HTML",
"bytes": "5413"
},
{
"name": "JavaScript",
"bytes": "6264"
},
{
"name": "Python",
"bytes": "6014667"
},
{
"name": "Shell",
"bytes": "3767"
}
],
"symlink_target": ""
} |
"""The xbox integration."""
import asyncio
from dataclasses import dataclass
from datetime import timedelta
import logging
from typing import Dict, Optional
import voluptuous as vol
from xbox.webapi.api.client import XboxLiveClient
from xbox.webapi.api.provider.catalog.const import SYSTEM_PFN_ID_MAP
from xbox.webapi.api.provider.catalog.models import AlternateIdType, Product
from xbox.webapi.api.provider.people.models import (
PeopleResponse,
Person,
PresenceDetail,
)
from xbox.webapi.api.provider.smartglass.models import (
SmartglassConsoleList,
SmartglassConsoleStatus,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.core import HomeAssistant
from homeassistant.helpers import (
aiohttp_client,
config_entry_oauth2_flow,
config_validation as cv,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from . import api, config_flow
from .const import DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
PLATFORMS = ["media_player", "remote", "binary_sensor", "sensor"]
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the xbox component."""
hass.data[DOMAIN] = {}
if DOMAIN not in config:
return True
config_flow.OAuth2FlowHandler.async_register_implementation(
hass,
config_entry_oauth2_flow.LocalOAuth2Implementation(
hass,
DOMAIN,
config[DOMAIN][CONF_CLIENT_ID],
config[DOMAIN][CONF_CLIENT_SECRET],
OAUTH2_AUTHORIZE,
OAUTH2_TOKEN,
),
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up xbox from a config entry."""
implementation = (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, entry
)
)
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
auth = api.AsyncConfigEntryAuth(
aiohttp_client.async_get_clientsession(hass), session
)
client = XboxLiveClient(auth)
consoles: SmartglassConsoleList = await client.smartglass.get_console_list()
_LOGGER.debug(
"Found %d consoles: %s",
len(consoles.result),
consoles.dict(),
)
coordinator = XboxUpdateCoordinator(hass, client, consoles)
await coordinator.async_refresh()
hass.data[DOMAIN][entry.entry_id] = {
"client": XboxLiveClient(auth),
"consoles": consoles,
"coordinator": coordinator,
}
for platform in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in PLATFORMS
]
)
)
if unload_ok:
# Unsub from coordinator updates
hass.data[DOMAIN][entry.entry_id]["sensor_unsub"]()
hass.data[DOMAIN][entry.entry_id]["binary_sensor_unsub"]()
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
@dataclass
class ConsoleData:
"""Xbox console status data."""
status: SmartglassConsoleStatus
app_details: Optional[Product]
@dataclass
class PresenceData:
"""Xbox user presence data."""
xuid: str
gamertag: str
display_pic: str
online: bool
status: str
in_party: bool
in_game: bool
in_multiplayer: bool
gamer_score: str
gold_tenure: Optional[str]
account_tier: str
@dataclass
class XboxData:
"""Xbox dataclass for update coordinator."""
consoles: Dict[str, ConsoleData]
presence: Dict[str, PresenceData]
class XboxUpdateCoordinator(DataUpdateCoordinator):
"""Store Xbox Console Status."""
def __init__(
self,
hass: HomeAssistantType,
client: XboxLiveClient,
consoles: SmartglassConsoleList,
) -> None:
"""Initialize."""
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=timedelta(seconds=10),
)
self.data: XboxData = XboxData({}, [])
self.client: XboxLiveClient = client
self.consoles: SmartglassConsoleList = consoles
async def _async_update_data(self) -> XboxData:
"""Fetch the latest console status."""
# Update Console Status
new_console_data: Dict[str, ConsoleData] = {}
for console in self.consoles.result:
current_state: Optional[ConsoleData] = self.data.consoles.get(console.id)
status: SmartglassConsoleStatus = (
await self.client.smartglass.get_console_status(console.id)
)
_LOGGER.debug(
"%s status: %s",
console.name,
status.dict(),
)
# Setup focus app
app_details: Optional[Product] = None
if current_state is not None:
app_details = current_state.app_details
if status.focus_app_aumid:
if (
not current_state
or status.focus_app_aumid != current_state.status.focus_app_aumid
):
app_id = status.focus_app_aumid.split("!")[0]
id_type = AlternateIdType.PACKAGE_FAMILY_NAME
if app_id in SYSTEM_PFN_ID_MAP:
id_type = AlternateIdType.LEGACY_XBOX_PRODUCT_ID
app_id = SYSTEM_PFN_ID_MAP[app_id][id_type]
catalog_result = (
await self.client.catalog.get_product_from_alternate_id(
app_id, id_type
)
)
if catalog_result and catalog_result.products:
app_details = catalog_result.products[0]
else:
app_details = None
new_console_data[console.id] = ConsoleData(
status=status, app_details=app_details
)
# Update user presence
presence_data = {}
batch: PeopleResponse = await self.client.people.get_friends_own_batch(
[self.client.xuid]
)
own_presence: Person = batch.people[0]
presence_data[own_presence.xuid] = _build_presence_data(own_presence)
friends: PeopleResponse = await self.client.people.get_friends_own()
for friend in friends.people:
if not friend.is_favorite:
continue
presence_data[friend.xuid] = _build_presence_data(friend)
return XboxData(new_console_data, presence_data)
def _build_presence_data(person: Person) -> PresenceData:
"""Build presence data from a person."""
active_app: Optional[PresenceDetail] = None
try:
active_app = next(
presence for presence in person.presence_details if presence.is_primary
)
except StopIteration:
pass
return PresenceData(
xuid=person.xuid,
gamertag=person.gamertag,
display_pic=person.display_pic_raw,
online=person.presence_state == "Online",
status=person.presence_text,
in_party=person.multiplayer_summary.in_party > 0,
in_game=active_app and active_app.is_game,
in_multiplayer=person.multiplayer_summary.in_multiplayer_session,
gamer_score=person.gamer_score,
gold_tenure=person.detail.tenure,
account_tier=person.detail.account_tier,
)
| {
"content_hash": "201115e9a0d7ec2579e531cd4f406ea8",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 85,
"avg_line_length": 30.21189591078067,
"alnum_prop": 0.6151101267380337,
"repo_name": "partofthething/home-assistant",
"id": "1d921f5fd182183d64c2cfee72bd45c511f2d195",
"size": "8127",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/xbox/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "31051838"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
import fallback, sys
fallback_timeout = 1.0
total_timeout = 3.0
cli = fallback.Client()
content = cli.get(sys.argv[1:], fallback_timeout)
content.await(total_timeout)
if content.isFinished():
print content.body
elif content.getError():
print >> sys.stderr, "Error:", content.getError()
else:
print >> sys.stderr, "timed out"
| {
"content_hash": "d56f1eb7e794e1f465e3da0afc1cd496",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 53,
"avg_line_length": 21.3125,
"alnum_prop": 0.7008797653958945,
"repo_name": "bozzzzo/quark",
"id": "ebe9dd3adc9f1ddf94882a344fef162948cd155e",
"size": "341",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/fallback/get.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "496221"
},
{
"name": "JavaScript",
"bytes": "466971"
},
{
"name": "Python",
"bytes": "590150"
},
{
"name": "Shell",
"bytes": "1328"
}
],
"symlink_target": ""
} |
from django.db.models import Q
from django.contrib.auth import get_user_model
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
from rest_framework.views import APIView
from rest_framework.filters import (
SearchFilter,
OrderingFilter,
)
from rest_framework.mixins import DestroyModelMixin, UpdateModelMixin
from rest_framework.generics import (
CreateAPIView,
DestroyAPIView,
ListAPIView,
UpdateAPIView,
RetrieveAPIView,
RetrieveUpdateAPIView
)
from rest_framework.permissions import (
AllowAny,
IsAuthenticated,
IsAdminUser,
IsAuthenticatedOrReadOnly,
)
from posts.api.permissions import IsOwnerOrReadOnly
from posts.api.pagination import PostLimitOffsetPagination, PostPageNumberPagination
User = get_user_model()
from .serializers import (
UserCreateSerializer,
UserLoginSerializer,
)
class UserCreateAPIView(CreateAPIView):
serializer_class = UserCreateSerializer
queryset = User.objects.all()
permission_classes = [AllowAny]
class UserLoginAPIView(APIView):
permission_classes = [AllowAny]
serializer_class = UserLoginSerializer
def post(self, request, *args, **kwargs):
data = request.data
serializer = UserLoginSerializer(data=data)
if serializer.is_valid(raise_exception=True):
new_data = serializer.data
return Response(new_data, status=HTTP_200_OK)
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
| {
"content_hash": "ca53a0768e3b2b36282ef8181272a697",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 84,
"avg_line_length": 18.32183908045977,
"alnum_prop": 0.7227101631116688,
"repo_name": "kurokochin/blog-ricky",
"id": "2399c81ae927fc04f792dfd20badc55e7a2e9386",
"size": "1594",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "accounts/api/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "52162"
},
{
"name": "HTML",
"bytes": "15634"
},
{
"name": "JavaScript",
"bytes": "321792"
},
{
"name": "Python",
"bytes": "53632"
}
],
"symlink_target": ""
} |
from core import perf_benchmark
from telemetry import benchmark
from telemetry.timeline import tracing_category_filter
from telemetry.web_perf import timeline_based_measurement
import page_sets
@benchmark.Enabled('android')
class MemoryHealthPlan(perf_benchmark.PerfBenchmark):
"""Timeline based benchmark for the Memory Health Plan."""
page_set = page_sets.MemoryHealthStory
def CreateTimelineBasedMeasurementOptions(self):
trace_memory = tracing_category_filter.TracingCategoryFilter(
filter_string='disabled-by-default-memory-infra')
return timeline_based_measurement.Options(overhead_level=trace_memory)
@classmethod
def Name(cls):
return 'memory.memory_health_plan'
@classmethod
def ValueCanBeAddedPredicate(cls, value, is_first_result):
return value.name.startswith('measurement-memory_')
| {
"content_hash": "5ffb76b977a54260a2eebb47d499857d",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 74,
"avg_line_length": 31.074074074074073,
"alnum_prop": 0.7878426698450537,
"repo_name": "chuan9/chromium-crosswalk",
"id": "590222c53be7a152730af116c2e2ca480988853f",
"size": "1002",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tools/perf/benchmarks/memory_health_plan.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "37073"
},
{
"name": "Batchfile",
"bytes": "8451"
},
{
"name": "C",
"bytes": "9417055"
},
{
"name": "C++",
"bytes": "240920124"
},
{
"name": "CSS",
"bytes": "938860"
},
{
"name": "DM",
"bytes": "60"
},
{
"name": "Groff",
"bytes": "2494"
},
{
"name": "HTML",
"bytes": "27258381"
},
{
"name": "Java",
"bytes": "14580273"
},
{
"name": "JavaScript",
"bytes": "20507007"
},
{
"name": "Makefile",
"bytes": "70992"
},
{
"name": "Objective-C",
"bytes": "1742904"
},
{
"name": "Objective-C++",
"bytes": "9967587"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "178732"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "480579"
},
{
"name": "Python",
"bytes": "8519074"
},
{
"name": "Shell",
"bytes": "482077"
},
{
"name": "Standard ML",
"bytes": "5034"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
from PyQt5.QtWidgets import (QApplication, QComboBox, QDialog,
QDialogButtonBox, QFormLayout, QGridLayout, QGroupBox, QHBoxLayout,
QLabel, QLineEdit, QMenu, QMenuBar, QPushButton, QSpinBox, QTextEdit,
QVBoxLayout)
import sys, serial, glob
class Dialog(QDialog):
# NumGridRows = 3
# NumButtons = 4
def __init__(self):
super(Dialog, self).__init__()
self.serial = serial.Serial()
self._find_available_ports()
self._create_form_group_box()
self._create_buttons()
mainLayout = QVBoxLayout()
mainLayout.addWidget(self.formGroupBox)
mainLayout.addWidget(self.buttonBox)
self.setLayout(mainLayout)
self.setWindowTitle("ICM Serial Port")
def _create_buttons(self):
self.buttonBox = QDialogButtonBox(QDialogButtonBox.Open | QDialogButtonBox.Close)
self.buttonBox.accepted.connect(self._connect_to_port)
self.buttonBox.rejected.connect(self._disconnect_from_port)
def _create_form_group_box(self):
self._create_combo_boxes()
self.formGroupBox = QGroupBox("Serial Port Setting")
self.layout = QFormLayout()
self.layout.addRow(QLabel("Port:"), self._port_combo)
self.layout.addRow(QLabel("Baud:"), self._baud_combo)
self.layout.addRow(QLabel("Data bits:"), self._databits_combo)
self.layout.addRow(QLabel("Parity bits:"), self._parity_combo)
self.layout.addRow(QLabel("Stop bits:"), self._stopbits_combo)
self.formGroupBox.setLayout(self.layout)
def _create_combo_boxes(self):
## Create the serial PORT combo box
self._port_combo = QComboBox()
for port in self._portnames:
self._port_combo.addItem(port)
## Create the serial BAUDRATE combo box
self._baud_combo = QComboBox()
for baud in self.serial.BAUDRATES:
self._baud_combo.addItem(str(baud))
self._baud_combo.setCurrentIndex(self.serial.BAUDRATES.index(9600))
## Create the DATABITs combo box
self._databits_combo = QComboBox()
for bits in self.serial.BYTESIZES:
self._databits_combo.addItem(str(bits))
self._databits_combo.setCurrentIndex(self.serial.BYTESIZES.index(8))
## Create the PARITY combo box
self._parity_combo = QComboBox()
for parity in self.serial.PARITIES:
self._parity_combo.addItem(parity)
self._parity_combo.setCurrentIndex(self.serial.PARITIES.index('N'))
## Create the STOPBITS combo box
self._stopbits_combo = QComboBox()
for sbits in self.serial.STOPBITS:
self._stopbits_combo.addItem(str(sbits))
self._stopbits_combo.setCurrentIndex(self.serial.STOPBITS.index(1))
def _find_available_ports(self):
if sys.platform.startswith('win'):
ports = ['COM%s' % (i+1) for i in range(256)]
elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
ports = glob.glob('/dev/tty[A-Za-z]*')
elif sys.platform.startswith('darwin'):
ports = glob.glob('/dev/tty.*')
else:
raise EnvironmentError('Unsupported Platform')
self._portnames = []
for port in ports:
try:
s = serial.Serial(port)
s.close()
self._portnames.append(port)
except (OSError,serial.SerialException):
pass
def _connect_to_port(self):
## Set the serial control values
self.serial.port = self._port_combo.currentText()
self.serial.baud = int(self._baud_combo.currentText())
self.serial.parity = self._parity_combo.currentText()
self.serial.bytesize = int(self._databits_combo.currentText())
self.serial.stopbits = float(self._stopbits_combo.currentText())
self.serial.timeout = 0.5
## Open the Port
self.serial.open()
## Set the status flag
self.connected = self.serial.isOpen()
def _disconnect_from_port(self):
## Close the port
self.serial.close()
## Set the status flag
self.connected = self.serial.isOpen()
if __name__ == '__main__':
app = QApplication(sys.argv)
dialog = Dialog()
sys.exit(dialog.exec_()) | {
"content_hash": "75cd462d9fbd1b363760479b5a836e73",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 89,
"avg_line_length": 37.166666666666664,
"alnum_prop": 0.6035874439461884,
"repo_name": "MCasari-PMEL/EDD-ICMGUI",
"id": "bdc0ff3447bda51cb51542bfa186528a59d36c90",
"size": "4460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/dialogtest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "80531"
}
],
"symlink_target": ""
} |
"""
Example program to demonstrate Gooey's presentation of subparsers
"""
from gooey import Gooey, GooeyParser
@Gooey(
optional_cols=2,
program_name="Subparser Demo",
dump_build_config=True,
show_success_modal=False)
def main():
dest_vars = [
'textfield',
'textarea',
'password',
'commandfield',
'dropdown',
'listboxie',
'counter',
'overwrite',
'mutextwo',
'filechooser',
'filesaver',
'dirchooser',
'datechooser'
]
parser = get_parser()
args = parser.parse_args()
import time
time.sleep(.6)
for i in dest_vars:
assert getattr(args, i) is not None
print("Success")
def get_parser():
parser = GooeyParser()
subs = parser.add_subparsers(help='commands', dest='command')
parser_one = subs.add_parser('parser1', prog="Parser 1")
parser_one.add_argument('--textfield', default=2, widget="TextField")
parser_one.add_argument('--textarea', default="oneline twoline",
widget='Textarea')
parser_one.add_argument('--password', default="hunter42",
widget='PasswordField')
parser_one.add_argument('--commandfield', default="cmdr",
widget='CommandField')
parser_one.add_argument('--dropdown',
choices=["one", "two"], default="two",
widget='Dropdown')
parser_one.add_argument('--listboxie',
nargs='+',
default=['Option three', 'Option four'],
choices=['Option one', 'Option two', 'Option three',
'Option four'],
widget='Listbox',
gooey_options={
'height': 300,
'validate': '',
'heading_color': '',
'text_color': '',
'hide_heading': True,
'hide_text': True,
}
)
parser_one.add_argument('-c', '--counter', default=3, action='count',
widget='Counter')
#
parser_one.add_argument("-o", "--overwrite", action="store_true",
default=True,
widget='CheckBox')
### Mutex Group ###
verbosity = parser_one.add_mutually_exclusive_group(
required=True,
gooey_options={
'initial_selection': 1
}
)
verbosity.add_argument(
'--mutexone',
default=True,
action='store_true',
help="Show more details")
verbosity.add_argument(
'--mutextwo',
default='mut-2',
widget='TextField')
parser_one.add_argument("--filechooser", default="fc-value", widget='FileChooser')
parser_one.add_argument("--filesaver", default="fs-value", widget='FileSaver')
parser_one.add_argument("--dirchooser", default="dc-value", widget='DirChooser')
parser_one.add_argument("--datechooser", default="2015-01-01", widget='DateChooser')
parser_two = subs.add_parser('parser2', prog="parser 2")
parser_two.add_argument('--textfield', default=2, widget="TextField")
parser_two.add_argument('--textarea', default="oneline twoline", widget='Textarea')
parser_two.add_argument('--password', default="hunter42", widget='PasswordField')
parser_two.add_argument('--commandfield', default="cmdr", widget='CommandField')
parser_two.add_argument('--dropdown', choices=["one", "two"], default="two", widget='Dropdown')
parser_two.add_argument('--listboxie',
nargs='+',
default=['Option three', 'Option four'],
choices=['Option one', 'Option two', 'Option three',
'Option four'],
widget='Listbox',
gooey_options={
'height': 300,
'validate': '',
'heading_color': '',
'text_color': '',
'hide_heading': True,
'hide_text': True,
}
)
parser_two.add_argument('-c', '--counter', default=3, action='count', widget='Counter')
parser_two.add_argument("-o", "--overwrite", action="store_true", default=True, widget='CheckBox')
### Mutex Group ###
verbosity = parser_two.add_mutually_exclusive_group(
required=True,
gooey_options={
'initial_selection': 1
}
)
verbosity.add_argument(
'--mutexone',
default=True,
action='store_true',
help="Show more details")
verbosity.add_argument(
'--mutextwo',
default='mut-2',
widget='TextField')
parser_two.add_argument("--filechooser", default="fc-value", widget='FileChooser')
parser_two.add_argument("--filesaver", default="fs-value", widget='FileSaver')
parser_two.add_argument("--dirchooser", default="dc-value", widget='DirChooser')
parser_two.add_argument("--datechooser", default="2015-01-01", widget='DateChooser')
return parser
if __name__ == '__main__':
main()
| {
"content_hash": "94ab36a2231470262cf04af67a7ebd68",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 102,
"avg_line_length": 38.48979591836735,
"alnum_prop": 0.49169317780134325,
"repo_name": "partrita/Gooey",
"id": "24f4e34106f604568627648969e401159d15bd17",
"size": "5658",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "gooey/tests/integration/programs/all_widgets_subparser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "178206"
}
],
"symlink_target": ""
} |
"""
dj-stripe System Checks
"""
import django
from django.conf import settings
from django.core import checks
from django.utils.dateparse import date_re
@checks.register("djstripe")
def check_stripe_api_key(app_configs=None, **kwargs):
"""Check the user has configured API live/test keys correctly."""
from .settings import djstripe_settings
messages = []
if not djstripe_settings.STRIPE_SECRET_KEY:
msg = "Could not find a Stripe API key."
hint = "Add STRIPE_TEST_SECRET_KEY and STRIPE_LIVE_SECRET_KEY to your settings."
messages.append(checks.Critical(msg, hint=hint, id="djstripe.C001"))
elif djstripe_settings.STRIPE_LIVE_MODE:
if not djstripe_settings.LIVE_API_KEY.startswith(("sk_live_", "rk_live_")):
msg = "Bad Stripe live API key."
hint = 'STRIPE_LIVE_SECRET_KEY should start with "sk_live_"'
messages.append(checks.Critical(msg, hint=hint, id="djstripe.C002"))
else:
if not djstripe_settings.TEST_API_KEY.startswith(("sk_test_", "rk_test_")):
msg = "Bad Stripe test API key."
hint = 'STRIPE_TEST_SECRET_KEY should start with "sk_test_"'
messages.append(checks.Critical(msg, hint=hint, id="djstripe.C003"))
return messages
def validate_stripe_api_version(version):
"""
Check the API version is formatted correctly for Stripe.
The expected format is an iso8601 date: `YYYY-MM-DD`
:param version: The version to set for the Stripe API.
:type version: ``str``
:returns bool: Whether the version is formatted correctly.
"""
return date_re.match(version)
@checks.register("djstripe")
def check_stripe_api_version(app_configs=None, **kwargs):
"""Check the user has configured API version correctly."""
from .settings import djstripe_settings
messages = []
default_version = djstripe_settings.DEFAULT_STRIPE_API_VERSION
version = djstripe_settings.get_stripe_api_version()
if not validate_stripe_api_version(version):
msg = "Invalid Stripe API version: {}".format(version)
hint = "STRIPE_API_VERSION should be formatted as: YYYY-MM-DD"
messages.append(checks.Critical(msg, hint=hint, id="djstripe.C004"))
if version != default_version:
msg = (
"The Stripe API version has a non-default value of '{}'. "
"Non-default versions are not explicitly supported, and may "
"cause compatibility issues.".format(version)
)
hint = "Use the dj-stripe default for Stripe API version: {}".format(
default_version
)
messages.append(checks.Warning(msg, hint=hint, id="djstripe.W001"))
return messages
@checks.register("djstripe")
def check_native_jsonfield_postgres_engine(app_configs=None, **kwargs):
"""
Check that the DJSTRIPE_USE_NATIVE_JSONFIELD isn't set unless Postgres is in use.
Only used on Django < 3.1.
"""
from .settings import djstripe_settings
messages = []
error_msg = (
"DJSTRIPE_USE_NATIVE_JSONFIELD is not compatible with engine {engine} "
"for database {name}"
)
# This error check is skipped on Django 3.1+, because the native JSONField
# will be used, which is compatible with mysql and sqlite.
# https://docs.djangoproject.com/en/dev/releases/3.1/#postgresql-jsonfield
if django.VERSION >= (3, 1):
return messages
if djstripe_settings.USE_NATIVE_JSONFIELD:
for db_name, db_config in settings.DATABASES.items():
# Hi there.
# You may be reading this because you are using Postgres, but
# dj-stripe is not detecting that correctly. For example, maybe you
# are using multiple databases with different engines, or you have
# your own backend. As long as you are certain you can support jsonb,
# you can use the SILENCED_SYSTEM_CHECKS setting to ignore this check.
engine = db_config.get("ENGINE", "")
if "postgresql" not in engine and "postgis" not in engine:
messages.append(
checks.Critical(
error_msg.format(name=repr(db_name), engine=repr(engine)),
hint="Switch to Postgres, or unset "
"DJSTRIPE_USE_NATIVE_JSONFIELD",
id="djstripe.C005",
)
)
return messages
@checks.register("djstripe")
def check_native_jsonfield_set_on_recent_django_versions(app_configs=None, **kwargs):
"""
Check that DJSTRIPE_USE_NATIVE_JSONFIELD is set on Django > 3.1.
This is only a suggestion, as existing installations need a migration path.
"""
messages = []
# This error check is skipped on Django < 3.1+, because the native JSONField
# was not available outside of Postgres engines then.
if django.VERSION < (3, 1):
return messages
# NOTE: Not using app_settings.USE_NATIVE_JSONFIELD.
# Only display this warning if the setting is unset.
if not hasattr(settings, "DJSTRIPE_USE_NATIVE_JSONFIELD"):
# TODO: Give more details on the migration path
messages.append(
checks.Warning(
"DJSTRIPE_USE_NATIVE_JSONFIELD is not set.",
hint=(
"On Django 3.1+, setting DJSTRIPE_USE_NATIVE_JSONFIELD = True is "
"recommended.\nPre-existing dj-stripe installations may require a "
"migration, in which case you may want to set it to False."
),
id="djstripe.W005",
)
)
return messages
@checks.register("djstripe")
def check_stripe_api_host(app_configs=None, **kwargs):
"""
Check that STRIPE_API_HOST is not being used in production.
"""
from django.conf import settings
messages = []
if not settings.DEBUG and hasattr(settings, "STRIPE_API_HOST"):
messages.append(
checks.Warning(
"STRIPE_API_HOST should not be set in production! "
"This is most likely unintended.",
hint="Remove STRIPE_API_HOST from your Django settings.",
id="djstripe.W002",
)
)
return messages
@checks.register("djstripe")
def check_webhook_secret(app_configs=None, **kwargs):
"""
Check that DJSTRIPE_WEBHOOK_SECRET looks correct
"""
from .settings import djstripe_settings
messages = []
secret = djstripe_settings.WEBHOOK_SECRET
if secret and not secret.startswith("whsec_"):
messages.append(
checks.Warning(
"DJSTRIPE_WEBHOOK_SECRET does not look valid",
hint="It should start with whsec_...",
id="djstripe.W003",
)
)
return messages
@checks.register("djstripe")
def check_webhook_validation(app_configs=None, **kwargs):
"""
Check that DJSTRIPE_WEBHOOK_VALIDATION is valid
"""
from .settings import djstripe_settings
messages = []
validation_options = ("verify_signature", "retrieve_event")
if djstripe_settings.WEBHOOK_VALIDATION is None:
messages.append(
checks.Warning(
"Webhook validation is disabled, this is a security risk if the "
"webhook view is enabled",
hint="Set DJSTRIPE_WEBHOOK_VALIDATION to one of {}".format(
", ".join(validation_options)
),
id="djstripe.W004",
)
)
elif djstripe_settings.WEBHOOK_VALIDATION == "verify_signature":
if not djstripe_settings.WEBHOOK_SECRET:
messages.append(
checks.Critical(
"DJSTRIPE_WEBHOOK_VALIDATION='verify_signature' "
"but DJSTRIPE_WEBHOOK_SECRET is not set",
hint="Set DJSTRIPE_WEBHOOK_SECRET or set "
"DJSTRIPE_WEBHOOK_VALIDATION='retrieve_event'",
id="djstripe.C006",
)
)
elif djstripe_settings.WEBHOOK_VALIDATION not in validation_options:
messages.append(
checks.Critical(
"DJSTRIPE_WEBHOOK_VALIDATION is invalid",
hint="Set DJSTRIPE_WEBHOOK_VALIDATION to one of {} or None".format(
", ".join(validation_options)
),
id="djstripe.C007",
)
)
return messages
@checks.register("djstripe")
def check_subscriber_key_length(app_configs=None, **kwargs):
"""
Check that DJSTRIPE_SUBSCRIBER_CUSTOMER_KEY fits in metadata.
Docs: https://stripe.com/docs/api#metadata
"""
from .settings import djstripe_settings
messages = []
key = djstripe_settings.SUBSCRIBER_CUSTOMER_KEY
key_size = len(str(key))
if key and key_size > 40:
messages.append(
checks.Error(
"DJSTRIPE_SUBSCRIBER_CUSTOMER_KEY must be no more than "
"40 characters long",
hint="Current value: %r (%i characters)" % (key, key_size),
id="djstripe.E001",
)
)
return messages
@checks.register("djstripe")
def check_djstripe_settings_foreign_key_to_field(app_configs=None, **kwargs):
"""
Check that DJSTRIPE_FOREIGN_KEY_TO_FIELD is set to a valid value.
"""
from django.conf import settings
setting_name = "DJSTRIPE_FOREIGN_KEY_TO_FIELD"
hint = (
f'Set {setting_name} to "id" if this is a new installation, '
f'otherwise set it to "djstripe_id".'
)
messages = []
if not hasattr(settings, setting_name):
messages.append(
checks.Error(
"%s is not set." % (setting_name),
hint=hint,
id="djstripe.E002",
)
)
elif getattr(settings, setting_name) not in ("id", "djstripe_id"):
messages.append(
checks.Error(
"%r is not a valid value for %s."
% (getattr(settings, setting_name), setting_name),
hint=hint,
id="djstripe.E003",
)
)
return messages
| {
"content_hash": "7fa7d28b8b65a2fce286274026c4e930",
"timestamp": "",
"source": "github",
"line_count": 301,
"max_line_length": 88,
"avg_line_length": 34.07641196013289,
"alnum_prop": 0.6010529394559813,
"repo_name": "pydanny/dj-stripe",
"id": "bb37ce2d2065a02e4b12817a95dc81652682ba54",
"size": "10257",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "djstripe/checks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "21431"
},
{
"name": "Python",
"bytes": "322111"
}
],
"symlink_target": ""
} |
from collections import OrderedDict
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
# copy-pasted from https://github.com/City-of-Helsinki/kerrokantasi/blob/2c26bf3ee9ac4fdc88aefabd7d0c4e73f4d3707d/democracy/views/utils.py#L257 # noqa
class TranslatableSerializer(serializers.Serializer):
"""
A serializer for translated fields.
translated_fields must be declared in the Meta class.
By default, translation languages obtained from settings, but can be overriden
by defining translation_lang in the Meta class.
"""
def __init__(self, *args, **kwargs):
self.Meta.translated_fields = [
field for field in self.Meta.model._parler_meta._fields_to_model if field in self.Meta.fields
]
non_translated_fields = [field for field in self.Meta.fields if field not in self.Meta.translated_fields]
self.Meta.fields = non_translated_fields
super(TranslatableSerializer, self).__init__(*args, **kwargs)
self.Meta.fields = non_translated_fields + self.Meta.translated_fields
if not hasattr(self.Meta, 'translation_lang'):
self.Meta.translation_lang = [lang['code'] for lang in settings.PARLER_LANGUAGES[settings.SITE_ID]]
def _update_lang(self, ret, field, value, lang_code):
if not ret.get(field) or isinstance(ret[field], str):
ret[field] = {}
if value:
ret[field][lang_code] = value
return ret
def to_representation(self, instance):
ret = super(TranslatableSerializer, self).to_representation(instance)
translations = instance.translations.filter(language_code__in=self.Meta.translation_lang)
for translation in translations:
for field in self.Meta.translated_fields:
self._update_lang(ret, field, getattr(translation, field), translation.language_code)
return ret
def _validate_translated_field(self, field, data):
assert field in self.Meta.translated_fields, '%s is not a translated field' % field
if data is None:
return
if not isinstance(data, dict):
raise ValidationError(_('Not a valid translation format. Expecting {"lang_code": %(data)s}' %
{'data': data}))
for lang in data:
if lang not in self.Meta.translation_lang:
raise ValidationError(_('%(lang)s is not a supported languages (%(allowed)s)' % {
'lang': lang,
'allowed': self.Meta.translation_lang,
}))
def validate(self, data):
"""
Add a custom validation for translated fields.
"""
validated_data = super().validate(data)
errors = OrderedDict()
for field in self.Meta.translated_fields:
try:
self._validate_translated_field(field, data.get(field, None))
except ValidationError as e:
errors[field] = e.detail
if errors:
raise ValidationError(errors)
return validated_data
def to_internal_value(self, value):
ret = super(TranslatableSerializer, self).to_internal_value(value)
for field in self.Meta.translated_fields:
v = value.get(field)
if v:
ret[field] = v
return ret
def save(self, **kwargs):
"""
Extract the translations and save them after main object save.
"""
translated_data = self._pop_translated_data()
if not self.instance:
# forces the translation to be created, since the object cannot be saved without
self.validated_data[self.Meta.translated_fields[0]] = ''
instance = super(TranslatableSerializer, self).save(**kwargs)
self.save_translations(instance, translated_data)
instance.save()
return instance
def _pop_translated_data(self):
"""
Separate data of translated fields from other data.
"""
translated_data = {}
for meta in self.Meta.translated_fields:
translations = self.validated_data.pop(meta, {})
if translations:
translated_data[meta] = translations
return translated_data
def save_translations(self, instance, translated_data):
"""
Save translation data into translation objects.
"""
for field in self.Meta.translated_fields:
translations = {}
if not self.partial:
translations = {lang_code: '' for lang_code in self.Meta.translation_lang}
translations.update(translated_data.get(field, {}))
for lang_code, value in translations.items():
translation = instance._get_translated_model(lang_code, auto_create=True)
setattr(translation, field, value)
instance.save_translations()
def assert_objects_in_response(response, objects):
assert {r['id'] for r in response.data['results']} == {o.id for o in objects}
| {
"content_hash": "d35ee9e4fad434f6f4854b46c97bc93d",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 151,
"avg_line_length": 41.424,
"alnum_prop": 0.6284279644650445,
"repo_name": "mikkokeskinen/tunnistamo",
"id": "e699bb2818cc2a2cd791ef8243e1712f29f5b65a",
"size": "5178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tunnistamo/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2730"
},
{
"name": "Dockerfile",
"bytes": "330"
},
{
"name": "HTML",
"bytes": "4360"
},
{
"name": "Python",
"bytes": "301704"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import os.path as op
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_equal,
assert_allclose)
from scipy import sparse
from nose.tools import assert_true, assert_raises
import copy
import warnings
from mne.datasets import testing
from mne.label import read_label, label_sign_flip
from mne.event import read_events
from mne.epochs import Epochs
from mne.source_estimate import read_source_estimate, VolSourceEstimate
from mne import (read_cov, read_forward_solution, read_evokeds, pick_types,
pick_types_forward)
from mne.io import Raw
from mne.minimum_norm.inverse import (apply_inverse, read_inverse_operator,
apply_inverse_raw, apply_inverse_epochs,
make_inverse_operator,
write_inverse_operator,
compute_rank_inverse,
prepare_inverse_operator)
from mne.utils import _TempDir, run_tests_if_main, slow_test
from mne.externals import six
s_path = op.join(testing.data_path(download=False), 'MEG', 'sample')
fname_fwd = op.join(s_path, 'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif')
# Four inverses:
fname_full = op.join(s_path, 'sample_audvis_trunc-meg-eeg-oct-6-meg-inv.fif')
fname_inv = op.join(s_path, 'sample_audvis_trunc-meg-eeg-oct-4-meg-inv.fif')
fname_inv_fixed_nodepth = op.join(s_path,
'sample_audvis_trunc-meg-eeg-oct-4-meg'
'-nodepth-fixed-inv.fif')
fname_inv_meeg_diag = op.join(s_path,
'sample_audvis_trunc-'
'meg-eeg-oct-4-meg-eeg-diagnoise-inv.fif')
fname_data = op.join(s_path, 'sample_audvis_trunc-ave.fif')
fname_cov = op.join(s_path, 'sample_audvis_trunc-cov.fif')
fname_raw = op.join(s_path, 'sample_audvis_trunc_raw.fif')
fname_event = op.join(s_path, 'sample_audvis_trunc_raw-eve.fif')
fname_label = op.join(s_path, 'labels', '%s.label')
fname_vol_inv = op.join(s_path,
'sample_audvis_trunc-meg-vol-7-meg-inv.fif')
snr = 3.0
lambda2 = 1.0 / snr ** 2
last_keys = [None] * 10
def read_forward_solution_meg(*args, **kwargs):
fwd = read_forward_solution(*args, **kwargs)
fwd = pick_types_forward(fwd, meg=True, eeg=False)
return fwd
def read_forward_solution_eeg(*args, **kwargs):
fwd = read_forward_solution(*args, **kwargs)
fwd = pick_types_forward(fwd, meg=False, eeg=True)
return fwd
def _get_evoked():
evoked = read_evokeds(fname_data, condition=0, baseline=(None, 0))
evoked.crop(0, 0.2)
return evoked
def _compare(a, b):
global last_keys
skip_types = ['whitener', 'proj', 'reginv', 'noisenorm', 'nchan',
'command_line', 'working_dir', 'mri_file', 'mri_id']
try:
if isinstance(a, dict):
assert_true(isinstance(b, dict))
for k, v in six.iteritems(a):
if k not in b and k not in skip_types:
raise ValueError('First one had one second one didn\'t:\n'
'%s not in %s' % (k, b.keys()))
if k not in skip_types:
last_keys.pop()
last_keys = [k] + last_keys
_compare(v, b[k])
for k, v in six.iteritems(b):
if k not in a and k not in skip_types:
raise ValueError('Second one had one first one didn\'t:\n'
'%s not in %s' % (k, a.keys()))
elif isinstance(a, list):
assert_true(len(a) == len(b))
for i, j in zip(a, b):
_compare(i, j)
elif isinstance(a, sparse.csr.csr_matrix):
assert_array_almost_equal(a.data, b.data)
assert_equal(a.indices, b.indices)
assert_equal(a.indptr, b.indptr)
elif isinstance(a, np.ndarray):
assert_array_almost_equal(a, b)
else:
assert_true(a == b)
except Exception as exptn:
print(last_keys)
raise exptn
def _compare_inverses_approx(inv_1, inv_2, evoked, rtol, atol,
check_depth=True):
# depth prior
if check_depth:
if inv_1['depth_prior'] is not None:
assert_array_almost_equal(inv_1['depth_prior']['data'],
inv_2['depth_prior']['data'], 5)
else:
assert_true(inv_2['depth_prior'] is None)
# orient prior
if inv_1['orient_prior'] is not None:
assert_array_almost_equal(inv_1['orient_prior']['data'],
inv_2['orient_prior']['data'])
else:
assert_true(inv_2['orient_prior'] is None)
# source cov
assert_array_almost_equal(inv_1['source_cov']['data'],
inv_2['source_cov']['data'])
# These are not as close as we'd like XXX
assert_array_almost_equal(np.abs(inv_1['eigen_fields']['data']),
np.abs(inv_2['eigen_fields']['data']), 0)
assert_array_almost_equal(np.abs(inv_1['eigen_leads']['data']),
np.abs(inv_2['eigen_leads']['data']), 0)
stc_1 = apply_inverse(evoked, inv_1, lambda2, "dSPM")
stc_2 = apply_inverse(evoked, inv_2, lambda2, "dSPM")
assert_true(stc_1.subject == stc_2.subject)
assert_equal(stc_1.times, stc_2.times)
assert_allclose(stc_1.data, stc_2.data, rtol=rtol, atol=atol)
assert_true(inv_1['units'] == inv_2['units'])
def _compare_io(inv_op, out_file_ext='.fif'):
tempdir = _TempDir()
if out_file_ext == '.fif':
out_file = op.join(tempdir, 'test-inv.fif')
elif out_file_ext == '.gz':
out_file = op.join(tempdir, 'test-inv.fif.gz')
else:
raise ValueError('IO test could not complete')
# Test io operations
inv_init = copy.deepcopy(inv_op)
write_inverse_operator(out_file, inv_op)
read_inv_op = read_inverse_operator(out_file)
_compare(inv_init, read_inv_op)
_compare(inv_init, inv_op)
@testing.requires_testing_data
def test_warn_inverse_operator():
"""Test MNE inverse warning without average EEG projection
"""
bad_info = copy.deepcopy(_get_evoked().info)
bad_info['projs'] = list()
fwd_op = read_forward_solution(fname_fwd, surf_ori=True)
noise_cov = read_cov(fname_cov)
with warnings.catch_warnings(record=True) as w:
make_inverse_operator(bad_info, fwd_op, noise_cov)
assert_equal(len(w), 1)
@slow_test
@testing.requires_testing_data
def test_make_inverse_operator():
"""Test MNE inverse computation (precomputed and non-precomputed)
"""
# Test old version of inverse computation starting from forward operator
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
inverse_operator = read_inverse_operator(fname_inv)
fwd_op = read_forward_solution_meg(fname_fwd, surf_ori=True)
my_inv_op = make_inverse_operator(evoked.info, fwd_op, noise_cov,
loose=0.2, depth=0.8,
limit_depth_chs=False)
_compare_io(my_inv_op)
assert_true(inverse_operator['units'] == 'Am')
_compare_inverses_approx(my_inv_op, inverse_operator, evoked, 1e-2, 1e-2,
check_depth=False)
# Test MNE inverse computation starting from forward operator
my_inv_op = make_inverse_operator(evoked.info, fwd_op, noise_cov,
loose=0.2, depth=0.8)
_compare_io(my_inv_op)
_compare_inverses_approx(my_inv_op, inverse_operator, evoked, 1e-2, 1e-2)
assert_true('dev_head_t' in my_inv_op['info'])
assert_true('mri_head_t' in my_inv_op)
@slow_test
@testing.requires_testing_data
def test_apply_inverse_operator():
"""Test MNE inverse application
"""
inverse_operator = read_inverse_operator(fname_full)
evoked = _get_evoked()
# Inverse has 306 channels - 4 proj = 302
assert_true(compute_rank_inverse(inverse_operator) == 302)
# Inverse has 306 channels - 4 proj = 302
assert_true(compute_rank_inverse(inverse_operator) == 302)
stc = apply_inverse(evoked, inverse_operator, lambda2, "MNE")
assert_true(stc.subject == 'sample')
assert_true(stc.data.min() > 0)
assert_true(stc.data.max() < 10e-9)
assert_true(stc.data.mean() > 1e-11)
# test if using prepared and not prepared inverse operator give the same
# result
inv_op = prepare_inverse_operator(inverse_operator, nave=evoked.nave,
lambda2=lambda2, method="MNE")
stc2 = apply_inverse(evoked, inv_op, lambda2, "MNE")
assert_array_almost_equal(stc.data, stc2.data)
assert_array_almost_equal(stc.times, stc2.times)
stc = apply_inverse(evoked, inverse_operator, lambda2, "sLORETA")
assert_true(stc.subject == 'sample')
assert_true(stc.data.min() > 0)
assert_true(stc.data.max() < 10.0)
assert_true(stc.data.mean() > 0.1)
stc = apply_inverse(evoked, inverse_operator, lambda2, "dSPM")
assert_true(stc.subject == 'sample')
assert_true(stc.data.min() > 0)
assert_true(stc.data.max() < 35)
assert_true(stc.data.mean() > 0.1)
# test without using a label (so delayed computation is used)
label = read_label(fname_label % 'Aud-lh')
stc = apply_inverse(evoked, inv_op, lambda2, "MNE")
stc_label = apply_inverse(evoked, inv_op, lambda2, "MNE",
label=label)
assert_equal(stc_label.subject, 'sample')
label_stc = stc.in_label(label)
assert_true(label_stc.subject == 'sample')
assert_array_almost_equal(stc_label.data, label_stc.data)
# Test we get errors when using custom ref or no average proj is present
evoked.info['custom_ref_applied'] = True
assert_raises(ValueError, apply_inverse, evoked, inv_op, lambda2, "MNE")
evoked.info['custom_ref_applied'] = False
evoked.info['projs'] = [] # remove EEG proj
assert_raises(ValueError, apply_inverse, evoked, inv_op, lambda2, "MNE")
@testing.requires_testing_data
def test_make_inverse_operator_fixed():
"""Test MNE inverse computation (fixed orientation)
"""
fwd_1 = read_forward_solution_meg(fname_fwd, surf_ori=False,
force_fixed=False)
fwd_2 = read_forward_solution_meg(fname_fwd, surf_ori=False,
force_fixed=True)
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
# can't make depth-weighted fixed inv without surf ori fwd
assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_1,
noise_cov, depth=0.8, loose=None, fixed=True)
# can't make fixed inv with depth weighting without free ori fwd
assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_2,
noise_cov, depth=0.8, loose=None, fixed=True)
# now compare to C solution
# note that the forward solution must not be surface-oriented
# to get equivalency (surf_ori=True changes the normals)
inv_op = make_inverse_operator(evoked.info, fwd_2, noise_cov, depth=None,
loose=None, fixed=True)
inverse_operator_nodepth = read_inverse_operator(fname_inv_fixed_nodepth)
_compare_inverses_approx(inverse_operator_nodepth, inv_op, evoked, 0, 1e-2)
# Inverse has 306 channels - 6 proj = 302
assert_true(compute_rank_inverse(inverse_operator_nodepth) == 302)
@testing.requires_testing_data
def test_make_inverse_operator_free():
"""Test MNE inverse computation (free orientation)
"""
fwd_op = read_forward_solution_meg(fname_fwd, surf_ori=True)
fwd_1 = read_forward_solution_meg(fname_fwd, surf_ori=False,
force_fixed=False)
fwd_2 = read_forward_solution_meg(fname_fwd, surf_ori=False,
force_fixed=True)
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
# can't make free inv with fixed fwd
assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_2,
noise_cov, depth=None)
# for free ori inv, loose=None and loose=1 should be equivalent
inv_1 = make_inverse_operator(evoked.info, fwd_op, noise_cov, loose=None)
inv_2 = make_inverse_operator(evoked.info, fwd_op, noise_cov, loose=1)
_compare_inverses_approx(inv_1, inv_2, evoked, 0, 1e-2)
# for depth=None, surf_ori of the fwd should not matter
inv_3 = make_inverse_operator(evoked.info, fwd_op, noise_cov, depth=None,
loose=None)
inv_4 = make_inverse_operator(evoked.info, fwd_1, noise_cov, depth=None,
loose=None)
_compare_inverses_approx(inv_3, inv_4, evoked, 0, 1e-2)
@testing.requires_testing_data
def test_make_inverse_operator_diag():
"""Test MNE inverse computation with diagonal noise cov
"""
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
fwd_op = read_forward_solution(fname_fwd, surf_ori=True)
inv_op = make_inverse_operator(evoked.info, fwd_op, noise_cov.as_diag(),
loose=0.2, depth=0.8)
_compare_io(inv_op)
inverse_operator_diag = read_inverse_operator(fname_inv_meeg_diag)
# This one's only good to zero decimal places, roundoff error (?)
_compare_inverses_approx(inverse_operator_diag, inv_op, evoked, 0, 1e0)
# Inverse has 366 channels - 6 proj = 360
assert_true(compute_rank_inverse(inverse_operator_diag) == 360)
@testing.requires_testing_data
def test_inverse_operator_noise_cov_rank():
"""Test MNE inverse operator with a specified noise cov rank
"""
fwd_op = read_forward_solution_meg(fname_fwd, surf_ori=True)
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
inv = make_inverse_operator(evoked.info, fwd_op, noise_cov, rank=64)
assert_true(compute_rank_inverse(inv) == 64)
fwd_op = read_forward_solution_eeg(fname_fwd, surf_ori=True)
inv = make_inverse_operator(evoked.info, fwd_op, noise_cov,
rank=dict(eeg=20))
assert_true(compute_rank_inverse(inv) == 20)
@testing.requires_testing_data
def test_inverse_operator_volume():
"""Test MNE inverse computation on volume source space
"""
tempdir = _TempDir()
evoked = _get_evoked()
inverse_operator_vol = read_inverse_operator(fname_vol_inv)
assert_true(repr(inverse_operator_vol))
stc = apply_inverse(evoked, inverse_operator_vol, lambda2, "dSPM")
assert_true(isinstance(stc, VolSourceEstimate))
# volume inverses don't have associated subject IDs
assert_true(stc.subject is None)
stc.save(op.join(tempdir, 'tmp-vl.stc'))
stc2 = read_source_estimate(op.join(tempdir, 'tmp-vl.stc'))
assert_true(np.all(stc.data > 0))
assert_true(np.all(stc.data < 35))
assert_array_almost_equal(stc.data, stc2.data)
assert_array_almost_equal(stc.times, stc2.times)
@slow_test
@testing.requires_testing_data
def test_io_inverse_operator():
"""Test IO of inverse_operator with GZip
"""
tempdir = _TempDir()
inverse_operator = read_inverse_operator(fname_inv)
x = repr(inverse_operator)
assert_true(x)
# just do one example for .gz, as it should generalize
_compare_io(inverse_operator, '.gz')
# test warnings on bad filenames
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
inv_badname = op.join(tempdir, 'test-bad-name.fif.gz')
write_inverse_operator(inv_badname, inverse_operator)
read_inverse_operator(inv_badname)
assert_true(len(w) == 2)
@testing.requires_testing_data
def test_apply_mne_inverse_raw():
"""Test MNE with precomputed inverse operator on Raw
"""
start = 3
stop = 10
raw = Raw(fname_raw)
label_lh = read_label(fname_label % 'Aud-lh')
_, times = raw[0, start:stop]
inverse_operator = read_inverse_operator(fname_full)
inverse_operator = prepare_inverse_operator(inverse_operator, nave=1,
lambda2=lambda2, method="dSPM")
for pick_ori in [None, "normal"]:
stc = apply_inverse_raw(raw, inverse_operator, lambda2, "dSPM",
label=label_lh, start=start, stop=stop, nave=1,
pick_ori=pick_ori, buffer_size=None,
prepared=True)
stc2 = apply_inverse_raw(raw, inverse_operator, lambda2, "dSPM",
label=label_lh, start=start, stop=stop,
nave=1, pick_ori=pick_ori,
buffer_size=3, prepared=True)
if pick_ori is None:
assert_true(np.all(stc.data > 0))
assert_true(np.all(stc2.data > 0))
assert_true(stc.subject == 'sample')
assert_true(stc2.subject == 'sample')
assert_array_almost_equal(stc.times, times)
assert_array_almost_equal(stc2.times, times)
assert_array_almost_equal(stc.data, stc2.data)
@testing.requires_testing_data
def test_apply_mne_inverse_fixed_raw():
"""Test MNE with fixed-orientation inverse operator on Raw
"""
raw = Raw(fname_raw)
start = 3
stop = 10
_, times = raw[0, start:stop]
label_lh = read_label(fname_label % 'Aud-lh')
# create a fixed-orientation inverse operator
fwd = read_forward_solution_meg(fname_fwd, force_fixed=False,
surf_ori=True)
noise_cov = read_cov(fname_cov)
inv_op = make_inverse_operator(raw.info, fwd, noise_cov,
loose=None, depth=0.8, fixed=True)
inv_op2 = prepare_inverse_operator(inv_op, nave=1,
lambda2=lambda2, method="dSPM")
stc = apply_inverse_raw(raw, inv_op2, lambda2, "dSPM",
label=label_lh, start=start, stop=stop, nave=1,
pick_ori=None, buffer_size=None, prepared=True)
stc2 = apply_inverse_raw(raw, inv_op2, lambda2, "dSPM",
label=label_lh, start=start, stop=stop, nave=1,
pick_ori=None, buffer_size=3, prepared=True)
stc3 = apply_inverse_raw(raw, inv_op, lambda2, "dSPM",
label=label_lh, start=start, stop=stop, nave=1,
pick_ori=None, buffer_size=None)
assert_true(stc.subject == 'sample')
assert_true(stc2.subject == 'sample')
assert_array_almost_equal(stc.times, times)
assert_array_almost_equal(stc2.times, times)
assert_array_almost_equal(stc3.times, times)
assert_array_almost_equal(stc.data, stc2.data)
assert_array_almost_equal(stc.data, stc3.data)
@testing.requires_testing_data
def test_apply_mne_inverse_epochs():
"""Test MNE with precomputed inverse operator on Epochs
"""
inverse_operator = read_inverse_operator(fname_full)
label_lh = read_label(fname_label % 'Aud-lh')
label_rh = read_label(fname_label % 'Aud-rh')
event_id, tmin, tmax = 1, -0.2, 0.5
raw = Raw(fname_raw)
picks = pick_types(raw.info, meg=True, eeg=False, stim=True, ecg=True,
eog=True, include=['STI 014'], exclude='bads')
reject = dict(grad=4000e-13, mag=4e-12, eog=150e-6)
flat = dict(grad=1e-15, mag=1e-15)
events = read_events(fname_event)[:15]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), reject=reject, flat=flat)
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, "dSPM",
label=label_lh, pick_ori="normal")
inverse_operator = prepare_inverse_operator(inverse_operator, nave=1,
lambda2=lambda2, method="dSPM")
stcs2 = apply_inverse_epochs(epochs, inverse_operator, lambda2, "dSPM",
label=label_lh, pick_ori="normal",
prepared=True)
# test if using prepared and not prepared inverse operator give the same
# result
assert_array_almost_equal(stcs[0].data, stcs2[0].data)
assert_array_almost_equal(stcs[0].times, stcs2[0].times)
assert_true(len(stcs) == 2)
assert_true(3 < stcs[0].data.max() < 10)
assert_true(stcs[0].subject == 'sample')
data = sum(stc.data for stc in stcs) / len(stcs)
flip = label_sign_flip(label_lh, inverse_operator['src'])
label_mean = np.mean(data, axis=0)
label_mean_flip = np.mean(flip[:, np.newaxis] * data, axis=0)
assert_true(label_mean.max() < label_mean_flip.max())
# test extracting a BiHemiLabel
stcs_rh = apply_inverse_epochs(epochs, inverse_operator, lambda2, "dSPM",
label=label_rh, pick_ori="normal",
prepared=True)
stcs_bh = apply_inverse_epochs(epochs, inverse_operator, lambda2, "dSPM",
label=label_lh + label_rh,
pick_ori="normal",
prepared=True)
n_lh = len(stcs[0].data)
assert_array_almost_equal(stcs[0].data, stcs_bh[0].data[:n_lh])
assert_array_almost_equal(stcs_rh[0].data, stcs_bh[0].data[n_lh:])
# test without using a label (so delayed computation is used)
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, "dSPM",
pick_ori="normal", prepared=True)
assert_true(stcs[0].subject == 'sample')
label_stc = stcs[0].in_label(label_rh)
assert_true(label_stc.subject == 'sample')
assert_array_almost_equal(stcs_rh[0].data, label_stc.data)
@testing.requires_testing_data
def test_make_inverse_operator_bads():
"""Test MNE inverse computation given a mismatch of bad channels
"""
fwd_op = read_forward_solution_meg(fname_fwd, surf_ori=True)
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
# test bads
bad = evoked.info['bads'].pop()
inv_ = make_inverse_operator(evoked.info, fwd_op, noise_cov, loose=None)
union_good = set(noise_cov['names']) & set(evoked.ch_names)
union_bads = set(noise_cov['bads']) & set(evoked.info['bads'])
evoked.info['bads'].append(bad)
assert_true(len(set(inv_['info']['ch_names']) - union_good) == 0)
assert_true(len(set(inv_['info']['bads']) - union_bads) == 0)
run_tests_if_main()
| {
"content_hash": "0027da9f33f2e2207276ae1006df0e83",
"timestamp": "",
"source": "github",
"line_count": 545,
"max_line_length": 79,
"avg_line_length": 41.40183486238532,
"alnum_prop": 0.6068073036695621,
"repo_name": "dgwakeman/mne-python",
"id": "d7d1d6ef576435f7a9e0d29d0c03d5b06ecbfc94",
"size": "22564",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "mne/minimum_norm/tests/test_inverse.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "3435"
},
{
"name": "PowerShell",
"bytes": "2988"
},
{
"name": "Python",
"bytes": "4132540"
},
{
"name": "Shell",
"bytes": "936"
}
],
"symlink_target": ""
} |
from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from osf_tests.factories import InstitutionFactory, AuthUserFactory, RegistrationFactory, WithdrawnRegistrationFactory
from framework.auth import Auth
from api.base.settings.defaults import API_BASE
class TestInstitutionRegistrationList(ApiTestCase):
def setUp(self):
super(TestInstitutionRegistrationList, self).setUp()
self.institution = InstitutionFactory()
self.registration1 = RegistrationFactory(is_public=True, is_registration=True)
self.registration1.affiliated_institutions.add(self.institution)
self.registration1.save()
self.user1 = AuthUserFactory()
self.user2 = AuthUserFactory()
self.registration2 = RegistrationFactory(creator=self.user1, is_public=False, is_registration=True)
self.registration2.affiliated_institutions.add(self.institution)
self.registration2.add_contributor(self.user2, auth=Auth(self.user1))
self.registration2.save()
self.registration3 = RegistrationFactory(creator=self.user2, is_public=False, is_registration=True)
self.registration3.affiliated_institutions.add(self.institution)
self.registration3.save()
self.institution_node_url = '/{0}institutions/{1}/registrations/'.format(API_BASE, self.institution._id)
def test_return_all_public_nodes(self):
res = self.app.get(self.institution_node_url)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_in(self.registration1._id, ids)
assert_not_in(self.registration2._id, ids)
assert_not_in(self.registration3._id, ids)
def test_does_not_return_private_nodes_with_auth(self):
res = self.app.get(self.institution_node_url, auth=self.user1.auth)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_in(self.registration1._id, ids)
assert_not_in(self.registration2._id, ids)
assert_not_in(self.registration3._id, ids)
def test_doesnt_return_retractions_without_auth(self):
self.registration2.is_public = True
self.registration2.save()
retraction = WithdrawnRegistrationFactory(registration=self.registration2, user=self.user1)
assert_true(self.registration2.is_retracted)
res = self.app.get(self.institution_node_url)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_not_in(self.registration2._id, ids)
def test_doesnt_return_retractions_with_auth(self):
retraction = WithdrawnRegistrationFactory(registration=self.registration2, user=self.user1)
assert_true(self.registration2.is_retracted)
res = self.app.get(self.institution_node_url, auth=self.user1.auth)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_not_in(self.registration2._id, ids)
| {
"content_hash": "f5f166bc3eaf7e030e094b95f2ab254e",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 118,
"avg_line_length": 42.647887323943664,
"alnum_prop": 0.7014531043593131,
"repo_name": "monikagrabowska/osf.io",
"id": "e76bc1852d8570fdedae7a2e50be6c907eecfe6a",
"size": "3028",
"binary": false,
"copies": "4",
"ref": "refs/heads/develop",
"path": "api_tests/institutions/views/test_institution_registrations_list.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "176566"
},
{
"name": "HTML",
"bytes": "183119"
},
{
"name": "JavaScript",
"bytes": "2017358"
},
{
"name": "Jupyter Notebook",
"bytes": "8510"
},
{
"name": "Makefile",
"bytes": "6905"
},
{
"name": "Mako",
"bytes": "755899"
},
{
"name": "PLpgSQL",
"bytes": "22144"
},
{
"name": "Perl",
"bytes": "13885"
},
{
"name": "Python",
"bytes": "9632033"
},
{
"name": "Shell",
"bytes": "436"
}
],
"symlink_target": ""
} |
"""Resource definitions for cloud platform apis."""
import enum
BASE_URL = 'https://bio.googleapis.com/v1/'
class Collections(enum.Enum):
"""Collections for all supported apis."""
PROJECTS_OPERATIONS = (
'projects.operations',
'{+name}',
{
'':
'projects/{projectsId}/operations/{operationsId}',
},
[u'name']
)
def __init__(self, collection_name, path, flat_paths, params):
self.collection_name = collection_name
self.path = path
self.flat_paths = flat_paths
self.params = params
| {
"content_hash": "48d1001c8c34b459eb942787505ec66b",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 64,
"avg_line_length": 21.692307692307693,
"alnum_prop": 0.6099290780141844,
"repo_name": "KaranToor/MA450",
"id": "4255a33b0dadde5bc2eeed53e2e5b390d47028e8",
"size": "1159",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/.install/.backup/lib/googlecloudsdk/third_party/apis/bio/v1/resources.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3162"
},
{
"name": "CSS",
"bytes": "1930"
},
{
"name": "HTML",
"bytes": "13381"
},
{
"name": "Java",
"bytes": "151442"
},
{
"name": "JavaScript",
"bytes": "4906"
},
{
"name": "Makefile",
"bytes": "1636"
},
{
"name": "Objective-C",
"bytes": "13335"
},
{
"name": "PHP",
"bytes": "9086"
},
{
"name": "Pascal",
"bytes": "62"
},
{
"name": "Python",
"bytes": "19710731"
},
{
"name": "Roff",
"bytes": "2069494"
},
{
"name": "Ruby",
"bytes": "690"
},
{
"name": "Shell",
"bytes": "32272"
},
{
"name": "Smarty",
"bytes": "4968"
},
{
"name": "SourcePawn",
"bytes": "616"
},
{
"name": "Swift",
"bytes": "14225"
}
],
"symlink_target": ""
} |
import mock
import six
from mistralclient.api.v2 import workflows
from mistralclient.commands.v2 import base as cmd_base
from mistralclient.commands.v2 import workflows as workflow_cmd
from mistralclient.tests.unit import base
WORKFLOW_DICT = {
'id': '1-2-3-4',
'name': 'a',
'namespace': '',
'project_id': '12345',
'tags': ['a', 'b'],
'input': 'param',
'created_at': '1',
'updated_at': '1'
}
WF_DEF = """
version: '2.0'
flow:
tasks:
task1:
action: nova.servers_get server="1"
"""
WF_WITH_DEF_DICT = WORKFLOW_DICT.copy()
WF_WITH_DEF_DICT.update({'definition': WF_DEF})
WORKFLOW = workflows.Workflow(mock, WORKFLOW_DICT)
WORKFLOW_WITH_DEF = workflows.Workflow(mock, WF_WITH_DEF_DICT)
class TestCLIWorkflowsV2(base.BaseCommandTest):
@mock.patch('argparse.open', create=True)
def test_create(self, mock_open):
self.client.workflows.create.return_value = [WORKFLOW]
result = self.call(workflow_cmd.Create, app_args=['1.txt'])
self.assertEqual(
[('1-2-3-4', 'a', '', '12345', 'a, b', 'param', '1', '1')],
result[1]
)
@mock.patch('argparse.open', create=True)
def test_create_public(self, mock_open):
self.client.workflows.create.return_value = [WORKFLOW]
result = self.call(
workflow_cmd.Create,
app_args=['1.txt', '--public']
)
self.assertEqual(
[('1-2-3-4', 'a', '', '12345', 'a, b', 'param', '1', '1')],
result[1]
)
self.assertEqual(
'public',
self.client.workflows.create.call_args[1]['scope']
)
@mock.patch('argparse.open', create=True)
def test_create_long_input(self, mock_open):
wf_long_input_dict = WORKFLOW_DICT.copy()
long_input = ', '.join(
['var%s' % i for i in six.moves.xrange(10)]
)
wf_long_input_dict['input'] = long_input
workflow_long_input = workflows.Workflow(mock, wf_long_input_dict)
self.client.workflows.create.return_value = [workflow_long_input]
result = self.call(workflow_cmd.Create, app_args=['1.txt'])
self.assertEqual(
[('1-2-3-4', 'a', '', '12345', 'a, b', cmd_base.cut(long_input),
'1', '1')],
result[1]
)
@mock.patch('argparse.open', create=True)
def test_update(self, mock_open):
self.client.workflows.update.return_value = [WORKFLOW]
result = self.call(workflow_cmd.Update, app_args=['1.txt'])
self.assertEqual(
[('1-2-3-4', 'a', '', '12345', 'a, b', 'param', '1', '1')],
result[1]
)
@mock.patch('argparse.open', create=True)
def test_update_public(self, mock_open):
self.client.workflows.update.return_value = [WORKFLOW]
result = self.call(
workflow_cmd.Update,
app_args=['1.txt', '--public']
)
self.assertEqual(
[('1-2-3-4', 'a', '', '12345', 'a, b', 'param', '1', '1')],
result[1]
)
self.assertEqual(
'public',
self.client.workflows.update.call_args[1]['scope']
)
@mock.patch('argparse.open', create=True)
def test_update_with_id(self, mock_open):
self.client.workflows.update.return_value = WORKFLOW
result = self.call(
workflow_cmd.Update,
app_args=['1.txt', '--id', '1-2-3-4']
)
self.assertEqual(
[('1-2-3-4', 'a', '', '12345', 'a, b', 'param', '1', '1')],
result[1]
)
def test_list(self):
self.client.workflows.list.return_value = [WORKFLOW]
result = self.call(workflow_cmd.List)
self.assertEqual(
[('1-2-3-4', 'a', '', '12345', 'a, b', 'param', '1', '1')],
result[1]
)
def test_get(self):
self.client.workflows.get.return_value = WORKFLOW
result = self.call(workflow_cmd.Get, app_args=['name'])
self.assertEqual(
('1-2-3-4', 'a', '', '12345', 'a, b', 'param', '1', '1'),
result[1]
)
def test_delete(self):
self.call(workflow_cmd.Delete, app_args=['name'])
self.client.workflows.delete.assert_called_once_with('name', None)
def test_delete_with_multi_names(self):
self.call(workflow_cmd.Delete, app_args=['name1', 'name2'])
self.assertEqual(2, self.client.workflows.delete.call_count)
self.assertEqual(
[mock.call('name1', None), mock.call('name2', None)],
self.client.workflows.delete.call_args_list
)
def test_get_definition(self):
self.client.workflows.get.return_value = WORKFLOW_WITH_DEF
self.call(workflow_cmd.GetDefinition, app_args=['name'])
self.app.stdout.write.assert_called_with(WF_DEF)
@mock.patch('argparse.open', create=True)
def test_validate(self, mock_open):
self.client.workflows.validate.return_value = {'valid': True}
result = self.call(workflow_cmd.Validate, app_args=['wf.yaml'])
self.assertEqual((True, None), result[1])
@mock.patch('argparse.open', create=True)
def test_validate_failed(self, mock_open):
self.client.workflows.validate.return_value = {
'valid': False,
'error': 'Invalid DSL...'
}
result = self.call(workflow_cmd.Validate, app_args=['wf.yaml'])
self.assertEqual((False, 'Invalid DSL...'), result[1])
| {
"content_hash": "a2f246425226ab10d76bf1f36fc2cd07",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 76,
"avg_line_length": 29.513368983957218,
"alnum_prop": 0.5591592679833303,
"repo_name": "StackStorm/python-mistralclient",
"id": "61bec75263e26fe0d52e0959f2b3433937c0b704",
"size": "6163",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mistralclient/tests/unit/v2/test_cli_workflows.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "427266"
},
{
"name": "Shell",
"bytes": "6899"
}
],
"symlink_target": ""
} |
__version__ = '0.1.0'
default_app_config = 'fieldobjectpermissions.apps.FieldObjectPermissionsConfig'
| {
"content_hash": "ae152b3fc591e209bab157b5322d56ba",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 79,
"avg_line_length": 51,
"alnum_prop": 0.7843137254901961,
"repo_name": "aarcro/django-field-object-permissions",
"id": "ee8b51719aa983edbae860e56aad1ae301e94551",
"size": "102",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fieldobjectpermissions/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10188"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import django
from django.conf import settings
from django.test.client import Client as DjangoClient
from django.test.client import ClientHandler
from django.test import testcases
from django.utils.http import urlencode
from rest_framework.settings import api_settings
from rest_framework_extensions.compat import RequestFactory as DjangoRequestFactory # changed here
from rest_framework_extensions.compat import force_bytes_or_smart_bytes, six # changed here
def force_authenticate(request, user=None, token=None):
request._force_auth_user = user
request._force_auth_token = token
class APIRequestFactory(DjangoRequestFactory):
renderer_classes_list = api_settings.TEST_REQUEST_RENDERER_CLASSES
default_format = api_settings.TEST_REQUEST_DEFAULT_FORMAT
def __init__(self, enforce_csrf_checks=False, **defaults):
self.enforce_csrf_checks = enforce_csrf_checks
self.renderer_classes = {}
for cls in self.renderer_classes_list:
self.renderer_classes[cls.format] = cls
super(APIRequestFactory, self).__init__(**defaults)
def _encode_data(self, data, format=None, content_type=None):
"""
Encode the data returning a two tuple of (bytes, content_type)
"""
if not data:
return ('', None)
assert format is None or content_type is None, (
'You may not set both `format` and `content_type`.'
)
if content_type:
# Content type specified explicitly, treat data as a raw bytestring
ret = force_bytes_or_smart_bytes(data, settings.DEFAULT_CHARSET)
else:
format = format or self.default_format
assert format in self.renderer_classes, ("Invalid format '{0}'. "
"Available formats are {1}. Set TEST_REQUEST_RENDERER_CLASSES "
"to enable extra request formats.".format(
format,
', '.join(["'" + fmt + "'" for fmt in self.renderer_classes.keys()])
)
)
# Use format and render the data into a bytestring
renderer = self.renderer_classes[format]()
ret = renderer.render(data)
# Determine the content-type header from the renderer
content_type = "{0}; charset={1}".format(
renderer.media_type, renderer.charset
)
# Coerce text to bytes if required.
if isinstance(ret, six.text_type):
ret = bytes(ret.encode(renderer.charset))
return ret, content_type
def get(self, path, data=None, **extra):
r = {
'QUERY_STRING': urlencode(data or {}, doseq=True),
}
# Fix to support old behavior where you have the arguments in the url
# See #1461
if not data and '?' in path:
r['QUERY_STRING'] = path.split('?')[1]
r.update(extra)
return self.generic('GET', path, **r)
def post(self, path, data=None, format=None, content_type=None, **extra):
data, content_type = self._encode_data(data, format, content_type)
return self.generic('POST', path, data, content_type, **extra)
def put(self, path, data=None, format=None, content_type=None, **extra):
data, content_type = self._encode_data(data, format, content_type)
return self.generic('PUT', path, data, content_type, **extra)
def patch(self, path, data=None, format=None, content_type=None, **extra):
data, content_type = self._encode_data(data, format, content_type)
return self.generic('PATCH', path, data, content_type, **extra)
def delete(self, path, data=None, format=None, content_type=None, **extra):
data, content_type = self._encode_data(data, format, content_type)
return self.generic('DELETE', path, data, content_type, **extra)
def options(self, path, data=None, format=None, content_type=None, **extra):
data, content_type = self._encode_data(data, format, content_type)
return self.generic('OPTIONS', path, data, content_type, **extra)
def request(self, **kwargs):
request = super(APIRequestFactory, self).request(**kwargs)
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
return request
class ForceAuthClientHandler(ClientHandler):
"""
A patched version of ClientHandler that can enforce authentication
on the outgoing requests.
"""
def __init__(self, *args, **kwargs):
self._force_user = None
self._force_token = None
super(ForceAuthClientHandler, self).__init__(*args, **kwargs)
def get_response(self, request):
# This is the simplest place we can hook into to patch the
# request object.
force_authenticate(request, self._force_user, self._force_token)
return super(ForceAuthClientHandler, self).get_response(request)
class APIClient(APIRequestFactory, DjangoClient):
def __init__(self, enforce_csrf_checks=False, **defaults):
super(APIClient, self).__init__(**defaults)
self.handler = ForceAuthClientHandler(enforce_csrf_checks)
self._credentials = {}
def credentials(self, **kwargs):
"""
Sets headers that will be used on every outgoing request.
"""
self._credentials = kwargs
def force_authenticate(self, user=None, token=None):
"""
Forcibly authenticates outgoing requests with the given
user and/or token.
"""
self.handler._force_user = user
self.handler._force_token = token
if user is None:
self.logout() # Also clear any possible session info if required
def request(self, **kwargs):
# Ensure that any credentials set get added to every request.
kwargs.update(self._credentials)
return super(APIClient, self).request(**kwargs)
class APITransactionTestCase(testcases.TransactionTestCase):
client_class = APIClient
class APITestCase(testcases.TestCase):
client_class = APIClient
if django.VERSION >= (1, 4):
class APISimpleTestCase(testcases.SimpleTestCase):
client_class = APIClient
class APILiveServerTestCase(testcases.LiveServerTestCase):
client_class = APIClient | {
"content_hash": "fed3f5a7eed42db27c21ef2d287d097e",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 99,
"avg_line_length": 37.91017964071856,
"alnum_prop": 0.6430263781393145,
"repo_name": "pratyushmittal/drf-extensions",
"id": "94b8da7ad076412b990049e529caff1f509ff45d",
"size": "6508",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "rest_framework_extensions/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "262"
},
{
"name": "Python",
"bytes": "234730"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from setuptools import setup
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import io
import codecs
import os
import sys
# import opencvgui
here = os.path.abspath(os.path.dirname(__file__))
def read(*filenames, **kwargs):
encoding = kwargs.get('encoding', 'utf-8')
sep = kwargs.get('sep', '\n')
buf = []
for filename in filenames:
with io.open(filename, encoding=encoding) as f:
buf.append(f.read())
return sep.join(buf)
long_description = read('README.txt', 'CHANGES.txt')
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errcode = pytest.main(self.test_args)
sys.exit(errcode)
setup(
name='opencvgui',
version='0.0.1',
url='http://github.com/#<Configatron::Store:0x007ff18c42aad8>/opencvgui/',
license='MIT',
author='Sean Mackesey',
tests_require=['pytest'],
install_requires=[ ],
cmdclass={'test': PyTest},
author_email='#<Configatron::Store:0x007ff18c42a718>',
description='DESCRIPTION HERE',
long_description=long_description,
packages=[''],
include_package_data=True,
platforms='any',
test_suite='.test.test_opencvgui',
extras_require={
'testing': ['pytest'],
}
)
| {
"content_hash": "351eb028070f4c3ba04cd11aaeaaea4b",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 76,
"avg_line_length": 25.785714285714285,
"alnum_prop": 0.6592797783933518,
"repo_name": "smackesey/opencvgui",
"id": "e7ae330c0989532635bbc5612a16b99ba71be65f",
"size": "1444",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15837"
}
],
"symlink_target": ""
} |
def get_text_if_exists(node):
if 'text' in dir(node):
return node.text.strip()
return node
| {
"content_hash": "d251f18f1c6362d2c0438996e233ba54",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 32,
"avg_line_length": 26.75,
"alnum_prop": 0.616822429906542,
"repo_name": "wkentaro/utaskweb",
"id": "5fe4e544ca8458934a8ad88319207a8df4f3c71f",
"size": "156",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/utaskweb/common.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7343"
}
],
"symlink_target": ""
} |
import random
import itertools
import math
class coor:
def __init__(self, x, y):
self.x = x
self.y = y
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
magicSquare = [
[4,9,2],
[3,5,7],
[8,1,6]
]
center = [coor(1,1)]
corners = [coor(0,0),coor(0,2),coor(2,0),coor(2,2)]
edges = [coor(0,1),coor(1,2),coor(2,1),coor(1,0)]
compCoor = []
playerCoor = []
def compPlay():
if(len(compCoor)>=2):
for combos in list(itertools.combinations(compCoor, 2)):
chance = 15 - (magicSquare[combos[0].x][combos[0].y] + magicSquare[combos[1].x][combos[1].y])
for i in range(len(magicSquare)):
if(chance in magicSquare[i]):
coordinate = coor(i, magicSquare[i].index(chance))
if(coordinate in center):
center.remove(coordinate)
compCoor.append(coordinate)
return
elif(coordinate in corners):
corners.remove(coordinate)
compCoor.append(coordinate)
return
elif(coordinate in edges):
edges.remove(coordinate)
compCoor.append(coordinate)
return
else:
pass
if(len(playerCoor)>=2):
for combos in list(itertools.combinations(playerCoor, 2)):
chance = 15 - (magicSquare[combos[0].x][combos[0].y] + magicSquare[combos[1].x][combos[1].y])
for i in range(len(magicSquare)):
if(chance in magicSquare[i]):
coordinate = coor(i, magicSquare[i].index(chance))
if(coordinate in center):
center.remove(coordinate)
compCoor.append(coordinate)
return
elif(coordinate in corners):
corners.remove(coordinate)
compCoor.append(coordinate)
return
elif(coordinate in edges):
edges.remove(coordinate)
compCoor.append(coordinate)
return
else:
pass
if(len(center)>0):
compCoor.append(center[0])
center.pop(0)
return
elif(len(corners)>0):
coordinate = random.choice(corners)
compCoor.append(coordinate)
corners.remove(coordinate)
return
elif(len(edges)>0):
coordinate = random.choice(edges)
compCoor.append(coordinate)
edges.remove(coordinate)
return
else:
return
def playerPlay():
while True:
number = input("Which square would you like to pick? ")
number -= 1
x = int(math.ceil(int(number)/3))
y = (number % 3)
coordinate = coor(x,y)
if(coordinate in center):
center.remove(coordinate)
playerCoor.append(coordinate)
return
elif(coordinate in corners):
corners.remove(coordinate)
playerCoor.append(coordinate)
return
elif(coordinate in edges):
edges.remove(coordinate)
playerCoor.append(coordinate)
return
print "Pick another square!"
def check():
if(len(playerCoor)>=3):
for combos in list(itertools.combinations(playerCoor, 3)):
sum = 0
for combo in combos:
sum+=magicSquare[combo.x][combo.y]
if(sum == 15):
print "WON!!!!!!"
return True
if(len(compCoor)>=3):
for combos in list(itertools.combinations(compCoor, 3)):
sum = 0
for combo in combos:
sum+=magicSquare[combo.x][combo.y]
if(sum == 15):
print "LOST!!!!!!"
return True
if(len(corners) == 0 and len(center) == 0 and len(edges) == 0):
print "Tie"
return True
return False
def render():
spaces = [" "," "," "," "," "," "," "," "," "]
for coordinate in compCoor:
spaces[3*(coordinate.x)+coordinate.y] = "X"
for coordinate in playerCoor:
spaces[3*(coordinate.x)+coordinate.y] = "O"
print("{0} | {1} | {2}".format(spaces[0],spaces[1],spaces[2]))
print("---------")
print("{0} | {1} | {2}".format(spaces[3],spaces[4],spaces[5]))
print("---------")
print("{0} | {1} | {2}".format(spaces[6],spaces[7],spaces[8]))
def intro():
print "Welcome to the game of Computer Tic Tac Toe!!!!"
print "Use this numbering system when inputing your move. \n"
spaces = ["1","2","3","4","5","6","7","8","9"]
print("{0} | {1} | {2}".format(spaces[0],spaces[1],spaces[2]))
print("---------")
print("{0} | {1} | {2}".format(spaces[3],spaces[4],spaces[5]))
print("---------")
print("{0} | {1} | {2}".format(spaces[6],spaces[7],spaces[8]))
print "\n\n"
def main():
intro()
while True:
print "Computer move"
compPlay()
render()
if(check()):
break
playerPlay()
print "\nPlayer move"
render()
if(check()):
break
main()
| {
"content_hash": "76cbe818cee582904fcdbe22c15bfe2f",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 97,
"avg_line_length": 24.233333333333334,
"alnum_prop": 0.6210453920220083,
"repo_name": "RoThePro/TicTacToeAI",
"id": "490bb1e59f5d14d5a252390ef4df69a9207a9dd2",
"size": "4362",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tictactoe.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4362"
}
],
"symlink_target": ""
} |
"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = "9.2.27.dev0"
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
| {
"content_hash": "ee355a92eba91c80463517cc25a8fb53",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 118,
"avg_line_length": 29.96,
"alnum_prop": 0.7516688918558078,
"repo_name": "angr/cle",
"id": "bfd18cd7836aecccb8ffd086e28eb3dee1e28eae",
"size": "749",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cle/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "609361"
}
],
"symlink_target": ""
} |
from pysal.esda.getisord import G_Local
from collections import Counter
from .utils import *
from .patch_analysis import *
from .landscape_stats import *
def diversity_map_spatial_weights(world, weights):
return make_diversity_map(world, weights=weights)
def diversity_map(world, neighbor_func=get_moore_neighbors_toroidal):
return make_diversity_map(world, neighbor_func=neighbor_func)
def make_diversity_map(world, neighbor_func=None, weights=None):
world_x = len(world[0])
world_y = len(world)
data = initialize_grid((world_x, world_y), -1)
for y in range(world_y):
for x in range(world_x):
local_vals = Counter()
if weights:
neighbors = weights.neighbors[world_x*y + x]
local_vals = Counter()
local_vals[world[y][x]] += 1
for i, n in enumerate(neighbors):
local_vals[world[n // world_x][n % world_x]] \
+= weights.weights[world_x*y + x][i]
elif neighbor_func:
neighbors = neighbor_func([x, y], (world_x, world_y))
neighbors.append([x, y])
for n in neighbors:
local_vals[world[n[1]][n[0]]] += 1
else:
raise RuntimeError("""Diversity map needs a neighbor_func or
weights matrix.""")
data[y][x] = entropy(dict(local_vals))
return data
| {
"content_hash": "cfc3ff225af31c4b3a5132c6c2678817",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 76,
"avg_line_length": 31.340425531914892,
"alnum_prop": 0.5627970128988459,
"repo_name": "emilydolson/avida-spatial-tools",
"id": "1174d813b94f2d5a65825c4c659c4f2f86e218b7",
"size": "1473",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "avidaspatial/spatial_analysis.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "186137"
},
{
"name": "Shell",
"bytes": "67"
}
],
"symlink_target": ""
} |
"""Module for Shutdown event."""
from typing import Text
from ..event import Event
class Shutdown(Event):
"""Event that is emitted on shutdown of a launched system."""
name = 'launch.events.Shutdown'
def __init__(self, *, reason: Text = 'reason not given', due_to_sigint: bool = False) -> None:
"""Create a Shutdown event."""
self.__reason = reason
self.__due_to_sigint = due_to_sigint
@property
def reason(self):
"""Getter for reason."""
return self.__reason
@property
def due_to_sigint(self):
"""Getter for due_to_sigint."""
return self.__due_to_sigint
| {
"content_hash": "83abfb58279912e0100c3767ace55eb1",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 98,
"avg_line_length": 24.884615384615383,
"alnum_prop": 0.6012364760432767,
"repo_name": "ros2/launch",
"id": "c5fcdcdaee70f321b7c67b56038b807b341d028b",
"size": "1249",
"binary": false,
"copies": "1",
"ref": "refs/heads/rolling",
"path": "launch/launch/events/shutdown.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "857"
},
{
"name": "C++",
"bytes": "1468"
},
{
"name": "CMake",
"bytes": "8807"
},
{
"name": "Makefile",
"bytes": "607"
},
{
"name": "Python",
"bytes": "1063971"
},
{
"name": "Shell",
"bytes": "85"
}
],
"symlink_target": ""
} |
from django.test import TestCase
from zerver.middleware import is_slow_query
class SlowQueryTest(TestCase):
def test_is_slow_query(self):
# type: () -> None
self.assertFalse(is_slow_query(1.1, '/some/random/url'))
self.assertTrue(is_slow_query(2, '/some/random/url'))
self.assertTrue(is_slow_query(5.1, '/activity'))
self.assertFalse(is_slow_query(2, '/activity'))
self.assertFalse(is_slow_query(2, '/json/report/error'))
self.assertFalse(is_slow_query(2, '/api/v1/deployments/report_error'))
self.assertFalse(is_slow_query(2, '/realm_activity/whatever'))
self.assertFalse(is_slow_query(2, '/user_activity/whatever'))
self.assertFalse(is_slow_query(9, '/accounts/webathena_kerberos_login/'))
self.assertTrue(is_slow_query(11, '/accounts/webathena_kerberos_login/'))
| {
"content_hash": "1359908dcb514118c5885aa0d442ccc5",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 81,
"avg_line_length": 50.588235294117645,
"alnum_prop": 0.6697674418604651,
"repo_name": "brockwhittaker/zulip",
"id": "a052090a6752e40ee1aab645332ab6d1109643bf",
"size": "861",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zerver/tests/test_middleware.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "442662"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "515931"
},
{
"name": "JavaScript",
"bytes": "2195008"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "393671"
},
{
"name": "Puppet",
"bytes": "87413"
},
{
"name": "Python",
"bytes": "3948219"
},
{
"name": "Ruby",
"bytes": "249744"
},
{
"name": "Shell",
"bytes": "65702"
}
],
"symlink_target": ""
} |
# Import a whole load of stuff
from System.IO import *
from System.Drawing import *
from System.Runtime.Remoting import *
from System.Threading import *
from System.Windows.Forms import *
from System.Xml.Serialization import *
from System import *
from DAQ.Environment import *
import time
def SMGo():
fileSystem = Environs.FileSystem
dataPath = fileSystem.GetDataDirectory(fileSystem.Paths["edmDataPath"])
def SelectProfile(profileName):
sm.SelectProfile(profileName)
def StartPattern():
sm.OutputPattern()
def StopPattern():
sm.StopPatternOutput()
def run_script():
SMGo()
SelectProfile("Scan B")
StartPattern()
time.sleep(5)
StopPattern()
hc.UpdateProbeAOMV(8.5)
StartPattern()
time.sleep(5)
StopPattern() | {
"content_hash": "e21e3e46cf85a40c31d6778ae77e398b",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 72,
"avg_line_length": 20.82857142857143,
"alnum_prop": 0.7681755829903978,
"repo_name": "ColdMatter/EDMSuite",
"id": "a4b3ae2cf1e4619b3f9a070ba2bf7e526d137489",
"size": "731",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SMScripts/twoParameterScan.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2489"
},
{
"name": "C#",
"bytes": "6547131"
},
{
"name": "F#",
"bytes": "1565"
},
{
"name": "Forth",
"bytes": "767"
},
{
"name": "HTML",
"bytes": "241926"
},
{
"name": "Mathematica",
"bytes": "452861"
},
{
"name": "Python",
"bytes": "798129"
},
{
"name": "Shell",
"bytes": "33"
},
{
"name": "TSQL",
"bytes": "1768"
},
{
"name": "TeX",
"bytes": "8393"
}
],
"symlink_target": ""
} |
from flask.ext.restful import Api
from .health import Health
from publictitles import app, db
from publictitles.resources import TitleResource
api = Api(app)
api.add_resource(TitleResource, '/titles/<string:title_number>')
Health(app, checks=[db.health])
| {
"content_hash": "3dbccf86a526a63f28e09b8b11de9a57",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 64,
"avg_line_length": 25.8,
"alnum_prop": 0.7868217054263565,
"repo_name": "LandRegistry/public-titles",
"id": "885521bc00bc18a6cd83da70d55f8d65a8c1bf8c",
"size": "258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "publictitles/server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "38"
},
{
"name": "Python",
"bytes": "13129"
},
{
"name": "Shell",
"bytes": "442"
}
],
"symlink_target": ""
} |
from django import http
from django.core.urlresolvers import reverse
from mox import IgnoreArg, IsA
from horizon import api
from horizon import test
INDEX_URL = reverse('horizon:syspanel:projects:index')
class TenantsViewTests(test.BaseAdminViewTests):
def test_index(self):
self.mox.StubOutWithMock(api.keystone, 'tenant_list')
api.keystone.tenant_list(IsA(http.HttpRequest), admin=True) \
.AndReturn(self.tenants.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'syspanel/projects/index.html')
self.assertItemsEqual(res.context['table'].data, self.tenants.list())
def test_modify_quota(self):
tenant = self.tenants.first()
quota = self.quotas.first()
quota_data = {"metadata_items": 1,
"injected_files": 1,
"injected_file_content_bytes": 1,
"cores": 1,
"instances": 1,
"volumes": 1,
"gigabytes": 1,
"ram": 1,
"floating_ips": 1}
self.mox.StubOutWithMock(api.keystone, 'tenant_get')
self.mox.StubOutWithMock(api.nova, 'tenant_quota_get')
self.mox.StubOutWithMock(api.nova, 'tenant_quota_update')
api.keystone.tenant_get(IgnoreArg(), tenant.id, admin=True) \
.AndReturn(tenant)
api.nova.tenant_quota_get(IgnoreArg(), tenant.id).AndReturn(quota)
api.nova.tenant_quota_update(IgnoreArg(), tenant.id, **quota_data)
self.mox.ReplayAll()
url = reverse('horizon:syspanel:projects:quotas',
args=[self.tenant.id])
quota_data.update({"method": "UpdateQuotas",
"tenant_id": self.tenant.id})
res = self.client.post(url, quota_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
def test_modify_users(self):
self.mox.StubOutWithMock(api.keystone, 'tenant_get')
self.mox.StubOutWithMock(api.keystone, 'user_list')
api.keystone.tenant_get(IgnoreArg(), self.tenant.id, admin=True) \
.AndReturn(self.tenant)
api.keystone.user_list(IsA(http.HttpRequest)) \
.AndReturn(self.users.list())
api.keystone.user_list(IsA(http.HttpRequest), self.tenant.id) \
.AndReturn([self.user])
self.mox.ReplayAll()
url = reverse('horizon:syspanel:projects:users',
args=(self.tenant.id,))
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'syspanel/projects/users.html')
| {
"content_hash": "0a567cd6a4641c797ca5e06f650b859c",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 77,
"avg_line_length": 41.95384615384615,
"alnum_prop": 0.5925925925925926,
"repo_name": "asomya/test",
"id": "30d45579fdb000b93c814486ba8a3957b0db6de9",
"size": "3377",
"binary": false,
"copies": "3",
"ref": "refs/heads/quantum-integration",
"path": "horizon/dashboards/syspanel/projects/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "33751"
},
{
"name": "JavaScript",
"bytes": "140887"
},
{
"name": "Python",
"bytes": "839260"
},
{
"name": "Shell",
"bytes": "11581"
}
],
"symlink_target": ""
} |
import frappe
from frappe.email.utils import get_port
def execute():
"""
1. Set default incoming email port in email domain
2. Set default incoming email port in all email account (for those account where domain is missing)
"""
frappe.reload_doc("email", "doctype", "email_domain", force=True)
frappe.reload_doc("email", "doctype", "email_account", force=True)
setup_incoming_email_port_in_email_domains()
setup_incoming_email_port_in_email_accounts()
def setup_incoming_email_port_in_email_domains():
email_domains = frappe.get_all("Email Domain", ["incoming_port", "use_imap", "use_ssl", "name"])
for domain in email_domains:
if not domain.incoming_port:
incoming_port = get_port(domain)
frappe.db.set_value(
"Email Domain", domain.name, "incoming_port", incoming_port, update_modified=False
)
# update incoming email port in all
frappe.db.sql(
"""update `tabEmail Account` set incoming_port=%s where domain = %s""",
(domain.incoming_port, domain.name),
)
def setup_incoming_email_port_in_email_accounts():
email_accounts = frappe.get_all(
"Email Account", ["incoming_port", "use_imap", "use_ssl", "name", "enable_incoming"]
)
for account in email_accounts:
if account.enable_incoming and not account.incoming_port:
incoming_port = get_port(account)
frappe.db.set_value(
"Email Account", account.name, "incoming_port", incoming_port, update_modified=False
)
| {
"content_hash": "c64e14642c66803554a7a68e2c467642",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 100,
"avg_line_length": 33.27906976744186,
"alnum_prop": 0.7092941998602376,
"repo_name": "frappe/frappe",
"id": "766e31fe67a9bef20b8e308cf6f905f5062d97ec",
"size": "1431",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "frappe/patches/v12_0/set_default_incoming_email_port.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "65093"
},
{
"name": "HTML",
"bytes": "250850"
},
{
"name": "JavaScript",
"bytes": "2523337"
},
{
"name": "Less",
"bytes": "10921"
},
{
"name": "Python",
"bytes": "3618097"
},
{
"name": "SCSS",
"bytes": "261690"
},
{
"name": "Vue",
"bytes": "98456"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
)
from ..utils import (
ExtractorError,
find_xpath_attr,
fix_xml_ampersands,
HEADRequest,
unescapeHTML,
url_basename,
RegexNotFoundError,
)
def _media_xml_tag(tag):
return '{http://search.yahoo.com/mrss/}%s' % tag
class MTVServicesInfoExtractor(InfoExtractor):
_MOBILE_TEMPLATE = None
@staticmethod
def _id_from_uri(uri):
return uri.split(':')[-1]
# This was originally implemented for ComedyCentral, but it also works here
@staticmethod
def _transform_rtmp_url(rtmp_video_url):
m = re.match(r'^rtmpe?://.*?/(?P<finalid>gsp\..+?/.*)$', rtmp_video_url)
if not m:
return rtmp_video_url
base = 'http://viacommtvstrmfs.fplive.net/'
return base + m.group('finalid')
def _get_feed_url(self, uri):
return self._FEED_URL
def _get_thumbnail_url(self, uri, itemdoc):
search_path = '%s/%s' % (_media_xml_tag('group'), _media_xml_tag('thumbnail'))
thumb_node = itemdoc.find(search_path)
if thumb_node is None:
return None
else:
return thumb_node.attrib['url']
def _extract_mobile_video_formats(self, mtvn_id):
webpage_url = self._MOBILE_TEMPLATE % mtvn_id
req = compat_urllib_request.Request(webpage_url)
# Otherwise we get a webpage that would execute some javascript
req.add_header('Youtubedl-user-agent', 'curl/7')
webpage = self._download_webpage(req, mtvn_id,
'Downloading mobile page')
metrics_url = unescapeHTML(self._search_regex(r'<a href="(http://metrics.+?)"', webpage, 'url'))
req = HEADRequest(metrics_url)
response = self._request_webpage(req, mtvn_id, 'Resolving url')
url = response.geturl()
# Transform the url to get the best quality:
url = re.sub(r'.+pxE=mp4', 'http://mtvnmobile.vo.llnwd.net/kip0/_pxn=0+_pxK=18639+_pxE=mp4', url, 1)
return [{'url': url, 'ext': 'mp4'}]
def _extract_video_formats(self, mdoc, mtvn_id):
if re.match(r'.*/(error_country_block\.swf|geoblock\.mp4)$', mdoc.find('.//src').text) is not None:
if mtvn_id is not None and self._MOBILE_TEMPLATE is not None:
self.to_screen('The normal version is not available from your '
'country, trying with the mobile version')
return self._extract_mobile_video_formats(mtvn_id)
raise ExtractorError('This video is not available from your country.',
expected=True)
formats = []
for rendition in mdoc.findall('.//rendition'):
try:
_, _, ext = rendition.attrib['type'].partition('/')
rtmp_video_url = rendition.find('./src').text
formats.append({'ext': ext,
'url': self._transform_rtmp_url(rtmp_video_url),
'format_id': rendition.get('bitrate'),
'width': int(rendition.get('width')),
'height': int(rendition.get('height')),
})
except (KeyError, TypeError):
raise ExtractorError('Invalid rendition field.')
self._sort_formats(formats)
return formats
def _get_video_info(self, itemdoc):
uri = itemdoc.find('guid').text
video_id = self._id_from_uri(uri)
self.report_extraction(video_id)
mediagen_url = itemdoc.find('%s/%s' % (_media_xml_tag('group'), _media_xml_tag('content'))).attrib['url']
# Remove the templates, like &device={device}
mediagen_url = re.sub(r'&[^=]*?={.*?}(?=(&|$))', '', mediagen_url)
if 'acceptMethods' not in mediagen_url:
mediagen_url += '&acceptMethods=fms'
mediagen_doc = self._download_xml(mediagen_url, video_id,
'Downloading video urls')
description_node = itemdoc.find('description')
if description_node is not None:
description = description_node.text.strip()
else:
description = None
title_el = None
if title_el is None:
title_el = find_xpath_attr(
itemdoc, './/{http://search.yahoo.com/mrss/}category',
'scheme', 'urn:mtvn:video_title')
if title_el is None:
title_el = itemdoc.find('.//{http://search.yahoo.com/mrss/}title')
if title_el is None:
title_el = itemdoc.find('.//title')
if title_el.text is None:
title_el = None
title = title_el.text
if title is None:
raise ExtractorError('Could not find video title')
title = title.strip()
# This a short id that's used in the webpage urls
mtvn_id = None
mtvn_id_node = find_xpath_attr(itemdoc, './/{http://search.yahoo.com/mrss/}category',
'scheme', 'urn:mtvn:id')
if mtvn_id_node is not None:
mtvn_id = mtvn_id_node.text
return {
'title': title,
'formats': self._extract_video_formats(mediagen_doc, mtvn_id),
'id': video_id,
'thumbnail': self._get_thumbnail_url(uri, itemdoc),
'description': description,
}
def _get_videos_info(self, uri):
video_id = self._id_from_uri(uri)
feed_url = self._get_feed_url(uri)
data = compat_urllib_parse.urlencode({'uri': uri})
idoc = self._download_xml(
feed_url + '?' + data, video_id,
'Downloading info', transform_source=fix_xml_ampersands)
return self.playlist_result(
[self._get_video_info(item) for item in idoc.findall('.//item')])
def _real_extract(self, url):
title = url_basename(url)
webpage = self._download_webpage(url, title)
try:
# the url can be http://media.mtvnservices.com/fb/{mgid}.swf
# or http://media.mtvnservices.com/{mgid}
og_url = self._og_search_video_url(webpage)
mgid = url_basename(og_url)
if mgid.endswith('.swf'):
mgid = mgid[:-4]
except RegexNotFoundError:
mgid = None
if mgid is None or ':' not in mgid:
mgid = self._search_regex(
[r'data-mgid="(.*?)"', r'swfobject.embedSWF\(".*?(mgid:.*?)"'],
webpage, 'mgid')
return self._get_videos_info(mgid)
class MTVServicesEmbeddedIE(MTVServicesInfoExtractor):
IE_NAME = 'mtvservices:embedded'
_VALID_URL = r'https?://media\.mtvnservices\.com/embed/(?P<mgid>.+?)(\?|/|$)'
_TEST = {
# From http://www.thewrap.com/peter-dinklage-sums-up-game-of-thrones-in-45-seconds-video/
'url': 'http://media.mtvnservices.com/embed/mgid:uma:video:mtv.com:1043906/cp~vid%3D1043906%26uri%3Dmgid%3Auma%3Avideo%3Amtv.com%3A1043906',
'md5': 'cb349b21a7897164cede95bd7bf3fbb9',
'info_dict': {
'id': '1043906',
'ext': 'mp4',
'title': 'Peter Dinklage Sums Up \'Game Of Thrones\' In 45 Seconds',
'description': '"Sexy sexy sexy, stabby stabby stabby, beautiful language," says Peter Dinklage as he tries summarizing "Game of Thrones" in under a minute.',
},
}
def _get_feed_url(self, uri):
video_id = self._id_from_uri(uri)
site_id = uri.replace(video_id, '')
config_url = ('http://media.mtvnservices.com/pmt/e1/players/{0}/'
'context4/context5/config.xml'.format(site_id))
config_doc = self._download_xml(config_url, video_id)
feed_node = config_doc.find('.//feed')
feed_url = feed_node.text.strip().split('?')[0]
return feed_url
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
mgid = mobj.group('mgid')
return self._get_videos_info(mgid)
class MTVIE(MTVServicesInfoExtractor):
_VALID_URL = r'''(?x)^https?://
(?:(?:www\.)?mtv\.com/videos/.+?/(?P<videoid>[0-9]+)/[^/]+$|
m\.mtv\.com/videos/video\.rbml\?.*?id=(?P<mgid>[^&]+))'''
_FEED_URL = 'http://www.mtv.com/player/embed/AS3/rss/'
_TESTS = [
{
'url': 'http://www.mtv.com/videos/misc/853555/ours-vh1-storytellers.jhtml',
'file': '853555.mp4',
'md5': '850f3f143316b1e71fa56a4edfd6e0f8',
'info_dict': {
'title': 'Taylor Swift - "Ours (VH1 Storytellers)"',
'description': 'Album: Taylor Swift performs "Ours" for VH1 Storytellers at Harvey Mudd College.',
},
},
{
'add_ie': ['Vevo'],
'url': 'http://www.mtv.com/videos/taylor-swift/916187/everything-has-changed-ft-ed-sheeran.jhtml',
'file': 'USCJY1331283.mp4',
'md5': '73b4e7fcadd88929292fe52c3ced8caf',
'info_dict': {
'title': 'Everything Has Changed',
'upload_date': '20130606',
'uploader': 'Taylor Swift',
},
'skip': 'VEVO is only available in some countries',
},
]
def _get_thumbnail_url(self, uri, itemdoc):
return 'http://mtv.mtvnimages.com/uri/' + uri
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('videoid')
uri = mobj.groupdict().get('mgid')
if uri is None:
webpage = self._download_webpage(url, video_id)
# Some videos come from Vevo.com
m_vevo = re.search(r'isVevoVideo = true;.*?vevoVideoId = "(.*?)";',
webpage, re.DOTALL)
if m_vevo:
vevo_id = m_vevo.group(1)
self.to_screen('Vevo video detected: %s' % vevo_id)
return self.url_result('vevo:%s' % vevo_id, ie='Vevo')
uri = self._html_search_regex(r'/uri/(.*?)\?', webpage, 'uri')
return self._get_videos_info(uri)
class MTVIggyIE(MTVServicesInfoExtractor):
IE_NAME = 'mtviggy.com'
_VALID_URL = r'https?://www\.mtviggy\.com/videos/.+'
_TEST = {
'url': 'http://www.mtviggy.com/videos/arcade-fire-behind-the-scenes-at-the-biggest-music-experiment-yet/',
'info_dict': {
'id': '984696',
'ext': 'mp4',
'title': 'Arcade Fire: Behind the Scenes at the Biggest Music Experiment Yet',
}
}
_FEED_URL = 'http://all.mtvworldverticals.com/feed-xml/'
| {
"content_hash": "cf33e8d59d15a26c39c3c2107c2dbb05",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 170,
"avg_line_length": 39.99256505576208,
"alnum_prop": 0.5526120096672243,
"repo_name": "janusnic/youtube-dl-GUI",
"id": "5ebc78033a4abbb98310096c279fe11459b4a791",
"size": "10758",
"binary": false,
"copies": "20",
"ref": "refs/heads/master",
"path": "youtube_dl/extractor/mtv.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Inno Setup",
"bytes": "7102"
},
{
"name": "Python",
"bytes": "2064276"
}
],
"symlink_target": ""
} |
"""Implements usefule CSV utilities."""
from __future__ import absolute_import
from __future__ import unicode_literals
from builtins import next
from builtins import str as newstr
from builtins import range
from builtins import object
import csv
import os
import pandas as pd
import random
import sys
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import tempfile
import datalab.storage
import datalab.utils
_MAX_CSV_BYTES = 10000000
class Csv(object):
"""Represents a CSV file in GCS or locally with same schema.
"""
def __init__(self, path, delimiter=b','):
"""Initializes an instance of a Csv instance.
Args:
path: path of the Csv file.
delimiter: the separator used to parse a Csv line.
"""
self._path = path
self._delimiter = delimiter
@property
def path(self):
return self._path
@staticmethod
def _read_gcs_lines(path, max_lines=None):
return datalab.storage.Item.from_url(path).read_lines(max_lines)
@staticmethod
def _read_local_lines(path, max_lines=None):
lines = []
for line in open(path):
if max_lines is not None and len(lines) >= max_lines:
break
lines.append(line)
return lines
def _is_probably_categorical(self, column):
if newstr(column.dtype) != 'object':
# only string types (represented in DataFrame as object) can potentially be categorical
return False
if len(max(column, key=lambda p: len(newstr(p)))) > 100:
return False # value too long to be a category
if len(set(column)) > 100:
return False # too many unique values to be a category
return True
def browse(self, max_lines=None, headers=None):
"""Try reading specified number of lines from the CSV object.
Args:
max_lines: max number of lines to read. If None, the whole file is read
headers: a list of strings as column names. If None, it will use "col0, col1..."
Returns:
A pandas DataFrame with the schema inferred from the data.
Raises:
Exception if the csv object cannot be read or not enough lines to read, or the
headers size does not match columns size.
"""
if self.path.startswith('gs://'):
lines = Csv._read_gcs_lines(self.path, max_lines)
else:
lines = Csv._read_local_lines(self.path, max_lines)
if len(lines) == 0:
return pd.DataFrame(columns=headers)
columns_size = len(next(csv.reader([lines[0]], delimiter=self._delimiter)))
if headers is None:
headers = ['col' + newstr(e) for e in range(columns_size)]
if len(headers) != columns_size:
raise Exception('Number of columns in CSV do not match number of headers')
buf = StringIO()
for line in lines:
buf.write(line)
buf.write('\n')
buf.seek(0)
df = pd.read_csv(buf, names=headers, delimiter=self._delimiter)
for key, col in df.iteritems():
if self._is_probably_categorical(col):
df[key] = df[key].astype('category')
return df
def _create_federated_table(self, skip_header_rows):
import datalab.bigquery as bq
df = self.browse(1, None)
# read each column as STRING because we only want to sample rows.
schema_train = bq.Schema([{'name': name, 'type': 'STRING'} for name in df.keys()])
options = bq.CSVOptions(skip_leading_rows=(1 if skip_header_rows is True else 0))
return bq.FederatedTable.from_storage(self.path,
csv_options=options,
schema=schema_train,
max_bad_records=0)
def _get_gcs_csv_row_count(self, federated_table):
import datalab.bigquery as bq
results = bq.Query('SELECT count(*) from data',
data_sources={'data': federated_table}).results()
return results[0].values()[0]
def sample_to(self, count, skip_header_rows, strategy, target):
"""Sample rows from GCS or local file and save results to target file.
Args:
count: number of rows to sample. If strategy is "BIGQUERY", it is used as approximate number.
skip_header_rows: whether to skip first row when reading from source.
strategy: can be "LOCAL" or "BIGQUERY". If local, the sampling happens in local memory,
and number of resulting rows matches count. If BigQuery, sampling is done
with BigQuery in cloud, and the number of resulting rows will be approximated to
count.
target: The target file path, can be GCS or local path.
Raises:
Exception if strategy is "BIGQUERY" but source is not a GCS path.
"""
# TODO(qimingj) Add unit test
# Read data from source into DataFrame.
if sys.version_info.major > 2:
xrange = range # for python 3 compatibility
if strategy == 'BIGQUERY':
import datalab.bigquery as bq
if not self.path.startswith('gs://'):
raise Exception('Cannot use BIGQUERY if data is not in GCS')
federated_table = self._create_federated_table(skip_header_rows)
row_count = self._get_gcs_csv_row_count(federated_table)
query = bq.Query('SELECT * from data', data_sources={'data': federated_table})
sampling = bq.Sampling.random(count * 100 / float(row_count))
sample = query.sample(sampling=sampling)
df = sample.to_dataframe()
elif strategy == 'LOCAL':
local_file = self.path
if self.path.startswith('gs://'):
local_file = tempfile.mktemp()
datalab.utils.gcs_copy_file(self.path, local_file)
with open(local_file) as f:
row_count = sum(1 for line in f)
start_row = 1 if skip_header_rows is True else 0
skip_count = row_count - count - 1 if skip_header_rows is True else row_count - count
skip = sorted(random.sample(xrange(start_row, row_count), skip_count))
header_row = 0 if skip_header_rows is True else None
df = pd.read_csv(local_file, skiprows=skip, header=header_row, delimiter=self._delimiter)
if self.path.startswith('gs://'):
os.remove(local_file)
else:
raise Exception('strategy must be BIGQUERY or LOCAL')
# Write to target.
if target.startswith('gs://'):
with tempfile.NamedTemporaryFile() as f:
df.to_csv(f, header=False, index=False)
f.flush()
datalab.utils.gcs_copy_file(f.name, target)
else:
with open(target, 'w') as f:
df.to_csv(f, header=False, index=False, sep=str(self._delimiter))
| {
"content_hash": "61f249a395afe3aa63db09dbd532d612",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 99,
"avg_line_length": 37.85964912280702,
"alnum_prop": 0.6524559777571826,
"repo_name": "parthea/pydatalab",
"id": "e0c75990d41e70135134705a8da7c66d7f5322fe",
"size": "7063",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "datalab/data/_csv.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7596"
},
{
"name": "Python",
"bytes": "1919296"
},
{
"name": "Shell",
"bytes": "3512"
},
{
"name": "TypeScript",
"bytes": "105129"
}
],
"symlink_target": ""
} |
from dataset.freeze.format.fjson import JSONSerializer
class TabsonSerializer(JSONSerializer):
def wrap(self, result):
fields = []
data = []
if len(result):
keys = result[0].keys()
fields = [{'id': k} for k in keys]
for row in result:
d = [row.get(k) for k in keys]
data.append(d)
result = {
'count': self.query.count,
'fields': fields,
'data': data
}
meta = self.export.get('meta', {})
if meta is not None:
result['meta'] = meta
return result
| {
"content_hash": "7a75313156f5334f2abda4957ba82e0c",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 54,
"avg_line_length": 26.5,
"alnum_prop": 0.47955974842767296,
"repo_name": "aashish24/dataset",
"id": "6b904682a4e35dedb866f1aa48b6e9825d46cfd9",
"size": "636",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dataset/freeze/format/ftabson.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '.'))
import re
from mnb_makemnb import *
from mnb_misc import *
from mnb_rpc import *
import simplejson as json
def start_masternode(
mns_to_start,
protocolversion,
blockcount,
access,
client,
announce,
mpath,
whalemode):
if announce:
print('\n[making mnbs and relay]')
else:
print('\n[making mnbs and quit]')
print_hw_wallet_check()
masternodebroadcast = []
for m in mns_to_start:
mnbhex = make_mnb(
m.get('alias'),
protocolversion,
blockcount,
m,
access,
client,
mpath)
masternodebroadcast.append(mnbhex)
mnbsublist = [masternodebroadcast[i:i + 10]
for i in range(0, len(masternodebroadcast), 10)]
for mnbs in mnbsublist:
vc = num_to_varint(len(mnbs)).hex()
vm = ''.join(mnbs)
print('mnb_hex : ', vc + vm)
verify = rpc_masternode("decode", vc + vm, access)
match1 = re.search(
'^Successfully decoded broadcast messages for (.*) masternodes, failed to decode (.*), total (.*)$',
verify.get('overall'))
decoded = {}
decoded['success'] = match1.group(1)
decoded['failed'] = match1.group(2)
decoded['total'] = match1.group(3)
print('\n---> verify(decoding mnb)')
print('\t---> total : ' + decoded['total'])
print('\t---> success : ' + decoded['success'])
print('\t---> failed : ' + decoded['failed'])
print()
if decoded['success'] != decoded['total']:
print(
json.dumps(
verify,
sort_keys=True,
indent=4,
separators=(
',',
': ')))
err_msg = 'error occurred while verifying mnb hex'
print_err_exit(
get_caller_name(),
get_function_name(),
err_msg)
if announce:
if not whalemode:
user_input = input(
'\nRelay broadcast messages ? Yes / (any key to no) : ')
if user_input.lower() == 'yes':
print('Yes, will relay')
else:
print('No.')
return
relay = rpc_masternode("relay", vc + vm, access)
match2 = re.search(
'^Successfully relayed broadcast messages for (.*) masternodes, failed to relay (.*), total (.*)$',
relay.get('overall'))
relayed = {}
relayed['success'] = match1.group(1)
relayed['failed'] = match1.group(2)
relayed['total'] = match1.group(3)
print('\n---> relay(announcing mnb)')
print('\t---> total : ' + relayed['total'])
print('\t---> success : ' + relayed['success'])
print('\t---> failed : ' + relayed['failed'])
print()
if relayed['success'] != relayed['total']:
print(
json.dumps(
relay,
sort_keys=True,
indent=4,
separators=(
',',
': ')))
# end
| {
"content_hash": "8c31f96909807b4bcdc60782acec0e49",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 115,
"avg_line_length": 28.06451612903226,
"alnum_prop": 0.4528735632183908,
"repo_name": "chaeplin/dashmnb",
"id": "37bb323d4f81894ba0619e17270a2d3745d6f230",
"size": "3480",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dashlib/mnb_start.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "159426"
},
{
"name": "Shell",
"bytes": "2516"
}
],
"symlink_target": ""
} |
"""Scraper for [Full name of court]
CourtID: [unique abbreviation to be used by software/filesystem]
Court Short Name: [standard abbreviation used in citations]
Author:
Reviewer:
History:
YYYY-MM-DD: Created by XXX
"""
from lxml import html
from juriscraper.OpinionSite import OpinionSite
from juriscraper.lib.string_utils import titlecase
from juriscraper.lib.string_utils import convert_date_string
class Site(OpinionSite):
def __init__(self, *args, **kwargs):
super(Site, self).__init__(*args, **kwargs)
self.court_id = self.__module__
self.url = 'http://court-url.gov/some-path.html'
# Complete this variable if you create a backscraper.
self.back_scrape_iterable = None
# if a POST, use these two attributes, otherwise, delete them.
self.parameters = {}
self.method = 'POST'
self.uses_selenium = False
"""
Required fields - InsanityExceptions will be thrown if omitted.
Remove this comment before submission.
"""
def _get_download_urls(self):
"""
This is an example of a basic piece of meta data accessible with a
simple XPath query.
On a good site, most of your methods will follow this pattern.
Note that relative URLs are made absolute by the AbstractSite
object's cleanup routines, so doing so here is not needed.
"""
path = '//path/to/text/text()'
return list(self.html.xpath(path))
def _get_case_names(self):
"""
This example demonstrates how to extract text from an element that
may contain other elements.
For example, this will work well on something like:
<strong>Nadim v. <em>Jenny</em></strong>
Resulting in text like:
Nadim v. Jenny
Note that titlecase() should be used here in the case that the case
names are provided in uppercase. Use the titlecase function on
cases where the name is provided in uppercase only.
"""
case_names = []
for e in self.html.xpath('//path/to/an/element/p'):
s = html.tostring(e, method='text', encoding='unicode')
case_names.append(titlecase(s))
return case_names
def _get_case_dates(self):
"""
This is an example of a date field. Note that the format string
will likely need to be updated to match the date formats
on the site you are scraping. The datetime formats can be found
here: http://docs.python.org/2/library/datetime.html
"""
path = '//path/to/text/text()'
return [convert_date_string(date_string) for date_string in
self.html.xpath(path)]
def _get_precedential_statuses(self):
"""
In most cases, this field should be normalized to either
'Published' or 'Unpublished', as below.
"""
statuses = []
for e in self.html.xpath('//path/to/text/text()'):
s = html.tostring(e, method='text', encoding='unicode')
if 'Opinion' in s:
statuses.append('Published')
elif 'Nonprecedential' in s:
statuses.append('Unpublished')
else:
statuses.append('Unknown')
return statuses
"""
High priority fields
Remove this comment and any unused methods before submission
"""
def _get_docket_numbers(self):
"""
This is typically of the form ##-####
"""
return None
def _get_neutral_citations(self):
"""
This is often of the form year, state abbreviation, sequential number
as in '2013 Neb. 12' which would be the 12th opinion issued in 2013.
"""
return None
def _get_judges(self):
"""
For the name of the judge authoring the lead opinion
"""
return None
def _get_lower_courts(self):
"""
If the document is an opinion of an appellate court, then the court
from which the case was appealed would go here. So, a 9th Circuit
case might be appealed from the 'N.D. Cal.' which would go here.
"""
return None
def _get_nature_of_suit(self):
"""
Some courts provide a NOS code or phrase giving the subject matter
of the case, such as 'Contract' 'Civil Rights' 'Immigration' etc.
"""
return None
def _get_summaries(self):
"""
Some courts provide a summary of the case or the opinion's holding.
"""
return None
def _get_west_citations(self):
"""
You are unlikely to encounter these unless it's a backscraper because
these citations only exist once the document is in a bound volume.
"""
return None
def _get_west_state_citations(self):
"""
You are unlikely to encounter these unless it's a backscraper because
these citations only exist once the document is in a bound volume.
"""
return None
"""
Optional fields
Remove this comment and any unused methods before submission
"""
def _get_adversary_numbers(self):
"""
Similar to a docket number, but found only in bankruptcy cases.
"""
return None
def _get_causes(self):
"""
Some courts will give the plaintiff's or appellant's cause of action
such as 'Employment' 'Trademark' etc.
"""
return None
def _get_dispositions(self):
"""
Affirmed, Reversed, Vacated; if longer than a sentence, should
probably use _get_summaries instead.
"""
return None
def _get_docket_attachment_numbers(self):
"""
A document appears on a docket. Each line on the docket has a number.
Item number 12, for example, might be a declaration with several
attachments, numbered 1-5. That number, from 1-5, goes here.
"""
return None
def _get_docket_document_numbers(self):
"""
A document appears on a docket. Each line on the docket has a number.
Those numbers go here.
"""
return None
def _get_lower_court_judges(self):
"""
If the document is an opinion of an appellate court, then the court
from which the case was appealed is the lower court. So, a 9th Cir.
case might be appealed from the N.D. Cal. and the judge from that
court that handled the case would go here.
"""
return None
def _get_lower_court_numbers(self):
"""
If the document is an opinion of an appellate court, then the court
from which the case was appealed is the lower court. So, a 9th Cir.
case might be appealed from the N.D. Cal. and the docket number or
other relevant case numbers from that court would go here.
"""
return None
"""
Optional methods for special purposes
"""
@staticmethod
def cleanup_content(content):
"""
Given the HTML from a page, this method is used to strip it down to
its bare essentials.
Jurisdictions post their content in an HTML page where the headers,
foots and other content must be stripped after the page has been
downloaded by the caller. The intention of this method is that the
caller is able to call the Site object and do all the meta data
parsing necessary. Once that's complete, the caller can call this
method, stripping the html content down to its essentials, before
saving.
"""
return content
def _download_backwards(self, date_str):
"""
This is a simple method that can be used to generate Site objects
that can be used to paginate through a court's entire website.
This method is usually called by a backscraper caller (see the
one in CourtListener/alert/scrapers for details), and typically
modifies aspects of the Site object's attributes such as Site.url.
A simple example has been provided below. The idea is that the
caller runs this method with a different variable on each iteration.
That variable is often a date that is getting iterated or is simply
a index (i), that we iterate upon.
This can also be used to hold notes useful to future backscraper
development.
"""
self.url = 'http://example.com/new/url/%s' % date_str
self.html = self._download()
| {
"content_hash": "5c5a4963788358c57c0584e38afdd622",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 77,
"avg_line_length": 34.272,
"alnum_prop": 0.6188141923436041,
"repo_name": "m4h7/juriscraper",
"id": "6548ce320bb87d0184752f8d21bcedea7685becb",
"size": "8568",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "juriscraper/opinions/opinion_template.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "27160373"
},
{
"name": "Makefile",
"bytes": "88"
},
{
"name": "Python",
"bytes": "623951"
}
],
"symlink_target": ""
} |
import logging
import sys
if (sys.version_info > (3,)):
from urllib.parse import quote
else:
from urllib import quote
from io import StringIO
import os
import jsonpickle
from cairis.test.CairisDaemonTestCase import CairisDaemonTestCase
from cairis.mio.ModelImport import importModelFile
import os
__author__ = 'Shamal Faily'
class UserAPITests(CairisDaemonTestCase):
@classmethod
def setUpClass(cls):
pass
def setUp(self):
self.logger = logging.getLogger(__name__)
def test_user(self):
method = 'test_version'
url = '/api/user?session_id=test'
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.get(url)
self.assertIsNotNone(rv.data, 'No response')
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
objt = jsonpickle.decode(responseData)
self.assertIsNotNone(objt, 'No results after deserialization')
self.assertEqual(objt['name'],'CAIRIS test user account')
self.assertEqual(objt['email'],'cairis_test')
| {
"content_hash": "e2c8d36303e8a756f69ae7ac4a5684d5",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 66,
"avg_line_length": 27.44736842105263,
"alnum_prop": 0.7056567593480345,
"repo_name": "failys/CAIRIS",
"id": "fd3e9d6f257756c6d2034bb8a3c46af81d1a9ecc",
"size": "1841",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cairis/test/test_UserAPI.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11265"
},
{
"name": "Makefile",
"bytes": "1017"
},
{
"name": "Python",
"bytes": "2813021"
},
{
"name": "Shell",
"bytes": "6461"
},
{
"name": "XSLT",
"bytes": "35533"
}
],
"symlink_target": ""
} |
import os
import re
import shutil
import tempfile
import time
import jinja2
import jinja2.meta
import jsonschema
from rally.common.i18n import _, _LI
from rally.common import log as logging
from rally.common import objects
from rally import consts
from rally.deployment import engine as deploy_engine
from rally import exceptions
from rally import osclients
from rally.task import engine
from rally.verification.tempest import tempest
LOG = logging.getLogger(__name__)
class Deployment(object):
@classmethod
def create(cls, config, name):
"""Create a deployment.
:param config: a dict with deployment configuration
:param name: a str represents a name of the deployment
:returns: Deployment object
"""
try:
deployment = objects.Deployment(name=name, config=config)
except exceptions.DeploymentNameExists as e:
if logging.is_debug():
LOG.exception(e)
raise
deployer = deploy_engine.Engine.get_engine(
deployment["config"]["type"], deployment)
try:
deployer.validate()
except jsonschema.ValidationError:
LOG.error(_("Deployment %s: Schema validation error.") %
deployment["uuid"])
deployment.update_status(consts.DeployStatus.DEPLOY_FAILED)
raise
with deployer:
endpoints = deployer.make_deploy()
deployment.update_endpoints(endpoints)
return deployment
@classmethod
def destroy(cls, deployment):
"""Destroy the deployment.
:param deployment: UUID or name of the deployment
"""
# TODO(akscram): We have to be sure that there are no running
# tasks for this deployment.
# TODO(akscram): Check that the deployment have got a status that
# is equal to "*->finished" or "deploy->inconsistent".
deployment = objects.Deployment.get(deployment)
deployer = deploy_engine.Engine.get_engine(
deployment["config"]["type"], deployment)
tempest.Tempest(deployment["uuid"]).uninstall()
with deployer:
deployer.make_cleanup()
deployment.delete()
@classmethod
def recreate(cls, deployment):
"""Performs a cleanup and then makes a deployment again.
:param deployment: UUID or name of the deployment
"""
deployment = objects.Deployment.get(deployment)
deployer = deploy_engine.Engine.get_engine(
deployment["config"]["type"], deployment)
with deployer:
deployer.make_cleanup()
endpoints = deployer.make_deploy()
deployment.update_endpoints(endpoints)
@classmethod
def get(cls, deployment):
"""Get the deployment.
:param deployment: UUID or name of the deployment
:returns: Deployment instance
"""
return objects.Deployment.get(deployment)
@classmethod
def service_list(cls, deployment):
"""Get the services list.
:param deployment: Deployment object
:returns: Service list
"""
# TODO(kun): put this work into objects.Deployment
clients = osclients.Clients(objects.Credential(**deployment["admin"]))
return clients.services()
class Task(object):
@classmethod
def render_template(cls, task_template, template_dir="./", **kwargs):
"""Render jinja2 task template to Rally input task.
:param task_template: String that contains template
:param template_dir: The path of directory contain template files
:param kwargs: Dict with template arguments
:returns: rendered template str
"""
def is_really_missing(mis, task_template):
# NOTE(boris-42): Removing variables that have default values from
# missing. Construction that won't be properly
# checked is {% set x = x or 1}
if re.search(mis.join(["{%\s*set\s+", "\s*=\s*", "[^\w]+"]),
task_template):
return False
# NOTE(jlk): Also check for a default filter which can show up as
# a missing variable
if re.search(mis + "\s*\|\s*default\(", task_template):
return False
return True
# NOTE(boris-42): We have to import builtins to get the full list of
# builtin functions (e.g. range()). Unfortunately,
# __builtins__ doesn't return them (when it is not
# main module)
from six.moves import builtins
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(template_dir, encoding="utf8"))
env.globals.update(cls.create_template_functions())
ast = env.parse(task_template)
# NOTE(Julia Varigina):
# Bug in jinja2.meta.find_undeclared_variables
#
# The method shows inconsistent behavior:
# it does not return undeclared variables that appear
# in included templates only (via {%- include "some_template.yaml"-%})
# and in the same time is declared in jinja2.Environment.globals.
#
# This is different for undeclared variables that appear directly
# in task_template. The method jinja2.meta.find_undeclared_variables
# returns an undeclared variable that is used in task_template
# and is set in jinja2.Environment.globals.
#
# Despite this bug, jinja resolves values
# declared in jinja2.Environment.globals for both types of undeclared
# variables and successfully renders templates in both cases.
required_kwargs = jinja2.meta.find_undeclared_variables(ast)
missing = (set(required_kwargs) - set(kwargs) - set(dir(builtins)) -
set(env.globals))
real_missing = [mis for mis in missing
if is_really_missing(mis, task_template)]
if real_missing:
multi_msg = _("Please specify next template task arguments: %s")
single_msg = _("Please specify template task argument: %s")
raise TypeError((len(real_missing) > 1 and multi_msg or single_msg)
% ", ".join(real_missing))
return env.from_string(task_template).render(**kwargs)
@classmethod
def create_template_functions(cls):
def template_min(int1, int2):
return min(int1, int2)
def template_max(int1, int2):
return max(int1, int2)
def template_round(float1):
return int(round(float1))
def template_ceil(float1):
import math
return int(math.ceil(float1))
return {"min": template_min, "max": template_max,
"ceil": template_ceil, "round": template_round}
@classmethod
def create(cls, deployment, tag):
"""Create a task without starting it.
Task is a list of benchmarks that will be called one by one, results of
execution will be stored in DB.
:param deployment: UUID or name of the deployment
:param tag: tag for this task
:returns: Task object
"""
deployment_uuid = objects.Deployment.get(deployment)["uuid"]
return objects.Task(deployment_uuid=deployment_uuid, tag=tag)
@classmethod
def validate(cls, deployment, config, task_instance=None):
"""Validate a task config against specified deployment.
:param deployment: UUID or name of the deployment
:param config: a dict with a task configuration
"""
deployment = objects.Deployment.get(deployment)
task = task_instance or objects.Task(
deployment_uuid=deployment["uuid"], temporary=True)
benchmark_engine = engine.TaskEngine(
config, task, admin=deployment["admin"], users=deployment["users"])
benchmark_engine.validate()
@classmethod
def start(cls, deployment, config, task=None, abort_on_sla_failure=False):
"""Start a task.
Task is a list of benchmarks that will be called one by one, results of
execution will be stored in DB.
:param deployment: UUID or name of the deployment
:param config: a dict with a task configuration
:param task: Task object. If None, it will be created
:param abort_on_sla_failure: if True, the execution of a benchmark
scenario will stop when any SLA check
for it fails
"""
deployment = objects.Deployment.get(deployment)
task = task or objects.Task(deployment_uuid=deployment["uuid"])
if task.is_temporary:
raise ValueError(_(
"Unable to run a temporary task. Please check your code."))
LOG.info("Benchmark Task %s on Deployment %s" % (task["uuid"],
deployment["uuid"]))
benchmark_engine = engine.TaskEngine(
config, task, admin=deployment["admin"], users=deployment["users"],
abort_on_sla_failure=abort_on_sla_failure)
try:
benchmark_engine.run()
except Exception:
deployment.update_status(consts.DeployStatus.DEPLOY_INCONSISTENT)
raise
@classmethod
def abort(cls, task_uuid, soft=False, async=True):
"""Abort running task.
:param task_uuid: The UUID of the task
:type task_uuid: str
:param soft: if set to True, task should be aborted after execution of
current scenario, otherwise as soon as possible before
all the scenario iterations finish [Default: False]
:type soft: bool
:param async: don't wait until task became in 'running' state
[Default: False]
:type async: bool
"""
if not async:
current_status = objects.Task.get_status(task_uuid)
if current_status in objects.Task.NOT_IMPLEMENTED_STAGES_FOR_ABORT:
LOG.info(_LI("Task status is '%s'. Should wait until it became"
" 'running'") % current_status)
while (current_status in
objects.Task.NOT_IMPLEMENTED_STAGES_FOR_ABORT):
time.sleep(1)
current_status = objects.Task.get_status(task_uuid)
objects.Task.get(task_uuid).abort(soft=soft)
if not async:
LOG.info(_LI("Waiting until the task stops."))
finished_stages = [consts.TaskStatus.ABORTED,
consts.TaskStatus.FINISHED,
consts.TaskStatus.FAILED]
while objects.Task.get_status(task_uuid) not in finished_stages:
time.sleep(1)
@classmethod
def delete(cls, task_uuid, force=False):
"""Delete the task.
:param task_uuid: The UUID of the task.
:param force: If set to True, then delete the task despite to the
status.
:raises TaskInvalidStatus: when the status of the task is not
FINISHED and the force argument
is not True
"""
status = None if force else consts.TaskStatus.FINISHED
objects.Task.delete_by_uuid(task_uuid, status=status)
class Verification(object):
@classmethod
def verify(cls, deployment, set_name, regex, tests_file,
tempest_config, system_wide_install=False):
"""Start verifying.
:param deployment: UUID or name of a deployment.
:param set_name: Valid name of tempest test set.
:param regex: Regular expression of test
:param tests_file: Path to a file with a list of Tempest tests
:param tempest_config: User specified Tempest config file
:param system_wide_install: Use virtualenv else run tests in local
environment
:returns: Verification object
"""
deployment_uuid = objects.Deployment.get(deployment)["uuid"]
verification = objects.Verification(deployment_uuid=deployment_uuid)
verifier = cls._create_verifier(deployment_uuid, verification,
tempest_config, system_wide_install)
LOG.info("Starting verification of deployment: %s" % deployment_uuid)
verification.set_running()
verifier.verify(set_name=set_name, regex=regex,
tests_file=tests_file)
return verification
@staticmethod
def _create_verifier(deployment_uuid, verification=None,
tempest_config=None, system_wide_install=False):
"""Create a Tempest object.
:param deployment_uuid: UUID or name of a deployment
:param verification: Verification object
:param tempest_config: User specified Tempest config file
:param system_wide_install: Use virtualenv else run tests in local
environment
:returns: Tempest object
"""
verifier = tempest.Tempest(deployment_uuid, verification=verification,
tempest_config=tempest_config,
system_wide_install=system_wide_install)
if not verifier.is_installed():
LOG.info(_("Tempest is not installed "
"for the specified deployment."))
LOG.info(_("Installing Tempest "
"for deployment: %s") % deployment_uuid)
verifier.install()
return verifier
@classmethod
def import_results(cls, deployment, set_name="", log_file=None):
"""Import Tempest tests results into the Rally database.
:param deployment: UUID or name of a deployment
:param log_file: User specified Tempest log file in subunit format
:returns: Deployment and verification objects
"""
# TODO(aplanas): Create an external deployment if this is
# missing, as required in the blueprint [1].
# [1] https://blueprints.launchpad.net/rally/+spec/verification-import
deployment_uuid = objects.Deployment.get(deployment)["uuid"]
verification = objects.Verification(deployment_uuid=deployment_uuid)
verifier = tempest.Tempest(deployment_uuid, verification=verification)
LOG.info("Importing verification of deployment: %s" % deployment_uuid)
verification.set_running()
verifier.import_results(set_name=set_name, log_file=log_file)
return deployment, verification
@classmethod
def install_tempest(cls, deployment, source=None):
"""Install Tempest.
:param deployment: UUID or name of the deployment
:param source: Source to fetch Tempest from
"""
deployment_uuid = objects.Deployment.get(deployment)["uuid"]
verifier = tempest.Tempest(deployment_uuid, source=source)
verifier.install()
@classmethod
def uninstall_tempest(cls, deployment):
"""Remove deployment's local Tempest installation.
:param deployment: UUID or name of the deployment
"""
deployment_uuid = objects.Deployment.get(deployment)["uuid"]
verifier = tempest.Tempest(deployment_uuid)
verifier.uninstall()
@classmethod
def reinstall_tempest(cls, deployment, tempest_config=None, source=None):
"""Uninstall Tempest and then reinstall from new source.
:param deployment: UUID or name of the deployment
:param tempest_config: Tempest config file. Use previous file as
default
:param source: Source to fetch Tempest from. Use old source as default
"""
deployment_uuid = objects.Deployment.get(deployment)["uuid"]
verifier = tempest.Tempest(deployment_uuid)
if not tempest_config:
config_path = verifier.config_file
filename = os.path.basename(config_path)
temp_location = tempfile.gettempdir()
tmp_conf_path = os.path.join(temp_location, filename)
shutil.copy2(config_path, tmp_conf_path)
source = source or verifier.tempest_source
verifier.uninstall()
verifier = tempest.Tempest(deployment_uuid, source=source,
tempest_config=tempest_config)
verifier.install()
if not tempest_config:
shutil.move(tmp_conf_path, verifier.config_file)
@classmethod
def configure_tempest(cls, deployment, tempest_config=None,
override=False):
"""Generate configuration file of Tempest.
:param deployment: UUID or name of a deployment
:param tempest_config: User specified Tempest config file location
:param override: Whether or not to override existing Tempest
config file
"""
deployment_uuid = objects.Deployment.get(deployment)["uuid"]
verifier = cls._create_verifier(deployment_uuid,
tempest_config=tempest_config)
verifier.generate_config_file(override)
@classmethod
def show_config_info(cls, deployment):
"""Show information about configuration file of Tempest.
:param deployment: UUID or name of a deployment
"""
deployment_uuid = objects.Deployment.get(deployment)["uuid"]
verifier = cls._create_verifier(deployment_uuid)
if not verifier.is_configured():
verifier.generate_config_file()
with open(verifier.config_file, "rb") as conf:
return {"conf_data": conf.read(),
"conf_path": verifier.config_file}
| {
"content_hash": "6526866c10ae3262a3e56632b2d75109",
"timestamp": "",
"source": "github",
"line_count": 459,
"max_line_length": 79,
"avg_line_length": 39.148148148148145,
"alnum_prop": 0.6082141465857867,
"repo_name": "group-policy/rally",
"id": "f7ade3ccd945359020b833bc581c6b3559d20def",
"size": "18599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rally/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "35771"
},
{
"name": "Mako",
"bytes": "17389"
},
{
"name": "Python",
"bytes": "2926625"
},
{
"name": "Shell",
"bytes": "40843"
}
],
"symlink_target": ""
} |
class DJHostExtension:
""" Parse the host name to find the DJ name, and save info that
will be used in various places """
first = True # Parse the host name before anything else is done
needs = {'request'}
provides = {'djhost'}
def __init__(self, **args):
pass
def prepare(self, context):
host, sep, host_domain = context.request.host.partition('.')
context.fulldj = host
if '-' in host: # So we can handle dj-name and name
prefix, host = host.split('-')
else:
prefix = ''
context.host_domain = host_domain
context.djname = host
context.djprefix = prefix
context.djhost = sep.join((host, host_domain))
context.websocket_admin = 'http://{}/pub?id={}-admin'.format(context.request.host.split(':')[0], context.fulldj.lower())
context.websocket = 'http://{}/pub?id={}'.format(context.request.host.split(':')[0], context.fulldj.lower())
| {
"content_hash": "781dd8651c1cfdae453c80e4ffe247f1",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 128,
"avg_line_length": 40.791666666666664,
"alnum_prop": 0.5975485188968335,
"repo_name": "bmillham/djrq2",
"id": "d8dba93a6c4460867311dbac77bacdc875067dbf",
"size": "979",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/ext/djhost.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "22622"
},
{
"name": "JavaScript",
"bytes": "59510"
},
{
"name": "Python",
"bytes": "267514"
},
{
"name": "Shell",
"bytes": "1030"
}
],
"symlink_target": ""
} |
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = [
url(r'^grappelli/', include('grappelli.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^chaining/', include('smart_selects.urls')),
url(r'^assesment_management/', include('assessment_management.urls')),
url(r'^$', RedirectView.as_view(url='/assesment_management/')),
]
| {
"content_hash": "ae7622d03ef0be06f687907959ae295c",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 74,
"avg_line_length": 40.18181818181818,
"alnum_prop": 0.7058823529411765,
"repo_name": "PayPal-Opportunity-Hack-Chennai-2015/ANEW",
"id": "eb3d7214be4155dd59feb736a932cbde152046e3",
"size": "442",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "anew/anew/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "272130"
},
{
"name": "HTML",
"bytes": "77959"
},
{
"name": "JavaScript",
"bytes": "379152"
},
{
"name": "Python",
"bytes": "84209"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Weapon()
result.template = "object/weapon/ranged/pistol/shared_pistol_republic_blaster_quest.iff"
result.attribute_template_id = 10
result.stfName("weapon_name","pistol_republic_blaster_quest")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "0bf07a2e22490a2c253e581daf9edf72",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 89,
"avg_line_length": 26.076923076923077,
"alnum_prop": 0.7168141592920354,
"repo_name": "obi-two/Rebelion",
"id": "ea8b69ae9cc59c2799d1e6f3bf58507fd36c61f9",
"size": "484",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/weapon/ranged/pistol/shared_pistol_republic_blaster_quest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
} |
from typing import List
# @lc code=start
class Solution:
def compress(self, chars: List[str]) -> int:
if len(chars) < 2:
return len(chars)
head = 1
compare_idx = 0
store_idx = 0
n = len(chars)
chars.append(None)
while head <= n:
if chars[compare_idx] == chars[head]:
head += 1
else:
count = head - compare_idx
chars[store_idx] = chars[compare_idx]
if count > 1:
count = str(count)
for char in count:
store_idx += 1
chars[store_idx] = char
store_idx += 1
compare_idx = head
head += 1
while store_idx < len(chars):
chars.pop(-1)
return len(chars)
# @lc code=end
s = Solution()
chars = ["a","b","b","b","b","b","b","b","b","b","b","b","b","c","c"]
ret = s.compress(chars)
print(ret, chars)
| {
"content_hash": "35402baaa8773ac062abb8393876976f",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 69,
"avg_line_length": 28.942857142857143,
"alnum_prop": 0.4353405725567621,
"repo_name": "heyf/cloaked-octo-adventure",
"id": "96d9c1c5f86259af75e292e07b06b6424116e6bd",
"size": "1086",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leetcode/443_string-compression.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1225"
},
{
"name": "C++",
"bytes": "13338"
},
{
"name": "Python",
"bytes": "145134"
}
],
"symlink_target": ""
} |
import sys, re
from .lines_in import lines_in as generic_lines_in
major_version = sys.version_info[0]
minor_version = sys.version_info[1]
def local_func():
"""Return extra string for representation of a test-funtion-local function or class.
This allows for easy renaming of test functions with no impact on expected result string.
"""
frame = sys._getframe(1)
return frame.f_code.co_name + '.<locals>.'
py37_no_exc_comma = ',' if (major_version == 3) and minor_version < 7 else ''
def line_num():
frame = sys._getframe(1)
return frame.f_lineno
def next_line_num():
frame = sys._getframe(1)
return frame.f_lineno + 1
def lazy(*args):
return lambda: args[0](*args[1:])
def _config_msg(error_type, file_name, line_num, *lines):
if not file_name.endswith('.py'):
# file_name may end in .pyc!
file_name = file_name[:-1]
line_msg = 'File "{file_name}", line {line_num}'.format(file_name=file_name, line_num=line_num) + '\n'
emsg = ""
for line in lines:
emsg += line_msg
emsg += error_type + ': ' + line + '\n'
return emsg
def config_error(file_name, line_num, *line):
return _config_msg('ConfigError', file_name, line_num, *line)
def config_warning(file_name, line_num, *line):
return _config_msg('ConfigWarning', file_name, line_num, *line)
def api_error(file_name, line_num, *line):
return _config_msg('MultiConfApiError', file_name, line_num, *line)
def total_msg(total_num_errors):
ww, err = ('were', 'errors') if total_num_errors > 1 else ('was', 'error')
return "There {ww} {num_errors} {err} when defining item: ".format(ww=ww, num_errors=total_num_errors, err=err)
def file_line(error_file_name, error_line_num):
"""Return string with file/line info formatted like in error messages."""
return 'File "{file_name}", line {line_num}'.format(file_name=error_file_name, line_num=error_line_num)
def start_file_line(error_file_name, error_line_num):
"""Helper for 'assert lines_in'.
Return string with file/line info formatted like in error messages and prefixed with '^' for start-of-line.
"""
return '^' + file_line(error_file_name, error_line_num)
def lines_in(text, *expected_lines):
return generic_lines_in(text, None, *expected_lines)
# Handle variable ids and source file line numbers in json/repr output
_replace_multiconf_file_line_msg_regex = re.compile(r'File "[^"]+/multiconf/([^/]+).py", line [0-9]+')
def replace_multiconf_file_line_msg(string):
return _replace_multiconf_file_line_msg_regex.sub(r'File "fake_multiconf_dir/\1.py", line 999', string)
_replace_ids_regex = re.compile(r'("__id__"|, id| #id): [0-9]+("?)')
_replace_refs_regex = re.compile(r'"#ref (self, |later, |)id: [0-9]+')
_replace_named_as_regex = re.compile(r" #as: '[^,]+',")
_replace_address_regex = re.compile(r" at 0x[^>]*>")
def replace_ids(json_string, named_as=True, address=False):
json_string = _replace_ids_regex.sub(r'\1: 0000\2', json_string)
if named_as:
json_string = _replace_named_as_regex.sub(r" #as: 'xxxx',", json_string)
if address:
json_string = _replace_address_regex.sub(r" at 0x0000>", json_string)
return _replace_refs_regex.sub(r'"#ref \1id: 0000', json_string)
_replace_builder_ids_regex = re.compile(r"""\.builder\.[0-9]+(["'])""")
def replace_ids_builder(json_string, named_as=True, address=False):
json_string = _replace_builder_ids_regex.sub(r'.builder.0000\1', json_string)
return replace_ids(json_string, named_as, address)
_compact_ids_regex = re.compile(r'("),\n *"__id__": ([0-9]+),')
_compact_calculated_regex = re.compile(r': "?([^"$]+)"?(,\n *"[a-zA-Z0-9_]+ #value for .* provided by @property": true|),\n *"([a-zA-Z0-9_]+) #(calculated|static)": true')
def to_compact(json_string):
# There is no named_as in the non-compact format, just insert
json_string = _compact_ids_regex.sub(r" #as: 'xxxx', id: \2\1,", json_string)
return _compact_calculated_regex.sub(r': "\1 #\4"\2', json_string)
# "item": false,
# "item #Excluded: <class 'multiconf.test.include_exclude_test.item'>": true
_compact_excluded_regex = re.compile(r""": false,\n *"([a-zA-Z0-9_]*) #Excluded: <class '([.xa-zA-Z0-9_]*)'>": true""")
def to_compact_excluded(json_string):
json_string = to_compact(json_string)
return _compact_excluded_regex.sub(r""": "false #Excluded: <class '\2'>""" + '"', json_string)
| {
"content_hash": "6e4e6134e2d60a7cf49e13be51d95258",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 171,
"avg_line_length": 35.608,
"alnum_prop": 0.6459222646596271,
"repo_name": "lhupfeldt/multiconf",
"id": "2906977a9b962f597cb3329943c5a17456396364",
"size": "4582",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/utils/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "31"
},
{
"name": "Python",
"bytes": "760680"
}
],
"symlink_target": ""
} |
from star import load, save | {
"content_hash": "78e00447fa1153d52020a8a67fe4d112",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 27,
"avg_line_length": 27,
"alnum_prop": 0.8148148148148148,
"repo_name": "craigyk/pystar",
"id": "e68cf191f47244567398e16ca5b1aaaca20afacb",
"size": "28",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11030"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', 'main.views.home', name='home'),
url(r'^changeset_added/$', 'main.views.changeset_added',
name='changeset_added'),
url(r'^checkout/$', 'main.views.checkout_index', name='checkout_index'),
url(r'^templateset/$', 'main.views.templatesets_index', name='templatesets_index'),
url(r'^checkout/(?P<slug>[\w\d_\-]+)/$', 'main.views.checkout_overview',
name='checkout_overview'),
url(r'^checkout/(?P<slug>[\w\d_\-]+)/checkin/$', 'main.views.templateset_checkin',
name='templateset_checkin'),
url(r'^codeblock/create/$', 'main.views.codeblock_create',
name='codeblock_create'),
url(r'^codeblock/edit/(?P<filename>[\w\d_\-\./]+)/$', 'main.views.codeblock_edit',
name='codeblock_edit'),
url(r'^codeblock/diff/(?P<filename>[\w\d_\-\./]+)/$',
'main.views.codeblock_diff',
name='codeblock_diff'),
url(r'^templateset/(?P<id>\d+)/$', 'main.views.templateset_overview',
name='templateset_overview'),
url(r'^templateset/(?P<id>\d+)/checkout/$', 'main.views.templateset_checkout',
name='templateset_checkout'),
url(r'^templateset/(?P<id>\d+)/(?P<filename>[\w\d_\-\.]+)/$', 'main.views.templateset_template',
name='templateset_template'),
url(r'^templateset/(?P<id>\d+)/(?P<filename>[\w\d_\-\.]+)/(?P<hash1>[a-f0-9]+)/(?P<hash2>[a-f0-9]+)/$',
'main.views.templateset_templatediff',
name='templateset_templatediff'),
url(r'^templateset/(?P<id>\d+)/(?P<filename>[\w\d_\-\.]+)/edit/$',
'main.views.templateset_template_edit',
name='templateset_template_edit'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns(
'django.contrib.auth.views',
(r'^accounts/login/$', 'login'),
(r'^accounts/logout/$', 'logout'),
)
| {
"content_hash": "fd6979f3fae145f830583e88e2719937",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 107,
"avg_line_length": 38.568627450980394,
"alnum_prop": 0.6034570411794611,
"repo_name": "boldprogressives/akcode",
"id": "cb3d3a7fccea6be987c4bcf65aa017b84e63d4bb",
"size": "1967",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "761"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "DOT",
"bytes": "5910"
},
{
"name": "Dart",
"bytes": "986"
},
{
"name": "Delphi",
"bytes": "1412"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "Java",
"bytes": "396"
},
{
"name": "JavaScript",
"bytes": "16067899"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "959"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "PHP",
"bytes": "26207"
},
{
"name": "Perl",
"bytes": "678"
},
{
"name": "PowerShell",
"bytes": "418"
},
{
"name": "Python",
"bytes": "43577"
},
{
"name": "R",
"bytes": "668"
},
{
"name": "Ruby",
"bytes": "531"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "1238"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TypeScript",
"bytes": "1607"
},
{
"name": "Visual Basic",
"bytes": "916"
},
{
"name": "XQuery",
"bytes": "114"
}
],
"symlink_target": ""
} |
from __future__ import division
from future.builtins import str, range, object
from past.utils import old_div
import re
import importlib
import time
import math
import json
import datetime
from collections import deque
from bson import ObjectId
import uuid
import shlex
#
# Utils are functions that should be independent from the rest of MRQ's codebase
#
def normalize_command(command, worker_group):
if "--processes" in command:
simplified_command = ""
worker_count = 0
skip_next = False
for part in shlex.split(command):
if skip_next:
worker_count = part
skip_next = False
continue
if part.startswith("--processes="):
worker_count = part.split("=")[1]
continue
if part == "--processes":
skip_next = True
continue
simplified_command += " %s" % part
skip_next = False
simplified_command = "MRQ_WORKER_GROUP=%s%s" % (worker_group, simplified_command)
return simplified_command, int(worker_count)
return "MRQ_WORKER_GROUP=%s %s" % (worker_group, command), 1
def get_local_ip():
""" Returns the local IP. Can be overwritten in the config with --local-ip so don't call
this function directly, instead get the current value from the config """
import socket
try:
return socket.gethostbyname(socket.gethostname())
except: # pylint: disable=bare-except
return "127.0.0.1"
def group_iter(iterator, n=2):
""" Given an iterator, it returns sub-lists made of n items.
(except the last that can have len < n)
"""
# Use slices instead of an iterator when we have a flat list
if isinstance(iterator, list):
length = len(iterator)
for i in range(int(math.ceil(old_div(float(length), n)))):
yield iterator[i * n: (i + 1) * n]
else:
accumulator = []
for item in iterator:
accumulator.append(item)
if len(accumulator) == n:
yield accumulator
accumulator = []
# Yield what's left
if len(accumulator) != 0:
yield accumulator
# http://code.activestate.com/recipes/578231-probably-the-fastest-memoization-decorator-in-the-/
def memoize(f):
""" Memoization decorator for a function taking one or more arguments. """
class memodict(dict):
def __getitem__(self, *key):
return dict.__getitem__(self, key)
def __missing__(self, key):
ret = self[key] = f(*key)
return ret
return memodict().__getitem__
def memoize_single_argument(f):
""" Memoization decorator for a function taking a single argument """
class memodict(dict):
def __missing__(self, key):
ret = self[key] = f(key)
return ret
return memodict().__getitem__
@memoize_single_argument
def load_class_by_path(taskpath):
""" Given a taskpath, returns the main task class. """
return getattr(
importlib.import_module(
re.sub(
r"\.[^.]+$",
"",
taskpath)),
re.sub(
r"^.*\.",
"",
taskpath))
def lazyproperty(fn):
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _lazyprop
# http://code.activestate.com/recipes/576655-wait-for-network-service-to-appear/
def wait_for_net_service(server, port, timeout=None, poll_interval=0.1):
""" Wait for network service to appear
@param timeout: in seconds, if None or 0 wait forever
@return: True of False, if timeout is None may return only True or
throw unhandled network exception
"""
import socket
import errno
s = socket.socket()
if timeout:
from time import time as now
# time module is needed to calc timeout shared between two exceptions
end = now() + timeout
while True:
try:
if timeout:
next_timeout = end - now()
if next_timeout < 0:
return False
else:
s.settimeout(next_timeout)
s.connect((server, port))
except socket.timeout as err:
# this exception occurs only if timeout is set
if timeout:
return False
except Exception as err:
# catch timeout exception from underlying network library
# this one is different from socket.timeout
if not isinstance(err.args, tuple) or err.args[0] != errno.ETIMEDOUT:
pass # raise
else:
s.close()
return True
time.sleep(poll_interval)
class LazyObject(object):
""" Lazy-connection class. Connections will only be initialized when first used. """
def __init__(self):
self._factories = []
self._attributes_via_factories = []
def add_factory(self, factory):
self._factories.append(factory)
# This will be called only once, when the attribute is still missing
def __getattr__(self, attr):
for factory in self._factories:
value = factory(attr)
if value is not None:
self._attributes_via_factories.append(attr)
self.__dict__[attr] = value
return value
def reset(self):
# TODO proper connection close?
for attr in self._attributes_via_factories:
del self.__dict__[attr]
self._attributes_via_factories = []
class MongoJSONEncoder(json.JSONEncoder):
def default(self, obj): # pylint: disable=E0202
if isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
elif isinstance(obj, ObjectId):
return str(obj)
elif isinstance(obj, uuid.UUID):
return str(obj)
elif isinstance(obj, bytes):
return obj.decode('utf-8')
return json.JSONEncoder.default(self, obj)
class MovingAverage(object):
def __init__(self, size):
self.__size = size
self.__sum = 0
self.__q = deque([])
def next(self, val):
if len(self.__q) == self.__size:
self.__sum -= self.__q.popleft()
self.__sum += val
self.__q.append(val)
return 1.0 * self.__sum / len(self.__q)
class MovingETA(object):
def __init__(self, size):
self.__size = size
self.__q = deque([])
self.__t = deque([])
def next(self, val, t=None):
if t is None:
t = time.time()
if len(self.__q) == self.__size:
self.__q.popleft()
self.__t.popleft()
self.__q.append(val)
self.__t.append(t)
if len(self.__q) == 1:
return None
mean_q = sum(self.__q) / len(self.__q)
mean_t = sum(self.__t) / len(self.__t)
def std(lst, m):
return math.sqrt(sum((pow(x - m, 2) for x in lst)) / (len(lst) - 1))
def pearson_r(list_t, list_q):
sum_xy = 0
sum_sq_v_x = 0
sum_sq_v_y = 0
for (x, y) in zip(list_t, list_q):
var_x = x - mean_t
var_y = y - mean_q
sum_xy += var_x * var_y
sum_sq_v_x += pow(var_x, 2)
sum_sq_v_y += pow(var_y, 2)
if sum_sq_v_x * sum_sq_v_y == 0:
return None
return sum_xy / math.sqrt(sum_sq_v_x * sum_sq_v_y)
r = pearson_r(self.__t, self.__q)
if r is None:
return None
# ax + b
a = r * (std(self.__q, mean_q) / std(self.__t, mean_t))
b = mean_q - a * mean_t
# ETA is ax + b = 0
if a == 0:
return None
return (-b / a) - t
| {
"content_hash": "286f09d7eaae9b225f485cea9f5f6929",
"timestamp": "",
"source": "github",
"line_count": 289,
"max_line_length": 96,
"avg_line_length": 27.858131487889274,
"alnum_prop": 0.5435349646006707,
"repo_name": "pricingassistant/mrq",
"id": "72d84a60ee4fbb1ccbecc53fa70886df56dc58c0",
"size": "8051",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mrq/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5514"
},
{
"name": "Dockerfile",
"bytes": "2722"
},
{
"name": "HTML",
"bytes": "60608"
},
{
"name": "JavaScript",
"bytes": "78540"
},
{
"name": "Makefile",
"bytes": "2765"
},
{
"name": "Perl",
"bytes": "1374"
},
{
"name": "Python",
"bytes": "931744"
}
],
"symlink_target": ""
} |
import boto
import os
import re
import urllib.parse
from boto.s3 import connection
from wal_e import log_help
from wal_e.exception import UserException
logger = log_help.WalELogger(__name__)
_S3_REGIONS = {
# See http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
'ap-northeast-1': 's3-ap-northeast-1.amazonaws.com',
'ap-southeast-1': 's3-ap-southeast-1.amazonaws.com',
'ap-southeast-2': 's3-ap-southeast-2.amazonaws.com',
'eu-central-1': 's3-eu-central-1.amazonaws.com',
'eu-west-1': 's3-eu-west-1.amazonaws.com',
'sa-east-1': 's3-sa-east-1.amazonaws.com',
'us-east-1': 's3.amazonaws.com',
'us-west-1': 's3-us-west-1.amazonaws.com',
'us-west-2': 's3-us-west-2.amazonaws.com',
}
try:
# Override the hard-coded region map with boto's mappings if
# available.
from boto.s3 import regions
_S3_REGIONS.update(dict((r.name, str(r.endpoint)) for r in regions()))
except ImportError:
pass
def _is_ipv4_like(s):
"""Find if a string superficially looks like an IPv4 address.
AWS documentation plays it fast and loose with this; in other
regions, it seems like even non-valid IPv4 addresses (in
particular, ones that possess decimal numbers out of range for
IPv4) are rejected.
"""
parts = s.split('.')
if len(parts) != 4:
return False
for part in parts:
try:
int(part)
except ValueError:
return False
return True
def _is_mostly_subdomain_compatible(bucket_name):
"""Returns True if SubdomainCallingFormat can be used...mostly
This checks to make sure that putting aside certificate validation
issues that a bucket_name is able to use the
SubdomainCallingFormat.
"""
return (bucket_name.lower() == bucket_name and
len(bucket_name) >= 3 and
len(bucket_name) <= 63 and
'_' not in bucket_name and
'..' not in bucket_name and
'-.' not in bucket_name and
'.-' not in bucket_name and
not bucket_name.startswith('-') and
not bucket_name.endswith('-') and
not bucket_name.startswith('.') and
not bucket_name.endswith('.') and
not _is_ipv4_like(bucket_name))
def _connect_secureish(*args, **kwargs):
"""Connect using the safest available options.
This turns on encryption (works in all supported boto versions)
and certificate validation (in the subset of supported boto
versions that can handle certificate validation, namely, those
after 2.6.0).
Versions below 2.6 don't support the validate_certs option to
S3Connection, and enable it via configuration option just seems to
cause an error.
"""
if tuple(int(x) for x in boto.__version__.split('.')) >= (2, 6, 0):
kwargs['validate_certs'] = True
kwargs['is_secure'] = True
auth_region_name = kwargs.pop('auth_region_name', None)
conn = connection.S3Connection(*args, **kwargs)
if auth_region_name:
conn.auth_region_name = auth_region_name
return conn
def _s3connection_opts_from_uri(impl):
# 'impl' should look like:
#
# <protocol>+<calling_format>://[user:pass]@<host>[:port]
#
# A concrete example:
#
# https+virtualhost://user:pass@localhost:1235
o = urllib.parse.urlparse(impl, allow_fragments=False)
if o.scheme is not None:
proto_match = re.match(
r'(?P<protocol>http|https)\+'
r'(?P<format>virtualhost|path|subdomain)', o.scheme)
if proto_match is None:
raise UserException(
msg='WALE_S3_ENDPOINT URI scheme is invalid',
detail='The scheme defined is ' + repr(o.scheme),
hint='An example of a valid scheme is https+virtualhost.')
opts = {}
if proto_match.group('protocol') == 'http':
opts['is_secure'] = False
else:
# Constrained by prior regexp.
proto_match.group('protocol') == 'https'
opts['is_secure'] = True
f = proto_match.group('format')
if f == 'virtualhost':
opts['calling_format'] = connection.VHostCallingFormat()
elif f == 'path':
opts['calling_format'] = connection.OrdinaryCallingFormat()
elif f == 'subdomain':
opts['calling_format'] = connection.SubdomainCallingFormat()
else:
# Constrained by prior regexp.
assert False
if o.username is not None or o.password is not None:
raise UserException(
msg='WALE_S3_ENDPOINT does not support username or password')
if o.hostname is not None:
opts['host'] = o.hostname
if o.port is not None:
opts['port'] = o.port
if o.path:
raise UserException(
msg='WALE_S3_ENDPOINT does not support a URI path',
detail='Path is {0!r}'.format(o.path))
if o.query:
raise UserException(
msg='WALE_S3_ENDPOINT does not support query parameters')
return opts
class CallingInfo(object):
"""Encapsulate information used to produce a S3Connection."""
def __init__(self, bucket_name=None, calling_format=None, region=None,
ordinary_endpoint=None):
self.bucket_name = bucket_name
self.calling_format = calling_format
self.region = region
self.ordinary_endpoint = ordinary_endpoint
def __repr__(self):
return ('CallingInfo({bucket_name}, {calling_format!r}, {region!r}, '
'{ordinary_endpoint!r})'.format(**self.__dict__))
def __str__(self):
return repr(self)
def connect(self, creds):
"""Return a boto S3Connection set up with great care.
This includes TLS settings, calling format selection, and
region detection.
The credentials are applied by the caller because in many
cases (instance-profile IAM) it is possible for those
credentials to fluctuate rapidly. By comparison, region
fluctuations of a bucket name are not nearly so likely versus
the gains of not looking up a bucket's region over and over.
"""
def _conn_help(*args, **kwargs):
return _connect_secureish(
*args,
provider=creds,
calling_format=self.calling_format(),
auth_region_name=self.region,
**kwargs)
# If WALE_S3_ENDPOINT is set, do not attempt to guess
# the right calling conventions and instead honor the explicit
# settings within WALE_S3_ENDPOINT.
impl = os.getenv('WALE_S3_ENDPOINT')
if impl:
return connection.S3Connection(**_s3connection_opts_from_uri(impl))
# Check if subdomain format compatible: if so, use the
# BUCKETNAME.s3.amazonaws.com hostname to communicate with the
# bucket.
if self.calling_format is connection.SubdomainCallingFormat:
return _conn_help(host='s3.amazonaws.com')
# Check if OrdinaryCallingFormat compatible, but also see if
# the endpoint has already been set, in which case only
# setting the host= flag is necessary.
assert self.calling_format is connection.OrdinaryCallingFormat
assert self.ordinary_endpoint is not None
return _conn_help(host=self.ordinary_endpoint)
def must_resolve(region):
if region in _S3_REGIONS:
endpoint = _S3_REGIONS[region]
return endpoint
else:
raise UserException(msg='Could not resolve host for AWS_REGION',
detail='AWS_REGION is set to "{0}".'
.format(region))
def from_store_name(bucket_name, region=None):
"""Construct a CallingInfo value from a bucket name.
This is useful to encapsulate the ugliness of setting up S3
connections, especially with regions and TLS certificates are
involved.
"""
# Late-bind `region` for the sake of tests that inject the
# AWS_REGION environment variable.
if region is None:
region = os.getenv('AWS_REGION')
mostly_ok = _is_mostly_subdomain_compatible(bucket_name)
if not mostly_ok:
return CallingInfo(
bucket_name=bucket_name,
region=region,
calling_format=connection.OrdinaryCallingFormat,
ordinary_endpoint=must_resolve(region))
else:
if '.' in bucket_name:
# The bucket_name might have been DNS compatible, but once
# dots are involved TLS certificate validations will
# certainly fail even if that's the case.
return CallingInfo(
bucket_name=bucket_name,
calling_format=connection.OrdinaryCallingFormat,
region=region,
ordinary_endpoint=must_resolve(region))
else:
# If the bucket follows naming rules and has no dots in
# the name, SubdomainCallingFormat can be used, with TLS,
# world-wide.
return CallingInfo(
bucket_name=bucket_name,
calling_format=connection.SubdomainCallingFormat,
region=region,
ordinary_endpoint=None)
assert False
| {
"content_hash": "3ee56e36a9428874fa9ae7403a3ae53a",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 79,
"avg_line_length": 33.904761904761905,
"alnum_prop": 0.6211106309420916,
"repo_name": "ajmarks/wal-e",
"id": "9f20ff2644927172126e1d2655895b59b22255d6",
"size": "9256",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "wal_e/blobstore/s3/calling_format.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "986"
},
{
"name": "Python",
"bytes": "359067"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
# url(r'^$', 'blog.views.home', name='home'),
)
| {
"content_hash": "1074bbcacefb8c140fd07029adf2e741",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 51,
"avg_line_length": 26.4,
"alnum_prop": 0.6515151515151515,
"repo_name": "Abildin/apt_hw4",
"id": "d524e26d5535a00dae020cb65c5286bd5f5d826f",
"size": "132",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "restapi/blog/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8138"
}
],
"symlink_target": ""
} |
import os
import re
from flask import render_template_string
import requests
from bs4 import BeautifulSoup
from app import models
from app.models import Setting, UploadedFile, Page, db, Entity, MenuItem
from app.utils import get_theme_folder
session = requests.Session()
def scrape_inputs(html):
"""Function to scrape input data needed to submit.
The values input tags wpAutoSummary, wpEditToken and wpEdittime are needed to submit the edit text.
wpEditToken, wpWatchthis, wpIgnoreWarning and wpUpload are needed to upload a file"""
soup = BeautifulSoup(html, 'html.parser')
return {inp['name']: inp['value'] for inp in soup.find_all('input') if inp['name'] in ['wpAutoSummary', 'wpEditToken', 'wpEdittime', 'wpWatchthis', 'wpIgnoreWarning', 'wpUpload']}
def wiki_login(username, password):
"""Function to log in into iGEM wiki.
The iGEM wiki API is broken, so we need to use the normal website."""
# Login form
login_data = {'username':username,
'password':password,
'Login':'Log in'}
response_code = 500
while response_code != 200:
response = session.post('http://www.igem.org/Login', login_data)
response_code = response.status_code
logged_in = 'successfully logged' in response.text
return logged_in
def wiki_logout():
"""Log out of the iGEM wiki again"""
response_code = 500
while response_code != 200:
response = session.get('http://igem.org/cgi/Logout.cgi')
response_code = response.status_code
return
theme_pattern = re.compile(r'(/_theme.*?|/static.*?)(?=[")])')
def upload_template(template):
root = get_theme_folder('templates/includes')
abs_path = os.path.join(root, template)
basename = os.path.basename(abs_path)
with open(abs_path) as f:
content = f.read()
ctx = {
'entities': Entity.query.order_by('position').all(),
'main_menu': MenuItem.query.filter_by(parent=None).order_by('position').all() or Page.query.order_by('position').all(),
'_theme': Setting.query.filter_by(name=u'theme').first().value
}
content = render_template_string(content, **ctx)
content = re.sub(theme_pattern, models.convert_external, content)
path = os.path.splitext(basename)[0]
return upload_wiki_text(content, path, prefix='Template:')
def upload_page(name):
page = Page.query.filter_by(name=name).first()
content = page.render_external()
path = page.url
return upload_wiki_text(content, path)
def upload_binary_file(root, path):
abs_path = os.path.join(root, path)
basename = os.path.basename(abs_path)
base_url = Setting.query.filter_by(name='base_url').first().value
namespace = Setting.query.filter_by(name='namespace').first().value
response_code = 0
while response_code != 200:
response = session.get('http://{}/Special:Upload'.format(base_url))
response_code = response.status_code
data = scrape_inputs(response.text)
data['wpDestFile'] = '{}_{}'.format(namespace, basename)
files = {'wpUploadFile': open(abs_path, 'rb')}
response_code = 0
while response_code != 200:
response = session.post('http://{}/Special:Upload'.format(base_url), data=data, files=files)
response_code = response.status_code
# Find the external path
m = re.search('"(/wiki/images/.+?)"', response.text)
external_path = m.group(1)
uploaded_file = UploadedFile.query.filter_by(name=basename)
if not uploaded_file:
uploaded_file = UploadedFile(name=basename, external_path=external_path)
uploaded_file.external_path = external_path
db.session.add(uploaded_file)
file_pattern = re.compile(r'(?<=url\()([\'"]?.*?)(?=[?#\'")])')
def convert_external(match_obj):
m = match_obj.groups()[0]
m0 = ''
if m[0] == '"' or m[0] == '\'':
m0 = m[0]
m = m[1:]
return '{}{}'.format(m0, UploadedFile.query.filter_by(name=os.path.basename(m)).external_path) or m
def upload_text_file(root, path):
abs_path = os.path.join(root, path)
basename = os.path.basename(abs_path)
with open(abs_path) as f:
content = f.read()
content = re.sub(file_pattern, convert_external, content)
path = basename.replace('.', '')
return upload_wiki_text(content, path)
def upload_wiki_text(content, path, prefix=''):
base_url = Setting.query.filter_by(name=u'base_url').first().value
namespace = Setting.query.filter_by(name=u'namespace').first().value
edit_path = '{}Team:{}'.format(prefix, namespace)
if path:
edit_path += '/{}'.format(path)
response_code = 0
while response_code != 200:
response = session.get('http://{}/wiki/index.php?title={}&action=edit'.format(base_url, edit_path))
response_code = response.status_code
data = scrape_inputs(response.text)
data['wpTextbox1'] = content
response_code = 0
while response_code != 200:
response = session.post("http://{}/wiki/index.php?title={}&action=submit".format(base_url, edit_path), data)
response_code = response.status_code
return
| {
"content_hash": "5150b610f2e9e3df36dc41de225e79d2",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 183,
"avg_line_length": 31.90740740740741,
"alnum_prop": 0.648674792029406,
"repo_name": "dkmva/igem-wiki-wizard",
"id": "de414d1e0f829427f7963cdb63099c51d9633239",
"size": "5169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/upload.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "393372"
},
{
"name": "HTML",
"bytes": "1688762"
},
{
"name": "JavaScript",
"bytes": "2099807"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "Python",
"bytes": "42323"
}
],
"symlink_target": ""
} |
"""
Scripts that make up subcommands to the `nlp` command.
Every Python file in this directory should have
a `main()` function decorated with any necessary
`@click.option` decorators for options specific
to that command.
"""
| {
"content_hash": "e6f54007ccbb710871074aa8c821e3d5",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 54,
"avg_line_length": 28.25,
"alnum_prop": 0.7654867256637168,
"repo_name": "jamesmishra/nlp-playground",
"id": "52e23416432ea296b3ac1a09eeb13e4f61b9f32c",
"size": "226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nlp_playground/scripts/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "76418"
},
{
"name": "Shell",
"bytes": "2459"
}
],
"symlink_target": ""
} |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'chirp_modbus',
version = '1.0.2',
license = 'Apache License, Version 2.0',
author = 'Albertas Mickenas',
author_email = 'mic@wemakethings.net',
url = 'https://github.com/Miceuz/rs485-moist-sensor',
description = 'A wrapper library for communicatin with Modbus based Chirp soil moisture sensor',
install_requires = ['minimalmodbus>=1.0.2'],
py_modules = ['chirp_modbus'],
keywords =['chirp', 'catnip', 'soil', 'moisture', 'sensor', 'rs485'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'Intended Audience :: Manufacturing',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Communications',
'Topic :: Home Automation',
'Topic :: Scientific/Engineering',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Hardware :: Hardware Drivers',
'Topic :: Terminals :: Serial',
]
) | {
"content_hash": "0c9ce535ee95b2e10dab19a4df8e907a",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 97,
"avg_line_length": 38.74285714285714,
"alnum_prop": 0.6438053097345132,
"repo_name": "Miceuz/rs485-moist-sensor",
"id": "ae6ad36b23bf335f81dd8799b14d5ae87ee7f268",
"size": "1356",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/lib/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "30227"
},
{
"name": "Makefile",
"bytes": "2996"
},
{
"name": "Python",
"bytes": "17325"
}
],
"symlink_target": ""
} |
from __future__ import (
absolute_import,
with_statement,
)
import logging
import sys
import unittest
import inspect
from random import getrandbits
from contextlib import contextmanager
from copy import deepcopy
from types import FunctionType
from collections import Mapping
class CascadingFailureError(AssertionError):
"""Raise in tests during a cascading failure."""
LOGGER = logging.getLogger(__name__)
class Helper(unittest.TestCase):
"""A singleton used to keep objects persistent during test execution.
Most, if not all objects that have the Helper instance as an attribute will
have custom __getattr__, __setattr__, and __delattr__ methods that defer
most attribute requests to this helper to enure that they have access to
the :class:`unittest.TestCase` assert methods and any objects that the
helper is holding.
"""
def __init__(self, *args, **kwargs):
self._level_stack = []
self._cases = []
super(Helper, self).__init__(*args, **kwargs)
def __del__(self):
self._clear_stack()
def _clear_stack(self):
"""Teardown the groups that are still in the level stack."""
teardown_groups = self._level_stack[::-1]
for group in teardown_groups:
if group._teardowns:
LOGGER.debug(
"Running tearDowns for group:\n{}".format(
group._get_full_ancestry_description(True),
),
)
for i, teardown in enumerate(group._teardowns):
LOGGER.debug("Running tearDown #{}".format(i))
teardown()
self._level_stack.remove(group)
LOGGER.debug("Teardowns complete.")
def _get_test_count(self):
"""The number of test cases created at the current moment."""
return len(self._cases)
def _set_teardown_level_for_last_case(self, level):
"""Set the teardown level of the last test case that was created."""
self._cases[-1]._teardown_level = level
def _add_case(self, case):
"""Add a test case to the queue."""
self._cases.append(case)
def _get_next_test(self):
"""Get the next test from the queue."""
try:
return self._cases.pop(0)
except IndexError:
raise IndexError("more test classes than test cases")
def runTest(self):
pass
helper = Helper()
def get_next_test_from_helper():
return helper._cases[0]
class ContextionalTestResultProxy(object):
def __init__(self, result):
self._true_result = getattr(result, "result", result)
self._result = result
if hasattr(self._true_result, "stream"):
self.stream = self._true_result.stream
def __getattr__(self, name):
if name == "showAll":
return getattr(self._true_result, name)
return getattr(self._result, name)
def stopTest(self, test):
if not test._is_pytest:
test._teardown_to_level(test._case._teardown_level)
self._result.stopTest(test)
class GcmMaker(object):
_helper = helper
def __init__(self):
self._current_context = None
def __call__(self, description, cascading_failure=True):
new_context = Context(description, cascading_failure)
new_context._parent_context = self._current_context
new_context._gcm = self
self._current_context = new_context
return self._current_context
def __getattr__(self, attr):
"""Defer attribute lookups to helper."""
try:
return getattr(self._helper, attr)
except AttributeError:
raise AttributeError(
"GCM has no attribute '{}'".format(attr),
)
def __setattr__(self, attr, value):
"""Defer attribute lookups to helper."""
if attr in self.__dict__.keys() or attr == "_group":
super(Context, self).__setattr__(attr, value)
else:
setattr(self._helper, attr, value)
def __delattr__(self, attr):
"""Defer attribute lookups to helper."""
if attr in self.__dict__.keys() or attr == "_group":
super(Context, self).__delattr__(attr)
else:
delattr(self._helper, attr)
@property
def add_test(self):
"""Forward the call to the current :class:`Context`.
For a more detailed description, go to :meth:`Context.add_test`.
"""
if self._current_context is None:
raise AttributeError("No current context.")
return self._current_context.add_test
@property
def add_group(self):
"""Forward the call to the current :class:`Context`.
For a more detailed description, go to :meth:`Context.add_group`.
"""
if self._current_context is None:
raise AttributeError("No current context.")
return self._current_context.add_group
@property
def add_setup(self):
"""Forward the call to the current :class:`Context`.
For a more detailed description, go to :meth:`Context.add_setup`.
"""
if self._current_context is None:
raise AttributeError("No current context.")
return self._current_context.add_setup
@property
def add_test_setup(self):
"""Forward the call to the current :class:`Context`.
For a more detailed description, go to :meth:`Context.add_test_setup`.
"""
if self._current_context is None:
raise AttributeError("No current context.")
return self._current_context.add_test_setup
@property
def add_teardown(self):
"""Forward the call to the current :class:`Context`.
For a more detailed description, go to :meth:`Context.add_teardown`.
"""
if self._current_context is None:
raise AttributeError("No current context.")
return self._current_context.add_teardown
@property
def add_test_teardown(self):
"""Forward the call to the current :class:`Context`.
For a more detailed description, go to
:meth:`Context.add_test_teardown`.
"""
if self._current_context is None:
raise AttributeError("No current context.")
return self._current_context.add_test_teardown
@property
def includes(self):
"""Forward the call to the current :class:`Context`.
For a more detailed description, go to :meth:`Context.includes`.
"""
if self._current_context is None:
raise AttributeError("No current context.")
return self._current_context.includes
@property
def combine(self):
"""Forward the call to the current :class:`Context`.
For a more detailed description, go to :meth:`Context.combine`.
"""
if self._current_context is None:
raise AttributeError("No current context.")
return self._current_context.combine
def utilize_asserts(self, container):
"""Allow the use of custom assert method in tests.
:param container:
A container of functions/methods to be used as assert methods
:type container: class, list of functions, or function
Accepts a class, list/set/dictionary of functions, or a function.
If a class is passed in, this takes all the methods of that class and
puts them into a dictionary, where the keys are the function names, and
the values are the functions themselves. If a list or set is passed in,
a dictionary is constructed using the names of the functions as the
keys with the functions being the values. If a function is passed in,
it is put into a dictionary, where the key is the function's name and
the value is the function itself. If a dictionary is passed in, it is
assumed that the keys are the function names, and the values are the
functions themselves.
Example::
class CustomAsserts(object):
def assertCustom(self, value):
if value != "custom":
raise AssertionError("value is not custom")
GCM.utilize_asserts(CustomAsserts)
with GCM("Main Group") as MG:
@GCM.add_test("is custom")
def test(case):
case.assertCustom("custom")
Once the functions are parsed into a dictionary, they are each set as
attributes of the :class:`.Helper`, using their dictionary keys as
their method names, but only if they're would-be names start with
"assert".
"""
assert_methods = {}
if inspect.isclass(container):
c_funcs = inspect.getmembers(
container,
predicate=inspect.isfunction,
)
c_meths = inspect.getmembers(
container,
predicate=inspect.ismethod,
)
c_meths_funcs = list((n, m.__func__) for n, m in c_meths)
assert_methods = {
name: method for name, method in set(c_funcs + c_meths_funcs)
}
elif isinstance(container, list) or isinstance(container, set):
assert_methods = {method.__name__: method for method in container}
elif isinstance(container, dict):
assert_methods = container
elif isinstance(container, FunctionType):
assert_methods = {container.__name__: container}
else:
raise TypeError(
"Unexpected type. Must be class, list, dict, or function",
)
for name, method in assert_methods.items():
if name.startswith("assert"):
setattr(Helper, name, method)
GroupContextManager = GcmMaker()
class Context(object):
"""A context manager for groups, their fixtures, child groups, and tests.
:param description: The description for the group of the current context.
:type descrition: str.
:param cascading_failure: Cascade the failure to all tests within the root
group.
:type cascading_failure: bool.
A :class:`Context` is used to handle constructing groups, their fixtures,
child groups, and tests through the various decorators and methods
it provides.
If ``cascading_failure`` is ``True``, and one of the setUps for this group
throws an error, then the remaining setUps will be skipped and, of the
remaining setUps and tests within this group (including those of
descendant groups), the setUps will be skipped, and the tests will
automatically fail.
In the event of a cascading failure, all tearDowns of this group (but
not the descendant groups) will still be run, so, if there are any,
they should be able to handle a situation where one or more of the
setUps for this group didn't run all the way through without any
problems.
A cascading failure can only be triggered by a setUp that exists at the
top level of this group. If a setUp of a descendant group has an issue, it
will not cause a cascading failure of this group.
Example::
with GCM("Main Group") as MG:
@GCM.add_test("something")
def test(case):
case.assertTrue(True)
MG.create_tests()
"""
_helper = helper
_current_manager = None
def __init__(self, description, cascading_failure=True):
self.__dict__["_group"] = Group(
description,
cascading_failure=cascading_failure,
)
self.__dict__["_gcm"] = None
self.__dict__["_parent_context"] = None
self.__dict__["_old_manager"] = None
def __enter__(self):
"""Track and provided the context manager when entering the context."""
self._old_manager = self.__class__._current_manager
self.__class__._current_manager = self
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
"""Stop tracking the context manager and handle exiting the context."""
self.__class__._current_manager = self._old_manager
self._gcm._current_context = self._parent_context
if exc_type is None:
return True
def __getattr__(self, attr):
"""Defer attribute lookups to helper."""
return getattr(self._helper, attr)
def __setattr__(self, attr, value):
"""Defer attribute lookups to helper."""
if attr in self.__dict__.keys():
super(Context, self).__setattr__(attr, value)
else:
setattr(self._helper, attr, value)
def __delattr__(self, attr):
"""Defer attribute lookups to helper."""
if attr in self.__dict__.keys():
super(Context, self).__delattr__(attr)
else:
delattr(self._helper, attr)
def add_test(self, func):
"""Add the decorated function to the current group as a test.
:param func: The test description or the test function itself
:type func: str or function
This decorator takes an optional argument for the description of the
test case. If not provided, the docstring of the function will be used
as the test case's description. If no description argument is provided,
and the function has no docstring, then the name of the function will
be used.
Example::
with GCM("Main Group") as MG:
@GCM.add_test("something")
def test(case):
case.assertTrue(True)
.. note::
To avoid any extra functions running by accident, this decorator
will NOT return any replacement function. The decorated function
will no longer exist in the global namespace of the module it was
declared in once the decorator is evaluated.
"""
if isinstance(func, FunctionType):
# use the function's __name__ if it has no docstring.
desc = func.__doc__ or func.__name__
else:
desc = func
def decorator(f):
case = Case(self._group, f, desc)
self._group._cases.append(case)
if isinstance(func, FunctionType):
decorator(func)
else:
return decorator
@contextmanager
def add_group(self, description, cascading_failure=True, params=()):
"""Use a new child group of the parent group for this context.
:param description: The description of the group for the context
:type description: str
:param cascading_failure: Cascade the failure to all tests within this
group.
:type descrition: bool.
:param params: The collection of sets of parameters
:type params: collection
If ``cascading_failure`` is ``True``, and one of the setUps for this
group throws an error, then the remaining setUps will be skipped and,
of the remaining setUps and tests within this group (including those of
descendant groups), the setUps will be skipped, and the tests will
automatically fail.
In the event of a cascading failure, all tearDowns of this group (but
not the descendant groups) will still be run, so, if there are any,
they should be able to handle a situation where one or more of the
setUps for this group didn't run all the way through without any
problems.
A cascading failure can only be triggered by a setUp that exists at the
top level of this group. If a setUp of a descendant group has an issue,
it will not cause a cascading failure of this group.
If provided with parameters, a duplicate group will be made for each
set of parameters (if any are provided) where each set of parameters is
passed to both the setups and teardowns for that group.
If the parameters are just a sequence of parameters (i.e. a set, tuple,
or list), then the group's description will show the particular set of
parameters used for that group. If it is a mapping, the key for each
set will be applied to the end of the group's description instead. Each
set of parameters can either be a set/tuple/list, or a Mapping if you
want keyword arguments to be passed to your setups/teardowns. For
example, the following code::
with GCM("Some Group") as SG:
params = (
{
"num_1": 1,
"num_2": 2,
"num_3": 3,
},
{
"num_3": 3,
"num_2": 2,
"num_1": 1,
},
)
with GCM.add_group("Child Group", params=params):
@GCM.add_setup
def setUp(num_1, num_2, num_3):
GCM.sum = num_1 + num_2 + num_3
@GCM.add_test("sum is 6")
def test(case):
case.assertEqual(GCM.sum, 6)
params = {
"set #1": (1, 2, 3),
"set #2": (3, 2, 1),
}
with GCM.add_group("Another Child Group", params=params):
@GCM.add_setup
def setUp(num_1, num_2, num_3):
GCM.sum = num_1 + num_2 + num_3
@GCM.add_test("sum is 6")
def test(case):
case.assertEqual(GCM.sum, 6)
will show the following output:
.. code-block:: none
Some Group
Child Group {'num_1': 1, 'num_2': 2, 'num_3': 3}
sum is 6 ... ok
Child Group {'num_1': 1, 'num_2': 2, 'num_3': 3}
sum is 6 ... ok
Another Child Group set #2
sum is 6 ... ok
Another Child Group set #1
sum is 6 ... ok
"""
last_group = self._group
self._group = last_group._add_child(description, cascading_failure)
yield self
no_params = params == ()
group_identifiers = []
if isinstance(params, Mapping):
group_identifiers = params.keys()
elif no_params:
group_identifiers = [0]
else:
group_identifiers = range(len(params))
for gid in group_identifiers:
args = () if no_params else params[gid]
new_group = deepcopy(self._group)
new_group._parent = last_group
new_group._args = args
if isinstance(params, Mapping):
new_group._description += " {}".format(gid)
elif no_params:
pass
else:
new_group._description += " {}".format(params[gid])
last_group._children.append(new_group)
original_new_child = self._group
self._group = last_group
self._group._children.remove(original_new_child)
def add_setup(self, func):
"""Add the decorated function to the current context as a setup.
:param func: The setup description or the setup function itself
:type func: str or function
This setup will only be run once, and it will be run by the first test
case within the group. If the current group has no test cases, then the
first test case of any of the current group's descendants will run the
setup before running any of its own. If the current group does not
have any test cases and none of its descendants have any test cases,
then the setup will not get run.
While the setup must be run by the actual test cases themselves (as
that's the only opportunity to run code), it might be easiest to
imagine that the setups for a group get run only as you step in to that
group from its parent.
If a description for the setup is provided, it will be shown when it is
run, at the same indentation as the tests of the group. If no
description is provided, the setup will not be shown unless it throws
an error. If it has no description, and it throws an error, you'll see
something along the lines of ``# setup (2/5) ERROR``.
Example::
with GCM("Main Group") as MG:
@GCM.add_setup
def setUp():
GCM.thing = 1
@GCM.add_test("thing is 1")
def test(case):
case.assertEqual(
GCM.thing,
1,
)
.. note::
To avoid any extra functions running by accident, this decorator
will NOT return any replacement function. The decorated function
will no longer exist in the global namespace of the module it was
declared in once the decorator is evaluated.
"""
if isinstance(func, FunctionType):
desc = func.__doc__
else:
desc = func
def decorator(f):
fixture = SetUpFixture(self._group, f, desc)
self._group._setups.append(fixture)
if isinstance(func, FunctionType):
decorator(func)
else:
return decorator
def add_test_setup(self, func):
"""Add the decorated function to the current context as a test setup.
:param func:
A function to be used for a setup of the tests of the group of the
current context
:type func: function
This test setup will be run once before each test case in the current
group. If the current group has no test cases, then this test setup
will never be run, even if a descendant of the current group has test
cases.
Example::
with GCM("Main Group") as MG:
@GCM.add_setup
def setUp():
GCM.thing = 0
@GCM.add_test_setup
def setUpTest():
GCM.thing += 1
@GCM.add_test("thing is 1")
def test(case):
case.assertEqual(
GCM.thing,
1,
)
.. note::
To avoid any extra functions running by accident, this decorator
will NOT return any replacement function. The decorated function
will no longer exist in the global namespace of the module it was
declared in once the decorator is evaluated.
"""
self._group._test_setups.append(func)
def add_teardown(self, func):
"""Add the decorated function to the current context as a teardown.
:param func:
A function to be used for a teardown of the group of the current
context
:type func: function
This teardown will only be run once, and it will be run by the last
test case within the group, including the test cases of the group's
descendants. If the current group has test cases, but its descendants
also have test cases, then only the single last test case of its
descendants will run the teardown after running its own teardowns. If
the current group does not have any test cases and none of its
descendants have any test cases, then the teardown will not get run.
While the teardowns must be run by the actual test cases themselves (as
that's the only opportunity to run code), it might be easiest to
imagine that the teardowns for a group get run only as you step out of
that group, back up to its parent group.
If a description for the teardown is provided, it will be shown when it
is run, at the same indentation as the tests of the group. If no
description is provided, the teardown will not be shown unless it
throws an error. If it has no description, and it throws an error,
you'll see something along the lines of ``# teardown (2/5) ERROR``.
Example::
with GCM("Main Group") as MG:
@GCM.add_setup
def setUp():
GCM.thing = 0
@GCM.add_teardown("deleting thing")
def tearDown():
del GCM.thing
with GCM.add_group("Child A"):
@GCM.add_setup("incrementing by 2")
def setUp():
GCM.thing += 2
@GCM.add_teardown("decrementing by 1")
def tearDown():
GCM.thing -= 1
@GCM.add_test("thing is 2")
def test(case):
case.assertEqual(
GCM.thing,
2,
)
with GCM.add_group("Child B"):
@GCM.add_test("thing is now 1")
def test(case):
case.assertEqual(
GCM.thing,
1,
)
Output:
.. code-block:: none
Main Group
Child A
# incrementing by 2
thing is 2 ... ok
# decrementing by 1
Child B
thing is now 1 ... ok
# deleting thing
.. note::
To avoid any extra functions running by accident, this decorator
will NOT return any replacement function. The decorated function
will no longer exist in the global namespace of the module it was
declared in once the decorator is evaluated.
"""
if isinstance(func, FunctionType):
desc = func.__doc__
else:
desc = func
def decorator(f):
fixture = TearDownFixture(self._group, f, desc)
self._group._teardowns.append(fixture)
if isinstance(func, FunctionType):
decorator(func)
else:
return decorator
def add_test_teardown(self, func):
"""Add the decorated function to the current group as a test teardown.
:param func:
A function to be used for a teardown of the tests of the group of
the current context
:type func: function
This test teardown will be run once after each test case in the current
group. If the current group has no test cases, then this test teardown
will never be run, even if a descendant of the current group has test
cases.
Example::
with GCM("Main Group") as MG:
@GCM.add_setup
def setUp():
GCM.thing = 0
@GCM.add_test_setup
def setUpTest():
GCM.thing += 1
@GCM.add_test_teardown
def setUpTest():
GCM.thing -= 1
@GCM.add_test("thing is 1")
def test(case):
case.assertEqual(
GCM.thing,
1,
)
@GCM.add_test("thing is still 1")
def test(case):
case.assertEqual(
GCM.thing,
1,
)
.. note::
To avoid any extra functions running by accident, this decorator
will NOT return any replacement function. The decorated function
will no longer exist in the global namespace of the module it was
declared in once the decorator is evaluated.
"""
self._group._test_teardowns.append(func)
@classmethod
def utilize_asserts(cls, container):
"""Allow the use of custom assert method in tests.
:param container:
A container of functions/methods to be used as assert methods
:type container: class, list of functions, or function
Accepts a class, list/set/dictionary of functions, or a function.
If a class is passed in, this takes all the methods of that class and
puts them into a dictionary, where the keys are the function names, and
the values are the functions themselves. If a list or set is passed in,
a dictionary is constructed using the names of the functions as the
keys with the functions being the values. If a function is passed in,
it is put into a dictionary, where the key is the function's name and
the value is the function itself. If a dictionary is passed in, it is
assumed that the keys are the function names, and the values are the
functions themselves.
Example::
class CustomAsserts(object):
def assertCustom(self, value):
if value != "custom":
raise AssertionError("value is not custom")
GCM.utilize_asserts(CustomAsserts)
with GCM("Main Group") as MG:
@GCM.add_test("is custom")
def test(case):
case.assertCustom("custom")
Once the functions are parsed into a dictionary, they are each set as
attributes of the :class:`.Helper`, using their dictionary keys as
their method names, but only if they're would-be names start with
"assert".
"""
assert_methods = {}
if inspect.isclass(container):
c_funcs = inspect.getmembers(
container,
predicate=inspect.isfunction,
)
c_meths = inspect.getmembers(
container,
predicate=inspect.ismethod,
)
c_meths_funcs = list((n, m.__func__) for n, m in c_meths)
assert_methods = {
name: method for name, method in set(c_funcs + c_meths_funcs)
}
elif isinstance(container, list) or isinstance(container, set):
assert_methods = {method.__name__: method for method in container}
elif isinstance(container, dict):
assert_methods = container
elif isinstance(container, FunctionType):
assert_methods = {container.__name__: container}
else:
raise TypeError(
"Unexpected type. Must be class, list, dict, or function",
)
for name, method in assert_methods.items():
if name.startswith("assert"):
setattr(Helper, name, method)
def includes(self, *contexts):
"""Graft a :class:`.Context` group structure here.
:param contexts:
The :class:`.Context` objects that contain the group
structures you want to include in the group of the current context,
in the order they are listed.
:type contexts: :class:`.Context`
For each :class:`.Context` instance that was passed, take
the root group of a it, make a deepcopy of it, and append it to this
:class:`.Context`'s current group's children so that copies
of all of its tests get run within the context of the current group in
the order and structure they were originally defined in.
Example::
with GCM("Predefined Group") as PG:
@GCM.add_test("value is 1")
def test(case):
case.assertEqual(
GCM.value,
1,
)
with GCM.add_group("Sub Group"):
@GCM.add_teardown
def tearDown():
GCM.value = 2
@GCM.add_test("value is still 1")
def test(case):
case.assertEqual(
GCM.value,
1,
)
with GCM("Another Predefined Group") as APG:
@GCM.add_test("value is now 2")
def test(case):
case.assertEqual(
GCM.value,
2,
)
with GCM("Main Group") as MG:
@GCM.add_setup
def setUp():
GCM.value = 1
GCM.includes(
PG,
APG,
)
Output:
.. code-block:: none
Main Group
Predefined Group
value is 1 ... ok
Sub Group
value is still 1 ... ok
AnotherPredefined Group
value is now 2 ... ok
"""
for context in contexts:
if not isinstance(context, Context):
raise TypeError(
"method only accepts Context objects",
)
group_copy = deepcopy(context._group)
group_copy._parent = self._group
self._group._children.append(group_copy)
def combine(self, *contexts):
"""Use the contents of a :class:`.Context`'s root group.
:param contexts:
The :class:`.Context` objects containing the group
structures you want to combine with the group of the current
context.
:type contexts: :class:`.Context`
For each :class:`.Context` instance that was passed, take
the root group of a it, make a deepcopy of it, and append each of its
tests, fixtures, and child groups to the respective lists of the group
of the current context.
Example::
with GCM("Predefined Group") as PG:
@GCM.add_test("value is 1")
def test(case):
case.assertEqual(
GCM.value,
1,
)
with GCM.add_group("Sub Group"):
@GCM.add_teardown
def tearDown():
GCM.value = 2
@GCM.add_test("value is still 1")
def test(case):
case.assertEqual(
GCM.value,
1,
)
with GCM("Another Predefined Group") as APG:
@GCM.add_test("value is still 1 here")
def test(case):
case.assertEqual(
GCM.value,
1,
)
with GCM("Main Group") as MG:
@GCM.add_setup
def setUp():
GCM.value = 1
GCM.combine(
PG,
APG,
)
Output:
.. code-block:: none
Main Group
value is 1 ... ok
value is now 2 ... FAIL
Sub Group
value is still 1 ... ok
"""
for context in contexts:
if not isinstance(context, Context):
raise TypeError(
"method only accepts Context objects",
)
group_copy = deepcopy(context._group)
for setup in group_copy._setups:
setup._group = self._group
self._group._setups.append(setup)
for test_setup in group_copy._test_setups:
test_setup._group = self._group
self._group._test_setups.append(test_setup)
for teardown in group_copy._teardowns:
teardown._group = self._group
self._group._teardowns.append(teardown)
for test_teardown in group_copy._test_teardowns:
test_teardown._group = self._group
self._group._test_teardowns.append(test_teardown)
for case in group_copy._cases:
case._group = self._group
self._group._cases.append(case)
for child in group_copy._children:
child._parent = self._group
self._group._children.append(child)
def create_tests(self, mod=None):
"""Create the tests that will be discovered by the testing framework.
:param mod: :func:`.globals`
This walks through the tree of groups and test cases, creating the
:class:`.Case` instances that the :attr:`._helper` holds, and a
:class:`.TestCase` class for each :class:`.Case` instance to run that
:class:`.Case` instance.
This method will try to add the :class:`.TestCase` classes to the
namespace of the module that :meth:`.create_tests` was called in. This
behavor should only be trusted when :meth:`.create_tests` is called in
the local namespace of the module (i.e. not inside a function, class,
etc.), and if there's any issues, the namespace object can be passed as
an argument to :meth:`.create_tests` (usually done with
:func:`.globals`).
Only :class:`.Context` instances that call this method will
have their tests run. If a :class:`.Context` instance does
not call this method, it should only be so that its group structure
could be included in another :class:`.Context`'s structure.
Example::
with GCM("Main Group") as MG:
@GCM.add_setup
def setUp():
GCM.value = 1
@GCM.add_test("value is 1")
def test(case):
case.assertEqual(
GCM.value,
1,
)
MG.create_tests()
"""
if mod is None:
mod = inspect.stack()[1][0].f_locals
plugin = "contextional.pytest_contextional"
if "pytest_plugins" in mod:
str_type = str if sys.version_info >= (3, 0) else basestring
if isinstance(mod["pytest_plugins"], tuple):
mod["pytest_plugins"] = list(mod["pytest_plugins"])
if isinstance(mod["pytest_plugins"], list):
mod["pytest_plugins"].append(plugin)
elif isinstance(mod["pytest_plugins"], str_type):
mod["pytest_plugins"] = [
mod["pytest_plugins"],
"contextional.pytest_contextional",
]
else:
mod["pytest_plugins"] = [
"contextional.pytest_contextional",
]
start_test_count = self._helper._get_test_count()
self._group._build_test_cases(mod)
if self._helper._get_test_count() > start_test_count:
self._helper._set_teardown_level_for_last_case(NullGroup)
class GroupTestCase(object):
"""The base test class for a Group.
All Groups will have to create a class that represents each test for that
Group. This should be the class those classes inherit from in order to
ensure they all perform the necessary steps requied of them.
"""
_helper = helper
_case = None
_dry_run = False
_root_group_hash = None
_description = ""
_full_description = ""
_currentResult = None
_err_info = None
_err = None
_is_pytest = False
def __str__(self):
"""String representation of the test case.
While the tests are running, the test case's description should only
contain what is necessary, based on what tests have already run. After
the tests have run, in the test results output, the test case's
description should have its entire ancestry listed so that the
failed/errored/skipped test has the necessary context (in the event
that two test cases have the same name, but are in different contexts).
This method allows the test case to alter it's description so that
during runtime, you will see this:
.. code-block:: none
Group A
Group B
test #1 ... ok
Group C
test #2 ... FAIL
instead of this:
.. code-block:: none
Group A
Group B
test #1 ... ok
Group A
Group C
test #2 ... FAIL
but the test case's description in the test result output will look
like this:
.. code-block:: none
Group A
Group C
test #2
"""
if self.__class__._case is None:
# this must be a dry run.
self.__class__._dry_run = True
self.__class__._case = self._helper._get_next_test()
if self._dry_run:
return self._case._dry_run_description
elif self._case._test_started:
return self._case._full_description
else:
return self._case._inline_description
def __getattr__(self, attr):
"""Defer attribute lookups to helper."""
try:
return getattr(self._helper, attr)
except AttributeError:
raise AttributeError(
"'TestCase' object has no attribute '{}'".format(
attr,
),
)
def __setattr__(self, attr, value):
"""Defer attribute lookups to helper."""
if attr in self.__dict__.keys():
super(GroupTestCase, self).__setattr__(attr, value)
else:
setattr(self._helper, attr, value)
def __delattr__(self, attr):
"""Defer attribute lookups to helper."""
if attr in self.__dict__.keys():
super(GroupTestCase, self).__delattr__(attr)
else:
delattr(self._helper, attr)
@staticmethod
def _find_common_ancestor(ancestry_a, ancestry_b):
"""Common ancestry between two :class:`.Group`s.
If one :class:`.Group` has an ancestry of:
.. code-block:: none
[C, B, A]
and another has an ancestry of
.. code-block:: none
[D, B, A]
They would have a common ancestry of:
.. code-block:: none
[B, A]
This is useful for determining where two :class:`.Group`s branch off
from each other.
"""
stack_comp = list(
zip(
ancestry_a,
ancestry_b,
),
)
branching_point = len(stack_comp)
for i, level in enumerate(stack_comp):
if level[0] == level[1]:
continue
else:
branching_point = i
break
common_ancestor = NullGroup
if branching_point > 0:
common_ancestor = ancestry_a[branching_point - 1]
return common_ancestor
@classmethod
def setUpClass(cls):
"""The preparations for the test case class that's about to be run.
The :meth:`.setUpClass` method gets the next test case from the
:attr:`._helper` if it doesn't already have one. That test case will
later be used to figure out what setups/teardowns need to be run, and
also to run the actual test.
"""
__tracebackhide__ = True
if cls._case is None:
cls._case = cls._helper._get_next_test()
cls._group = cls._case._group
def setUp(self):
"""The preparations required to be run before each test in the group.
This is also where the description of the current test case is modified
to include its full ancestry, so that, in the event that this test case
fails, errors, or is skipped, the test's description in the results
output provides the complete context for this test case.
"""
__tracebackhide__ = True
self._auto_fail = any(
group._cascading_failure_in_progress
for group in self._group._ancestry,
)
self._case._test_started = True
if self._auto_fail is True:
LOGGER.debug(
"CASCADING FAILURE - Not setting up for test:\n{}".format(
self._group._get_full_ancestry_description(indented=True),
(" " * (self._group._level + 1)),
self._case._description,
),
)
return
LOGGER.debug(
"Running test setUps for test:\n{}\n{}{}".format(
self._group._get_full_ancestry_description(indented=True),
(" " * (self._group._level + 1)),
self._case._description,
),
)
try:
for i, setup in enumerate(self._group._test_setups):
LOGGER.debug("Running test setUp #{}".format(i))
setup()
LOGGER.debug("test setUp #{} complete.".format(i))
except Exception:
LOGGER.debug(
"Couldn't complete setups for the test due to exception.",
exc_info=True,
)
if self._group._cascading_failure:
LOGGER.debug("Preparing for cascading failure.")
self.__class__._auto_fail = True
self._group._cascading_failure_in_progress = True
raise
LOGGER.debug("Test setups complete.")
def tearDown(self):
"""The cleanup required to be run after each test in the group."""
__tracebackhide__ = True
if self._auto_fail is True:
LOGGER.debug(
"CASCADING FAILURE - Not tearing down test:\n{}".format(
self._group._get_full_ancestry_description(indented=True),
(" " * (self._group._level + 1)),
self._case._description,
),
)
return
LOGGER.debug(
"Running test tearDowns for test:\n{}\n{}{}".format(
self._group._get_full_ancestry_description(indented=True),
(" " * (self._group._level + 1)),
self._case._description,
),
)
try:
for i, teardown in enumerate(self._group._test_teardowns):
LOGGER.debug("Running test tearDown #{}".format(i))
teardown()
LOGGER.debug("test tearDown #{} complete.".format(i))
except Exception:
LOGGER.debug(
"Couldn't complete teardowns for the test due to exception.",
exc_info=True,
)
if self._group._cascading_failure:
LOGGER.debug("Preparing for cascading failure.")
self.__class__._auto_fail = True
self._group._cascading_failure_in_progress = True
raise
LOGGER.debug("Test teardowns complete.")
def _teardown_to_level(self, td_level):
if td_level is None:
return
if td_level is NullGroup:
stop_index = None
else:
try:
stop_index = self._helper._level_stack.index(td_level)
except IndexError:
raise IndexError(
"Cannot teardown to desired level from current stack.",
)
teardown_groups = self._helper._level_stack[:stop_index:-1]
for group in teardown_groups:
group._teardown_group()
LOGGER.debug("Teardowns complete.")
def _teardown_to_common_level(self):
self._teardown_to_level(
self._find_common_ancestor(
self._helper._level_stack,
self._group._setup_ancestry,
),
)
def _dry_run_setup(self, result=None):
if self._case is None:
self.__class__._case = self._helper._get_next_test()
self.__class__._group = self._case._group
self.temp_result = ContextionalTestResultProxy(result)
for group in self._group._setup_ancestry:
group._result = self.temp_result
group._dry_run = True
group._setup_group()
def run(self, result=None):
__tracebackhide__ = True
self._currentResult = result
# nose uses a ResultProxy class, but keeps the actual result as an
# attribute of the proxy object
self.temp_result = ContextionalTestResultProxy(result)
LOGGER.debug("Setting up group:\n{}".format(str(self._group)))
self._teardown_to_common_level()
for group in self._group._setup_ancestry:
group._result = self.temp_result
group._setup_group()
LOGGER.debug("Setups complete.")
return super(GroupTestCase, self).run(self.temp_result)
def _dry_run_teardown(self):
# clean up level stack
for group in self._case._teardown_groups:
group._dry_run = True
group._teardown_group()
def runTest(self):
__tracebackhide__ = True
if self._auto_fail is True:
LOGGER.debug(
"CASCADING FAILURE - Not running test:\n{}".format(
self._case._full_description,
),
)
raise CascadingFailureError()
LOGGER.debug("Running test:\n{}".format(self._case._full_description))
# Execute the actual test case function.
try:
self._case(self)
except Exception:
LOGGER.debug(
"Test completed unsuccessfully.",
exc_info=True,
)
raise
LOGGER.debug("Test completed successfully.")
TEST_CLASS_NAME_TEMPLATE = "ContextionalCase_{}"
class Group(object):
"""A group of tests, with common fixtures and description."""
_helper = helper
_pytest_dry_run = False
def __init__(self, description, cascading_failure=True, args=(),
parent=None):
self._description = description
self._parent = parent
self._cascading_failure = cascading_failure
self._cascading_failure_in_progress = False
self._cascading_failure_root = False
self._args = args
self._cases = []
self._setups = []
self._teardowns = []
self._test_setups = []
self._test_teardowns = []
self._children = []
self._teardown_level = self
self._last_test_case = None
self._last_location = self
self._result = None
self._pytest_writer = None
self._dry_run = False
def __str__(self):
if self._pytest_dry_run:
# pytest handles indentation in dry runs already.
self._helper._level_stack.append(self)
desc = [self._description]
desc += [setup._inline_description for setup in self._setups]
return "\n".join(desc)
return self._get_full_ancestry_description(indented=True)
@property
def _level(self):
"""The level of the group within the tree structure.
This is defined by:
1 + the number of connections between the group and the root group.
"""
level = 0
parent = self._parent
while parent is not None:
level += 1
parent = parent._parent
return level
@property
def _ancestry(self):
"""The ancestry of a specific :class:`.Group` from child to ancestor.
If groups are declared like this::
with GCM("A") as MG:
with GCM.add_group("B"):
with GCM.add_group("C"):
# do something
Group A would be the parent of Group B, and Group B would be the parent
of Group C. So the ancestry would look like this:
.. code-block:: none
[C, B, A]
"""
ancestry = []
group = self
while group is not None:
ancestry.append(group)
group = getattr(group, "_parent", None)
return ancestry
@property
def _setup_ancestry(self):
"""The ancestry of a specific :class:`.Group` from ancestor to child.
If groups are declared like this::
with GCM("A") as MG:
with GCM.add_group("B"):
with GCM.add_group("C"):
# do something
Group A would be the parent of Group B, and Group B would be the parent
of Group C. So the setup ancestry would look like this:
.. code-block:: none
[A, B, C]
"""
return list(reversed(self._ancestry))
def _get_full_ancestry_description(self, indented=False):
"""The ancestry of a specific :class:`.Group` from ancestor to child.
If groups are declared like this::
with GCM("A") as MG:
with GCM.add_group("B"):
with GCM.add_group("C"):
# do something
Group A would be the parent of Group B, and Group B would be the parent
of Group C. So the ancestry would look like this:
.. code-block:: none
[C, B, A]
But this returns the formatted version of the ancestry from ancestor to
child, as it would appear in the test output. It would look like this
for the previous example:
.. code-block:: none
A
B
C
If ``indented`` is ``True``, then each line will be indented with two
spaces in addition to the normal indentation based on the level of
each :class:`.Group`.
"""
padding = " " if indented else ""
full_desc = ""
group_ancestry = list(reversed(self._ancestry))
for ancestor in group_ancestry[:-1]:
full_desc += "{padding}{indent}{desc}\n".format(
padding=padding,
indent=(" " * ancestor._level),
desc=ancestor._description,
)
# the last group does not need a new line after it
full_desc += "{padding}{indent}{desc}".format(
padding=padding,
indent=(" " * group_ancestry[-1]._level),
desc=group_ancestry[-1]._description,
)
return full_desc
@property
def _inline_description(self):
return " " * self._level + self._description
@property
def _root_group(self):
"""The root group of the :class:`.Context` instance."""
return self._ancestry[-1]
def _build_test_cases(self, mod):
"""Build the test cases for this :class:`.Group`.
The group of the main :class:`.Context` represents the root
of a tree. Each group should be considered a branch, capable of having
leaves or other branches as its children, and each test case should be
considered aleaf.
If a branch has no leaves on either itself, or any of its descendant
branches, then it's considered useless, and nothing will happen with
it, even if it has setups or teardowns.
"""
if self._cases:
# build test cases
bases = (
GroupTestCase,
unittest.TestCase,
)
for case in self._cases:
self._helper._add_case(case)
case_name = TEST_CLASS_NAME_TEMPLATE.format(getrandbits(128))
_test = type(case_name, bases, {})
_test.__module__ = mod['__name__']
mod[_test.__name__] = _test
start_test_count = self._helper._get_test_count()
for child in self._children:
child._build_test_cases(mod)
end_test_count = self._helper._get_test_count()
if end_test_count > start_test_count:
self._helper._set_teardown_level_for_last_case(self)
start_test_count = end_test_count
def _add_child(self, child_description, cascading_failure=False):
"""Add a child :class:`.Group` instance to the current group.
The child :class:`.Group` must be appended to the current
:class:`.Group`\ 's list of children, and it must be aware that the
current :class:`.Group` is its parent.
"""
child = Group(
child_description,
cascading_failure=cascading_failure,
parent=self,
)
self._children.append(child)
return child
def _write(self, text):
if self._write_to_result:
self._result.stream.write(text)
elif self._pytest_writer is not None:
self._pytest_writer.write_ensure_prefix(text, "")
def _writeln(self):
if self._write_to_result:
self._result.stream.writeln()
def _setup_group(self):
"""Setup the :class:`Group`.
Check if any ancestor :class:`Group`s caused a cascading failure. If
so, do nothing. Otherwise, run the setups for the group.
"""
__tracebackhide__ = True
if self in self._helper._level_stack:
# setup for this group has already been attempted
return
self._write_to_result = False
if self._result is not None:
if hasattr(self._result, "stream"):
if self._result.showAll:
self._write_to_result = True
self._helper._level_stack.append(self)
self._write(self._inline_description)
self._cascading_failure_in_progress = any(
group._cascading_failure_in_progress for group in self._ancestry,
)
if self._cascading_failure_in_progress:
LOGGER.debug(
"CASCADING FAILURE - Not setting up group:\n{}".format(
str(self),
),
)
self._writeln()
return
LOGGER.debug("Running setUps for group:\n{}".format(str(self)))
try:
for i, setup in enumerate(self._setups):
LOGGER.debug("Running setUp #{}".format(i))
self._last_location = setup
if setup._description is not None:
# should be preceded by a new line, because nothing else
# triggers a new line.
self._writeln()
self._write(setup._inline_description + " ")
if not self._dry_run:
if isinstance(self._args, Mapping):
setup(**self._args)
else:
setup(*self._args)
LOGGER.debug("setUp #{} complete.".format(i))
except:
LOGGER.debug("Group setup failed.", exc_info=True)
if setup._description is None:
# make sure the setup has something displayed if it failed.
self._writeln()
self._write(setup._inline_description + " ")
if self._cascading_failure:
LOGGER.debug("Triggering cascading failure.")
self._cascading_failure_in_progress = True
self._cascading_failure_root = True
if self._result is not None and self._pytest_writer is None:
if hasattr(self._result, "_result"):
if hasattr(self._result._result, "test"):
old_result_test = self._result._result.test
self._result._result.test = setup
self._result.addError(setup, sys.exc_info())
if hasattr(self._result, "_result"):
if hasattr(self._result._result, "test"):
self._result._result.test = old_result_test
else:
raise
else:
self._writeln()
LOGGER.debug("Done setting up group.")
def _teardown_group(self):
"""Teardown the :class:`Group`.
Check if there's currently a cascading failure. If so, find what level
it began at.any ancestor :class:`Group`s caused a cascading failure. If
so, do nothing. Otherwise, run the setups for the group.
"""
__tracebackhide__ = True
if self not in self._helper._level_stack:
# teardowns for this group has already been attempted
return
if self._cascading_failure_in_progress:
if not self._cascading_failure_root:
LOGGER.debug(
"CASCADING FAILURE - Not tearing down group:\n{}".format(
str(self),
),
)
self._helper._level_stack.remove(self)
return
if self._teardowns:
LOGGER.debug("Running tearDowns for group:\n{}".format(str(self)))
try:
for i, teardown in enumerate(self._teardowns):
LOGGER.debug("Running tearDown #{}".format(i))
self._last_location = teardown
if teardown._description is not None:
self._write(teardown._inline_description + " ")
if not self._dry_run:
teardown()
if teardown._description is not None:
# new line is only needed if teardown has a description
# and no error was thrown.
self._writeln()
LOGGER.debug("tearDown #{} complete.".format(i))
except:
LOGGER.debug("Group teardown failed.", exc_info=True)
if teardown._description is None:
# make sure the teardown has something displayed if it
# failed.
self._write(teardown._inline_description + " ")
if self._result is not None and self._pytest_writer is None:
if hasattr(self._result, "_result"):
if hasattr(self._result._result, "test"):
old_result_test = self._result._result.test
self._result._result.test = teardown
self._result.addError(teardown, sys.exc_info())
if hasattr(self._result, "_result"):
if hasattr(self._result._result, "test"):
self._result._result.test = old_result_test
else:
self._helper._level_stack.remove(self)
raise
self._helper._level_stack.remove(self)
LOGGER.debug("Done tearing down group.")
class NullGroup(object):
"""Represents the ultimate teardown level.
This class is only used as a :class:`.Case`'s :attr:`._teardown_level` to
signal that it is the very last one in the :class:`Context`,
so all teardowns should be run once it is complete, and the
:class:`.Helper`'s :attr:`_level_stack` should then be empty.
"""
pass
class Case(object):
"""Information about the test case.
This includes the :class:`Group` that this test case belongs to, the test
case's description, the :class:`Group` level that this test case will need
to teardown to if needed (`None` by default), and the actual function that
performs the test.
"""
_helper = helper
_exc_info = None
_dry_run_description_cache = None
_pytest_dry_run = False
def __init__(self, group, func, description):
self._group = group
self._func = func
self._description = description
self._teardown_level = None
self._test_started = False
def __call__(self, testcase, *args):
"""Performs the actual test."""
__tracebackhide__ = True
self._helper = testcase
if sys.version_info >= (3, 0):
funcargs = inspect.getfullargspec(self._func).args
else:
funcargs = inspect.getargspec(self._func)[0]
if funcargs:
self._func(testcase, *args)
else:
self._func()
def __str__(self):
if self._pytest_dry_run:
# pytest handles indentation in dry runs already.
desc = [self._description]
td_groups = self._teardown_groups
desc += [
td._inline_description
for g in td_groups
for td in g._teardowns
]
for group in td_groups:
self._helper._level_stack.remove(group)
return "\n".join(desc)
return self._full_description
@property
def _inline_description(self):
return " " * (self._group._level + 1) + self._description
@property
def _full_description(self):
desc = "\n{}\n{}{}".format(
str(self._group),
(" " * (self._group._level + 2)),
self._description,
)
return desc
@property
def _teardown_groups(self):
"""Groups that should be torn down after this test completes."""
teardown_groups = []
if self._teardown_level is not None:
if self._teardown_level is NullGroup:
stop_index = None
else:
try:
stop_index = self._helper._level_stack.index(
self._teardown_level,
)
except IndexError:
raise IndexError(
(
"Cannot teardown to desired level from "
"current stack."
),
)
teardown_groups = self._helper._level_stack[:stop_index:-1]
return teardown_groups
@property
def _dry_run_description(self):
if self._dry_run_description_cache is None:
desc_list = []
for group in self._group._setup_ancestry:
if group not in self._helper._level_stack:
self._helper._level_stack.append(group)
desc_list.append(group._inline_description)
for setup in group._setups:
if setup._description is not None:
desc_list.append(setup._inline_description)
desc_list.append(self._inline_description)
self._dry_run_description_cache = "\n".join(desc_list)
return self._dry_run_description_cache
def __getattr__(self, attr):
"""Defer attribute lookups to helper."""
return getattr(self._helper, attr)
class Fixture(object):
"""Information about the fixture.
This includes the :class:`Group` that this fixture belongs to, the
fixture's description (if applicable), and the actual function that
performs the steps needed for the fixture.
If a description is provided, it will be written on its own line as the
fixture is called. If not provided, nothing will be shown, unless the
fixture throws an error.
"""
_helper = helper
_exc_info = None
_dry_run_description_cache = None
_pytest_dry_run = False
def __init__(self, group, func, description=None):
self._group = group
self._func = func
self._description = description
def __call__(self, *args, **kwargs):
"""Performs the actual test."""
__tracebackhide__ = True
self._func(*args, **kwargs)
def __str__(self):
if self._pytest_dry_run:
# pytest handles indentation in dry runs already.
return self._description
return self._full_description
@property
def _parent_collection(self):
"""List of fixtures of this type belonging to the parent group."""
collection_name = "_" + self._fixture_type.lower() + "s"
return getattr(self._group, collection_name)
@property
def _position(self):
"""Fixture index in the parent collection, starting at 1 (not 0)."""
return self._parent_collection.index(self)
@property
def _position_str(self):
"""String representation of the fixture's position."""
length = len(self._parent_collection)
return "(" + str(self._position + 1) + "/" + str(length) + ")"
@property
def description(self):
"""Description of the fixture."""
if self._description is None:
return self._fixture_type + " " + self._position_str
else:
return self._description
@property
def _root_group(self):
"""The root group of the :class:`.Context` instance."""
return self._group._root_group
@property
def _inline_description(self):
return " " * (self._group._level + 1) + "# " + self.description
@property
def _full_description(self):
full_desc = "\n{}\n{}# {}".format(
str(self._group),
(" " * (self._group._level + 2)),
self.description,
)
return full_desc
def __getattr__(self, attr):
"""Defer attribute lookups to helper."""
return getattr(self._helper, attr)
class SetUpFixture(Fixture):
_fixture_type = "setup"
class TearDownFixture(Fixture):
_fixture_type = "teardown"
class GroupRepr(object):
failureException = AssertionError
def __init__(self, group):
self._group = group
def __str__(self):
return "\n" + str(self._group)
def shortDescription(self):
return None
class CaseRepr(object):
failureException = AssertionError
def __init__(self, case):
self._case = case
def __str__(self):
desc = "\n{}\n{}{}".format(
str(self._case._group),
(" " * (self._case._group._level + 2)),
self._case._description,
)
return desc
def shortDescription(self):
return None
| {
"content_hash": "f515b0e3f36efef3cc88c0a12a2d4b83",
"timestamp": "",
"source": "github",
"line_count": 2012,
"max_line_length": 79,
"avg_line_length": 34.87127236580517,
"alnum_prop": 0.5463291572240988,
"repo_name": "SalmonMode/contextional",
"id": "47d61b5416bcd9321298fd80e83ab24bcd296a54",
"size": "70161",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contextional/contextional.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "111019"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.