repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
gangadhar-kadam/nassimapp | support/doctype/maintenance_schedule/maintenance_schedule.py | Python | agpl-3.0 | 10,225 | 0.034425 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import add_days, cstr, getdate
from webnotes.model.doc import addchild
from webnotes.model.bean import getlist
from webnotes import msgprint, _
from stock.utils import get_valid_serial_nos
from utilities.transaction_base import TransactionBase, delete_events
class DocType(TransactionBase):
def __init__(self, doc, doclist=[]):
self.doc = doc
self.doclist = doclist
def get_item_details(self, item_code):
item = webnotes.conn.sql("""select item_name, description from `tabItem`
where name = %s""", (item_code), as_dict=1)
ret = {
'item_name': item and item[0]['item_name'] or '',
'description' : item and item[0]['description'] or ''
}
return ret
def generate_schedule(self):
self.doclist = self.doc.clear_table(self.doclist, 'maintenance_schedule_detail')
count = 0
webnotes.conn.sql("delete from `tabMaintenance Schedule Detail` where parent='%s'" %(self.doc.name))
for d in getlist(self.doclist, 'item_maintenance_detail'):
self.validate_maintenance_detail()
s_list =[]
s_list = self.create_schedule_list(d.start_date, d.end_date, d.no_of_visits)
for i in range(d.no_of_visits):
child = addchild(self.doc, 'maintenance_schedule_detail',
'Maintenance Schedule Detail', self.doclist)
child.item_code = d.item_code
child.item_name = d.item_name
child.scheduled_date = s_list[i].strftime('%Y-%m-%d')
if d.serial_no:
child.serial_no = d.serial_no
child.idx = count
count = count+1
child.incharge_name = d.incharge_name
child.save(1)
self.on_update()
def on_submit(self):
if not getlist(self.doclist, 'maintenance_schedule_detail'):
msgprint("Please click on 'Generate Schedule' to get schedule")
raise Exception
self.check_serial_no_added()
self.validate_schedule()
email_map ={}
for d in getlist(self.doclist, 'item_maintenance_detail'):
if d.serial_no:
serial_nos = get_valid_serial_nos(d.serial_no)
self.validate_serial_no(serial_nos, d.start_date)
self.update_amc_date(serial_nos, d.end_date)
if d.incharge_name not in email_map:
email_map[d.incharge_name] = webnotes.bean("Sales Person",
d.incharge_name).run_method("get_email_id")
scheduled_date =webnotes.conn.sql("select scheduled_date from `tabMaintenance Schedule Detail` \
where incharge_name='%s' and item_code='%s' and parent='%s' " %(d.incharge_name, \
d.item_code, self.doc.name), as_dict=1)
for key in scheduled_date:
if email_map[d.incharge_name]:
description = "Reference: %s, Item Code: %s and Customer: %s" % \
(self.doc.name, d.item_code, self.doc.customer)
webnotes.bean({
"doctype": "Event",
"owner": email_map[d.incharge_name] or self.doc.owner,
"subject": description,
"description": description,
"starts_on": key["scheduled_date"] + " 10:00:00",
"event_type": "Private",
"ref_type": self.doc.doctype,
"ref_name": self.doc.name
}).insert()
webnotes.conn.set(self.doc, 'status', 'Submitted')
#get schedule dates
#----------------------
def create_schedule_list(self, start_date, end_date, no_of_visit):
schedule_list = []
start_date1 = start_date
date_diff = (getdate(end_date) - getdate(start_date)).days
add_by = date_diff/no_of_visit
#schedule_list.append(start_date1)
while(getdate(start_date1) < getdate(end_date)):
start_date1 = add_days(start_date1, add_by)
if len(schedule_list) < no_of_visit:
schedule_list.append(getdate(start_date1))
return schedule_list
#validate date range and periodicity selected
#-------------------------------------------------
def validate_period(self, arg):
arg1 = eval(arg)
if getdate(arg1['start_date']) >= getdate(arg1['end_date']):
msgprint("Start date should be less than end date ")
raise Exception
period = (getdate(arg1['end_date'])-getdate(arg1['start_date'])).days+1
if (arg1['periodicity']=='Yearly' or arg1['periodicity']=='Half Yearly' or arg1['periodicity']=='Quarterly') and period<365:
msgprint(cstr(arg1['periodicity'])+ " periodicity can be set for period of atleast 1 year or more only")
raise Exception
elif arg1['periodicity']=='Monthly' and period<30:
msgprint("Monthly periodicity can be set for period of atleast 1 month or more")
raise Exception
elif arg1['periodicity']=='Weekly' and period<7:
msgprint("Weekly periodicity can be set for period of atleast 1 week or more")
raise Exception
def get_no_of_visits(self, arg):
arg1 = eval(arg)
self.validate_period(arg)
period = (getdate(arg1['end_date'])-getdate(arg1['start_date'])).days+1
count =0
if arg1['periodicity'] == 'Weekly':
count = period/7
elif arg1['periodicity'] == 'Monthly':
count = period/30
elif arg1['periodicity'] == 'Quarterly':
count = period/91
elif arg1['periodicity'] == 'Half Yearly':
count = period/182
elif arg1['periodicity'] == 'Yearly':
count = period/365
ret = {'no_of_visits':count}
return ret
def validate_maintenance_detail(self):
if not getlist(self.doclist, 'item_maintenance_detail'):
msgprint("Please enter Maintaince Details first")
raise Exception
for d in getlist(self.doclist, 'item_maintenance_detail'):
if not d.item_code:
msgprint("Please select item code")
raise Exception
elif not d.start_date or not d.end_date:
msgprint("Please select Start Date and End Date for item "+d.item_code)
raise Exception
elif not d.no_of_visits:
msgprint("Please mention no of visits required")
raise Exception
elif not d.incharge_name:
msgprint("Please select Incharge Person's name")
raise Exception
if getdate(d.start_date) >= getdat | e(d.end_date):
msgprint("Start date should be less than end date for item "+d.item_code)
raise Exception
def validate_sales_o | rder(self):
for d in getlist(self.doclist, 'item_maintenance_detail'):
if d.prevdoc_docname:
chk = webnotes.conn.sql("select t1.name from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2 where t2.parent=t1.name and t2.prevdoc_docname=%s and t1.docstatus=1", d.prevdoc_docname)
if chk:
msgprint("Maintenance Schedule against "+d.prevdoc_docname+" already exist")
raise Exception
def validate(self):
self.validate_maintenance_detail()
self.validate_sales_order()
def on_update(self):
webnotes.conn.set(self.doc, 'status', 'Draft')
def update_amc_date(self, serial_nos, amc_expiry_date=None):
for serial_no in serial_nos:
serial_no_bean = webnotes.bean("Serial No", serial_no)
serial_no_bean.doc.amc_expiry_date = amc_expiry_date
serial_no_bean.save()
def validate_serial_no(self, serial_nos, amc_start_date):
for serial_no in serial_nos:
sr_details = webnotes.conn.get_value("Serial No", serial_no,
["warranty_expiry_date", "amc_expiry_date", "status", "delivery_date"], as_dict=1)
if sr_details.warranty_expiry_date and sr_details.warranty_expiry_date>=amc_start_date:
webnotes.throw("""Serial No: %s is already under warranty upto %s.
Please check AMC Start Date.""" % (serial_no, sr_details.warranty_expiry_date))
if sr_details.amc_expiry_date and sr_details.amc_expiry_date >= amc_start_date:
webnotes.throw("""Serial No: %s is already under AMC upto %s.
Please check AMC Start Date.""" % (serial_no, sr_details.amc_expiry_date))
if sr_details.status=="Delivered" and sr_details.delivery_date and \
sr_details.delivery_date >= amc_start_date:
webnotes.throw(_("Maintenance start date can not be before \
delivery date for serial no: ") + serial_no)
def validate_schedule(self):
item_lst1 =[]
item_lst2 =[]
for d in getlist(self.doclist, 'item_maintenance_detail'):
if d.item_code not in item_lst1:
item_lst1.append(d.item_code)
for m in getlist(self.doclist, 'maintenance_schedule_detail'):
if m.item_code not in item_lst2:
item_lst2.append(m.item_code)
if len(item_lst1 |
zsjohny/jumpserver | apps/users/models/user.py | Python | gpl-2.0 | 18,955 | 0.000106 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import uuid
import base64
import string
import random
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import AbstractUser
from django.core.cache import cache
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.shortcuts import reverse
from orgs.utils import current_org
from common.utils import signer, date_expired_default, get_logger, lazyproperty
from common import fields
from ..signals import post_user_change_password
__all__ = ['User']
logger = get_logger(__file__)
class AuthMixin:
@property
def password_raw(self):
raise AttributeError('Password raw is not a readable attribute')
#: Use this attr to set user object password, example
#: user = User(username='example', password_raw='password', ...)
#: It's equal:
#: user = User(username='example', ...)
#: user.set_password('password')
@password_raw.setter
def password_raw(self, password_raw_):
self.set_password(password_raw_)
def set_password(self, raw_password):
if self.can_update_password():
self.date_password_last_updated = timezone.now()
post_user_change_password.send(self.__class__, user=self)
super().set_password(raw_password)
def can_update_password(self):
return self.is_local
def can_update_ssh_key(self):
return self.can_use_ssh_key_login()
def can_use_ssh_key_login(self):
return settings.TERMINAL_PUBLIC_KEY_AUTH
def is_public_key_valid(self):
"""
Check if the user's ssh public key is valid.
This function is used in base.html.
"""
if self.public_key:
return True
return False
@property
def public_key_obj(self):
class PubKey(object):
def __getattr__(self, item):
return ''
if self.public_key:
import sshpubkeys
try:
return sshpubkeys.SSHKey(self.public_key)
except (TabError, TypeError):
pass
return PubKey()
def reset_password(self, new_password):
self.set_password(new_password)
self.save()
@property
def date_password_expired(self):
interval = settings.SECURITY_PASSWORD_EXPIRATION_TIME
date_expired = self.date_password_last_updated + timezone.timedelta(
days=int(interval))
return date_expired
@property
def password_expired_remain_days(self):
date_remain = self.date_password_expired - timezone.now()
return date_remain.days
@property
def password_has_expired(self):
if self.is_local and self.password_expired_remain_days < 0:
return True
return False
@property
def password_will_expired(self):
if self.is_local and 0 <= self.password_expired_remain_days < 5:
return True
return False
def get_login_confirm_setting(self):
if hasattr(self, 'login_confirm_setting'):
s = self.login_confirm_setting
if s.reviewers.all().count() and s.is_active:
return s
return False
@staticmethod
def get_public_key_body(key):
for i in key.split():
if len(i) > 256:
return i
return key
def check_public_key(self, key):
if not self.public_key:
return False
key = self.get_public_key_body(key)
key_saved = self.get_public_key_body(self.public_key)
if key == key_saved:
return True
else:
return False
class RoleMixin:
ROLE_ADMIN = 'Admin'
ROLE_USER = 'User'
ROLE_APP = 'App'
ROLE_AUDITOR = 'Auditor'
ROLE_CHOICES = (
(ROLE_ADMIN, _('Administrator')),
(ROLE_USER, _('User')),
(ROLE_APP, _('Application')),
(ROLE_AUDITOR, _("Auditor"))
)
role = ROLE_USER
@property
def role_display(self):
if not current_org.is_real():
return self.get_role_display()
roles = []
if self in current_org.get_org_admins():
roles.append(str(_('Org admin')))
if self in current_org.get_org_auditors():
roles.append(str(_('Org auditor')))
if self in current_org.get_org_users():
roles.append(str(_('User')))
return " | ".join(roles)
@property
def is_superuser(self):
if self.role == 'Admin':
return True
else:
return False
@is_superuser.setter
def is_superuser(self, value):
if value is True:
self.role = 'Admin'
else:
self.role = 'User'
@property
def is_super_auditor(self):
return self.role == 'Auditor'
@property
def is_common_user(self):
if self.is_org_admin:
return False
if self.is_org_auditor:
return False
if self.is_app:
return False
return True
@property
def is_app(self):
return self.role == 'App'
@lazyproperty
def user_orgs(self):
from orgs.models import Organization
return Organization.get_user_user_orgs(self)
@lazyproperty
def admin_orgs(self):
from orgs.models import Organization
return Organization.get_user_admin_orgs(self)
@lazyproperty
def audit_orgs(self):
from orgs.models import Organization
return Organization.get_user_audit_orgs(self)
@lazyproperty
def admin_or_audit_orgs(self):
from orgs.models import Organization
return Organization.get_user_admin_or_audit_orgs(self)
@lazyproperty
def is_org_admin(self):
if self.is_superuser or self.related_admin_orgs.exists():
return True
else:
return False
@lazyproperty
def is_org_auditor(self):
if self.is_super_auditor or self.related_audit_orgs.exists():
return True
else:
| return False
@lazyproperty
def can_admin_current_org(self):
return current_org.can_admin_by(self)
@lazyproperty
| def can_audit_current_org(self):
return current_org.can_audit_by(self)
@lazyproperty
def can_user_current_org(self):
return current_org.can_user_by(self)
@lazyproperty
def can_admin_or_audit_current_org(self):
return self.can_admin_current_org or self.can_audit_current_org
@property
def is_staff(self):
if self.is_authenticated and self.is_valid:
return True
else:
return False
@is_staff.setter
def is_staff(self, value):
pass
@classmethod
def create_app_user(cls, name, comment):
app = cls.objects.create(
username=name, name=name, email='{}@local.domain'.format(name),
is_active=False, role='App', comment=comment,
is_first_login=False, created_by='System'
)
access_key = app.create_access_key()
return app, access_key
def remove(self):
if not current_org.is_real():
return
if self.can_user_current_org:
current_org.users.remove(self)
if self.can_admin_current_org:
current_org.admins.remove(self)
if self.can_audit_current_org:
current_org.auditors.remove(self)
class TokenMixin:
CACHE_KEY_USER_RESET_PASSWORD_PREFIX = "_KEY_USER_RESET_PASSWORD_{}"
email = ''
id = None
@property
def private_token(self):
return self.create_private_token()
def create_private_token(self):
from authentication.models import PrivateToken
token, created = PrivateToken.objects.get_or_create(user=self)
return token
def delete_private_token(self):
from authentication.models import PrivateToken
PrivateToken.objects.filter(user=self).delete()
def refresh_private_token(self):
self.delete_private_token()
return self |
tehmaze/ansi | ansi/colour/bg.py | Python | mit | 1,639 | 0.025625 | #pylint: disable=C0103,R0903
from ansi.colour.base import Graphic
from ansi.colour.fx import bold
# ECMA-048 standard names
black = Graphic('40')
red = Graphic('41')
green = Graphic('42')
yellow = Graphic('43')
blue = Graphic('44')
magenta = Graphic('45')
cyan = Graphic('46')
white = Graphic('47')
default = Graphic('49')
# ECMA-048 bold variants
boldblack = bold + black
boldred = bold + red
boldgreen = bold + green
boldyellow = bold + yellow
boldblue = bold + blue
boldmagenta = bold + magenta
boldcyan | = bold + cyan
boldwhite = bold + white
# High intensity variants
brightblack = Graphic('100')
brightred = Graphic('101')
brightgreen = Graphic('102')
brightyellow = Graphic('103')
brightblue = Graphic('104')
brightmagenta = Graphic('105')
brightcyan = Graphic('106')
brightwh | ite = Graphic('107')
# Convenience wrappers
brown = yellow # Not in ANSI/ECMA-048 standard
grey = white # Not in ANSI/ECMA-048 standard
gray = white # US English
darkgrey = boldblack
darkgray = boldblack # US English
brightbrown = boldyellow # Not in ANSI/ECMA-048 standard
brightgrey = boldwhite # Not in ANSI/ECMA-048 standard
brightgray = boldwhite # Us English
# 8 bit and 24 bit colors colors
palette = lambda colour: Graphic('48;5;%s' % colour)
truecolor = lambda r,g,b: Graphic('48;2;%s;%s;%s' % (r,g,b))
|
shashisp/blumix-webpy | app/gluon/tests/test_cache.py | Python | mit | 3,558 | 0.003092 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Unit tests for gluon.cache
"""
import os
import unittest
from fix_path import fix_sys_path
fix_sys_path(__file__)
from storage import Storage
from cache import CacheInRam, CacheOnDisk, Cache
oldcwd = None
def setUpModule():
global oldcwd
if oldcwd is None:
oldcwd = os.getcwd()
if not os.path.isdir('gluon'):
os.chdir(os.path.realpath('../../'))
def tearDownModule():
global oldcwd
if oldcwd:
os.chdir(oldcwd)
oldcwd = None
class TestCache(unittest.TestCase):
def testCacheInRam(self):
# defaults to mode='http'
cache = CacheInRam()
self.assertEqual(cache('a', lambda: 1, 0), 1)
self.a | ssertEqual(cache('a', lambda: 2, 100), 1)
cache.clear('b')
self.assertEqual(cache('a', lambda: 2, 100), 1)
cache.clear('a')
self.assertEqual(cache('a', lambda: 2, 100), 2)
cache.clear()
self.assertEqual(cache('a', lambda: 3, 100), 3)
self.assertEqual | (cache('a', lambda: 4, 0), 4)
#test singleton behaviour
cache = CacheInRam()
cache.clear()
self.assertEqual(cache('a', lambda: 3, 100), 3)
self.assertEqual(cache('a', lambda: 4, 0), 4)
#test key deletion
cache('a', None)
self.assertEqual(cache('a', lambda: 5, 100), 5)
#test increment
self.assertEqual(cache.increment('a'), 6)
self.assertEqual(cache('a', lambda: 1, 100), 6)
cache.increment('b')
self.assertEqual(cache('b', lambda: 'x', 100), 1)
def testCacheOnDisk(self):
# defaults to mode='http'
s = Storage({'application': 'admin',
'folder': 'applications/admin'})
cache = CacheOnDisk(s)
self.assertEqual(cache('a', lambda: 1, 0), 1)
self.assertEqual(cache('a', lambda: 2, 100), 1)
cache.clear('b')
self.assertEqual(cache('a', lambda: 2, 100), 1)
cache.clear('a')
self.assertEqual(cache('a', lambda: 2, 100), 2)
cache.clear()
self.assertEqual(cache('a', lambda: 3, 100), 3)
self.assertEqual(cache('a', lambda: 4, 0), 4)
#test singleton behaviour
cache = CacheOnDisk(s)
cache.clear()
self.assertEqual(cache('a', lambda: 3, 100), 3)
self.assertEqual(cache('a', lambda: 4, 0), 4)
#test key deletion
cache('a', None)
self.assertEqual(cache('a', lambda: 5, 100), 5)
#test increment
self.assertEqual(cache.increment('a'), 6)
self.assertEqual(cache('a', lambda: 1, 100), 6)
cache.increment('b')
self.assertEqual(cache('b', lambda: 'x', 100), 1)
def testCacheWithPrefix(self):
s = Storage({'application': 'admin',
'folder': 'applications/admin'})
cache = Cache(s)
prefix = cache.with_prefix(cache.ram,'prefix')
self.assertEqual(prefix('a', lambda: 1, 0), 1)
self.assertEqual(prefix('a', lambda: 2, 100), 1)
self.assertEqual(cache.ram('prefixa', lambda: 2, 100), 1)
def testRegex(self):
cache = CacheInRam()
self.assertEqual(cache('a1', lambda: 1, 0), 1)
self.assertEqual(cache('a2', lambda: 2, 100), 2)
cache.clear(regex=r'a*')
self.assertEqual(cache('a1', lambda: 2, 0), 2)
self.assertEqual(cache('a2', lambda: 3, 100), 3)
return
if __name__ == '__main__':
setUpModule() # pre-python-2.7
unittest.main()
tearDownModule()
|
chemelnucfin/tensorflow | tensorflow/python/feature_column/feature_column_v2.py | Python | apache-2.0 | 183,637 | 0.005015 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""This API defines FeatureColumn abstraction.
FeatureColumns provide a high level abstraction for ingesting and representing
features. FeatureColumns are also the primary way of encoding features for
canned `tf.estimator.Estimator`s.
When using FeatureColumns with `Estimators`, the type of feature column you
should choose depends on (1) the feature type and (2) the model type.
1. Feature type:
* Continuous features can be represented by `numeric_column`.
* Categorical features can be represented by any `categorical_column_with_*`
column:
- `categorical_column_with_vocabulary_list`
- `categorical_column_with_vocabulary_file`
- `categorical_column_with_hash_bucket`
- `categorical_column_with_identity`
- `weighted_categorical_column`
2. Model type:
* Deep neural network models (`DNNClassifier`, `DNNRegressor`).
Continuous features can be directly fed into deep neural network models.
age_column = numeric_column("age")
To feed sparse features into DNN models, wrap the column with
`embedding_column` or `indicator_column`. `indicator_column` is recommended
for features with only a few possible values. For features with many
possible values, to reduce the size of your model, `embedding_column` is
recommended.
embedded_dept_column = embedding_column(
categorical_column_with_vocabulary_list(
"department", ["math", "philosophy", ...]), dimension=10)
* Wide (aka linear) models (`LinearClassifier`, `LinearRegressor`).
Sparse features can be fed directly into linear models. They behave like an
indicator column but with an efficient implementation.
dept_column = categorical_column_with_vocabulary_list("department",
["math", "philosophy", "english"])
It is recommended that continuous features be bucketized before being
fed into linear models.
bucketized_age_column = bucketized_column(
source_column=age_column,
boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])
Sparse features can be crossed (also known as conjuncted or combined) in
order to form non-linearities, and then fed into linear models.
cross_dept_age_column = crossed_column(
columns=["department", bucketized_age_column],
hash_bucket_size=1000)
Example of building canned `Estimator`s using FeatureColum | ns:
```python
# Define features and transformations
deep_feature_columns = [age_column, embedded_dept_column]
wide_feature_columns = [dept_column, bucketized_age_column,
cross_dept_age_column]
# Build deep model
estimator = | DNNClassifier(
feature_columns=deep_feature_columns,
hidden_units=[500, 250, 50])
estimator.train(...)
# Or build a wide model
estimator = LinearClassifier(
feature_columns=wide_feature_columns)
estimator.train(...)
# Or build a wide and deep model!
estimator = DNNLinearCombinedClassifier(
linear_feature_columns=wide_feature_columns,
dnn_feature_columns=deep_feature_columns,
dnn_hidden_units=[500, 250, 50])
estimator.train(...)
```
FeatureColumns can also be transformed into a generic input layer for
custom models using `input_layer`.
Example of building model using FeatureColumns, this can be used in a
`model_fn` which is given to the {tf.estimator.Estimator}:
```python
# Building model via layers
deep_feature_columns = [age_column, embedded_dept_column]
columns_to_tensor = parse_feature_columns_from_examples(
serialized=my_data,
feature_columns=deep_feature_columns)
first_layer = input_layer(
features=columns_to_tensor,
feature_columns=deep_feature_columns)
second_layer = fully_connected(first_layer, ...)
```
NOTE: Functions prefixed with "_" indicate experimental or private parts of
the API subject to change, and should not be relied upon!
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import collections
import math
import numpy as np
import six
from tensorflow.python.eager import context
from tensorflow.python.feature_column import feature_column as fc_old
from tensorflow.python.feature_column import utils as fc_utils
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib
from tensorflow.python.framework import tensor_shape
# TODO(b/118385027): Dependency on keras can be problematic if Keras moves out
# of the main repo.
from tensorflow.python.keras import initializers
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.engine import training
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_utils
from tensorflow.python.training.tracking import tracking
from tensorflow.python.training.tracking import base as trackable
from tensorflow.python.util import deprecation
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
from tensorflow.python.util.compat import collections_abc
_FEATURE_COLUMN_DEPRECATION_DATE = None
_FEATURE_COLUMN_DEPRECATION = ('The old _FeatureColumn APIs are being '
'deprecated. Please use the new FeatureColumn '
'APIs instead.')
class StateManager(object):
"""Manages the state associated with FeatureColumns.
Some `FeatureColumn`s create variables or resources to assist their
computation. The `StateManager` is responsible for creating and storing these
objects since `FeatureColumn`s are supposed to be stateless configuration
only.
"""
def create_variable(self,
feature_column,
name,
shape,
dtype=None,
trainable=True,
use_resource=True,
initializer=None):
"""Creates a new variable.
Args:
feature_column: A `FeatureColumn` object this variable corresponds to.
name: variable name.
shape: variable shape.
dtype: The type of the variable. Defaults to `self.dtype` or `float32`.
trainable: Whether this variable is trainable or not.
use_resource: If true, we use resource variables. Otherwise we use
RefVariable.
initializer: initializer instance (callable).
Returns:
The created variable.
"""
del feature_column, name, shape, dtype, trainable, use_resource, initializer
raise NotImplementedError('StateManager.create_variable')
def add_variable(self, feature_column, var):
"""Adds an existing variable to the state.
Args:
feature_column: A `F |
litex-hub/lxbe-tool | lxbe_tool/providers/docker.py | Python | apache-2.0 | 323 | 0.006192 |
"""
Based on this example -> https://github.com/open-power/pdbg/blob/master/.build.sh
TEMPDIR=`mktemp -d ${HOME}/pdbgobjXXXXXX`
RUN_TMP="docker run --rm=true --user=${USER} -w ${TEMPDIR} -v ${HOME}:${HOME} -t ${CONTAINER}"
${RUN_TMP} ${SR | CDIR}/configure --host=arm-linux-gnueabi
${RUN_T | MP} make
rm -rf ${TEMPDIR}
"""
|
AnderssonPeter/pytrafikverket | pytrafikverket/__init__.py | Python | mit | 708 | 0 | """Pytrafikverket module."""
# flake8: noqa
from pytrafikverket.trafikverket import (AndFilter, FieldFilter, FieldSort,
Filter, FilterOperation, NodeHelper,
OrFilter, SortOrder, Trafikverket)
from pytrafikverket.trafikverket_train import (StationInfo, TrafikverketTrain,
| TrainStop, TrainStopStatus)
from pytrafikverket. | trafikverket_weather import (TrafikverketWeather,
WeatherStationInfo)
from pytrafikverket.trafikverket_ferry import (TrafikverketFerry,
FerryStop, FerryStopStatus)
|
Comcast/rulio | examples/stockfs.py | Python | apache-2.0 | 4,418 | 0.005885 | #!/usr/bin/python
# Copyright 2015 Comcast Cable Communications Management, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# End Copyright
# Little external FS (fact service) example
# Wrap Yahoo stock quotes as an FS.
# Pattern must specify a single ticker symbol "symbol".
# Pattern must specify at least one additional property from the set
legalProperties = {"bid", "ask", "change", "percentChange", "lastTradeSize"}
# curl 'http://download.finance.yahoo.com/d/quotes.csv?s=CMCSA&f=abc1p2k3&e=.csv'
# http://www.canbike.ca/information-technology/yahoo-finance-url-download-to-a-csv-file.html
# A more principled approach would allow the pattern to specify only a
# single additional property, but that decision is a separate
# discussion.
# Usage:
#
# curl -d '{"symbol":"CMCSA","bid":"?bid","ask":"?ask"}' 'http://localhost:6666/facts/search'
#
from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
import cgi # Better way now?
import json
import urllib2
import urllib
import re
PORT = 6666
def protest (response, message):
response.send_response(200)
response.send_header('Content-type','text/plain')
response.end_headers()
response.wfile.write(message) # Should probably be JSON
def getQuote (symbol):
uri = "http://download.finance.yahoo.com/d/quotes.csv?s=" + symbol + "&f=abc1p2k3&e=.csv"
print "uri ", uri
line = urllib2.urlopen(uri).read().strip()
print "got | ", line, "\n"
line = re.sub(r'[%"\n]+', "", line)
print "clean ", line, "\n"
data = line.split(",")
ns = map(float, data)
q = {}
q["bid"] = ns[0]
q["ask"] = ns[1]
q["change"] = ns[2]
q["percentChange"] = ns[3]
q["lastTradeSize"] = ns[4]
return q
class handler(BaseHTTPRequest | Handler):
def do_GET(self):
protest(self, "You should POST with json.\n")
return
def do_POST(self):
if not self.path == '/facts/search':
protest(self, "Only can do /facts/search.\n")
return
try:
content_length = int(self.headers['Content-Length'])
js = self.rfile.read(content_length)
m = json.loads(js)
if 'symbol' not in m:
protest(self, "Need symbol.\n")
return
symbol = m["symbol"]
del m["symbol"]
for p in m:
if p not in legalProperties:
protest(self, "Illegal property " + p + ".\n")
return
v = m[p]
if not v.startswith("?"):
protest(self, "Value " + v + " must be a variable.\n")
return
if len(v) < 2:
protest(self, "Need an named variable for " + v + ".\n")
return
q = getQuote(symbol)
print q, "\n"
bindings = {}
satisfied = True
for p in m:
print p, ": ", q[p], "\n"
if p in q:
bindings[m[p]] = q[p]
else:
satisfied = False
break
if satisfied:
js = json.dumps(bindings)
response = '{"Found":[{"Bindingss":[%s]}]}' % (js)
else:
response = '{"Found":[{"Bindingss":[]}]}'
self.send_response(200)
self.send_header('Content-type','application/json')
self.end_headers()
print 'response ', response
self.wfile.write(response)
except Exception as broke:
print broke, "\n"
protest(self, str(broke))
try:
server = HTTPServer(('', PORT), handler)
print 'Started weather FS on port ' , PORT
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down the weather FS on ', PORT
server.socket.close()
|
tchellomello/home-assistant | homeassistant/components/firmata/switch.py | Python | apache-2.0 | 2,287 | 0 | """Support for Firmata switch output."""
import logging
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant
from .const import (
CONF_INITIAL_STATE,
CONF_NEGATE_STATE,
CONF_PIN,
CONF_PIN_MODE,
DOMAIN,
)
from .entity import FirmataPinEntity
from .pin import FirmataBinaryDigitalOutput, FirmataPinUsedException
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Firmata switches."""
new_entities = []
board = hass.data[DOMAIN][config_entry.entry_id]
for swit | ch in board.switches:
pin = switch[CONF_PIN]
pin_mode = switch[CONF_PIN_MODE]
initial = switch[CONF_INITIAL_STATE]
negate = switch[CONF_NEGATE_STATE]
api = FirmataBinaryDigitalOutput(board, pin, pin_mode, initial, negate)
try:
api.setup()
except FirmataPinUsedException:
_LOGGER.error(
"Could not setup switch on pin %s s | ince pin already in use.",
switch[CONF_PIN],
)
continue
name = switch[CONF_NAME]
switch_entity = FirmataSwitch(api, config_entry, name, pin)
new_entities.append(switch_entity)
if new_entities:
async_add_entities(new_entities)
class FirmataSwitch(FirmataPinEntity, SwitchEntity):
"""Representation of a switch on a Firmata board."""
async def async_added_to_hass(self) -> None:
"""Set up a switch."""
await self._api.start_pin()
self.async_write_ha_state()
@property
def is_on(self) -> bool:
"""Return true if switch is on."""
return self._api.is_on
async def async_turn_on(self, **kwargs) -> None:
"""Turn on switch."""
_LOGGER.debug("Turning switch %s on", self._name)
await self._api.turn_on()
self.async_write_ha_state()
async def async_turn_off(self, **kwargs) -> None:
"""Turn off switch."""
_LOGGER.debug("Turning switch %s off", self._name)
await self._api.turn_off()
self.async_write_ha_state()
|
klahnakoski/TestLog-ETL | vendor/jx_sqlite/expressions/number_op.py | Python | mpl-2.0 | 1,194 | 0.000838 | # encoding: utf-8
#
#
# This Source Code Form is subject to the term | s of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http:# mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import absolute_import, division, unicode_literals
from jx_base.expressions import NumberOp as NumberOp_
from jx_sqlite.expressions import _utils
from jx_sqlite.expressions._utils import SQLang, check
from mo_dots import | wrap
from mo_sql import sql_coalesce
class NumberOp(NumberOp_):
@check
def to_sql(self, schema, not_null=False, boolean=False):
value = SQLang[self.term].to_sql(schema, not_null=True)
acc = []
for c in value:
for t, v in c.sql.items():
if t == "s":
acc.append("CAST(" + v + " as FLOAT)")
else:
acc.append(v)
if not acc:
return wrap([])
elif len(acc) == 1:
return wrap([{"name": ".", "sql": {"n": acc[0]}}])
else:
return wrap([{"name": ".", "sql": {"n": sql_coalesce(acc)}}])
_utils.NumberOp = NumberOp |
7kbird/chrome | tools/telemetry/telemetry/core/backends/chrome/inspector_runtime.py | Python | bsd-3-clause | 2,006 | 0.008973 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core import exceptions
class InspectorRuntime(object):
def __init__(self, inspector_backend):
self._inspector_backend = inspector_backend
self._inspector_backend.RegisterDomain(
'Runtime',
self._OnNotification,
self._OnClose)
self._contexts_enabled = False
self._max_context_id = None
def _OnNotification(self, msg):
if (self._contexts_enabled and
msg['method'] == 'Runtime.executionContextCreated'):
self._max_context_id = max(self._max_context_id,
msg['params']['context']['id'])
def _OnClose(self):
pass
def Execute(self, expr, context_id, timeout):
self.Evaluate(expr + '; 0;', context_id, timeout)
def Evaluate(self, expr, context_id, timeout):
request = {
'method': 'Runtime.evaluate',
'params': {
'expression': expr,
'returnByValue': True
}
}
if context_id is not None:
self.EnableAllContexts()
request['params']['contextId'] = context_id
res = self._inspector_backend.SyncRequest(request, timeout)
if 'error' in res:
raise exceptions.EvaluateException(res['error']['message'])
if 'wasThrown' in res['result'] and res['result']['wasThrown']:
# TODO(nduca): propagate stacks from javascript up to the python
# exception.
raise excep | tions.EvaluateException(res['result']['result']['description'])
if res['result']['result']['type'] == 'undefined':
return None
return res['res | ult']['result']['value']
def EnableAllContexts(self):
"""Allow access to iframes."""
if not self._contexts_enabled:
self._contexts_enabled = True
self._inspector_backend.SyncRequest({'method': 'Runtime.enable'},
timeout=30)
return self._max_context_id
|
batra-mlp-lab/DIGITS | digits/frameworks/errors.py | Python | bsd-3-clause | 825 | 0.006061 | # Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.
from digits.utils import subclass
@subclass
class Error(Exception):
pass
@subclass
class BadNetw | orkError(Error):
"""
Errors that occur when validating a network
"""
def __init__(self, message):
self.message = message
def __str__(self):
return repr(self.message)
@subclass
class NetworkVisualizationError(Error):
"""
Errors that occur when validating a network
"""
def __init__(self, message):
| self.message = message
def __str__(self):
return repr(self.message)
@subclass
class InferenceError(Error):
"""
Errors that occur during inference
"""
def __init__(self, message):
self.message = message
def __str__(self):
return repr(self.message)
|
xamurej/py3-cli-skel | cli_app/options.py | Python | mit | 888 | 0 | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
"""Module docstring.
This serves as a long usage message.
"""
import configargparse
from cli_app import log
LOG = log.Logger.get()
class Options(object):
def __init__(self):
self.parser = configargparse.ArgParser(
default_config_files=['./config.ini'])
s | elf.parser.add('-c',
'--my-config',
required=False,
is_config_file=True,
help='config file | path')
# this option can be set in a config file because it starts with '--'
self.parser.add('--text',
required=False,
help='text for output',
default='Hello world',
env_var='APP_TEXT')
def parse(self):
return self.parser.parse_args()
|
diegobill/django-cities | cities/management/commands/table_autocomplete.py | Python | mit | 1,726 | 0.009849 | from django.core.management.base import BaseCommand
from django.db import connections, reset_queries
from ...models import *
class Command(BaseCommand):
def handle(self, *args, **options):
self.table_autocomplete()
def table_autocomplete(self):
#tabela cache para autocomplete
#pegando possiveis idiomas
languages=['en','pt']
#for l in AlternativeName.objects.raw("SELECT id, language FROM cities_alternativename GROUP BY language"):
# languages.append(l.language.encode('utf-8'))
#cursor.execute("DELETE FROM cities_table_autocomplete WHERE 1;")
packet_size = 100;
limit = packet_size
off | set = 0
places = Place.objects.all()[offset:limit]
sql_packet=''
while len(places)>0:
for place in places:
| for language in languages:
sql = "INSERT INTO cities_table_autocomplete_%s (id, name, slug, active, deleted) VALUES (%s,'%s','%s',%s,%s);" % (
language[:2],
place.id,
place.translated_name(language).replace("'",'"'),
place.get_absolute_url(),
place.active,
place.deleted
)
sql_packet += sql
cursor = connections['default'].cursor()
cursor.execute(sql_packet)
cursor.close()
sql_packet=''
offset+=packet_size
limit+=packet_size
# free some memory
# https://docs.djangoproject.com/en/dev/faq/models/
reset_queries()
places = Place.objects.all()[offset:limit]
|
gltn/stdm | stdm/composer/custom_items/label.py | Python | gpl-2.0 | 2,466 | 0.001217 | # /***************************************************************************
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU General Public License as published by *
# * the Free Software Foundation; either version 2 of the License, or *
# * (at your option) any later version. *
# * *
# ***************************************************************************/
"""
Custom label type
"""
from typing import Optional
from qgis.PyQt. | QtCore import (
QCoreApplication
)
from qgis.PyQt.QtXml import (
QDomDocument,
QDomElement
)
from qgis.core import (
QgsLayoutItemRegistry,
QgsLayoutItemAbstractMetadata,
QgsLayoutItemLabel,
QgsReadWriteContext
)
from stdm.ui.gui_utils import GuiUtils
STDM_DATA_LABEL_ITEM_TYPE = QgsLayoutItemRegistry.PluginItem + 2337 + 1
class StdmDataLabelLayoutItem(QgsLayoutItemLabel):
def __init__(self, layout):
super().__init__(layout)
self._link | ed_field = None
def type(self):
return STDM_DATA_LABEL_ITEM_TYPE
def icon(self):
return GuiUtils.get_icon('db_field.png')
def linked_field(self) -> Optional[str]:
return self._linked_field
def set_linked_field(self, field: Optional[str]):
self._linked_field = field
def writePropertiesToElement(self, element: QDomElement, document: QDomDocument,
context: QgsReadWriteContext) -> bool:
super().writePropertiesToElement(element, document, context)
if self._linked_field:
element.setAttribute('linked_field', self._linked_field)
return True
def readPropertiesFromElement(self, element: QDomElement, document: QDomDocument,
context: QgsReadWriteContext) -> bool:
super().readPropertiesFromElement(element, document, context)
self._linked_field = element.attribute('linked_field') or None
return True
class StdmDataLabelLayoutItemMetadata(QgsLayoutItemAbstractMetadata):
def __init__(self):
super().__init__(STDM_DATA_LABEL_ITEM_TYPE, QCoreApplication.translate('StdmItems', 'STDM Data Label'))
def createItem(self, layout):
return StdmDataLabelLayoutItem(layout)
|
onelab-eu/myslice | portal/resources.py | Python | gpl-3.0 | 1,955 | 0.01688 | import json
import time
import re
from django.sho | rtcuts import render
| from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.sites.models import Site
from unfold.page import Page
from manifold.core.query import Query
from manifoldapi.manifoldapi import execute_admin_query, execute_query
from portal.actions import is_pi, create_slice, create_pending_slice, clear_user_creds, authority_check_pis
#from portal.forms import SliceRequestForm
from unfold.loginrequired import LoginRequiredAutoLogoutView
from ui.topmenu import topmenu_items_live, the_user
from myslice.theme import ThemeView
from myslice.settings import logger
import activity.user
theme = ThemeView()
class ResourcesView (LoginRequiredAutoLogoutView, ThemeView):
template_name = 'resources.html'
# because we inherit LoginRequiredAutoLogoutView that is implemented by redefining 'dispatch'
# we cannot redefine dispatch here, or we'd lose LoginRequired and AutoLogout behaviours
def post (self, request, slicename):
return self.get_or_post (request, 'POST', slicename)
def get (self, request, slicename):
return self.get_or_post (request, 'GET', slicename)
def get_or_post (self, request, method, slicename):
"""
"""
# Page rendering
page = Page(request)
page.add_js_files ( [ "js/jquery-ui.js" ] )
page.add_css_files ( [ "css/jquery-ui.css" ] )
errors = []
slice_name =''
template_env = {
'theme': self.theme,
'section': "Slice request",
'slicename': slicename,
'request': request,
}
template_env.update(page.prelude_env())
return render_to_response(self.template,template_env, context_instance=RequestContext(request))
|
YongJang/PythonTelegram | examples/referenced/bs4NaverITNews.py | Python | gpl-2.0 | 1,503 | 0.019398 | import pymysql
import sys
import time
from bs4 import BeautifulSoup
from urllib.request import Request, urlopen
def getPost() :
html = Request('http://news.naver.com/main/list.nhn | ?mode=LS2D&mid=shm&sid1=105&sid2=230', headers={'User-Agent':'Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30)'})
page = urlopen(html).read()
soup = BeautifulSoup(page , from_encoding="utf-8")
page_num_list = soup.find("div" , { "class" : "paging" }).find_all('a') #page개수 |
page_num = len(page_num_list)
date_num_list = soup.find("div" , { "class" : "viewday" }).find_all('a') #page개수
date_num = len(date_num_list)
print("page_num :" + str(page_num))
print("date_num :" + str(date_num))
for n in range(len(page_num_list))
print(page_num_list[n])
print(date_num_list[n])
def Naver_IT_News() :
getPost()
if __name__ == '__main__' :
try:
print(sys.stdin.encoding)
conn = pymysql.connect(host='telegramdb.cctjzlx6kmlc.ap-northeast-1.rds.amazonaws.com', port=3306, user='yongjang', passwd='yongjang', db='telegramdb', charset='utf8')
print("Database connection success!!")
cur = conn.cursor()
Naver_IT_News()
except pymysql.Error as e:
print ("Error %d: %s" % (e.args[0], e.args[1]))
sys.exit(1)
finally:
if conn:
cur.close()
conn.close()
|
ray-project/ray | dashboard/modules/snapshot/tests/test_job_submission.py | Python | apache-2.0 | 5,558 | 0.001439 | import logging
import os
import sys
import time
import json
import jsonschema
import pprint
import pytest
import requests
from ray._private.test_utils import (
format_web_url,
wait_for_condition,
wait_until_server_available,
)
from ray.dashboard import dashboard
from ray.dashboard.tests.conftest import * # noqa
from ray.job_submission import JobSubmissionClient
logger = logging.getLogger(__name__)
def _get_snapshot(address: str):
response = requests.get(f"{address}/api/snapshot")
response.raise_for_status()
data = response.json()
schema_path = os.path.join(
os.path.dirname(dashboard.__file__), "modules/snapshot/snapshot_schema.json"
)
pprint.pprint(data)
jsonschema.validate(instance=data, schema=json.load(open(schema_path)))
return data
def test_successful_job_status(
ray_start_with_dashboard, disable_aiohttp_cache, enable_test_module
):
address = ray_start_with_dashboard.address_info["webui_url"]
assert wait_until_server_available(address)
address = format_web_url(address)
job_sleep_time_s = 5
entrypoint_cmd = (
'python -c"'
"import ray;"
"ray.init();"
"import time;"
f"time.sleep({job_sleep_time_s});"
'"'
)
client = JobSubmissionClient(address)
start_time_s = int(time.time())
runtime_env = {"env_vars": {"RAY_TEST_123": "123"}}
metadata = {"ray_test_456": "456"}
job_id = client.submit_job(
entrypoint=entrypoint_cmd, metadata=metadata, runtime_env=runtime_env
)
def wait_for_job_to_succeed():
data = _get_snapshot(address)
legacy_job_succeeded = False
job_succeeded = False
# Test legacy job snapshot (one driver per job).
for job_entry in data["data"]["snapshot"]["jobs"].values():
if job_entry["status"] is not None:
assert job_entry["config"]["metadata"]["jobSubmissionId"] == job_id
assert job_entry["status"] in {"PENDING", "RUNNING", "SUCCEEDED"}
assert job_entry["statusMessage"] is not None
legacy_job_succeeded = job_entry["status"] == "SUCCEEDED"
# Test new jobs snapshot (0 to N drivers per job).
for job_submission_id, entry in data["data"]["snapshot"][
"jobSubmission"
].items():
if entry["status"] is not None:
assert entry["status"] in {"PENDING", "RUNNING", "SUCCEEDED"}
assert entry["message"] is not None
# TODO(architkulkarni): Disable automatic camelcase.
assert entry["runtimeEnv"] == {"envVars": {"RAYTest123": "123"}}
assert entry["metadata"] == {"rayTest456": "456"}
assert entry["errorType"] is None
assert abs(entry["startTime"] - start_ | time_s) <= 2
if entry["status"] == "SUCCEEDED":
job_succeeded = True
assert entry["endTime"] >= entry["startTime"] + job_sleep_time_s
return legacy_job_succeeded and job_succeeded
wait_for_condition(wait_for_job_to_succeed, timeout=30)
def test_failed_job_status(
ray_start_with_dashboard, disable_aiohttp_cache, enable_test_module
):
address | = ray_start_with_dashboard.address_info["webui_url"]
assert wait_until_server_available(address)
address = format_web_url(address)
job_sleep_time_s = 5
entrypoint_cmd = (
'python -c"'
"import ray;"
"ray.init();"
"import time;"
f"time.sleep({job_sleep_time_s});"
"import sys;"
"sys.exit(1);"
'"'
)
start_time_s = int(time.time())
client = JobSubmissionClient(address)
runtime_env = {"env_vars": {"RAY_TEST_456": "456"}}
metadata = {"ray_test_789": "789"}
job_id = client.submit_job(
entrypoint=entrypoint_cmd, metadata=metadata, runtime_env=runtime_env
)
def wait_for_job_to_fail():
data = _get_snapshot(address)
legacy_job_failed = False
job_failed = False
# Test legacy job snapshot (one driver per job).
for job_entry in data["data"]["snapshot"]["jobs"].values():
if job_entry["status"] is not None:
assert job_entry["config"]["metadata"]["jobSubmissionId"] == job_id
assert job_entry["status"] in {"PENDING", "RUNNING", "FAILED"}
assert job_entry["statusMessage"] is not None
legacy_job_failed = job_entry["status"] == "FAILED"
# Test new jobs snapshot (0 to N drivers per job).
for job_submission_id, entry in data["data"]["snapshot"][
"jobSubmission"
].items():
if entry["status"] is not None:
assert entry["status"] in {"PENDING", "RUNNING", "FAILED"}
assert entry["message"] is not None
# TODO(architkulkarni): Disable automatic camelcase.
assert entry["runtimeEnv"] == {"envVars": {"RAYTest456": "456"}}
assert entry["metadata"] == {"rayTest789": "789"}
assert entry["errorType"] is None
assert abs(entry["startTime"] - start_time_s) <= 2
if entry["status"] == "FAILED":
job_failed = True
assert entry["endTime"] >= entry["startTime"] + job_sleep_time_s
return legacy_job_failed and job_failed
wait_for_condition(wait_for_job_to_fail, timeout=25)
if __name__ == "__main__":
sys.exit(pytest.main(["-v", __file__]))
|
rtts/qqq | mptt/__init__.py | Python | gpl-3.0 | 879 | 0.005688 |
VERSION = (0, 5, 'pre')
# NOTE: This method was removed in 0.4.0, but restored in 0.4.2 after use-cases were
# reported that were impossible by merely subclassing MPTTModel.
def register(*args, **kwargs):
"""
Registers a model class as an MPTTModel, adding MPTT fields and adding MPTTModel to __bases__.
This is equivalent to just subclassing MPTTModel, but works for an already-created model.
"""
from mptt.models import MPTTModelBase
return MPTTModelBase.register(*args, **kwargs)
# Also removed in 0.4.0 but restored in 0.4.2, otherwise this 0.3-compatibility code will break:
# if hasattr(mptt, 'register'):
# try:
# mptt.register(...)
# exce | pt mptt.AlreadyRegistered:
# pass
class AlreadyRegistered(Exception):
"Deprecated - don't use this anymore. It's never thrown, you don't nee | d to catch it"
|
rizumu/dialogos | dialogos/models.py | Python | bsd-3-clause | 880 | 0 | from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.generic import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
class Comment(models.Model):
author = models.ForeignKey(User, null=True, related_name="comments")
name = models.CharField(max_length=100)
email = models.CharF | ield(max_length=255, blank=True)
website = models.CharField(max_length=255, blank=True)
content_type = models.ForeignKey(ContentType)
object_id = models.IntegerField()
content_object = GenericForeignKey()
comment = models.TextField()
submit_date = models.DateTimeField(default=datetime.now)
ip_address = models. | IPAddressField(null=True)
public = models.BooleanField(default=True)
def __unicode__(self):
return "pk=%d" % self.pk
|
317070/kaggle-heart | configurations/j7_jeroen_ch.py | Python | mit | 10,276 | 0.005936 | """Single slice vgg with normalised scale.
"""
import functools
import lasagne as nn
import numpy as np
import theano
import theano.tensor as T
import data_loader
import deep_learning_layers
import image_transform
import layers
import preprocess
import postprocess
import objectives
import theano_printer
import updates
import utils
# Random params
rng = np.random
take_a_dump = False # dump a lot of data in a pkl-dump file. (for debugging)
dump_network_loaded_data = False # dump the outputs from the dataloader (for debugging)
# Memory usage scheme
caching = None
# Save and validation frequency
validate_every = 20
validate_train_set = True
save_every = 20
restart_from_save = False
dump_network_loaded_data = False
# Training (schedule) parameters
# - batch sizes
batch_size = 4
sunny_batch_size = 4
batches_per_chunk = 32
num_epochs_train = 150
# - learning rate and method
base_lr = 0.0001
learning_rate_schedule = {
0: base_lr,
8*num_epochs_train/10: base_lr/10,
19*num_epochs_train/20: base_lr/100,
}
momentum = 0.9
build_updates = updates.build_adam_updates
# Preprocessing stuff
cleaning_processes = [
preprocess.set_upside_up,]
cleaning_processes_post = [
functools.partial(preprocess.normalize_contrast_zmuv, z=2)]
augmentation_params = {
"rotation": (-180, 180),
"shear": (0, 0),
"translation": (-8, 8),
"flip_vert": (0, 1),
"roll_time": (0, 0),
"flip_time": (0, 0),
}
use_hough_roi = True
preprocess_train = functools.partial( # normscale_resize_and_augment has a bug
preprocess.preprocess_normscale,
normscale_resize_and_augment_function=functools.partial(
image_transform.normscale_resize_and_augment_2,
normalised_patch_size=(64,64)))
preprocess_validation = functools.partial(preprocess_train, augment=False)
preprocess_test = preprocess_train
sunny_preprocess_train = preprocess.sunny_preprocess_with_augmentation
sunny_preprocess_validation = preprocess.sunny_preprocess_validation
sunny_preprocess_test = preprocess.sunny_preprocess_validation
# Data generators
create_train_gen = data_loader.generate_train_batch
create_eval_valid_gen = functools.partial(data_loader.generate_validation_batch, set="validation")
create_eval_train_gen = functools.partial(data_loader.generate_validation_batch, set="train")
create_test_gen = functools.partial(data_loader.generate_test_batch, set=["validation", "test"])
def filter_samples(folders):
# don't use patients who don't have mre than 6 slices
import glob
return folders
# Input sizes
image_size = 64
nr_slices = 22
data_sizes = {
"sliced:data:sax": (batch_size, nr_slices, 30, image_size, image_size),
"sliced:data:sax:locations": (batch_size, nr_slices),
"sliced:data:sax:is_not_padded": (batch_size, nr_slices),
"sliced:data:randomslices": (batch_size, nr_slices, 30, image_size, image_size),
"sliced:data:singleslice:2ch": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:4ch": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:difference:middle": (batch_size, 29, image_size, image_size),
"sliced:data:singleslice:difference": (batch_size, 29, image_size, image_size),
"sliced:data:singleslice": (batch_size, 30, image_si | ze, image_size),
| "sliced:data:ax": (batch_size, 30, 15, image_size, image_size),
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, image_size, image_size)
# TBC with the metadata
}
# Objective
l2_weight = 0.000
l2_weight_out = 0.000
def build_objective(interface_layers):
# l2 regu on certain layers
l2_penalty = nn.regularization.regularize_layer_params_weighted(
interface_layers["regularizable"], nn.regularization.l2)
# build objective
return objectives.KaggleObjective(interface_layers["outputs"], penalty=l2_penalty)
# Testing
postprocess = postprocess.postprocess
test_time_augmentations = 100 # More augmentations since a we only use single slices
tta_average_method = lambda x: np.cumsum(utils.norm_geometric_average(utils.cdf_to_pdf(x)))
# nonlinearity putting a lower bound on it's output
def lb_softplus(lb):
return lambda x: nn.nonlinearities.softplus(x) + lb
init = nn.init.Orthogonal()
rnn_layer = functools.partial(nn.layers.RecurrentLayer,
W_in_to_hid=init,
W_hid_to_hid=init,
b=nn.init.Constant(0.1),
nonlinearity=nn.nonlinearities.rectify,
hid_init=nn.init.Constant(0.),
backwards=False,
learn_init=True,
gradient_steps=-1,
grad_clipping=False,
unroll_scan=False,
precompute_input=False)
# Architecture
def build_model():
import j6_2ch_gauss, j6_4ch_gauss
meta_2ch = j6_2ch_gauss.build_model()
meta_4ch = j6_4ch_gauss.build_model()
l_meta_2ch_systole = nn.layers.DenseLayer(meta_2ch["meta_outputs"]["systole"], num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_meta_2ch_diastole = nn.layers.DenseLayer(meta_2ch["meta_outputs"]["diastole"], num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_meta_4ch_systole = nn.layers.DenseLayer(meta_4ch["meta_outputs"]["systole"], num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_meta_4ch_diastole = nn.layers.DenseLayer(meta_4ch["meta_outputs"]["diastole"], num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
#################
# Regular model #
#################
input_size = data_sizes["sliced:data:sax"]
input_size_mask = data_sizes["sliced:data:sax:is_not_padded"]
input_size_locations = data_sizes["sliced:data:sax:locations"]
l0 = nn.layers.InputLayer(input_size)
lin_slice_mask = nn.layers.InputLayer(input_size_mask)
lin_slice_locations = nn.layers.InputLayer(input_size_locations)
# PREPROCESS SLICES SEPERATELY
# Convolutional layers and some dense layers are defined in a submodel
l0_slices = nn.layers.ReshapeLayer(l0, (-1, [2], [3], [4]))
import je_ss_jonisc64small_360_gauss_longer
submodel = je_ss_jonisc64small_360_gauss_longer.build_model(l0_slices)
# Systole Dense layers
l_sys_mu = submodel["meta_outputs"]["systole:mu"]
l_sys_sigma = submodel["meta_outputs"]["systole:sigma"]
l_sys_meta = submodel["meta_outputs"]["systole"]
# Diastole Dense layers
l_dia_mu = submodel["meta_outputs"]["diastole:mu"]
l_dia_sigma = submodel["meta_outputs"]["diastole:sigma"]
l_dia_meta = submodel["meta_outputs"]["diastole"]
# AGGREGATE SLICES PER PATIENT
l_scaled_slice_locations = layers.TrainableScaleLayer(lin_slice_locations, scale=nn.init.Constant(0.1), trainable=False)
# Systole
l_pat_sys_ss_mu = nn.layers.ReshapeLayer(l_sys_mu, (-1, nr_slices))
l_pat_sys_ss_sigma = nn.layers.ReshapeLayer(l_sys_sigma, (-1, nr_slices))
l_pat_sys_aggr_mu_sigma = layers.JeroenLayer([l_pat_sys_ss_mu, l_pat_sys_ss_sigma, lin_slice_mask, l_scaled_slice_locations], rescale_input=100.)
l_systole = layers.MuSigmaErfLayer(l_pat_sys_aggr_mu_sigma)
l_sys_meta = nn.layers.DenseLayer(nn.layers.ReshapeLayer(l_sys_meta, (-1, nr_slices, 512)), num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_meta_systole = nn.layers.ConcatLayer([l_meta_2ch_systole, l_meta_4ch_systole, l_sys_meta])
l_weights = nn.layers.DenseLayer(l_meta_systole, num_units=512, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_weights = nn.layers.DenseLayer(l_weights, num_units=3, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
systole_output = layers.WeightedMeanLayer(l_weights, [l_systole, meta_2ch["outputs"]["systole"], meta_4ch["outputs"]["systole"]])
# Diastole
l_pat_dia_ss_mu = nn.layers.ReshapeLayer(l_dia_mu, (-1, nr_slices))
l_pat_dia_ss_sigma = nn.layers.ReshapeLayer(l_dia_sigma, (-1, nr_slices))
l_pat_ |
tkaitchuck/nupic | examples/bindings/svm_how_to.py | Python | gpl-3.0 | 8,034 | 0.013069 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from random import *
import numpy
import pdb
import cPickle
import bz2
import sys
import pylab
import nupic.bindings.algorithms as algo
from nupic.bindings.math import GetNumpyDataType
type = GetNumpyDataType('NTA_Real')
type = 'float32'
#--------------------------------------------------------------------------------
# Simple use case
#--------------------------------------------------------------------------------
def simple():
print "Simple"
numpy.random.seed(42)
n_dims = 2
n_class = 4
size = 200
labels = numpy.random.random_integers(0, n_class-1, size)
samples = numpy.zeros((size, n_dims), dtype=type)
do_plot = False
print "Generating data"
centers = numpy.array([[0,0],[0,1],[1,0],[1,1]])
for i in range(0, size):
t = 6.28 * numpy.random.random_sample()
samples[i][0] = 2 * centers[labels[i]][0] + .5*numpy.random.random() * numpy.cos(t)
samples[i][1] = 2 * centers[labels[i]][1] + .5*numpy.random.random() * numpy.sin(t)
classifier = algo.svm_dense(0, n_dims, probability=True, seed=42)
print "Adding sample vectors"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier.add_sample(float(y), x)
print "Displaying problem"
problem = classifier.get_problem()
print "Problem size:", problem.size()
print "Problem dimensionality:", problem.n_dims()
print "Problem samples:"
s = numpy.zeros((problem.size(), problem.n_dims()+1), dtype=type)
problem.get_samples(s)
print s
if do_plot:
pylab.ion()
pylab.plot(s[s[:,0]==0,1], s[s[:,0]==0,2], '.', color='r')
pylab.plot(s[s[:,0]==1,1], s[s[:,0]==1,2], '+', color='b')
pylab.plot(s[s[:,0]==2,1], s[s[:,0]==2,2], '^', color='g')
pylab.plot(s[s[:,0]==3,1], s[s[:,0]==3,2], 'v', color='g')
print "Training"
classifier.train(gamma = 1./3., C = 100, eps=1e-1)
print "Displaying model"
model = classifier.get_model()
print "Number of support vectors:", model.size()
print "Number of classes:", model.n_class()
print "Number of dimensions: ", model.n_dims()
print "Support vectors:"
sv = numpy.zeros((model.size(), model.n_dims()), dtype=type)
model.get_support_vectors(sv)
print sv
if do_plot:
pylab.plot(sv[:,0], sv[:,1], 'o', color='g')
print "Support vector coefficients:"
svc = numpy.zeros((model.n_class()-1, model.size()), dtype=type)
model.get_support_vector_coefficients(svc)
print svc
print "Hyperplanes (for linear kernel only):"
h = model.get_hyperplanes()
print h
if do_plot:
xmin = numpy.min(samples[:,0])
xmax = numpy.max(samples[:,0])
xstep = (xmax - xmin) / 10
X = numpy.arange(xmin, xmax, xstep)
ymin = numpy.min(samples[:,1])
ymax = numpy.max(samples[:,1])
ystep = (ymax - ymin) / 10
Y = numpy.arange(ymin, ymax, ystep)
points = numpy.zeros((len(X), len(Y)))
for i,x in enumerate(X):
for j,y in enumerate(Y):
proba = numpy.zeros(model.n_class(), dtype=type)
classifier.predict_probability(numpy.array([x,y]), proba)
points[i,j] = proba[0]
pylab.contour(X,Y,points)
print "Cross-validation"
print classifier.cross_validate(2, gamma = .5, C = 10, eps = 1e-3)
print "Predicting"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
proba = numpy.zeros(model.n_class(), dtype=type)
print x, ': real=', y,
print 'p1=', classifier.predict(x),
print 'p2=', classifier.predict_probability(x, proba),
print 'proba=', proba
print "Discarding problem"
classifier.discard_problem()
print "Predicting after discarding the problem"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
proba = numpy.zeros(model.n_class(), dtype=type)
print x, ': real=', y,
print 'p1=', classifier.predict(x),
print 'p2=', classifier.predict_probability(x, proba),
print 'proba=', proba
#--------------------------------------------------------------------------------
# Persistence
#--------------------------------------------------------------------------------
def persistence():
print "P | ersistence"
numpy.random.seed(42)
n_dims = 2
n_class = 12
size = 100
labels = numpy.random.random_integers(0, 256, size)
samples = numpy.zeros((size, n_dims), dtype=type)
print "Generating data"
for i in range(0, size):
t = 6. | 28 * numpy.random.random_sample()
samples[i][0] = 2 * labels[i] + 1.5 * numpy.cos(t)
samples[i][1] = 2 * labels[i] + 1.5 * numpy.sin(t)
print "Creating dense classifier"
classifier = algo.svm_dense(0, n_dims = n_dims, seed=42)
print "Adding sample vectors to dense classifier"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier.add_sample(float(y), x)
print "Pickling dense classifier"
cPickle.dump(classifier, open('test', 'wb'))
classifier = cPickle.load(open('test', 'rb'))
print "Training dense classifier"
classifier.train(gamma = 1, C = 10, eps=1e-1)
print "Predicting with dense classifier"
print classifier.predict(samples[0])
print "Creating 0/1 classifier"
classifier01 = algo.svm_01(n_dims = n_dims, seed=42)
print "Adding sample vectors to 0/1 classifier"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier01.add_sample(float(y), x)
print "Training 0/1 classifier"
classifier01.train(gamma = 1./3., C = 100, eps=1e-1)
print "Pickling 0/1 classifier"
cPickle.dump(classifier01, open('test', 'wb'))
classifier01 = cPickle.load(open('test', 'rb'))
print "Predicting with 0/1 classifier"
print classifier01.predict(numpy.array(samples[0], dtype=type))
#--------------------------------------------------------------------------------
# Cross validation
#--------------------------------------------------------------------------------
def cross_validation():
return
print "Cross validation"
numpy.random.seed(42)
labels = [0, 1, 1, 2, 1, 2]
samples = [[0, 0, 0], [0, 1, 0], [1, 0, 1], [1, 1, 1], [1, 1, 0], [0, 1, 1]]
classifier = algo.svm_dense(0, n_dims = 3, seed=42)
print "Adding sample vectors"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier.add_sample(float(y), x)
cPickle.dump(classifier, open('test', 'wb'))
classifier = cPickle.load(open('test', 'rb'))
print "Training"
classifier.train(gamma = 1./3., C = 100, eps=1e-1)
print "Cross validation =",
print classifier.cross_validate(3, gamma = .5, C = 10, eps = 1e-3)
#--------------------------------------------------------------------------------
simple()
persistence()
cross_validation()
|
kondra/latent_ssvm | smd.py | Python | bsd-2-clause | 3,199 | 0.006877 | import numpy as np
import sys
from trw_utils import *
from heterogenous_crf import inference_gco
from pyqpbo import | binary_general_graph
from scipy.optimize import fmin_l_bfgs_b
def trw(node_weights, edges, edge_weights, y,
max_iter=100, verbose=0, tol=1e-3,
get_energy=None):
n_nodes, n_states = node_weights.shape
n_edges = edges.shape[0]
y_hat = []
lambdas = np.zeros(n_nodes)
mu = np.zeros((n_nodes, n_states))
learning_rate = 0.1
energy_history = []
primal_history = []
pairwise = []
for k in xrange(n_states):
y_hat.append(np.zeros(n_state | s))
_pairwise = np.zeros((n_edges, 2, 2))
for i in xrange(n_edges):
_pairwise[i,1,0] = _pairwise[i,0,1] = -0.5 * edge_weights[i,k,k]
pairwise.append(_pairwise)
for i in xrange(n_edges):
e1, e2 = edges[i]
node_weights[e1,:] += 0.5 * np.diag(edge_weights[i,:,:])
node_weights[e2,:] += 0.5 * np.diag(edge_weights[i,:,:])
for iteration in xrange(max_iter):
dmu = np.zeros((n_nodes, n_states))
unaries = node_weights + mu
x, f_val, d = fmin_l_bfgs_b(f, np.zeros(n_nodes),
args=(unaries, pairwise, edges),
maxiter=50,
pgtol=1e-5)
E = np.sum(x)
for k in xrange(n_states):
new_unaries = np.zeros((n_nodes, 2))
new_unaries[:,1] = unaries[:,k] + x
y_hat[k], energy = binary_general_graph(edges, new_unaries, pairwise[k])
E -= 0.5*energy
dmu[:,k] -= y_hat[k]
y_hat_kappa, energy = optimize_kappa(y, mu, 1, n_nodes, n_states)
E += energy
dmu[np.ogrid[:dmu.shape[0]], y_hat_kappa] += 1
mu -= learning_rate * dmu
energy_history.append(E)
lambda_sum = np.zeros((n_nodes, n_states))
for k in xrange(n_states):
lambda_sum[:,k] = y_hat[k]
lambda_sum = lambda_sum / np.sum(lambda_sum, axis=1, keepdims=True)
if get_energy is not None:
primal = get_energy(get_labelling(lambda_sum))
primal_history.append(primal)
else:
primal = 0
if iteration:
learning_rate = 1. / np.sqrt(iteration)
if verbose:
print 'Iteration {}: energy={}, primal={}'.format(iteration, E, primal)
if iteration > 0 and np.abs(E - energy_history[-2]) < tol:
if verbose:
print 'Converged'
break
info = {'primal': primal_history,
'dual': energy_history,
'iteration': iteration}
return lambda_sum, y_hat_kappa, info
def f(x, node_weights, pairwise, edges):
n_nodes, n_states = node_weights.shape
dual = 0
dlambda = np.zeros(n_nodes)
for k in xrange(n_states):
new_unaries = np.zeros((n_nodes, 2))
new_unaries[:,1] = node_weights[:,k] + x
y_hat, energy = binary_general_graph(edges, new_unaries, pairwise[k])
dual += 0.5 * energy
dlambda += y_hat
dlambda -= 1
dual -= np.sum(x)
#print dual
return -dual, -dlambda
|
eduNEXT/edx-platform | openedx/features/course_duration_limits/migrations/0003_auto_20181128_1407.py | Python | agpl-3.0 | 601 | 0.001664 | # Generated by Django 1.11.16 on 2018-11-28 19:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('course_dura | tion_limits', '0002_auto_20181119_0959'),
]
operations = [
migrations.AlterField(
model_name='coursedurationlimitconfig',
name='enabled_as_of',
field=models.DateTimeField(blank=True, default=None, he | lp_text='If the configuration is Enabled, then all enrollments created after this date (UTC) will be affected.', null=True, verbose_name='Enabled As Of'),
),
]
|
pmav99/praktoras | checks.d/cacti.py | Python | bsd-3-clause | 8,178 | 0.001712 | # (C) Fractal Industries, Inc. 2016
# (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# stdlib
from collections import namedtuple
from fnm | atch import fnmatch
impo | rt os
import time
# 3rd party
try:
import rrdtool
except ImportError:
rrdtool = None
import pymysql
# project
from checks import AgentCheck
CFUNC_TO_AGGR = {
'AVERAGE': 'avg',
'MAXIMUM': 'max',
'MINIMUM': 'min'
}
CACTI_TO_DD = {
'hdd_free': 'system.disk.free',
'hdd_used': 'system.disk.used',
'swap_free': 'system.swap.free',
'load_1min': 'system.load.1',
'load_5min': 'system.load.5',
'load_15min': 'system.load.15',
'mem_buffers': 'system.mem.buffered',
'proc': 'system.proc.running',
'users': 'system.users.current',
'mem_swap': 'system.swap.free',
'ping': 'system.ping.latency'
}
class Cacti(AgentCheck):
def __init__(self, name, init_config, agentConfig):
AgentCheck.__init__(self, name, init_config, agentConfig)
self.last_ts = {}
def get_library_versions(self):
if rrdtool is not None:
return {"rrdtool": rrdtool.__version__}
return {"rrdtool": "Not Found"}
def check(self, instance):
if rrdtool is None:
raise Exception("Unable to import python rrdtool module")
# Load the instance config
config = self._get_config(instance)
connection = pymysql.connect(config.host, config.user, config.password, config.db)
self.log.debug("Connected to MySQL to fetch Cacti metadata")
# Get whitelist patterns, if available
patterns = self._get_whitelist_patterns(config.whitelist)
# Fetch the RRD metadata from MySQL
rrd_meta = self._fetch_rrd_meta(connection, config.rrd_path, patterns, config.field_names)
# Load the metrics from each RRD, tracking the count as we go
metric_count = 0
for hostname, device_name, rrd_path in rrd_meta:
m_count = self._read_rrd(rrd_path, hostname, device_name)
metric_count += m_count
self.gauge('cacti.metrics.count', metric_count)
def _get_whitelist_patterns(self, whitelist):
patterns = []
if whitelist:
if not os.path.isfile(whitelist) or not os.access(whitelist, os.R_OK):
# Don't run the check if the whitelist is unavailable
self.log.exception("Unable to read whitelist file at %s" % (whitelist))
wl = open(whitelist)
for line in wl:
patterns.append(line.strip())
wl.close()
return patterns
def _get_config(self, instance):
required = ['mysql_host', 'mysql_user', 'rrd_path']
for param in required:
if not instance.get(param):
raise Exception("Cacti instance missing %s. Skipping." % (param))
host = instance.get('mysql_host')
user = instance.get('mysql_user')
password = instance.get('mysql_password', '') or ''
db = instance.get('mysql_db', 'cacti')
rrd_path = instance.get('rrd_path')
whitelist = instance.get('rrd_whitelist')
field_names = instance.get('field_names', ['ifName', 'dskDevice'])
Config = namedtuple('Config', [
'host',
'user',
'password',
'db',
'rrd_path',
'whitelist',
'field_names']
)
return Config(host, user, password, db, rrd_path, whitelist, field_names)
def _read_rrd(self, rrd_path, hostname, device_name):
''' Main metric fetching method '''
metric_count = 0
try:
info = rrdtool.info(rrd_path)
except Exception:
# Unable to read RRD file, ignore it
self.log.exception("Unable to read RRD file at %s" % rrd_path)
return metric_count
# Find the consolidation functions for the RRD metrics
c_funcs = set([v for k,v in info.items() if k.endswith('.cf')])
for c in list(c_funcs):
last_ts_key = '%s.%s' % (rrd_path, c)
if last_ts_key not in self.last_ts:
self.last_ts[last_ts_key] = int(time.time())
continue
start = self.last_ts[last_ts_key]
last_ts = start
try:
fetched = rrdtool.fetch(rrd_path, c, '--start', str(start))
except rrdtool.error:
# Start time was out of range, skip this RRD
self.log.warn("Time %s out of range for %s" % (rrd_path, start))
return metric_count
# Extract the data
(start_ts, end_ts, interval) = fetched[0]
metric_names = fetched[1]
points = fetched[2]
for k, m_name in enumerate(metric_names):
m_name = self._format_metric_name(m_name, c)
for i, p in enumerate(points):
ts = start_ts + (i * interval)
if p[k] is None:
continue
# Save this metric as a gauge
val = self._transform_metric(m_name, p[k])
self.gauge(m_name, val, hostname=hostname,
device_name=device_name, timestamp=ts)
metric_count += 1
last_ts = (ts + interval)
# Update the last timestamp based on the last valid metric
self.last_ts[last_ts_key] = last_ts
return metric_count
def _fetch_rrd_meta(self, connection, rrd_path_root, whitelist, field_names):
''' Fetch metadata about each RRD in this Cacti DB, returning a list of
tuples of (hostname, device_name, rrd_path)
'''
def _in_whitelist(rrd):
path = rrd.replace('<path_rra>/','')
for p in whitelist:
if fnmatch(path, p):
return True
return False
c = connection.cursor()
and_parameters = " OR ".join(["hsc.field_name = '%s'" % field_name for field_name in field_names])
# Check for the existence of the `host_snmp_cache` table
rrd_query = """
SELECT
h.hostname as hostname,
hsc.field_value as device_name,
dt.data_source_path as rrd_path
FROM data_local dl
JOIN host h on dl.host_id = h.id
JOIN data_template_data dt on dt.local_data_id = dl.id
LEFT JOIN host_snmp_cache hsc on h.id = hsc.host_id
AND dl.snmp_index = hsc.snmp_index
WHERE dt.data_source_path IS NOT NULL
AND dt.data_source_path != ''
AND (%s OR hsc.field_name is NULL) """ % and_parameters
c.execute(rrd_query)
res = []
for hostname, device_name, rrd_path in c.fetchall():
if not whitelist or _in_whitelist(rrd_path):
if hostname in ('localhost', '127.0.0.1'):
hostname = self.hostname
rrd_path = rrd_path.replace('<path_rra>', rrd_path_root)
device_name = device_name or None
res.append((hostname, device_name, rrd_path))
# Collect stats
num_hosts = len(set([r[0] for r in res]))
self.gauge('cacti.rrd.count', len(res))
self.gauge('cacti.hosts.count', num_hosts)
return res
def _format_metric_name(self, m_name, cfunc):
''' Format a cacti metric name into a Conmon-friendly name '''
try:
aggr = CFUNC_TO_AGGR[cfunc]
except KeyError:
aggr = cfunc.lower()
try:
m_name = CACTI_TO_DD[m_name]
if aggr != 'avg':
m_name += '.%s' % (aggr)
return m_name
except KeyError:
return "cacti.%s.%s" % (m_name.lower(), aggr)
def _transform_metric(self, m_name, val):
''' Add any special case transformations here '''
# Report memory in MB
if m_name[0:11] in ('system.mem.', 'system.disk'):
return val / 1024
return val
|
CIRCL/AIL-framework | bin/modules/submit_paste.py | Python | agpl-3.0 | 15,703 | 0.004776 | #!/usr/bin/env python3
# -*-coding:UTF-8 -*
"""
The Submit paste module
================
This module is taking paste in redis queue ARDB_DB and submit to global
"""
##################################
# Import External packages
##################################
import os
import sys
import gzip
import io
import redis
import base64
import datetime
import time
# from sflock.main import unpack
# import sflock
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from modules.abstract_module import AbstractModule
from packages import Tag
from lib import ConfigLoader
class SubmitPaste(AbstractModule):
"""
SubmitPaste module for AIL framework
"""
expire_time = 120
# Text max size
TEXT_MAX_SIZE = ConfigLoader.ConfigLoader().get_config_int("SubmitPaste", "TEXT_MAX_SIZE")
# File max size
FILE_MAX_SIZE = ConfigLoader.ConfigLoader().get_config_int("SubmitPaste", "FILE_MAX_SIZE")
# Allowed file type
ALLOWED_EXTENSIONS = ConfigLoader.ConfigLoader().get_config_str("SubmitPaste", "FILE_ALLOWED_EXTENSIONS").split(',')
def __init__(self):
"""
init
"""
super(SubmitPaste, self).__init__(queue_name='submit_paste')
self.r_serv_db = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_DB")
self.r_serv_log_submit = ConfigLoader.ConfigLoader().get_redis_conn("Redis_Log_submit")
self.r_serv_tags = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_Tags")
self.r_serv_metadata = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_Metadata")
self.serv_statistics = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_Statistics")
self.pending_seconds = 3
self.PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], ConfigLoader.ConfigLoader().get_config_str("Directories", "pastes")) + '/'
def compute(self, uuid):
"""
Main method of the Module to implement
"""
self.redis_logger.debug(f'compute UUID {uuid}')
# get temp value save on disk
ltags = self.r_serv_db.smembers(f'{uuid}:ltags')
ltagsgalaxies = self.r_serv_db.smembers(f'{uuid}:ltagsgalaxies')
paste_content = self.r_serv_db.get(f'{uuid}:paste_content')
isfile = self.r_serv_db.get(f'{uuid}:isfile')
password = self.r_serv_db.get(f'{uuid}:password')
source = self.r_serv_db.get(f'{uuid}:source')
if source in ['crawled', 'tests']:
source = 'submitted'
self.redis_logger.debug(f'isfile UUID {isfile}')
self.redis_logger.debug(f'source UUID {source}')
self.redis_logger.debug(f'paste_content UUID {paste_content}')
# needed if redis is restarted
self.r_serv_log_submit.set(f'{uuid}:end', 0)
self.r_serv_log_submit.set(f'{uuid}:processing', 0)
self.r_serv_log_submit.set(f'{uuid}:nb_total', -1)
self.r_serv_log_submit.set(f'{uuid}:nb_end', 0)
self.r_serv_log_submit.set(f'{uuid}:nb_sucess', 0)
self.r_serv_log_submit.set(f'{uuid}:processing', 1)
if isfile == 'True':
# file input
self._manage_file(uuid, paste_content, ltags, ltagsgalaxies, source)
else:
# textarea input paste
self._manage_text(uuid, paste_content, ltags, ltagsgalaxies, source)
# new paste created from file, remove uuid ref
self.remove_submit_uuid(uuid)
def run(self):
"""
Run Module endless process
"""
# Endless loop processing messages from the input queue
while self.proceed:
# Get one message (paste) from the QueueIn (copy of Redis_Global publish)
nb_submit = self.r_serv_db.scard('submitted:uuid')
if nb_submit > 0:
try:
uuid = self.r_serv_db.srandmember('submitted:uuid')
# Module processing with the message from the queue
self.redis_logger.debug(uuid)
self.compute(uuid)
except Exception as err:
self.redis_logger.error(f'Error in module {self.module_name}: {err}')
# Remove uuid ref
self.remove_submit_uuid(uuid)
else:
# Wait before next process
self.redis_logger.debug(f'{self.module_name}, waiting for new message, Idling {self.pending_seconds}s')
time.sleep(self.pending_seconds)
def _manage_text(self, uuid, paste_content, ltags, ltagsgalaxies, source):
"""
Create a paste for given text
"""
if sys.getsizeof(paste_content) < SubmitPaste.TEXT_MAX_S | IZE:
self.r_serv_log_submit.set(f'{uuid}:nb_total', 1)
self.create_paste(uuid, paste_content.encode(), ltags, ltagsgalaxies, uuid, source)
time.sleep(0.5)
else:
self.abord_file_submission(uuid, f'Text size is over {SubmitPaste.TEXT_MAX_SIZE} bytes')
def _manage_file(self, uuid, file_full_path, ltags, ltagsgalaxies, source):
"""
| Create a paste for given file
"""
self.redis_logger.debug('manage')
if os.path.exists(file_full_path):
self.redis_logger.debug(f'file exists {file_full_path}')
file_size = os.stat(file_full_path).st_size
self.redis_logger.debug(f'file size {file_size}')
# Verify file length
if file_size < SubmitPaste.FILE_MAX_SIZE:
# TODO sanitize filename
filename = file_full_path.split('/')[-1]
self.redis_logger.debug(f'sanitize filename {filename}')
self.redis_logger.debug('file size allowed')
if not '.' in filename:
self.redis_logger.debug('no extension for filename')
try:
# Read file
with open(file_full_path,'r') as f:
content = f.read()
self.r_serv_log_submit.set(uuid + ':nb_total', 1)
self.create_paste(uuid, content.encode(), ltags, ltagsgalaxies, uuid, source)
except:
self.abord_file_submission(uuid, "file error")
else:
file_type = filename.rsplit('.', 1)[1]
file_type = file_type.lower()
self.redis_logger.debug(f'file ext {file_type}')
if file_type in SubmitPaste.ALLOWED_EXTENSIONS:
self.redis_logger.debug('Extension allowed')
# TODO enum of possible file extension ?
# TODO verify file hash with virus total ?
if not self._is_compressed_type(file_type):
self.redis_logger.debug('Plain text file')
# plain txt file
with open(file_full_path,'r') as f:
content = f.read()
self.r_serv_log_submit.set(uuid + ':nb_total', 1)
self.create_paste(uuid, content.encode(), ltags, ltagsgalaxies, uuid, source)
else:
# Compressed file
self.abord_file_submission(uuid, "file decompression should be implemented")
# TODO add compress file management
# #decompress file
# try:
# if password == None:
# files = unpack(file_full_path.encode())
# #print(files.children)
# else:
# try:
# files = unpack(file_full_path.encode(), password=password.encode())
# #print(files.children)
# except sflock.exception.IncorrectUsageException:
|
iksaif/euscan | pym/euscan/scan.py | Python | gpl-2.0 | 5,444 | 0 | from __future__ import print_function
import os
import sys
from datetime import datetime
import portage
import gentoolkit.pprinter as pp
from gentoolkit.query import Query
from gentoolkit.package import Package
from euscan import CONFIG, BLACKLIST_PACKAGES
from euscan import handlers, output
from euscan.out import from_mirror
from euscan.helpers import version_blacklisted
from euscan.version import is_version_stable
from euscan.ebuild import package_from_ebuild
def filter_versions(cp, versions):
filtered = {}
for url, version, handler, confidence in versions:
# Try to keep the most specific urls (determinted by the length)
if version in filtered and len(url) < len(filtered[version]):
continue
# Remove blacklisted versions
if version_blacklisted(cp, version):
continue
filtered[version] = {
"url": url,
"handler": handler,
"confidence": confidence
}
return [
(cp, filtered[version]["url"], version, filtered[version]["handler"],
filtered[version]["confidence"])
for version in filtered
]
def parse_src_uri(uris):
ret = {}
uris = uris.split()
uris.reverse()
while uris:
uri = uris.pop()
if '://' not in uri:
continue
if uris and uris[-1] == "->":
uris.pop() # operator
file_ = uris.pop()
else:
file_ = os.path.basename(uri)
if file_ not in ret:
ret[file_] = []
ret[file_].append(uri)
return ret
def reload_gentoolkit():
import gentoolkit
# Not used in recent versions
if not hasattr(gentoolkit.package, 'PORTDB'):
return
PORTDB = portage.db[portage.root]["porttree"].dbapi
if hasattr(gentoolkit.dbapi, 'PORTDB'):
gentoolkit.dbapi.PORTDB = PORTDB
if hasattr(gentoolkit.package, 'PORTDB'):
gentoolkit.package.PORTDB = PORTDB
if hasattr(gentoolkit.query, 'PORTDB'):
gentoolkit.query.PORTDB = PORTDB
def scan_upstream(query, on_progress=None):
"""
Scans the upstream searching new versions for the given query
"""
matches = []
if query.endswith(".ebuild"):
cpv = package_from_ebuild(query)
reload_gentoolkit()
if cpv:
matches = [Package(cpv)]
else:
matches = Query(query).find(
include_masked=True,
in_installed=False,
)
if not matches:
output.ewarn(
pp.warn("No package matching '%s'" % pp.pkgquery(query))
)
return None
matches = sorted(matches)
pkg = matches.pop()
while '9999' in pkg.version and len(matches):
pkg = matches.pop()
if not pkg:
output.ewarn(
pp.warn("Package '%s' only have a dev version (9999)"
% pp.pkgquery(pkg.cp))
)
return None
# useful data only for formatted output
start_time = datetime.now()
output.metadata("datetime", start_time.isoformat(), show=False)
output.metadata("cp", pkg.cp, show=False)
output.metadata("cpv", pkg.cpv, show=False)
if on_progress:
on_progress(increment=10)
if pkg.cp in BLACKLIST_PACKAGES:
output.ewarn(
pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp))
)
return None
if not CONFIG['quiet']:
if not CONFIG['format']:
pp.uprint(
" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name()))
)
pp.uprint()
else:
output.metadata("overlay", pp.section(pkg.repo_name()))
ebuild_path = pkg.ebuild_path()
if ebuild_path:
output.metadata(
"ebuild", pp.path(os.path.normpath(ebuild_path))
)
uris, homepage, description = pkg.environment(
('SRC_URI', 'HOMEPAGE', 'DESCRIPTION')
)
output.metadata("repository", pkg.repo_name())
output.metadata("homepage", homepage)
output.metadata("description", description)
else:
uris = pkg.environment('SRC_URI')
cpv = pkg.cpv
uris = parse_src_uri(uris)
uris_expanded = [
from_mirror(uri) if 'mirror://' in uri else uri for uri in uris
]
pkg._uris = uris
pkg._uris_expanded = uris_expanded
versions = handlers.scan(pkg, uris, on_progress)
cp, ver, rev = portage.pkgsplit(pkg.cpv)
result = filter_versions(cp, versions)
if on_progress:
on_progress(increment=10)
# output scan time for formatted output
scan_time = (datetime.now() - start_tim | e).total_seconds()
output.metadata("scan_time", scan_time, show=False)
is_current_version_stable = is_version_stable(ver)
if len(result) > 0:
if not (CONFIG['format'] or CONFIG['quiet']):
print("")
for cp, url, version, handler, confidence in result:
| if CONFIG["ignore-pre-release"]:
if not is_version_stable(version):
continue
if CONFIG["ignore-pre-release-if-stable"]:
if is_current_version_stable and \
not is_version_stable(version):
continue
if CONFIG['progress']:
print("", file=sys.stderr)
output.result(cp, version, url, handler, confidence)
return result
|
bkosawa/admin-recommendation | admin_recommendation/settings.py | Python | apache-2.0 | 6,881 | 0.001017 | """
Django settings for recomendation project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
from __future__ import absolute_import
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
from os.path import normpath, join
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'g^dpyw$mvbv5(c&k-$oz#b5a1a)t*q6va^bvkkz+9-$i6jakaq'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'material',
'material.admin',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'crawler',
]
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAdminUser',
],
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 25
}
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'admin_recommendation.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'admin_recommendation.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'admin',
'USER': os.environ.get('RECOM_DATA_USER', 'recom_server'),
'PASSWORD': os.environ.get('RECOM_DATA_PASSWORD', 'password@2016'),
'HOST': os.environ.get('RECOM_DATA_URL', '127.0.0.1'),
'PORT': '3306',
},
'app-recommendation': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'recommendation',
'USER': os.environ.get('RECOM_DATA_USER', 'recom_server'),
'PASSWORD': os.environ.get('RECOM_DATA_PASSWORD', 'password@2016'),
'HOST': os.environ.get('RECOM_DATA_URL', '127.0.0.1'),
'PORT': '3306',
| }
}
DATABASE_ROUTERS = ['my_router.MyRouter']
# CELERY STUFF
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://l | ocalhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'America/Sao_Paulo'
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': "[%(asctime)s] %(levelname)s "
"[%(name)s:%(lineno)s] %(message)s",
'datefmt': "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'file_django': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.environ.get('ADMIN_RECOM_LOG_FILE', 'log/django.log'),
'formatter': 'verbose'
},
'file_debug': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.environ.get('CRAWLER_LOG_FILE', 'log/crawler.log'),
'formatter': 'verbose'
},
'file_command': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.environ.get('COMMAND_LOG_FILE', 'log/command.log'),
'formatter': 'verbose'
},
'celery_task_logger': {
'level': 'DEBUG',
'filters': None,
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.environ.get('CELERY_LOG_FILE', 'log/celery_tasks.log'),
'maxBytes': 1024 * 1024 * 5,
'backupCount': 2,
'formatter': 'verbose'
}
},
'loggers': {
'django': {
'handlers': ['file_django'],
'propagate': True,
'level': 'INFO',
},
'crawler.tasks': {
'handlers': ['file_debug'],
'propagate': True,
'level': 'DEBUG',
},
'crawler.command': {
'handlers': ['file_command'],
'propagate': True,
'level': 'DEBUG',
},
'celery.tasks': {
'handlers': ['celery_task_logger'],
'level': 'DEBUG',
'propagate': True,
},
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
MEDIA_ROOT = ''
MEDIA_URL = '/media/'
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
|
google/active-qa | third_party/bi_att_flow/my/tensorflow/__init__.py | Python | apache-2.0 | 51 | 0.019608 | from | third_party.bi_att_flow.my.tensorflow impor | t * |
SpaceGroupUCL/qgisSpaceSyntaxToolkit | esstoolkit/external/networkx/algorithms/approximation/matching.py | Python | gpl-3.0 | 1,155 | 0 | """
**************
Graph Matching
**************
Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent
edges; that is, no two edges share a common vertex.
`Wikipedia: Matching <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
"""
import networkx as nx
__all__ = ["min_maximal_matching"]
def min_maximal_matching(G):
r"""Returns the minimum maximal matching of G. That is, out of all maximal
matchings of the graph G, the smallest is returned.
Parameters
----------
G : NetworkX graph
Undirected graph
Returns
-------
min_maximal_matching : set
Returns a set of edges such that no two edges share a common endpoint
and every edge not in the set shares some common endpoint in the set.
Cardinality will be 2*OPT in the worst case.
Notes
-----
The algorithm computes an approximate solution fo the minimum maximal
cardinality matching problem. The solution is no more than 2 * OPT in size.
Runtime is $O(|E|)$.
References
| ----------
.. [1] Vazirani, Vijay A | pproximation Algorithms (2001)
"""
return nx.maximal_matching(G)
|
lem8r/website-themes | facebook_instant_article/models/website_blog.py | Python | lgpl-3.0 | 659 | 0 | # -*- coding: utf-8 -*-
# from openerp import api, fields, models, _
from openerp.osv import osv, fields
from openerp.addons.website.models.website import slug
from op | enerp import SUPERUSER_ID
import requests
class FbBlogPost(osv.Model):
_inherit = 'blog.post'
_columns = {
'fb_content': fields.html('FB Content', sanitize=False),
'fb_import_id': fields.char('FB Import ID'),
'fb_import_status_ok': fields.boolean('FB Import Status',
default=False),
'fb_articl | e_id': fields.char('FB Article ID'),
'fb_publisher_token': fields.char('FB Publisher Token'),
}
|
Varun-Teja/Projects | Proxy/Proxy.py | Python | mit | 3,866 | 0.010347 | from socket import *
import sys, time
if len(sys.argv) <= 1:
print 'Usage: "python proxy.py server_ip"\n[server_ip : It is the IP Address of the Proxy Server'
sys.exit(2)
# Create a server socket, bind it to a port and start listening
tcpSERVERPort = 8080
tcpSERVERSock = socket(AF_INET, SOCK_STREAM)
fp = open('log.txt','w')
# Prepare a server socket
tcpSERVERSock.bind((sys.argv[1], tcpSERVERPort))
tcpSERVERSock.listen(5)
while True:
# Start receiving data from the client
print 'Ready to serve...'
tcpCLIENTSock, addr = tcpSERVERSock.accept()
print 'Received a connection from: ', addr
t = time.time()
message = tcpCLIENTSock.recv(4096)
print "message= Hello ",message
fp.write(message)
a = len(message)
print 'number of bytes sent =',a
# Extract the filename from the given message
if message == '':
print "No data"
else:
print "m2=::::",message.split()[1]
filename = message.split()[1].partition("/")[2]
print "filename = ",filename
fileExist = "false"
filetouse = "/" + filename
print "filetouse= :",filetouse
try:
# Check whether the file exists in the cache
f = open(filetouse[1:], "r")
outputdata = f.readlines()
b = len(outputdata)
print "bytes received from server = ",b
print "outputdata = ",outputdata
fileExist = "true"
print 'File Exists!'
# ProxyServer finds a cache hit and generates a response message
tcpCLIENTSock.send("HTTP/1.0 200 OK\r\n")
print "HTTP/1.0 200 OK\r\n"
tcpCLIENTSock.send("Content-Type:text/html\r\n")
# Send the content of the requested file to the client
for i in range(0, len(outputdata)):
tcpCLIENTSock.send(outputdata[i])
print 'Read from cache'
# Error handling for file not found in cache
except IOError:
print 'File Exist: ', fileExist
if fileExist == "false":
# Create a socket on the proxyserver
print 'Creating socket on proxyserver'
c = socket(AF_INET, SOCK_STREAM)
hostn = filename.replace("www.", "", 1)
print 'Host Name: ', hostn
try:
# Connect to the socket to port 80
c.connect((hostn, 80))
print 'Socket connected to port 80 of the host'
# Create a temporary file on this socket and ask port 80
# for the file requested by the client
fileobj = c.makefile('r', 0)
fileobj.write("GET " + "http://" + filename + " HTTP/1.0\n\n")
# Read the response into buffer
buffer = fileobj.readlines()
b = len(buffer)
print 'bytes received =' ,b
#resp = c.recv(4096)
#response = ""
#while resp:
#response += resp
# Create a new file in the cache for the requested file.
# Also send the response in the buffer to client socket
# and the corresponding file in the cache
tempFile = open("./" + filename, "wb")
#tempFile.write(response)
#tempFile.close()
#tcpcLIENTsock.send(response)
for i in range(0, len(buffer)):
tempFile.write(buffer[i])
tcpCLIENTSock.send(buffer[i])
except:
print 'illegal request'
else:
# HTTP response message for file not found
print 'File Not Found. | ..'
elap = time.time()
diff = elap - t
# Close the socket and the server sockets
tcpCLIENTSock.close()
fp.write("\n time taken =" + str(diff))
| fp.write("\n bytes sent =" + str(a))
fp.write("\n bytes received =" + str(b))
fp.write("\n")
fp.close()
print "Closing the server connection"
tcpSERVERSock.close()
|
DisposaBoy/GoSublime | gs9o.py | Python | mit | 19,161 | 0.035489 | from .gosubl import about
from .gosubl import gs
from .gosubl import gsq
from .gosubl import gsshell
from .gosubl import mg9
from .gosubl import sh
from .gosubl.margo import mg
from .gosubl.margo_state import actions
import datetime
import json
import os
import re
import shlex
import string
import sublime
import sublime_plugin
import uuid
import webbrowser
DOMAIN = "9o"
AC_OPTS = sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS
SPLIT_FN_POS_PAT = re.compile(r'(.+?)(?:[:](\d+))?(?:[:](\d+))?$')
URL_SCHEME_PAT = re.compile(r'[\w.+-]+://')
URL_PATH_PAT = re.compile(r'(?:[\w.+-]+://|(?:www|(?:\w+\.)*(?:golang|pkgdoc|gosublime)\.org))')
HIST_EXPAND_PAT = re.compile(r'^[\'"\s]*(\^+)\s*(\d+)[\'"\s]*$')
HOURGLASS = u'\u231B'
DEFAULT_COMMANDS = [
'help',
'run',
'build',
'replay',
'clear',
'go',
'go build',
'go clean',
'go doc',
'go env',
'go fix',
'go fmt',
'go get',
'go install',
'go list',
'go run',
'go test',
'go tool',
'go version',
'go vet',
'go help',
'settings',
'env',
'share',
'hist',
'hist erase',
'cd',
]
DEFAULT_CL = [(s, s+' ') for s in DEFAULT_COMMANDS]
try:
stash
except NameError:
stash = {}
try:
tid_alias
except NameError:
tid_alias = {}
def active_wd(win=None):
_, v = gs.win_view(win=win)
return gs.basedir_or_cwd(v.file_name() if v else '')
_9o_instance_default = '9o'
def _9o_instance(wd):
name = gs.setting('9o_instance') or _9o_instance_default
if name == 'auto':
name = wd or name
return name.replace('#', '~')
def _rkey(wd):
_, rkey = mg.run_tokens.next()
return rkey.replace('#', '~')
def _rcmd_wdid_rkey(*, fd):
l = fd.split('#', 1)
return (l[0], l[1]) if len(l) == 2 else (_wdid(_9o_instance_default), l[0])
def _rcmd_fd(*, wd, rkey):
return '%s#%s' % (_wdid(wd), rkey)
def _hkey(wd):
return '9o.hist.%s' % _9o_instance(wd)
def _wdid(wd):
return '9o://%s' % _9o_instance(wd)
class EV(sublime_plugin.EventListener):
def on_query_completions(self, view, prefix, locations):
pos = gs.sel(view).begin()
if view.score_selector(pos, 'text.9o') == 0:
return []
cl = set()
hkey = _hkey(view.settings().get('9o.wd', ''))
cl.update((k, k+' ') for k in gs.dval(gs.aso().get(hkey), []))
cl.update((k, k+' ') for k in aliases())
cl.update((k, k+' ') for k in builtins())
cl.update(DEFAULT_CL)
return ([cl_esc(e) for e in sorted(cl)], AC_OPTS)
def cl_esc(e):
return (e[0], e[1].replace('$', '\\$'))
class Gs9oBuildCommand(sublime_plugin.WindowCommand):
def is_enabled(self):
view = gs.active_valid_go_view(self.window)
return view is not None
def run(self):
view = self.window.active_view()
args = {'run': gs.setting('build_command', ['^1'])} if gs.is_pkg_view(view) else {}
view.run_command('gs9o_open', args)
class Gs9oInsertLineCommand(sublime_plugin.TextCommand):
def run(self, edit, after=True):
insln = lambda: self.view.insert(edit, gs.sel(self.view).begin(), "\n")
if after:
self.view.run_command("move_to", {"to": "hardeol"})
insln()
else:
self.view.run_command("move_to", {"to": "hardbol"})
insln()
self.view.run_command("move", {"by": "lines", "forward": False})
class Gs9oMoveHist(sublime_plugin.TextCommand):
def run(self, edit, up):
view = self.view
pos = gs.sel(view).begin()
if view.score_selector(pos, 'prompt.9o') <= 0:
return
aso = gs.aso()
vs = view.settings()
wd = vs.get('9o.wd')
hkey = _hkey(wd)
hist = [s for s in gs.dval(aso.get(hkey), []) if s.strip()]
if not hist:
return
r = view.extract_scope(pos)
cmd = view.substr(r).strip('#').strip()
try:
idx = hist.index(cmd) + (-1 if up else 1)
found = True
except Exception:
idx = -1
found = False
if cmd and not found:
hist.append(cmd)
aso.set(hkey, hist)
gs.save_aso()
if idx >= 0 and idx < len(hist):
cmd = hist[idx]
elif up:
if not found:
cmd = hist[-1]
else:
cmd = ''
view.replace(edit, r, '# %s \n' % cmd)
n = view.line(r.begin()).end()
view.sel().clear()
view.sel().add(sublime.Region(n, n))
class Gs9oInitCommand(sublime_plugin.TextCommand):
def run(self, edit, wd=None):
v = self.view
mg.view(v.id(), view=v).is_9o = True
vs = v.settings()
if not wd:
wd = vs.get('9o.wd', active_wd(win=v.window()))
was_empty = v.size() == 0
s = '[ %s ] # \n' % gs.simple_fn(wd).replace('#', '~')
if was_empty:
v.insert(edit, 0, 'GoSublime %s 9o: type `help` for help and command documentation\n\n' % about.VERSION)
if was_empty or v.substr(v.size()-1) == '\n':
v.insert(edit, v.size(), s)
else:
v.insert(edit, v.size(), '\n'+s)
v.sel().clear()
n = v.size()-1
v.sel().add(sublime.Region(n, n))
opts = {
"rulers": [],
"fold_buttons": True,
"fade_fold_buttons": False,
"gutter": True,
"margin": 0,
# pad mostly so the completion menu shows on the first line
"line_padding_top": 1,
"line_padding_bottom": 1,
"tab_size": 2,
"word_wrap" | : True,
"indent_subsequent_lines": True,
"line_numbers": False,
"auto_complete": True,
"auto_complete_selector": "text",
"highlight_line": True,
"draw_indent_guides": True,
"scroll_past_end": True,
"indent_g | uide_options": ["draw_normal", "draw_active"],
"word_separators": "./\\()\"'-:,.;<>~!@#$%&*|+=[]{}`~?",
}
opts.update(gs.setting('9o_settings'))
for opt in opts:
vs.set(opt, opts[opt])
vs.set("9o", True)
vs.set("9o.wd", wd)
color_scheme = gs.setting("9o_color_scheme", "")
if color_scheme:
if color_scheme == "default":
vs.erase("color_scheme")
else:
vs.set("color_scheme", color_scheme)
else:
vs.set("color_scheme", "")
v.set_syntax_file(gs.tm_path('9o'))
if was_empty:
v.show(0)
else:
v.show(v.size()-1)
os.chdir(wd)
class Gs9oOpenCommand(sublime_plugin.TextCommand):
def run(self, edit, **kw):
win = self.view.window() or sublime.active_window()
win.run_command('gs9o_win_open', kw)
class Gs9oWinOpenCommand(sublime_plugin.WindowCommand):
def run(
self,
wd = None,
run = [],
save_hist = False,
focus_view = True,
show_view = True,
env = {},
push_output = [],
wdid = '',
action_data={},
):
win = self.window
wid = win.id()
if not wd:
wd = active_wd(win=win)
id = wdid or _wdid(wd)
st = stash.setdefault(wid, {})
v = st.get(id)
if v is None:
v = win.get_output_panel(id)
st[id] = v
if show_view:
win.run_command("show_panel", {"panel": ("output.%s" % id)})
if focus_view:
win.focus_view(v)
if not push_output:
v.run_command('gs9o_init', {'wd': wd})
if push_output:
v.run_command('gs9o_push_output', push_output)
if run:
v.run_command('gs9o_paste_exec', {
'cmd': ' '.join(shlex.quote(s) for s in run),
'save_hist': save_hist,
'env': env,
'action_data': action_data,
})
class Gs9oPasteExecCommand(sublime_plugin.TextCommand):
def run(self, edit, cmd, save_hist=False, env={}, action_data={}):
view = self.view
view.insert(edit, view.line(view.size()-1).end(), cmd)
view.sel().clear()
view.sel().add(view.line(view.size()-1).end())
view.run_command('gs9o_exec', {'save_hist': save_hist, 'env': env, 'action_data': action_data})
class Gs9oOpenSelectionCommand(sublime_plugin.TextCommand):
def is_enabled(self):
pos = gs.sel(self.view).begin()
return self.view.score_selector(pos, 'text.9o') > 0
def run(self, edit):
actions = []
v = self.view
sel = gs.sel(v)
if (sel.end() - sel.begin()) == 0:
pos = sel.begin()
inscope = lambda p: v.score_selector(p, 'path.9o') > 0
if inscope(pos):
actions.append(v.substr(v.extract_scope(pos)))
else:
pos -= 1
if inscope(pos):
actions.append(v.substr(v.extract_scope(pos)))
else:
line = v.line(pos)
for cr in v.find_by_selector('path.9o'):
if line.contains(cr):
actions.append(v.substr(cr))
else:
actions.append(v.substr(sel))
act_on(v, actions)
def act_on(view, actions):
for a in actions:
if act_on_path(view, a):
break
def act_on_path(view, path):
row = 0
col = 0
m = gs.VFN_ID_PAT.search(path)
if m:
path = 'gs.view://%s' % m.group(1)
m2 = gs.ROWCOL_PAT.search(m.group(2))
if m2:
row = int(m2.group(1))-1 if m2. |
toobaz/pandas | pandas/compat/_optional.py | Python | bsd-3-clause | 3,494 | 0.000859 | import distutils.version
import importlib
import types
import warnings
# Update install.rst when updating versions!
VERSIONS = {
"bs4": "4.6.0",
"bottleneck": "1.2.1",
"fastparquet": "0.2.1",
"gcsfs": "0.2.2",
"lxml.etree": "3.8.0",
"matplotlib": "2.2.2",
"numexpr": "2.6.2",
"odfpy": "1.3.0",
"openpyxl": "2.4.8",
"pandas_gbq": "0.8.0",
"pyarrow": "0.9.0",
"pytables": "3.4.2",
"s3fs": "0.0.8",
"scipy": "0.19.0",
"sqlalchemy": "1.1.4",
"tables": "3.4.2",
"xarray": "0.8.2",
"xlrd": "1.1.0",
"xlwt": "1.2.0",
"xlsxwriter": "0.9.8",
}
message = (
"Missing optional dependency '{name}'. {extra} "
"Use pip or conda to install {name}."
)
version_message = (
"Pandas requires version '{minimum_version}' or newer of '{name}' "
"(version '{actual_version}' currently installed)."
)
def _get_version(module: types.ModuleType) -> str:
version = getattr(module, "__version__", None)
if version is None:
# xlrd uses a capitalized attribute name
version = getattr(module, "__VERSION__", None)
if version is None:
raise ImportError("Can't determine version for {}".format(module.__name__))
return version
def import_optional_dependency(
name: str, extra: str = "", raise_on_missing: bool = True, on_version: str = "raise"
):
"""
Import an optional dependency.
By default, if a dependency is missing an ImportError with a nice
message will be raised. If a dependency is present, but too old,
we raise.
Parameters
----------
name : str
The module name. This should be top-level only, so that the
version may be checked.
extra : str
Additional text to include in the ImportError message.
raise_on_missing : bool, default True
Whether to raise if the optional dependency is not found.
When False and the module is not present, None is returned.
on_version : str {'raise', 'warn'}
What to do when a dependency's version is too old.
* raise : Raise an ImportError
* warn : Warn that the version is too old. Returns None
* ignore: Return the module, even if the version is too old.
It's expected that users validate the version locally when
using ``on_version="ignore"`` (see. ``io/html.py``)
Returns
-------
maybe_module : Optional[ModuleType]
The imported module, when found and the version is correct.
None is returned when the package is not found and `raise_on_missing`
is False, or when the package's version is too old and `on_version`
is ``'warn'``.
"""
try:
module = importlib.import_module(name)
except ImportError:
if raise_on_missing:
raise ImportError(message.format(name=name, extra=extra)) from None
else:
return None
minimum_version = VERSIONS.get(name)
if minimum_version:
version = _get_version(module)
if distutils.version.LooseVersion(version) < minimum_version:
assert on_version in {"warn", "ra | ise", "ignore"}
msg = version_message.format(
| minimum_version=minimum_version, name=name, actual_version=version
)
if on_version == "warn":
warnings.warn(msg, UserWarning)
return None
elif on_version == "raise":
raise ImportError(msg)
return module
|
cjcjameson/gpdb | gpMgmt/bin/gppylib/test/unit/test_unit_guccollection.py | Python | apache-2.0 | 12,742 | 0.003061 | from mock import *
from gp_unittest import *
from gpconfig_modules.database_segment_guc import DatabaseSegmentGuc
from gpconfig_modules.file_segment_guc import FileSegmentGuc
from gpconfig_modules.guc_collection import GucCollection
class GucCollectionTest(GpTestCase):
def setUp(self):
self.subject = GucCollection()
row = ['-1', 'guc_name', 'master_value']
self.db_seg_guc_1 = DatabaseSegmentGuc(row)
self.subject.update(self.db_seg_guc_1)
row = ['0', 'guc_name', 'value']
self.db_seg_guc_2 = DatabaseSegmentGuc(row)
self.subject.update(self.db_seg_guc_2)
def test_when_non_matching_file_value_yields_failure_format(self):
row = ['-1', 'guc_name', 'master_file_value', 'dbid1']
self.subject.update(FileSegmentGuc(row))
row = ['0', 'guc_name', 'file_value', 'dbid2']
self.subject.update(FileSegmentGuc(row))
self.assertIn("[context: -1] [dbid: dbid1] [name: guc_name] [value: master_value | file: master_file_value]"
"\n[context: 0] [dbid: dbid2] [name: guc_name] [value: value | file: file_value]",
self.subject.report())
def test_when_the_master_is_different_from_segment_yet_all_segments_same_yields_success_format(self):
row = ['0', 'guc_name', 'value', 'dbid1']
self.subject.update(FileSegmentGuc(row))
row = ['-1', 'guc_name', 'master_value', 'dbid2']
self.subject.update(FileSegmentGuc(row))
self.assertIn("Master value: master_value | file: master_value", self.subject.report())
self.assertIn("Segment value: value | file: value", self.subject.report())
def test_less_than_two_segments_raises(self):
del self.subject.gucs['0']
with self.assertRaisesRegexp(Exception,
"Collections must have at least a master and segment value"):
self.subject.report()
def test_when_invalid_gucs_with_no_master_raises(self):
del self.subject.gucs['-1']
with self.assertRaisesRegexp(Exception,
"Collections must have at least a master and segment value"):
self.subject.validate()
def test_when_three_segments_match_success_format(self):
row = ['-1', 'guc_name', 'master_value', 'dbid1']
self.subject.update(FileSegmentGuc(row))
row = ['0', 'guc_name', 'value', 'dbid2']
self.subject.update(FileSegmentGuc(row))
row = ['1', 'guc_name', 'value']
self.subject.update(DatabaseSegmentGuc(row))
row = ['1', 'guc_name', 'value', 'dbid3']
self.subject.update(FileSegmentGuc(row))
self.assertIn("Master value: master_value | file: master_value", self.subject.report())
self.assertIn("Segment value: value | file: value", self.subject.report())
def test_file_format_succeeds(self):
row = ['-1', 'guc_name', 'master_value', 'dbid1']
self.subject.update(FileSegmentGuc(row))
row = ['0', 'guc_name', 'value', 'dbid2']
self.subject.update(FileSegmentGuc(row))
self.assertIn("Master value: master_value", self.subject.report())
self.assertIn("Segment value: value", self.subject.report())
def test_database_format_succeeds(self):
self.assertIn("Master value: master_value", self.subject.report())
self.assertIn("Segment value: value", self.subject.report())
def test_update_adds_to_empty(self):
self.assertEqual(len(self.subject.gucs), 2)
def test_update_when_same_database_segment_guc_type_overrides_existing(self):
row = ['-1', 'guc_name', 'new_value', 'dbid']
self.subject.update(DatabaseSegmentGuc(row))
self.assertEqual(len(self.subject.gucs), 2)
def test_update_after_having_full_comparison_for_a_given_contentid_succeeds(self):
row = ['0', 'guc_name', 'value', 'dbid']
self.subject.update(FileSegmentGuc(row))
row = ['-1', 'guc_name', 'master_value', 'dbid']
self.subject.update(FileSegmentGuc(row))
row = ['-1', 'guc_name', 'master_value', 'dbid']
self.subject.update(FileSegmentGuc(row))
self.assertIn("Master value: master_value | file: master_value", self.subject.report())
self.assertIn("Segment value: value | file: value", self.subject.report())
def test_when_file_value_empty_file_compare_succeeds(self):
row = ['0', 'guc_name', None, 'dbid']
self.subject.update(FileSegmentGuc(row))
row = ['-1', 'guc_name', None, 'dbid']
self.s | ubject.update(FileSegmentGuc(row))
self.assertIn("Master value: master_value | file: -", self.subject.report())
self.assertIn("Segment value: value | file: -", self.subject.report())
def test_when_multiple_dbids_per_contentid_reports_failure(self):
row = ['-1', 'guc_name', 'master_ | value', '1']
self.subject.update(FileSegmentGuc(row))
row = ['-1', 'guc_name', 'master_value', '2']
self.subject.update(FileSegmentGuc(row))
row = ['0', 'guc_name', 'value', '3']
self.subject.update(FileSegmentGuc(row))
row = ['0', 'guc_name', 'value', '4']
self.subject.update(FileSegmentGuc(row))
row = ['1', 'guc_name', 'value']
self.subject.update(DatabaseSegmentGuc(row))
row = ['1', 'guc_name', 'different', '5']
self.subject.update(FileSegmentGuc(row))
row = ['1', 'guc_name', 'different', '6']
self.subject.update(FileSegmentGuc(row))
self.assertIn(""
"[context: -1] [dbid: 1] [name: guc_name] [value: master_value | file: master_value]"
"\n[context: -1] [dbid: 2] [name: guc_name] [value: master_value | file: master_value]",
# "\n[context: 0] [dbid: dbid] [name: guc_name] [value: value | file: file_value]",
self.subject.report())
def test_update_when_file_segment_has_same_dbid_overwrites_and_succeeds(self):
row = ['-1', 'guc_name', 'master_value', 'dbid']
self.subject.update(FileSegmentGuc(row))
primary_file_seg = FileSegmentGuc(row)
self.subject.update(primary_file_seg)
self.assertEquals(self.subject.gucs["-1"].primary_file_seg_guc, primary_file_seg)
self.assertEquals(self.subject.gucs["-1"].mirror_file_seg_guc, None)
def test_update_when_file_segments_first_succeeds(self):
row = ['-1', 'guc_name', 'master_value', 'dbid1']
self.subject.update(FileSegmentGuc(row))
row = ['0', 'guc_name', 'value', 'dbid2']
self.subject.update(FileSegmentGuc(row))
primary_file_seg = FileSegmentGuc(row)
self.subject.update(primary_file_seg)
row = ['-1', 'guc_name', 'master_value']
self.subject.update(DatabaseSegmentGuc(row))
row = ['0', 'guc_name', 'value']
self.subject.update(DatabaseSegmentGuc(row))
self.assertIn("Master value: master_value | file: master_value", self.subject.report())
self.assertIn("Segment value: value | file: value", self.subject.report())
def test_update_when_only_database_segments_succeeds(self):
row = ['-1', 'guc_name', 'master_value']
self.subject.update(DatabaseSegmentGuc(row))
row = ['0', 'guc_name', 'value']
self.subject.update(DatabaseSegmentGuc(row))
self.assertIn("Master value: master_value", self.subject.report())
self.assertIn("Segment value: value", self.subject.report())
def test_update_when_only_file_segments_overwriting_succeeds(self):
self.subject = GucCollection()
row = ['-1', 'guc_name', 'master_value', 'dbid1']
self.subject.update(FileSegmentGuc(row))
row = ['0', 'guc_name', 'value', 'dbid2']
self.subject.update(FileSegmentGuc(row))
row = ['-1', 'guc_name', 'master_value', 'dbid1']
self.subject.update(FileSegmentGuc(row))
row = ['0', 'guc_name', 'value', 'dbid2']
self.subject.update(FileSegmentGuc(row))
self.assertIn("Master value: master_value", self.subject.report())
self.assertIn("Segment value: value" |
aolsux/DeepThought | deepthought/factories.py | Python | gpl-3.0 | 1,527 | 0.00131 | '''
available factories:
feed_forward_perceptron
'''
import numpy
from mlpdata import MLPData
class zero_matrix(object):
def __init__(self, rows, cols):
self.__row | s = rows
self.__cols = cols
def rows(self):
return self.__rows
def cols(self):
return self.__cols
def __repr__(self):
return str(self.rows()) + "x" + str(self.cols()) + " zero matrix"
class numpy_dense_matrix(n | umpy.ndarray):
def rows(self):
return self.shape[0]
def cols(self):
return self.shape[1]
def __repr__(self):
return str(self.rows()) + "x" + str(self.cols()) + " dense matrix"
def feed_forward_factory(layers, afunction):
'''
determines:
- number of layers
- number of neurons per layer
- number of total neurons
'''
mlp = MLPData()
mlp.weights = {}
for i in xrange(len(layers)):
mlp.weights[i] = {}
for j in xrange(len(layers)):
if j-i == 1:
mlp.weights[i][j] = numpy.ndarray(shape=(layers[i], layers[j]),
dtype=numpy.float).view(numpy_dense_matrix)
else:
mlp.weights[i][j] = zero_matrix(rows=layers[i],
cols=layers[j])
mlp.neuron_parameters = numpy.ndarray(shape=(sum(layers)),
dtype=afunction.parameters)
mlp.activation_function = afunction
return mlp
|
sigmunau/nav | python/nav/portadmin/snmputils.py | Python | gpl-2.0 | 31,038 | 0.000161 | #
# Copyright (C) 2010 Norwegian University of Science and Technology
# Copyright (C) 2011-2015 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details. You should have received a copy of the GNU General Public License
# along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""This is a utility library made especially for PortAdmin."""
import time
import logging
from operator import attrgetter
from nav import Snmp
from nav.errors import NoNetboxTypeError
from nav.Snmp.errors import (SnmpError, UnsupportedSnmpVersionError,
NoSuchObjectError)
from nav.bitvector import BitVector
from nav.models.manage import Vlan, SwPortAllowedVlan
from nav.enterprise.ids import (VENDOR_ID_CISCOSYSTEMS,
VENDOR_ID_H3C,
VENDOR_ID_HEWLETT_PACKARD)
_logger = logging.getLogger("nav.portadmin.snmputils")
CHARS_IN_1024_BITS = 128
# TODO: Fix get_vlans as it does not return all vlans, see get_available_vlans
class FantasyVlan(object):
"""A container object for storing vlans for a netbox
This object is needed because we mix "real" vlans that NAV know about
and "fake" vlan that NAV does not know about but exists on the switch.
They need to be compared and sorted, and this class does that.
"""
def __init__(self, vlan, netident=None, descr=None):
self.vlan = vlan
self.net_ident = netident
self.descr = descr
def __unicode__(self):
if self.net_ident:
return "%s (%s)" % (self.vlan, self.net_ident)
else:
return str(self.vlan)
def __hash__(self):
return hash(self.vlan)
def __cmp__(self, other):
return cmp(self.vlan, other.vlan)
class SNMPHandler(object):
"""A basic class for SNMP-read and -write to switches."""
from nav.smidumps.qbridge_mib import MIB as qbridgemib
QBRIDGENODES = qbridgemib['nodes']
SYSOBJECTID = '.1.3.6.1.2.1.1.2.0'
SYSLOCATION = '1.3.6.1.2.1.1.6.0'
IF_ALIAS_OID = '1.3.6.1.2.1.31.1.1.1.18' # From IF-MIB
IF_ADMIN_STATUS = '1.3.6.1.2.1.2.2.1.7'
IF_ADMIN_STATUS_UP = 1
IF_ADMIN_STATUS_DOWN = 2
IF_OPER_STATUS = '1.3.6.1.2.1.2.2.1.8'
# The VLAN ID assigned to untagged frames
VlAN_OID = QBRIDGENODES['dot1qPvid']['oid']
# List of all available vlans on this netbox as by the command "show vlans"
VLAN_ROW_STATUS = QBRIDGENODES['dot1qVlanStaticRowStatus']['oid']
# List of all ports on a vlan as a hexstring (including native vlan)
VLAN_EGRESS_PORTS = QBRIDGENODES['dot1qVlanStaticEgressPorts']['oid']
# dot1x
# dot1xPaeSystemAuthControl: The administrative enable/ disable state for
# Port Access Control in a System.
dot1xPaeSystemAuthControl = '1.0.8802.1.1.1.1.1.1.0'
netbox = None
def __init__(self, netbox, **kwargs):
self.netbox = netbox
self.read_only_handle = None
self.read_write_handle = None
self.available_vlans = None
self.timeout = kwargs.get('timeout', 3)
self.retries = kwargs.get('retries', 3)
def __unicode__(self):
return self.netbox.type.vendor.id
def _bulkwalk(self, oid):
"""Walk all branches for the given oid."""
handle = self._get_read_only_handle()
result = []
try:
result = handle.bulkwalk(oid)
except UnsupportedSnmpVersionError as unsup_ex:
_logger.info("_bulkwalk: UnsupportedSnmpVersionError = %s",
unsup_ex)
try:
result = handle.walk(oid)
except SnmpError as ex:
_logger.error("_bulkwalk: Exception = %s", ex)
return result
def _jog(self, oid):
"""Do a jog"""
handle = self._get_read_only_handle()
try:
return handle.jog(oid)
except SnmpError as _error:
return []
@staticmethod
def _get_legal_if_index(if_index):
"""Check if the given index is a legal interface-index."""
return str(int(if_index))
def _get_query(self, oid, if_index):
"""Concat given oid and interface-index."""
return oid + "." + self._get_legal_if_index(if_index)
def _get_read_only_handle(self):
"""Get a read only SNMP-handle."""
if self.read_only_handle is None:
self.read_only_handle = Snmp.Snmp(self.netbox.ip, self.netbox.read_only,
self.netbox.snmp_version,
retries=self.retries,
timeout=self.timeout)
return self.read_only_handle
def _query_netbox(self, oi | d, if_index):
"""Query the given interface."""
handle = self._get_read_only_handle()
result = None
try:
result = handle.get(self._get | _query(oid, if_index))
except NoSuchObjectError as no_such_ex:
_logger.debug("_query_netbox: NoSuchObjectError = %s", no_such_ex)
return result
def _get_read_write_handle(self):
"""Get a read and write SNMP-handle.
:rtype: nav.Snmp.Snmp
"""
if self.read_write_handle is None:
self.read_write_handle = Snmp.Snmp(
self.netbox.ip, self.netbox.read_write,
self.netbox.snmp_version, retries=self.retries,
timeout=self.timeout)
return self.read_write_handle
def _set_netbox_value(self, oid, if_index, value_type, value):
"""Set a value for the given interface."""
handle = self._get_read_write_handle()
return handle.set(self._get_query(oid, if_index), value_type, value)
@staticmethod
def _chunkify(bitvector, chunks):
"""Divide bitvector into chunks number of chunks
:returns a new bitvector instance with the chunk
"""
hexes = bitvector.to_hex()
chunksize = len(bitvector.to_hex()) / chunks
for i in range(0, len(hexes), chunksize):
yield BitVector.from_hex(hexes[i:i + chunksize])
def test_read(self):
"""Test if read works"""
handle = self._get_read_only_handle()
try:
handle.get(self.SYSOBJECTID)
return True
except SnmpError as error:
return False
def test_write(self):
"""Test if write works"""
handle = self._get_read_write_handle()
try:
value = handle.get(self.SYSLOCATION)
handle.set(self.SYSLOCATION, 's', value)
return True
except SnmpError as error:
return False
def get_if_alias(self, if_index):
""" Get alias on a specific interface """
return self._query_netbox(self.IF_ALIAS_OID, if_index)
def get_all_if_alias(self):
"""Get all aliases for all interfaces."""
return self._bulkwalk(self.IF_ALIAS_OID)
def set_if_alias(self, if_index, if_alias):
"""Set alias on a specific interface."""
if isinstance(if_alias, unicode):
if_alias = if_alias.encode('utf8')
return self._set_netbox_value(self.IF_ALIAS_OID, if_index, "s",
if_alias)
def get_vlan(self, base_port):
"""Get vlan on a specific interface."""
return self._query_netbox(self.VlAN_OID, base_port)
def get_all_vlans(self):
"""Get all vlans on the switch"""
return self._bulkwalk(self.VlAN_OID)
@staticmethod
def _compute_octet_string(hexstring, port, action='enable'):
"""
hexstring: the returnvalue of the snmpquery
port: the number of the port to add
"""
bit = BitVector(hexstring)
# |
hkust-smartcar/sc-studio | src/sc_studio/string_view.py | Python | mit | 1,489 | 0.032236 | '''
sc_studio.string_view
Author: Ming Tsang
Copyright (c) 2014-2015 HKUST SmartCar Team
Refer to LICENSE for details
'''
import binascii
import logging
import time
import tkinter
from tkinter import Tk, Text
from sc_studio import config
from sc_studio.view import View
class StringView(View):
def __init__(self, params):
super(StringView, self).__init__(params | )
self._tk = Tk()
self._text | = Text(self._tk, bg = config.COL_GREY_900,
fg = config.COL_GREY_100)
self._tk.title("String view")
self._text.pack(side = tkinter.LEFT, fill = tkinter.Y)
self._tk.protocol("WM_DELETE_WINDOW", self.on_press_close)
self._file = open("string_" + str(int(time.time() * 1000)) + ".txt", "w")
def run(self):
super(StringView, self).run()
self._tk.mainloop()
def on_new_input(self):
try:
hex_str = self.get_input()
line = self._get_line(hex_str)
except Exception as e:
logging.debug(str(e))
return
string = line.decode("UTF-8")
self._text.insert(tkinter.END, string)
self._text.insert(tkinter.END, '\n')
while self._text.yview()[1] != 1.0:
self._text.delete(1.0, 2.0)
self._file.write(time.strftime("[%x %X] "))
self._file.write(string)
self._file.write('\n')
def on_dismiss(self):
self._tk.after_idle(self.on_press_close)
def on_press_close(self):
self._tk.destroy()
self.join_io_thread()
def _get_line(self, hex_str):
try:
return binascii.unhexlify(hex_str)
except TypeError as e:
logging.debug(str(e))
return
|
JadsonReis/sistema-nacional-cultura | planotrabalho/utils.py | Python | agpl-3.0 | 880 | 0.001136 | import re
f | rom datetime import date
d | ef get_or_none(model, **kwargs):
try:
return model.objects.get(**kwargs)
except model.DoesNotExist:
return None
def validar_cnpj(cnpj):
cnpj = ''.join(re.findall('\d', str(cnpj)))
if (not cnpj) or (len(cnpj) < 14):
return False
inteiros = list(map(int, cnpj))
novo = inteiros[:12]
prod = [5, 4, 3, 2, 9, 8, 7, 6, 5, 4, 3, 2]
while len(novo) < 14:
r = sum([x * y for (x, y) in zip(novo, prod)]) % 11
if r > 1:
f = 11 - r
else:
f = 0
novo.append(f)
prod.insert(0, 6)
if novo == inteiros:
return cnpj
return False
def add_anos(data, anos):
try:
return data.replace(year=data.year + anos)
except ValueError:
return data + (date(data.year + anos, 1, 1) - date(data.year, 1, 1))
|
crossroadchurch/paul | openlp/core/ui/firsttimewizard.py | Python | gpl-2.0 | 17,462 | 0.00315 | # -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection #
# --------------------------------------------------------------------------- #
# Copyright (c) 2008-2015 OpenLP Developers #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
"""
The UI widgets for the first time wizard.
"""
from PyQt4 import QtCore, QtGui
from openlp.core.common import translate, is_macosx, clean_button_text
from openlp.core.lib import build_icon
from openlp.core.lib.ui import add_welcome_page
class FirstTimePage(object):
"""
An enumeration class with each of the pages of the wizard.
"""
Welcome = 0
Download = 1
NoInternet = 2
Plugins = 3
Songs = 4
Bibles = 5
Themes = 6
Defaults = 7
Progress = 8
class UiFirstTimeWizard(object):
"""
The UI widgets for the first time wizard.
"""
def setup_ui(self, first_time_wizard):
"""
Set up the UI.
:param first_time_wizard: The wizard form
"""
first_time_wizard.setObjectName('first_time_wizard')
first_time_wizard.setWindowIcon(build_icon(u':/icon/openlp-logo.svg'))
first_time_wizard.resize(550, 386)
first_time_wizard.setModal(True)
first_time_wizard.setOptions(QtGui.QWizard.IndependentPages | QtGui.QWizard.NoBackButtonOnStartPage |
QtGui.QWizard.NoBackButtonOnLastPage | QtGui.QWizard.HaveCustomButton1 |
QtGui.QWizard.HaveCustomButton2)
if is_macosx():
first_time_wizard.setPixmap(QtGui.QWizard.BackgroundPixmap,
QtGui.QPixmap(':/wizards/openlp-osx-wizard.png'))
first_time_wizard.resize(634, 386)
else:
first_time_wizard.setWizardStyle(QtGui.QWizard.ModernStyle)
self.finish_button = self.button(QtGui.QWizard.FinishButton)
self.no_internet_finish_button = self.button(QtGui.QWizard.CustomButton1)
self.cancel_button = self.button(QtGui.QWizard.CancelButton)
self.no_internet_cancel_button = self.button(QtGui.QWizard.CustomButton2)
self.next_button = self.button(QtGui.QWizard.NextButton)
self.back_button = self.button(QtGui.QWizard.BackButton)
add_welcome_page(first_time_wizard, ':/wizards/wizard_firsttime.bmp')
# The download page
self.download_page = QtGui.QWizardPage()
self.download_page.setObjectName('download_page')
self.download_layout = QtGui.QVBoxLayout(self.download_page)
self.download_layout.setMargin(48)
self.download_layout.setObjectName('download_layout')
self.download_label = QtGui.QLabel(self.download_page)
self.download_label.setObjectName('download_label')
self.download_layout.addWidget(self.download_label)
first_time_wizard.setPage(FirstTimePage.Download, self.download_page)
# The "you don't have an internet connection" page.
self.no_internet_page = QtGui.QWizardPage()
self.no_internet_page.setObjectName('no_internet_page')
self.no_internet_layout = QtGui.QVBoxLayout(self.no_internet_page)
self.no_internet_layout.setContentsMargins(50, 30, 50, 40)
self.no_internet_layout.setObjectName('no_internet_layout')
self.no_internet_label = QtGui.QLabel(self.no_internet_page)
self.no_internet_label.setWordWrap(True)
self.no_internet_label.setObjectName('no_internet_label')
self.no_internet_layout.addWidget(self.no_internet_label)
first_time_wizard.setPage(FirstTimePage.NoInternet, self.no_internet_page)
# The plugins page
self.plugin_page = QtGui.QWizardPage()
self.plugin_page.setObjectName('plugin_page')
self.plugin_layout = QtGui.QVBoxLayout(self.plugin_page)
self.plugin_layout.setContentsMargins(40, 15, 40, 0)
self.plugin_layout.setObjectName('plugin_layout')
self.songs_check_box = QtGui.QCheckBox(self.plugin_page)
self.songs_check_box.setChecked(True)
self.songs_check_box.setObjectName('songs_check_box')
self.plugin_layout.addWidget(self.songs_check_box)
self.custom_check_box = QtGui.QCheckBox(self.plugin_page)
self.custom_check_box.setChecked(True)
self.custom_check_box.setObjectName('custom_check_box')
self.plugin_layout.addWidget(self.custom_check_box)
| self.bible_check_box = QtGui.QCheckBox(self.plugin_page)
self.bible_check_box.setChecked(True)
self.bible_check_bo | x.setObjectName('bible_check_box')
self.plugin_layout.addWidget(self.bible_check_box)
self.image_check_box = QtGui.QCheckBox(self.plugin_page)
self.image_check_box.setChecked(True)
self.image_check_box.setObjectName('image_check_box')
self.plugin_layout.addWidget(self.image_check_box)
self.presentation_check_box = QtGui.QCheckBox(self.plugin_page)
self.presentation_check_box.setChecked(True)
self.presentation_check_box.setObjectName('presentation_check_box')
self.plugin_layout.addWidget(self.presentation_check_box)
self.media_check_box = QtGui.QCheckBox(self.plugin_page)
self.media_check_box.setChecked(True)
self.media_check_box.setObjectName('media_check_box')
self.plugin_layout.addWidget(self.media_check_box)
self.remote_check_box = QtGui.QCheckBox(self.plugin_page)
self.remote_check_box.setObjectName('remote_check_box')
self.plugin_layout.addWidget(self.remote_check_box)
self.song_usage_check_box = QtGui.QCheckBox(self.plugin_page)
self.song_usage_check_box.setChecked(True)
self.song_usage_check_box.setObjectName('song_usage_check_box')
self.plugin_layout.addWidget(self.song_usage_check_box)
self.alert_check_box = QtGui.QCheckBox(self.plugin_page)
self.alert_check_box.setChecked(True)
self.alert_check_box.setObjectName('alert_check_box')
self.plugin_layout.addWidget(self.alert_check_box)
first_time_wizard.setPage(FirstTimePage.Plugins, self.plugin_page)
# The song samples page
self.songs_page = QtGui.QWizardPage()
self.songs_page.setObjectName('songs_page')
self.songs_layout = QtGui.QVBoxLayout(self.songs_page)
self.songs_layout.setContentsMargins(50, 20, 50, 20)
self.songs_layout.setObjectName('songs_layout')
self.songs_list_widget = QtGui.QListWidget(self.songs_page)
self.songs_list_widget.setAlternatingRowColors(True)
self.songs_list_widget.setObjectName('songs_list_widget')
self.songs_layout.addWidget(self.songs_list_widget)
first_time_wizard.setPage(FirstTimePage.Songs, self.songs_page)
# The Bible samples page
self.bibles_page = QtGui.QWizardPage()
self.bibles_page.setObjectName('bibles_page')
|
minaevmike/praktica | Diplom/Samples/minimal-django-file-upload-example-master/src/for_django_1-5/myproject/myproject/myapp/urls.py | Python | gpl-2.0 | 169 | 0.005917 | # -*- coding: utf-8 -*-
from django.conf.urls.defaults import pattern | s, url
urlpatte | rns = patterns('myproject.myapp.views',
url(r'^list/$', 'list', name='list'),
)
|
eroicaleo/LearningPython | PythonTricks/ch04_05.py | Python | mit | 1,316 | 0.007599 | #!/usr/bin/env python
def print_banner(s):
print('##------------------------------------------------------------------------------')
print(f'## {s}')
print('##------------------------------------------------------------------------------')
print_banner('First implementation')
class Base:
def foo(self):
raise NotImplementedError()
def bar(self):
raise NotImplementedError()
class Concrete(Base):
def foo(self):
return 'foo() called'
b = Base()
try:
b.foo()
except NotImplementedError as err:
print(f'Got this NotImplementedError error')
c = Concrete()
print(c.foo())
try:
c.bar()
except NotImplementedError as err:
print(f'Got this NotImplementedError error')
print_banner('Implementation with abc module')
from abc import ABCMeta, abstractmethod
class Base(metaclass=ABCMeta):
@abstractmethod
def foo(self):
pass
@abstractmethod
def bar(self):
pass
class | Concrete(Base):
def foo(self):
pass
assert issubclass(Concrete, Base)
try:
print('Instantiate b = Base()')
b = Base()
| except TypeError as err:
print(f'Got this TypeError error: {err!r}')
try:
print('Instantiate c = Concrete()')
c = Concrete()
except TypeError as err:
print(f'Got this TypeError error: {err!r}')
|
ShengGuangzhi/SummerTree | algorithm/python_version/basic/max_sub_array.py | Python | mit | 2,257 | 0.001329 | import math
def max_sub_array(array, begin=None, end=None):
def max_sub_array_mid(arr, begin_m, end_m, middle):
l_sum, l_max_index, l_max_sum = 0, None, None
l_local = middle - 1
while l_local >= begin_m:
l_sum += arr[l_local]
if l_max_index is None:
l_max_index = l_local
l_max_sum = arr[l_local]
elif l_sum > l_max_sum:
l_max_index = l_local
l_max_sum = l_sum
l_local -= 1
r_sum, r_max_index, r_max_sum = 0, None, None
r_local = middle
while r_local < end_m:
r_sum += arr[r_local]
if r_max_index is None:
r_max_index = r_local + 1
r_max_sum = arr[r_local]
elif r_sum > r_max_sum:
r_max_index = r_local + 1
r_max_sum = r_sum
r_local += 1
if l_max_sum is None:
l_max_sum = 0
l_max_index = middle
if r_max_sum is None:
r_max_sum = 0
r_max_index = middle
ret = dict()
ret['sum'] = l_max_sum + r_max_sum
ret['begin'] = l_max_index
ret['end'] = r_max_index
return ret
if begin is None and end is None:
begin = 0
end = len(array)
res = dict()
if begin + 1 == end:
res['begin'] = begin
res['end'] = end
res['sum'] = array[begin]
return res
if begin == end:
res['begin'] = begin
res['end'] = end
res['sum'] = 0
return res
mid = math.floor((begin + end) / 2)
l = max_sub_array(array, begin, mid)
r = max_sub_array(array, mid, end | )
m = max_sub_array_mid(array, begin, end, mid)
if l['sum'] >= r['sum'] and l['sum'] >= m['sum']:
return l
elif r['sum'] >= l['sum'] and r['sum'] >= m['sum']:
return r
else:
return m
if __name__ == '__main__':
test_list = [13, -3, -25, 20, -3, -16, -23, 1 | 8, 20, -7, 12, -5, -22, 15, -4, 7]
result = max_sub_array(test_list)
print('begin :', result['begin'], 'end:', result['end'], 'sum:', result['sum'])
|
Axilent/Dox | dox/client.py | Python | bsd-3-clause | 2,050 | 0.019512 | """
Axilent Client functionality for Dox.
"""
from sharrock.client import HttpClient, ResourceClient, ServiceException
from dox.config import get_cfg
from dox.utils import slugify
def _get_resource(app,resource,library=True):
"""
Gets a resource client.
"""
cfg = get_cfg()
apikey_setting = 'library_key' if library else 'api_key'
return ResourceClient('%s/api/resource' % cfg.get('Connection','endpoint'),app,'beta3',resource,auth_user=cfg.get('Connection',apikey_setting))
def _get_client(app,library=True):
"""
Gets a regular API client.
"""
cfg = get_cfg()
apikey_setting = 'library_key' if library else 'api_key'
return HttpClient('%s/api' % cfg.get('Connection','endpoint'),app,'beta3',auth_user=cfg.get('Connection',apikey_setting))
def get_content_library_resource():
"""
Gets | a content library resource.
"""
return _get_resource('axilent.library','content')
def get_library_client():
"""
Gets the library API client.
"""
return _get_client('axilent.library')
def ping_library():
"""
Pings the library.
" | ""
cfg = get_cfg()
lib = get_library_client()
lib.ping(project=cfg.get('Connection','project'),content_type=cfg.get('Connection','content_type'))
def get_content_api():
"""
Gets the content API.
"""
return _get_client('axilent.content',library=False)
def get_content_resource():
"""
Gets the deployed content resource.
"""
return _get_resource('axilent.content','content',library=False)
def get_content_keys():
"""
Gets the keys for the conten type.
"""
cfg = get_cfg()
api = get_content_api()
keys = api.getcontentkeys(content_type_slug=slugify(cfg.get('Connection','content_type')))
return keys
def get_content_item(key):
"""
Gets deployed content for the specified key.
"""
cfg = get_cfg()
resource = get_content_resource()
return resource.get(params={'content_type_slug':slugify(cfg.get('Connection','content_type')),'content_key':key})
|
tartavull/tigertrace | tigertrace/util/rehuman_semantics.py | Python | mit | 1,483 | 0.011463 | from tqdm import tqdm
from collections import defaultdict
import h5py
import networkx as nx
import struct
import numpy as np
# hl = None
# ml = None
# with h5py.File('/usr/people/it2/seungmount/research/datasets/blended_piriform_157x2128x2128/all/human_semantic_labels.h5','r') as f:
# hl = f['main'][:]
# with h5py.File('/usr/people/it2/seungmount/research/datasets/blended_piriform_157x2128x2128/all/sparse_human_labels.h5','r') as f:
# ml = f['main'][:]
# soft_labels = defaultdict(lambda: defaultdict | (int))
# def max_key_from_dict( d ):
# max_key = d.keys()[0]
# max_val = d[max_key]
# for k,v in d.iteritems():
# if v > max_val:
# max_val = v
# max_key = k
# return max_key
# for z in tqdm(xrange(hl.shape[0])):
# for y in xrange(hl.shape[1]):
# for x in xrange(hl.shape[2]):
# soft_labels[ml[z,y,x]][hl[z,y,x]] += 1
# mapping = dict()
# for ml_label, soft_label in soft_labels.iteritems():
# best_hl = max_key_from_dict(soft_label)
# | mapping[ml_label] = best_hl
# final = np.zeros(shape=ml.shape)
# for z in tqdm(xrange(hl.shape[0])):
# for y in xrange(hl.shape[1]):
# for x in xrange(hl.shape[2]):
# final[z,y,x] = mapping[ml[z,y,x]]
with h5py.File('/usr/people/it2/seungmount/research/datasets/blended_piriform_157x2128x2128/all/sparse_semantic_labels.h5') as f:
exp = np.zeros(shape=(4,157,2128,2128))
for i in range(5):
exp[i,:,:,:] = f['main'][:,:,:] == i
f.create_dataset('expanded',data=exp) |
sjtsp2008/oompa | oompa/tracking/UpdateLogger.py | Python | apache-2.0 | 6,922 | 0.010546 | #
# UpdateLogger.py
#
"""
package oompa.tracking
TODO: stil waffling about whether to log two-columns - "datetime {json}" or "{json-with-datetime-field}"
"""
import json
import os
from datetime import datetime
class UpdateLogger:
"""
records updates for later replay
"""
def __init__(self, config):
self.config = config
# XXX really get from config
config_base = os.environ["HOME"]
oompa_base = os.path.join(config_base, "oompa")
self._updates_folder = os.path.join(oompa_base, "updates")
# cache
self._streams = {}
return
# ###
#
# file-based subclass
# TODO: factor out to a subclass
#
def _getUpdatePath(self, datetime = None):
if isinstance(datetime, str):
yyyymmdd = datetime
else:
# in file subclass, we assume datetime is not none
yyyymmdd = datetime.strftime("%Y%m%d")
return os.path.join(self._updates_folder, "%s.updates.log" % yyyymmdd)
def _getUpdateStream(self, datetime = None):
path = self._getUpdatePath(datetime)
if path not in self._streams:
self._streams[path] = open(path, "a")
# assumes that oompa_base exists
if not os.path.exists(self._updates_folder):
os.mkdir(self._updates_folder)
return self._streams[path]
def _logUpdate(self, info_d):
now = datetime.now()
info_d["datetime"] = now.strftime("%Y%m%d-%H:%M:%S")
updateStream = self._getUpdateStream(now)
updateStream.write("%s\n" % json.dumps(info_d))
# print("# wrote update: %s" % json.dumps(info_d))
# updateStream.flush()
return
#
# ###
def logListUpdate(self, entityMetadata, fieldName, action, **extra):
# i think this may end up being the only kind of update
info_d = {
"kind": "list3",
"subject_kind": entityMetadata.kind,
"subject": entityMetadata.name,
"field": fieldName,
"action": action,
}
info_d.update(extra)
self._logUpdate(info_d)
return
def logUpdates(self, entityMetadata, fieldName, newItems):
"""
XXX i don't think this is generic across any kind of update
"""
if not newItems:
return
info_d = {
"kind": "list1", # stupid - merge list1 and list2 handling
"field": fieldName,
"subject_kind": entityMetadata.kind,
"subject": entityMetadata.name,
}
for item in newItems:
# XXX non-generic
if fieldName == "repoNames":
# note that created_at and updated_at could be fetched later. updated will certainly change
info_d["full_name"] = item.full_name
# info_d["created_at"] = item.created_at
# these assume that someone has called refresh
# info_d["parent"] = item.parent
# info_d["source"] = item.source
# note: *not* including blurb
else:
print(" logUpdates() - *not* a repoName: %s" % fieldName)
pass
self._logUpdate(info_d)
pass
return
def logListUpdates(self, entityMetadata, fieldName, action, values):
"""
XXX i don't think this is generic across any kind of update
TODO: use self.logListUpdate, to get more normalized
"""
info_d = {
"kind": "list2",
"field": fieldName,
"action": action,
"subject_kind": entityMetadata.kind,
"subject": entityMetadata.name,
}
# TODO: probably just write out full list in one record
for value in values:
info_d["full_name"] = value
# TODO: if action is added, refresh the metadata to add parent and source
# TODO:
#
# info_d["created_at"] = item.created_at
# these assume that someone has called refresh
# info_d["parent"] = repo.parent
# info_d["source"] = item.source
#
# note: *not* including the blurb - renderer can look it up
self._logUpdate(info_d)
pass
return
def getUpdates(self, start_date = None, end_date = None):
"""
generate stream of updates
TODO: support various filters
"""
# print("UpdateLogger.getUpdates(): %s - %s" % ( start_date, end_date ))
# TODO: assuming today is dumb. maybe discover most recent
# update?
if end_date is None:
end_date = datetime.now()
if start_date is None:
start_date = end_date
if start_date != end_date:
xxx
# XXX need date_utils.date_range
date_range = [ start_date, ]
for date in date_range:
# yyyymmdd = date.strftime("%Y%m%d")
yyyymmdd = date
update_path = self._getUpdatePath(yyyymmdd)
if not os.path.exists(update_path):
print("# update_path does not exist: %s" % update_path)
continue
for line in open(update_p | ath):
yield json.loads( | line)
return
def organizeUpdatesByEntity(self, updates):
"""
organize updates by ( subject_kind, subject ) from the update
"""
byEntity = {}
for update in updates:
entity = ( update["subject_kind"], update["subject"] )
byEntity.setdefault(entity, []).append(update)
pass
return byEntity
def organizeUpdatesByKind(self, updates):
"""
organize updates by ( kind, ) from each update
"""
by_kind = {}
for update in updates:
by_kind.setdefault(update["kind"], []).append(update)
pass
return by_kind
def organizeUpdatesByField(self, updates):
"""
organize updates by ( field, ) from each update
"""
by_field = {}
for update in updates:
by_field.setdefault(update["field"], []).append(update)
pass
return by_field
def close(self):
map(lambda stream: stream.close(), self._streams)
return
pass
|
vineodd/PIMSim | GEM5Simulation/gem5/src/systemc/tests/verify.py | Python | gpl-3.0 | 19,486 | 0.00195 | #!/usr/bin/env python2
#
# Copyright 2018 Google, Inc | .
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: | redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
from __future__ import print_function
import argparse
import collections
import difflib
import functools
import inspect
import itertools
import json
import multiprocessing.pool
import os
import re
import subprocess
import sys
script_path = os.path.abspath(inspect.getfile(inspect.currentframe()))
script_dir = os.path.dirname(script_path)
config_path = os.path.join(script_dir, 'config.py')
systemc_rel_path = 'systemc'
tests_rel_path = os.path.join(systemc_rel_path, 'tests')
json_rel_path = os.path.join(tests_rel_path, 'tests.json')
def scons(*args):
args = ['scons'] + list(args)
subprocess.check_call(args)
class Test(object):
def __init__(self, target, suffix, build_dir, props):
self.target = target
self.suffix = suffix
self.build_dir = build_dir
self.props = {}
for key, val in props.iteritems():
self.set_prop(key, val)
def set_prop(self, key, val):
setattr(self, key, val)
self.props[key] = val
def dir(self):
return os.path.join(self.build_dir, tests_rel_path, self.path)
def src_dir(self):
return os.path.join(script_dir, self.path)
def expected_returncode_file(self):
return os.path.join(self.src_dir(), 'expected_returncode')
def golden_dir(self):
return os.path.join(self.src_dir(), 'golden')
def bin(self):
return '.'.join([self.name, self.suffix])
def full_path(self):
return os.path.join(self.dir(), self.bin())
def m5out_dir(self):
return os.path.join(self.dir(), 'm5out.' + self.suffix)
def returncode_file(self):
return os.path.join(self.m5out_dir(), 'returncode')
test_phase_classes = {}
class TestPhaseMeta(type):
def __init__(cls, name, bases, d):
if not d.pop('abstract', False):
test_phase_classes[d['name']] = cls
super(TestPhaseMeta, cls).__init__(name, bases, d)
class TestPhaseBase(object):
__metaclass__ = TestPhaseMeta
abstract = True
def __init__(self, main_args, *args):
self.main_args = main_args
self.args = args
def __lt__(self, other):
return self.number < other.number
class CompilePhase(TestPhaseBase):
name = 'compile'
number = 1
def run(self, tests):
targets = list([test.full_path() for test in tests])
parser = argparse.ArgumentParser()
parser.add_argument('-j', type=int, default=0)
args, leftovers = parser.parse_known_args(self.args)
if args.j == 0:
self.args = ('-j', str(self.main_args.j)) + self.args
scons_args = [ 'USE_SYSTEMC=1' ] + list(self.args) + targets
scons(*scons_args)
class RunPhase(TestPhaseBase):
name = 'execute'
number = 2
def run(self, tests):
parser = argparse.ArgumentParser()
parser.add_argument('--timeout', type=int, metavar='SECONDS',
help='Time limit for each run in seconds, '
'0 to disable.',
default=60)
parser.add_argument('-j', type=int, default=0,
help='How many tests to run in parallel.')
args = parser.parse_args(self.args)
timeout_cmd = [
'timeout',
'--kill-after', str(args.timeout * 2),
str(args.timeout)
]
def run_test(test):
cmd = []
if args.timeout:
cmd.extend(timeout_cmd)
cmd.extend([
os.path.abspath(test.full_path()),
'-rd', os.path.abspath(test.m5out_dir()),
'--listener-mode=off',
'--quiet',
os.path.abspath(config_path),
])
# Ensure the output directory exists.
if not os.path.exists(test.m5out_dir()):
os.makedirs(test.m5out_dir())
try:
subprocess.check_call(cmd, cwd=os.path.dirname(test.dir()))
except subprocess.CalledProcessError, error:
returncode = error.returncode
else:
returncode = 0
with open(test.returncode_file(), 'w') as rc:
rc.write('%d\n' % returncode)
j = self.main_args.j if args.j == 0 else args.j
runnable = filter(lambda t: not t.compile_only, tests)
if j == 1:
map(run_test, runnable)
else:
tp = multiprocessing.pool.ThreadPool(j)
map(lambda t: tp.apply_async(run_test, (t,)), runnable)
tp.close()
tp.join()
class Checker(object):
def __init__(self, ref, test, tag):
self.ref = ref
self.test = test
self.tag = tag
def check(self):
with open(self.test) as test_f, open(self.ref) as ref_f:
return test_f.read() == ref_f.read()
def tagged_filt(tag, num):
return (r'\n{}: \({}{}\) .*\n(In file: .*\n)?'
r'(In process: [\w.]* @ .*\n)?').format(tag, tag[0], num)
def error_filt(num):
return tagged_filt('Error', num)
def warning_filt(num):
return tagged_filt('Warning', num)
def info_filt(num):
return tagged_filt('Info', num)
class DiffingChecker(Checker):
def __init__(self, ref, test, tag, out_dir):
super(DiffingChecker, self).__init__(ref, test, tag)
self.out_dir = out_dir
def diffing_check(self, ref_lines, test_lines):
test_file = os.path.basename(self.test)
ref_file = os.path.basename(self.ref)
diff_file = '.'.join([ref_file, 'diff'])
diff_path = os.path.join(self.out_dir, diff_file)
if test_lines != ref_lines:
with open(diff_path, 'w') as diff_f:
for line in difflib.unified_diff(
ref_lines, test_lines,
fromfile=ref_file,
tofile=test_file):
diff_f.write(line)
return False
else:
if os.path.exists(diff_path):
os.unlink(diff_path)
return True
class LogChecker(DiffingChecker):
def merge_filts(*filts):
filts = map(lambda f: '(' + f + ')', filts)
filts = '|'.join(filts)
return re.compile(filts, flags=re.MULTILINE)
# The reporting mechanism will print the actual filename when running in
# gem5, and the "golden" output will say "<removed by verify.py>". We want
# to strip out both versions to make comparing the output sensible.
in_file_filt = r'^In file: ((<removed by verify\.pl>)|([a-zA-Z0-9.:_/]*))$'
ref_filt = merge_filts(
r'^\nInfo: /OSCI/SystemC: S |
Empire-of-Code-Puzzles/checkio-empire-auto-painting | verification/src/referee.py | Python | gpl-2.0 | 2,173 | 0.001381 | from checkio_referee import RefereeCodeGolf
from checkio_referee import covercodes, validators, representations
import settings_env
from tests import TESTS
class AutoPaintingValidator(validators.BaseValidator):
def validate(self, outer_result):
steps, k, n = self._test["validation_data"]
if not isinstance(outer_result, str):
return validators.ValidatorResult(False, "This is not a string.")
actions = outer_result.split(",")
if len(actions) > steps:
return validators.ValidatorResult(False, "It can be shorter.")
| details | = [0 for _ in range(n)]
good_ch = "".join(str(r) for r in range(n))
good_ch += ","
if any(ch not in good_ch for ch in outer_result):
return validators.ValidatorResult(False, "Wrong symbol in the result.")
for act in actions:
if len(act) > k:
return validators.ValidatorResult(
False, "The system can contain {0} detail(s).".format(k))
if len(set(act)) < len(act):
return validators.ValidatorResult(
False, "You can not place one detail twice in one load")
for ch in act:
details[int(ch)] += 1
if any(d < 2 for d in details):
return validators.ValidatorResult(False, "I see no painted details.")
if any(d > 2 for d in details):
return validators.ValidatorResult(False, "I see over painted details.")
return validators.ValidatorResult(True)
class Referee(RefereeCodeGolf):
TESTS = TESTS
DEFAULT_MAX_CODE_LENGTH = 150
BASE_POINTS = 10
ENVIRONMENTS = settings_env.ENVIRONMENTS
DEFAULT_FUNCTION_NAME = "golf"
VALIDATOR = AutoPaintingValidator
CALLED_REPRESENTATIONS = {
"python_3": representations.unwrap_arg_representation,
"python_2": representations.unwrap_arg_representation,
"javascript": representations.unwrap_arg_representation
}
ENV_COVERCODE = {
"python_2": covercodes.py_unwrap_args,
"python_3": covercodes.py_unwrap_args,
"javascript": covercodes.py_unwrap_args
}
|
zhongpei/softether-client | endpoints/main.py | Python | gpl-3.0 | 6,782 | 0.010764 | #!coding: utf-8
"""
Usage:
main.py <host> <username> <password> [-r] [--port=<port>] [--hub=<hub>] [--pppoe-username=<username>] [--pppoe-password=<password>] [--output=<output>]
Options:
-h --help Show help
-r Change route to make connect to Packetix Server always use default gw
--env Get param from env
--port=<port> Packetix Server Port [default: 15555].
--hub=<hub> Packetix Server Hub [default: VPN]
--pppoe-username=<username> PPPoE username
--pppoe-password=<password> PPPoE password
--output=<output> output file
"""
import sys
from docopt import docopt
import time
from command import Commander
import netifaces
import logging
def add_route(packetix_host):
import socket
from socket import AF_INET
from pyroute2 import IPRoute
gws = netifaces.gateways()
default_gw = gws['default'][netifaces.AF_INET]
logging.INFO('default gw %s' + str(default_gw))
dst_ip = socket.gethostbyname(packetix_host)
logging.INFO('packetix server : %s', dst_ip)
ip = IPRoute()
ip.route(
'add',
dst=dst_ip,
gateway=default_gw[0],
metrics={
'mtu': 1500,
'hoplimit': 16
}
)
def writeconf(template,target,**kw):
with open(template) as f:
data = f.read()
with open(target,"w+") as f:
f.write(data.format(**kw))
return True
return False
def init_pppoe(args , status_loop = 10):
if args.get("--pppoe-username") is None:
logging.error("not have --pppoe-username")
return False,"pppoe param error"
if args.get('--pppoe-password') is None:
logging.error("not have --pppoe-password")
return False,"pppoe param error"
ok = writeconf("/etc/ppp/peers/dsl-provider.tp","/etc/ppp/peers/dsl-provider",username=args["--pppoe-username"])
if not ok:
logging.error("write conf /etc/ppp/peers/dsl-provider failed")
return False,"write conf /etc/ppp/peers/dsl-provider failed"
ok = writeconf("/etc/ppp/pap-secrets.tp","/etc/ppp/pap-secrets",username=args["--pppoe-username"],password=args['--pppoe-password'])
if not ok:
logging.error("write conf /etc/ppp/pap-secrets failed")
return False,"write conf /etc/ppp/pap-secrets failed"
c = Commander()
rd, ed = c.command2("pon dsl-provider")
if len(ed) > 0:
logging.error("pon failed")
return False,"pon failed"
for i in range(status_loop):
ok,why = is_pppoe_conneced()
logging.info("pppoe {ok}({why})".format(ok=ok,why=why))
if ok:
return True,why
time.sleep(1)
return False,"pppoe error"
def is_pppoe_conneced():
c = Commander()
rd, ed = c.command2("plog")
for l in rd.split("\n"):
index = l.find("local IP address")
if index != -1:
ip = l[index+len("local IP address"):].strip()
return True,ip
return False,"error"
def output(args,**kw):
output = args.get("--output")
if output is None:
return False
with open(output,"a+") as f:
for k,v in kw.items():
f.write("{key} : {value}\n".format(key=k,value=v))
f.write("\n")
return True
def init_vpn(args, status_loop = 10):
c = Commander()
rd, ed = c.command2("service rsyslog start")
if len(ed) > 0:
logging.error("start rsyslog failed")
return False,"start rsyslog failed"
rd, ed = c.command2("/opt/vpnclient/vpnclient start")
if len(ed) > 0:
logging.error("start vpnclient failed")
return False,"start vpnclient failed"
time.sleep(1)
ok, rd, ed = c.vpn_command("NicCreate p1")
if not ok:
logging.error("create nic failed")
return False,"create nic failed"
time.sleep(1)
if 'vpn_p1' not in netifaces.interfaces():
logging.error("create nic failed")
return False,"create nic failed"
ok, rd, ed = c.vpn_command(
"AccountCreate {username} /SERVER:{host}:{port} /HUB:VPN /USERNAME:{username} /NICNAME:p1".format(
username=args["<username>"],
host=args["<host>"],
port=15555,
)
)
if not ok:
logging.error("create account failed")
return False,"create account failed"
ok, rd, ed = c.vpn_command(
"AccountPasswordSet {username} /PASSWORD:{password} /TYPE:standard".format(
username=args["<username>"],
password=args["<password>"],
)
)
if not ok:
logging.error("account set password failed")
return False,"account set password failed"
ok, rd, ed = c.vpn_command(
"AccountConnect {username} ".format(
username=args["<username>"],
)
)
if not ok:
logging.error("connect failed")
return False,"connect failed"
for i in range(status_loop):
ok,why = is_vpn_connected()
logging.info("vpn connect %s (%s)"%(ok,why))
if ok:
return True,why
time.sleep(1)
return False,"vpn error"
def is_vpn_connected():
c = Commander()
ok, rd, ed = c.vpn_command(
"AccountStatusGet {username} ".format(
username=args["<username>"],
)
)
if not ok:
return False,"command not runing"
#print "\n".join(rd)
for l in rd:
if l.find("|") != -1:
key,value = l. | split("|")
if key.find("Session Status") != -1 :
if value.find("Connection Completed (Session Established)") != -1:
return True,value
else:
return False,value
return False,"error"
if __name__ == '__main__':
args = docopt | (__doc__)
logging.basicConfig(
level=logging.DEBUG,
format='%(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
#print args
if args.get("--env"):
if os.getenv("HOST"):
args['<host>'] = os.getenv("HOST")
if os.getenv("USERNAME"):
args['<username>'] = os.getenv("USERNAME")
if os.getenv("PASSWORD"):
args['<password>'] = os.getenv("PASSWORD")
if os.getenv("PPPOE_USERNAME"):
args['--pppoe-username'] = os.getenv("PPPOE_USERNAME")
if os.getenv("PPPOE_PASSWORD"):
args['--pppoe-password'] = os.getenv("PPPOE_PASSWORD")
print args
ok,why = init_vpn(args)
output(args,vpn=ok,status=why)
if ok:
ok,why = init_pppoe(args)
output(args,pppoe=ok,ip=why)
if args['-r']:
add_route(args['<host>'])
sys.exit()
|
jairomoldes/PyTango | tango/exception.py | Python | lgpl-3.0 | 6,718 | 0.002233 | # ------------------------------------------------------------------------------
# This file is part of PyTango (http://pytango.rtfd.io)
#
# Copyright 2006-2012 CELLS / ALBA Synchrotron, Bellaterra, Spain
# Copyright 2013-2014 European Synchrotron Radiation Facility, Grenoble, France
#
# Distributed under the terms of the GNU Lesser General Public License,
# either version 3 of the License, or (at your option) any later version.
# See LICENSE.txt for more info.
# ------------------------------------------------------------------------------
"""
This is an internal PyTango module.
"""
__all__ = ("exception_init",)
__docformat__ = "restructuredtext"
from .utils import document_static_method as __document_static_method
from ._tango import Except, DevError, ErrSeverity
def __to_dev_failed(exc_type=None, exc_value=None, traceback=None):
"""to_dev_failed(exc_type, exc_value, traceback) -> tango.DevFailed
Generate a TANGO DevFailed exception.
The exception is created with a single :class:`~tango.DevError`
object. A default value *tango.ErrSeverity.ERR* is defined for
the :class:`~tango.DevError` severity field.
The parameters are the same as the ones generates by a call to
:func:`sys.exc_info`.
Parameters :
- type : (class) the exception type of the exception being handled
- value : (object) exception parameter (its associated value or the
second argument to raise, which is always a class instance
if the exception type is a class object)
- traceback : (traceback) traceback object
Return : (tango.DevFailed) a tango exception object
New in PyTango 7.2.1"""
try:
Except.throw_python_exception(exc_type, exc_value, traceback)
except Exception as e:
return e
# -~-~-~-~-~- | ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~
# DevError pickle
# -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~
def __DevError__getin | itargs__(self):
return ()
def __DevError__getstate__(self):
return self.reason, self.desc, self.origin, int(self.severity)
def __DevError__setstate__(self, state):
self.reason = state[0]
self.desc = state[1]
self.origin = state[2]
self.severity = ErrSeverity(state[3])
def __init_DevError():
DevError.__getinitargs__ = __DevError__getinitargs__
DevError.__getstate__ = __DevError__getstate__
DevError.__setstate__ = __DevError__setstate__
def __init_Except():
Except.to_dev_failed = staticmethod(__to_dev_failed)
def __doc_Except():
def document_static_method(method_name, desc, append=True):
return __document_static_method(Except, method_name, desc, append)
Except.__doc__ = """
A containner for the static methods:
- throw_exception
- re_throw_exception
- print_exception
- compare_exception"""
document_static_method("throw_exception", """
throw_exception(reason, desc, origin, sever=tango.ErrSeverity.ERR) -> None
Generate and throw a TANGO DevFailed exception.
The exception is created with a single :class:`~tango.DevError`
object. A default value *tango.ErrSeverity.ERR* is defined for
the :class:`~tango.DevError` severity field.
Parameters :
- reason : (str) The exception :class:`~tango.DevError` object reason field
- desc : (str) The exception :class:`~tango.DevError` object desc field
- origin : (str) The exception :class:`~tango.DevError` object origin field
- sever : (tango.ErrSeverity) The exception DevError object severity field
Throws : DevFailed
""")
document_static_method("re_throw_exception", """
re_throw_exception(ex, reason, desc, origin, sever=tango.ErrSeverity.ERR) -> None
Re-throw a TANGO :class:`~tango.DevFailed` exception with one more error.
The exception is re-thrown with one more :class:`~tango.DevError` object.
A default value *tango.ErrSeverity.ERR* is defined for the new
:class:`~tango.DevError` severity field.
Parameters :
- ex : (tango.DevFailed) The :class:`~tango.DevFailed` exception
- reason : (str) The exception :class:`~tango.DevError` object reason field
- desc : (str) The exception :class:`~tango.DevError` object desc field
- origin : (str) The exception :class:`~tango.DevError` object origin field
- sever : (tango.ErrSeverity) The exception DevError object severity field
Throws : DevFailed
""")
document_static_method("print_error_stack", """
print_error_stack(ex) -> None
Print all the details of a TANGO error stack.
Parameters :
- ex : (tango.DevErrorList) The error stack reference
""")
document_static_method("print_exception", """
print_exception(ex) -> None
Print all the details of a TANGO exception.
Parameters :
- ex : (tango.DevFailed) The :class:`~tango.DevFailed` exception
""")
document_static_method("throw_python_exception", """
throw_python_exception(type, value, traceback) -> None
Generate and throw a TANGO DevFailed exception.
The exception is created with a single :class:`~tango.DevError`
object. A default value *tango.ErrSeverity.ERR* is defined for
the :class:`~tango.DevError` severity field.
The parameters are the same as the ones generates by a call to
:func:`sys.exc_info`.
Parameters :
- type : (class) the exception type of the exception being handled
- value : (object) exception parameter (its associated value or the
second argument to raise, which is always a class instance
if the exception type is a class object)
- traceback : (traceback) traceback object
Throws : DevFailed
New in PyTango 7.2.1
""")
def __doc_DevError():
DevError.__doc__ = """
Structure describing any error resulting from a command execution,
or an attribute query, with following members:
- reason : (str) reason
- severity : (ErrSeverity) error severty (WARN, ERR, PANIC)
- desc : (str) error description
- origin : (str) Tango server method in which the error happened"""
def exception_init(doc=True):
__init_Except()
__init_DevError()
if doc:
__doc_Except()
__doc_DevError()
|
chenke91/ckPermission | app/api_v1/resources/tests.py | Python | mit | 403 | 0.009926 | #encoding: utf-8
from flask.ext.restful import Resource, reqparse
class Test(Resource):
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('id', type=int)
super(AccountAPI, self).__init__()
| def get(self):
return {'id': id}
def post(self):
p | ass
def put(self):
pass
def delete(self):
pass |
PersianWikipedia/pywikibot-core | scripts/archive/__init__.py | Python | mit | 99 | 0 | # -*- | coding: utf-8 -*-
"""THIS DIRECTORY IS TO HOLD B | OT SCRIPTS THAT NO LONGER ARE MAINTAINED."""
|
mwillmott/techbikers | server/urls.py | Python | mit | 482 | 0 | fro | m django.conf.urls import include, url
from django.views.generic.base import RedirectView
from server.views import app
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
# Uncomment the next line to enable | the admin:
url(r'^admin/', include(admin.site.urls)),
# API
url(r'^api/', include('server.api.urls')),
# Catchall and routing is handled by the client app
url(r'^', app)
]
|
yunojuno/django-s3-upload | example/migrations/0001_initial.py | Python | mit | 1,676 | 0.002983 | # Generated by Django 3.1 on 2020-08-25 12:15
import django.db.models.deletion
from django.db import migrations, models
import s3upload.fields
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="Cat",
fields=[
(
"id",
| models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"custom_filename",
s3upload.fields.S3UploadField(blank=True, dest="custom_filename"),
),
],
),
migrations.CreateModel(
name="Kitte | n",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("video", s3upload.fields.S3UploadField(blank=True, dest="vids")),
("image", s3upload.fields.S3UploadField(blank=True, dest="imgs")),
("pdf", s3upload.fields.S3UploadField(blank=True, dest="files")),
(
"mother",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="example.cat"
),
),
],
),
]
|
matthappens/taskqueue | taskqueue/venv_tq/lib/python2.7/site-packages/gevent/coros.py | Python | mit | 251 | 0.011952 | # This module definitely remains in 1.0.x, probably in versions after that too.
import warnings
warnings.warn('geven | t.coros has been renamed to gevent.lock', DeprecationWarning, stacklevel=2)
from gevent.lock import *
from gev | ent.lock import __all__
|
brianrodri/oppia | core/domain/user_domain.py | Python | apache-2.0 | 46,090 | 0.000195 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Domain objects for user."""
from __future__ import annotations
import re
from core import feconf
from core import utils
from core.constants import constants
class UserSettings:
"""Value object representing a user's settings.
Attributes:
user_id: str. The unique ID of the user.
email: str. The user email.
roles: list(str). Roles of the user.
username: str or None. Identifiable username to display in the UI.
last_agreed_to_terms: datetime.datetime or None. When the user last
agreed to the terms of the site.
last_started_state_editor_tutorial: datetime.datetime or None. When
the user last started the state editor tutorial.
last_started_state_translation_tutorial: datetime.datetime or None. When
the user last started the state translation tutorial.
last_logged_in: datetime.datetime or None. When the user last logged in.
last_created_an_exploration: datetime.datetime or None. When the user
last created an exploration.
last_edited_an_exploration: datetime.datetime or None. When the user
last edited an exploration.
profile_picture_data_url: str or None. User uploaded profile picture as
a dataURI string.
default_dashboard: str or None. The default dashboard of the user.
user_bio: str. User-specified biography.
subject_interests: list(str) or None. Subject interests specified by
the user.
first_contribution_msec: float or None. The time in milliseconds when
the user first contributed to Oppia.
preferred_language_codes: list(str) or None. Exploration language
preferences specified by the user.
preferred_site_language_code: str or None. System language preference.
preferred_audio_language_code: str or None. Audio language preference.
pin: str or None. The PIN of the user's profile for android.
display_alias: str or None. Display name of a user who is logged
into the Android app. None when the request is coming from web
because we don't use it there.
"""
def __init__(
self, user_id, email, roles, banned, username=None,
last_agreed_to_terms=None, last_started_state_editor_tutorial=None,
last_started_state_translation_tutorial=None, last_logged_in=None,
last_created_an_exploration=None, last_edited_an_exploration=None,
profile_picture_data_url=None, default_dashboard=None,
creator_dashboard_display_pref=(
constants.ALLOWED_CREATOR_DASHBOARD_DISPLAY_PREFS['CARD']),
user_bio='', subject_interests=None, first_contribution_msec=None,
preferred_language_codes=None, preferred_site_language_code=None,
preferred_audio_language_code=None, pin=None, display_alias=None,
deleted=False, created_on=None):
"""Constructs a UserSettings domain object.
Args:
user_id: str. The unique ID of the user.
email: str. The user email.
ro | les: list(str). Roles of the user.
banned: bool. Whether the uses is banned.
username: str or None. Identifiable username to display in the UI.
last_agreed_to_terms: datetime.datetime or None. When the user
last agreed to the terms of the site.
last_started_state_editor_tutorial: datetime.dateti | me or None. When
the user last started the state editor tutorial.
last_started_state_translation_tutorial: datetime.datetime or None.
When the user last started the state translation tutorial.
last_logged_in: datetime.datetime or None. When the user last
logged in.
last_created_an_exploration: datetime.datetime or None. When the
user last created an exploration.
last_edited_an_exploration: datetime.datetime or None. When the
user last edited an exploration.
profile_picture_data_url: str or None. User uploaded profile
picture as a dataURI string.
default_dashboard: str|None. The default dashboard of the user.
creator_dashboard_display_pref: str. The creator dashboard of the
user.
user_bio: str. User-specified biography.
subject_interests: list(str) or None. Subject interests specified by
the user.
first_contribution_msec: float or None. The time in milliseconds
when the user first contributed to Oppia.
preferred_language_codes: list(str) or None. Exploration language
preferences specified by the user.
preferred_site_language_code: str or None. System language
preference.
preferred_audio_language_code: str or None. Default language used
for audio translations preference.
pin: str or None. The PIN of the user's profile for android.
display_alias: str or None. Display name of a user who is logged
into the Android app. None when the request is coming from
web because we don't use it there.
deleted: bool. Whether the user has requested removal of their
account.
created_on: datetime.datetime. When the user was created on.
"""
self.user_id = user_id
self.email = email
self.roles = roles
self.username = username
self.last_agreed_to_terms = last_agreed_to_terms
self.last_started_state_editor_tutorial = (
last_started_state_editor_tutorial)
self.last_started_state_translation_tutorial = (
last_started_state_translation_tutorial)
self.last_logged_in = last_logged_in
self.last_edited_an_exploration = last_edited_an_exploration
self.last_created_an_exploration = last_created_an_exploration
self.profile_picture_data_url = profile_picture_data_url
self.default_dashboard = default_dashboard
self.creator_dashboard_display_pref = creator_dashboard_display_pref
self.user_bio = user_bio
self.subject_interests = (
subject_interests if subject_interests else [])
self.first_contribution_msec = first_contribution_msec
self.preferred_language_codes = (
preferred_language_codes if preferred_language_codes else [])
self.preferred_site_language_code = preferred_site_language_code
self.preferred_audio_language_code = preferred_audio_language_code
self.pin = pin
self.display_alias = display_alias
self.banned = banned
self.deleted = deleted
self.created_on = created_on
def validate(self):
"""Checks that the user_id, email, roles, banned, pin and display_alias
fields of this UserSettings domain object are valid.
Raises:
ValidationError. The user_id is not str.
ValidationError. The email is not str.
ValidationError. The email is invalid.
ValidationError. The roles is not a list.
ValidationError. Given role does not exist.
ValidationError. The pin is not str.
ValidationError. The display alias is not str.
"""
if not isinstance(self.user_id, str):
raise utils.ValidationError(
|
airfrog/okws | test/regtest/cases/85.py | Python | gpl-2.0 | 1,198 | 0.038397 |
import copy
description = "test sort"
arr = [ 10, 3, 44, 15, 40 , | -10, -1000, 0, 0, 3000]
arr2 = [ -10.2, 4.33, 1.999, -399.22, -10000.1001, 10.9, 3.922, 59.01, -33.11, 0.3, 0.2, 0.1, 0.001, -0.001, -0.2, -0.4, -0.3, -0.222 ]
arr3 = [ { "key" : k } for k in arr2 ]
filedata = """{$
locals { v : %(ar | r)s, v2 : [], w : %(arr2)s, u : %(arr3)s, l }
def rcmp (a, b) { return b - a; }
v2 = sort (v, cmp);
v3 = sort (v, rcmp);
v4 = sort (w);
l = lambda (a,b) {
locals { diff : b.key - a.key };
return cmp_float (diff);
} ;
v5 = sort (u, l);
v6 = sort2 (u, lambda (x) { return (0 - x.key); } ) ;
print (v2, " ", v3, " ", v4, " ", v5, " ", v6);
$}""" % { "arr" : arr, "arr2" : arr2, "arr3" : arr3 }
a1 = copy.copy (arr)
a1.sort ()
a2 = copy.copy (arr)
a2.sort ()
a2.reverse ()
a3 = copy.copy (arr2)
a3.sort ()
a4 = copy.copy (arr3)
a4.sort (lambda x,y: cmp (y["key"], x["key"]) )
a3s = str([ "%.12g" % x for x in a3 ]).replace("'","")
a4s = str ([ { "key" : "%.12g" % x["key"] } for x in a4 ]).replace("'","")
a4s = a4s.replace ("key", '"key"').replace ('": ', '" : ')
outcome = " ".join ([str (s) for s in [ a1, a2, a3s, a4s, a4s ] ])
|
JeyZeta/Dangerous | Dangerous/Weevely/modules/shell/sh.py | Python | mit | 3,972 | 0.012085 | '''
Created on 22/ago/2011
@author: norby
'''
from core.moduleexception import ModuleException, ProbeException, ExecutionException, ProbeSucceed
from core.moduleguess import ModuleGuess
from core.argparse import ArgumentParser, StoredNamespace
from core.argparse import SUPPRESS
from ast import literal_eval
import random
MSG_SH_INTERPRETER_SUCCEED = 'Shell interpreter load succeed'
WARN_SH_INTERPRETER_FAIL = 'Shell interpreters load failed'
class Sh(ModuleGuess):
'''Execute system shell command'''
def _set_vectors(self):
self.vectors.add_vector("system", 'shell.php', "@system('$cmd $no_stderr');")
self.vectors.add_vector("passthru" , 'shell.php', "@passthru('$cmd $no_stderr');")
self.vectors.add_vector("shell_exec", 'shell.php', "echo @shell_exec('$cmd $no_stderr');")
self.vectors.add_vector("exec", 'shell.php', "@exec('$cmd $no_stderr', $r);echo(join(\"\\n\",$r));")
#self.vectors.add_vector("pcntl", 'shell.php', ' $p = pcntl_fork(); if(!$p) {{ pcntl_exec( "/bin/sh", Array("-c", "$cmd")); }} else {{ pcntl_waitpid($p,$status); }}'),
self.vectors.add_vector("popen", 'shell.php', "$h = popen('$cmd','r'); while(!feof($h)) echo(fread($h,4096)); pclose($h);")
self.vectors.add_vector("python_eval", 'shell.php', "python_eval('import os; os.system('$cmd$no_stderr');")
self.vectors.add_vector("perl_system", 'shell.php', "$perl = new perl(); $r = @perl->system('$cmd$no_stderr'); echo $r;")
self.vectors.add_vector("proc_open", 'shell.php', """$p = array(array('pipe', 'r'), array('pipe', 'w'), array('pipe', 'w'));
$h = proc_open('$cmd', $p, $pipes); while(!feof($pipes[1])) echo(fread($pipes[1],4096));
while(!feof($pipes[2])) echo(fread($pipes[2],4096)); fclose($pipes[0]); fclose($pipes[1]);
fclose($pipes[2]); proc_close($h);""")
def _set_args(self):
self.argparser.add_argument('cmd', help='Shell command', nargs='+' | )
self.argparser.add_argument('-no-stderr', help='Suppress error output', action='store_false')
self.argparser.add_argument( | '-vector', choices = self.vectors.keys())
self.argparser.add_argument('-just-probe', help=SUPPRESS, action='store_true')
def _init_stored_args(self):
self.stored_args_namespace = StoredNamespace()
setattr(self.stored_args_namespace, 'vector', None )
def _execute_vector(self):
if not getattr(self.stored_args_namespace, 'vector') or self.args['just_probe']:
self.__slacky_probe()
# Execute if is current vector is saved or choosen
if self.current_vector.name in (getattr(self.stored_args_namespace, 'vector'), self.args['vector']):
self._result = self.current_vector.execute( self.formatted_args)
def _prepare_vector(self):
# Format cmd
self.formatted_args['cmd'] = ' '.join(self.args['cmd']).replace( "'", "\\'" )
# Format stderr
if any('$no_stderr' in p for p in self.current_vector.payloads):
if self.args['no_stderr']:
self.formatted_args['no_stderr'] = '2>&1'
else:
self.formatted_args['no_stderr'] = ''
def __slacky_probe(self):
rand = str(random.randint( 11111, 99999 ))
slacky_formats = self.formatted_args.copy()
slacky_formats['cmd'] = 'echo %s' % (rand)
if self.current_vector.execute( slacky_formats) == rand:
setattr(self.stored_args_namespace, 'vector', self.current_vector.name)
# Set as best interpreter
#self.modhandler.interpreter = self.name
if self.args['just_probe']:
self._result = True
raise ProbeSucceed(self.name, MSG_SH_INTERPRETER_SUCCEED)
return
raise ModuleException(self.name, WARN_SH_INTERPRETER_FAIL)
|
sozforex/furry-train | tests/test_nettoips.py | Python | mit | 1,059 | 0.002833 | from click.testing import CliRun | ner
from furrytrain.nettoips import main
def test_main():
cases = [(' 10.234.10.0/30 ',
'10.234.10.0\n10.234.10.1\n10.234.10.2\n10.234.10.3\n'),
('1000.234.10.0/30',
'error: 1000.234.10.0/30\n'),
('1000.234.10.0/30 ',
'error: 1000.234.10.0/30 \n'),
('255.255.255.255/30',
'255.255.255.252\n'
| '255.255.255.253\n'
'255.255.255.254\n'
'255.255.255.255\n'),
('192.168.0.0/31',
'192.168.0.0\n192.168.0.1\n'),
]
for input, output in cases:
isolated_main(input, output)
def isolated_main(input, output, only_cidr=True):
runner = CliRunner()
with runner.isolated_filesystem():
with open('input1.txt', 'w') as f:
f.write(input)
args = (['-c'] if only_cidr else []) + ['input1.txt']
result = runner.invoke(main, args)
assert result.exit_code == 0
assert result.output == output
|
llvm-mirror/lldb | packages/Python/lldbsuite/test/functionalities/breakpoint/breakpoint_options/TestBreakpointOptions.py | Python | apache-2.0 | 3,765 | 0.000531 | """
Test breakpoint command for different options.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
class BreakpointOptionsTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test(self):
"""Test breakpoint command for different options."""
self.build()
self.breakpoint_options_test()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break inside main().
self.line = line_number('main.cpp', '// Set break point at this line.')
def breakpoint_options_test(self):
"""Test breakpoint command for different options."""
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# This should create a breakpoint with 1 locations.
lldbutil.run_break_set_by_file_and_line(
self,
"main.cpp",
self.line,
extra_options="-K 1",
num_expected_locations=1)
lldbutil.run_break_set_by_file_and_line(
self,
"main.cpp",
self.line,
extra_options="-K 0",
num_expected_locations=1)
# Run the program.
self.runCmd("run", RUN_SUCCEEDED)
# Stopped once.
self.expect("thread backtrace", STOPPED_DUE_TO_BREAKPOINT,
substrs=["stop reason = breakpoint 2."])
# Check the list of breakpoint.
self.expect(
"breakpoint list -f",
"Breakpoint locations shown correctly",
substrs=[
"1: file = 'main.cpp', line = %d, exact_match = 0, locations = 1" %
self.line,
"2: file = 'main.cpp', line = %d, exact_match = 0, locations = 1" %
self.line])
# Continue the program, there should be another stop.
self.runCmd("process continue")
# Stopped again.
self.expect("thread backtrace", STOPPED_DUE_TO_BREAKPOINT,
substrs=["stop reason = breakpoint 1."])
# Continue the program, we should exit.
self.runCmd("process continue")
# We should exit.
self.expect("process status", "Process exited successfully",
patterns=["^Process [0-9]+ exited with status = 0"])
def breakpoint_options_language_test(self):
"""Test breakpoint command for language option."""
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# This should create a breakpoint with 1 locations.
lldbutil.run_break_set_by_symbol(
self,
'ns::func',
sym_exact=False,
extra_options="-L c++",
num_expected_locations=1)
# This should create a breakpoint with 0 locations.
lldbutil.run_break_set_by_symbol(
self,
'ns::func',
sym_exact=False,
extra_options="-L c",
num_expected_locations=0)
self.runCm | d("settings set tar | get.language c")
lldbutil.run_break_set_by_symbol(
self, 'ns::func', sym_exact=False, num_expected_locations=0)
# Run the program.
self.runCmd("run", RUN_SUCCEEDED)
# Stopped once.
self.expect("thread backtrace", STOPPED_DUE_TO_BREAKPOINT,
substrs=["stop reason = breakpoint 1."])
# Continue the program, we should exit.
self.runCmd("process continue")
# We should exit.
self.expect("process status", "Process exited successfully",
patterns=["^Process [0-9]+ exited with status = 0"])
|
jbu/personis | personis/examples/aelog/httplib2/__init__.py | Python | gpl-3.0 | 68,094 | 0.005698 | from __future__ import generators
"""
httplib2
A caching http interface that supports ETags and gzip
to conserve bandwidth.
Requires Python 2.3 or later
Changelog:
2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
"""
__author__ = "Joe Gregorio (joe@bitworking.org)"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
"James Antill",
"Xavier Verges Farrero",
"Jonathan Feinberg",
"Blair Zajac",
"Sam Ruby",
"Louis Nyffenegger"]
__license__ = "MIT"
__version__ = "0.7.4"
import re
import sys
import email
import email.Utils
import email.Message
import email.FeedParser
import StringIO
import gzip
import zlib
import httplib
import urlparse
import urllib
import base64
import os
import copy
import calendar
import time
import random
import errno
try:
from hashlib import sha1 as _sha, md5 as _md5
except ImportError:
# prior to Python 2.5, these were separate modules
import sha
import md5
_sha = sha.new
_md5 = md5.new
import hmac
from gettext import gettext as _
import socket
try:
from httplib2 import socks
except ImportError:
try:
import socks
except ImportError:
socks = None
# Build the appropriate socket wrapper for ssl
try:
import ssl # python 2.6
ssl_SSLError = ssl.SSLError
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if disable_validation:
cert_reqs = ssl.CERT_NONE
else:
cert_reqs = ssl.CERT_REQUIRED
# We should be specifying SSL version 3 or TLS v1, but the ssl module
# doesn't expose the necessary knobs. So we need to go with the default
# of SSLv23.
return ssl.wrap_socket(s | ock, keyfile=key_file, certfile=cert_file,
cert_reqs=cert_reqs, ca_certs=ca_certs)
except (AttributeError, ImportError):
ssl_SSLError = None
def _ssl_wrap_ | socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if not disable_validation:
raise CertificateValidationUnsupported(
"SSL certificate validation is not supported without "
"the ssl module installed. To avoid this error, install "
"the ssl module, or explicity disable validation.")
ssl_sock = socket.ssl(sock, key_file, cert_file)
return httplib.FakeSocket(sock, ssl_sock)
if sys.version_info >= (2,3):
from iri2uri import iri2uri
else:
def iri2uri(uri):
return uri
def has_timeout(timeout): # python 2.6
if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
return (timeout is not None)
__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error',
'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent',
'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError',
'debuglevel', 'ProxiesUnavailableError']
# The httplib debug level, set to a non-zero value to get debug output
debuglevel = 0
# Python 2.3 support
if sys.version_info < (2,4):
def sorted(seq):
seq.sort()
return seq
# Python 2.3 support
def HTTPResponse__getheaders(self):
"""Return list of (header, value) tuples."""
if self.msg is None:
raise httplib.ResponseNotReady()
return self.msg.items()
if not hasattr(httplib.HTTPResponse, 'getheaders'):
httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
# All exceptions raised here derive from HttpLib2Error
class HttpLib2Error(Exception): pass
# Some exceptions can be caught and optionally
# be turned back into responses.
class HttpLib2ErrorWithResponse(HttpLib2Error):
def __init__(self, desc, response, content):
self.response = response
self.content = content
HttpLib2Error.__init__(self, desc)
class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
class RedirectLimit(HttpLib2ErrorWithResponse): pass
class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class MalformedHeader(HttpLib2Error): pass
class RelativeURIError(HttpLib2Error): pass
class ServerNotFoundError(HttpLib2Error): pass
class ProxiesUnavailableError(HttpLib2Error): pass
class CertificateValidationUnsupported(HttpLib2Error): pass
class SSLHandshakeError(HttpLib2Error): pass
class NotSupportedOnThisPlatform(HttpLib2Error): pass
class CertificateHostnameMismatch(SSLHandshakeError):
def __init__(self, desc, host, cert):
HttpLib2Error.__init__(self, desc)
self.host = host
self.cert = cert
# Open Items:
# -----------
# Proxy support
# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
# Pluggable cache storage (supports storing the cache in
# flat files by default. We need a plug-in architecture
# that can support Berkeley DB and Squid)
# == Known Issues ==
# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
# Does not handle Cache-Control: max-stale
# Does not use Age: headers when calculating cache freshness.
# The number of redirections to follow before giving up.
# Note that only GET redirects are automatically followed.
# Will also honor 301 requests by saving that info and never
# requesting that URI again.
DEFAULT_MAX_REDIRECTS = 5
# Default CA certificates file bundled with httplib2.
CA_CERTS = os.path.join(
os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
# Which headers are hop-by-hop headers by default
HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
return [header for header in response.keys() if header not in hopbyhop]
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
(scheme, authority, path, query, fragment) = parse_uri(uri)
"""
groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
def urlnorm(uri):
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
authority = authority.lower()
scheme = scheme.lower()
if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
request_uri = query and "?".join([path, query]) or path
scheme = scheme.lower()
defrag_uri = scheme + "://" + authority + request_uri
return scheme, authority, request_uri, defrag_uri
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
re_url_scheme = re.compile(r'^\w+://')
re_slash = re.compile(r'[?/:|]+')
def safename(filename):
"""Return a filename suitable for the cache.
Strips dangerous and common characters to create a filename we
can use to store the cache in.
"""
try:
if re_url_scheme.match(filename):
if isinstance(filename,str):
filename = filename.decode('utf-8')
filename = filename.encode('idna')
else:
filename = filename.encode('idna')
except UnicodeError:
pass
if isinstance(filename,unicode):
filename=filename.encode('utf-8')
filemd5 = _md5(filename).hexdigest()
filename = re_url_scheme.sub("", filename)
filename = re_slash.sub(",", filename)
# limit length of filename
if len(filename)>200:
filename=filename[:200]
return ",".join((filename, filemd5))
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normal |
pedsm/deepHack | old/rating/main.py | Python | mit | 126 | 0.007937 | def rate(Likes, Comme | nts, Tags):
a = 0.0143
b = 0.0413
c = 0.0367
return Likes * a + Comments * b + Tags * c
| |
KMarkert/servir-vic-training | scripts/calibrate_vic.py | Python | gpl-3.0 | 6,662 | 0.01486 | #******************************************************************************
# FILE: calibrate_vic.py
# AUTHOR: Kel Markert
# EMAIL: kel.markert@nasa.gov
# ORGANIZATION: NASA-SERVIR, UAH/ESSC
# MODIFIED BY: n/a
# CREATION DATE: 22 Feb. 2017
# LAST MOD DATE: 03 Apr. 2017
# PURPOSE: This script performs a simple calibration process by inserting random
# parameterization into the VIC model
# DEPENDENCIES: numpy, pandas, scipy, osgeo (gdal)
#******************************************************************************
# import dependencies
from __future__ import print_function
import os
import sys
import datetime
import numpy as np
import pandas as pd
import xarray as xr
from scipy import stats
from format_soil_params import *
from rout_vic import *
from flux2nc import *
def calibrate_vic(niter):
# define script file path for relative path definitions
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
os.chdir(__location__)
# output calibration table
outCal = '../data/output/calibration_table.csv'
# specify file paths to pass into system commands
globalFile = os.path.join(__location__,'../data/input/global.params')
soilFile = os.path.join(__location__,'../data/input/soil.param')
gridRas = os.path.join(__location__,'../data/input/gis/Nyando_grid.tif')
elvRas = os.path.join(__location__,'../data/input/gis/Nyando_basin_ElvAvg.tif')
slopeRas = os.path.join(__location__,'../data/input/gis/Nyando_basin_SlopeAvg.tif')
precipRas = os.path.join(__location__,'../data/input/gis/Nyando_basin_PrecipSnap.tif')
soilRas =os.path.join(__location__,'../data/input/gis/Nyando_basin_SoilAgg.tif')
#gridding paths
fluxPath = '../data/output/fluxes/'
fluxNc = '../data/output/'
# specify variables for routing
fracRas = os.path.join(__location__,'../data/input/gis/Nyando_fraction.tif')
uhFile = '../data/input//Nyando_UHFile.csv'
roFile = '../data/output/runoff_2005.nc'
bfFile = '../data/output/base_2005.nc'
routOut = '../data/output/rout_out.csv'
calStart = '20050101'
calEnd = '20091231'
# specify validation time series path
valFile = '../data/input/Nyando_discharge.xlsx'
# specify range of acceptable parameter
b_range = [0, 0.5]
Ws_range = [0.5,1]
Ds_range = [0,0.5]
c_range = [0.01,5]
s_range = [0.3,1.5]
#empty lists to add calibration results
b_vals = []
Ws_vals = []
Ds_vals = []
c_vals = []
s2_vals = []
s3_vals = []
NSEs = []
Rs = []
Biases = []
RMSEs = []
# calibration start time
t1 = datetime.datetime.now()
# run n number of iterationa
for i in range(int(niter)):
print("Iteration {0} of {1}".format(i+1,niter))
# get random variable for each parameter
# get random variable for each parameter
#b_val = np.random.uniform(b_range[0],b_range[1],1)[0]
#Ws_val = np.random.uniform(Ws_range[0],Ws_range[1],1)[0]
#Ds_val = np.random.uniform(Ds_range[0],Ds_range[1],1)[0]
#c_val = np.random.uniform(c_range[0],c_range[1],1)[0]
#s2_val = np.random.uniform(s_range[0],s_range[1],1)[0]
#s3_val = np.random.uniform(s_range[0],s_range[1],1)[0]
b_val = 0.086734798
Ws_val = 0.626467812
Ds_val = 0.031062675
c_val = 3.225213007
s2_val = 1.311442407
s3_val = 1.272227087
# use random parameters in soil parameter file
format_soil_params(gridRas,soilRas,elvRas,precipRas,slopeRas,soilFile,
b_val, Ws_val, Ds_val, s2_val, s3_val)
# run the VIC model
os.system('../data/vicNl -g {0} 2> ../data/output/vic.log'.format(globalFile))
# grid flux
flux2nc(fluxPath,fluxNc, 5,2005,2009)
flux2nc(fluxPath,fluxNc, 6,2005,2009)
# rout the VIC model
rout_vic(uhFile,fracRas,roFile,bfFile,routOut,calStart,calEnd,daily='False')
# read simulated and observed data for time period
obsData = pd.ExcelFile(valFile)
obsSheet = obsData.parse(stn)
obsdates = np.array(obsSheet.Date)
obstimes = pd.date_range('2005-02-01','2013-12-31',freq='D')
obsSeries = xr.DataArray(obsSheet.Obs,coords=[obstimes],dims=['time']).sel(time=slice('2005-03-01','2009-12-31')).data
simCsv = pd.read_csv(routOut)
simtimes = pd.date_range('2005-01-01','2009-12-31',freq='D | ')
simSeries = xr.DataArray(simCsv.Discharge,coords=[ | simtimes],dims=['time'])
simSeries = simSeries.sel(time=slice('2005-03-01','2009-12-31')).data
# calculate model performance statistics
r = stats.pearsonr(obsSeries,simSeries)
nse = 1 - (sum((obsSeries-simSeries)**2)/sum((obsSeries-obsSeries.mean())**2))
bias = np.mean(simSeries-obsSeries)
rmse = np.mean(np.abs(obsSeries-simSeries))
# append variables to lists
b_vals.append(b_val)
Ws_vals.append(Ws_val)
Ds_vals.append(Ds_val)
s2_vals.append(s2_val)
s3_vals.append(s3_val)
NSEs.append(nse)
Rs.append(r[0])
Biases.append(bias)
RMSEs.append(rmse)
deltat = datetime.datetime.now()-t1
print('Processing time for {0} iterations: {1}'.format(niter,deltat))
# create dictionary to be put in a dataframe
d = {'Infilt':b_vals,"Ws":Ws_vals,"Ds":Ds_vals,"S2Depth":s2_vals,"S3Depth":s3_vals,
"NSE":NSEs,'R':Rs,"Bias":Biases,'RMSE':RMSEs}
# convert dictionary to datafram
df = pd.DataFrame(data=d)
# save dataframe to csv
df.to_csv(outCal)
idx = np.argmax(df.NSE)
print('BEST PARAMETER SET:\nb: {0}\tWs: {1}\tDs: {2}\tc: {3}\tSd2: {4}\tSd3: {5}\n'.format(df.Infilt[idx],
df.Ws[idx],df.Ds[idx],df.C[idx],df.S2Depth[idx],df.S3Depth[idx]))
print('PARAMETER EVALUATION:\nR: {0}\tNSE: {1}\tBias: {2}\tRMSE: {3}'.format(df.R[idx],df.NSE[idx],
df.Bias[idx],df.RMSE[idx]))
return
def main():
n_args = len(sys.argv)
# Check user inputs
if n_args != 2:
print("Wrong user input")
print("Script used to perform a simple random calibration process for the VIC model")
print("usage: python calibrate_vic.py <number of iterations> ")
print("Exiting system...")
sys.exit()
else: # do the process
calibrate_vic(sys.argv[1])
return
# Execute the main level program if run as standalone
if __name__ == "__main__":
main()
|
mwilliamson/abuse | python/test/abuse/test_parse.py | Python | bsd-2-clause | 5,846 | 0.008724 | import funk
from funk import expects
from funk import allows
from funk.tools import assert_that
import funk.matchers as m
from abuse.generate import RuleSet
from abuse.generate import NonTerminal
from abuse.parse import parse
from abuse.parse import MissingArrow
from abuse.parse import MissingClosingBrace
from abuse.parse import NoProductionRule
from abuse.parse import RuleNeverUsed
def test_can_parse_source_with_only_whitespace():
parse("\n\n\n\n\n\r\t\t \n\n \r\n\n", None, [])
@funk.with_context
def test_can_read_rule_for_terminal_to_non_terminal(context):
rule_set = context.mock(RuleSet)
expects(rule_set).add(NonTerminal("SENTENCE"), "I hate you!")
parse("$SENTENCE -> I hate you!", rule_set, [])
@funk.with_context
def test_can_read_multiple_rules_separated_by_newlines(context):
rule_set = context.mock(RuleSet)
expects(rule_set).add(NonTerminal("SENTENCE"), "I hate you!")
expects(rule_set).add(NonTerminal("SENTENCE"), "You smell!")
parse("$SENTENCE -> I hate you!\n$SENTENCE -> You smell!", rule_set, [])
@funk.with_context
def test_whitespace_is_trimmed_from_right(context):
rule_set = context.mock(RuleSet)
expects(rule_set).add(NonTerminal("SENTENCE"), "I hate you!")
parse("$SENTENCE -> I hate you! \t\t", rule_set, [])
@funk.with_context
def test_final_terminal_is_only_trimmed_on_the_right(context):
rule_set = context.mock(RuleSet)
expects(rule_set).add(NonTerminal("VERY"), NonTerminal("VERY"), " very")
parse("$VERY -> $VERY very", rule_set, [])
@funk.with_context
def test_empty_terminals_are_not_produced_by_rules(context):
rule_set = context.mock(RuleSet)
expects(rule_set).add(NonTerminal("SENTENCE"), NonTerminal("INSULT"))
parse("$SENTENCE -> $INSULT", rule_set, [])
@funk.with_context
def test_ignores_blank_lines(context):
rule_set = context.mock(RuleSet)
expects(rule_set).add(NonTerminal("SENTENCE"), "I hate you!")
expects(rule_set).add(NonTerminal("SENTENCE"), "You smell!")
parse("\n \t\n\n$SENTENCE -> I hate you!\n \n\n$SENTENCE -> You smell!\n\n\n", rule_set, [])
@funk.with_context
def test_can_read_non_terminals_on_right(context):
rule_set = context.mock(RuleSet)
expects(rule_set).add(NonTerminal("SENTENCE"), "You're as ", NonTerminal("ADJ"), " as a ", NonTerminal("ANIMAL"))
parse("$SENTENCE -> You're as $ADJ as a $ANIMAL", rule_set, [])
@funk.with_context
def test_non_terminals_are_alphanumeric_and_underscores_only(context):
rule_set = context.mock(RuleSet)
expects(rule_set).add(NonTerminal("SENTENCE"), "You smell of ", NonTerminal("Smell2"), ".")
parse("$SENTENCE -> You smell of $Smell2.", rule_set, [])
@funk.with_context
def test_can_use_braces_to_indicate_non_terminals(context):
rule_set = context.mock(RuleSet)
expects(rule_set).add(NonTerminal("SENTENCE"), "You're ", NonTerminal("RUDE_ADJ"), "er than I thought")
parse("$SENTENCE -> You're ${RUDE_ADJ}er than I thought", rule_set, [])
@funk.with_context
def test_adds_error_with_line_number_if_arrow_is_missing(context):
rule_set = context.mock(RuleSet)
allows(rule_set).add
errors = []
parse("\n\n$SENTENCE - You're ${RUDE_ADJ}er than I thought\n" +
"$RUDE_ADJ ->\n" +
"$SENTENCE -> $RUDE_ADJ",
rule_set,
errors)
assert_that(errors, m.contains_exactly(m.all_of(
m.has_attr(message="Missing symbol on line 3: ->", line_number=3),
m.is_a(MissingArrow)
)))
@funk.with_context
def test_adds_error_with_line_number_if_closing_brace_is_missing(context):
errors = []
parse("\n\n$SENTENCE -> You're ${RUDE_ADJer than I thought\n" +
"$SENTENCE ->\n",
RuleSet(),
errors)
assert_that(errors, m.contains_exactly(m.all_of(
m.has_attr(message="Missing closing brace on line 3 (opening brace at character 22)",
line_number=3, opening_brace_character_number=22),
m.is_a(MissingClosingBrace)
)))
@funk.with_context
def test_adds_error_with_line_number_if_closing_brace_for_second_variable_is_missing(context):
errors = []
parse("\n\n$SENTENCE -> You're ${RUDE_ADJ}er than ${OBJ\n" +
"$SENTENCE ->\n\n",
RuleSet(),
errors)
assert_that(errors, m.contains_exactly(m.all_of(
m.has_attr(message="Missing closing brace on line 3 (opening brace at character 41)",
line_number=3, opening_brace_character_number=41),
m.is_a(MissingClosingBrace)
)))
@funk.with_context
def test_adds_error_if_non_terminal_is_used_with_no_matching_production_rule(context):
errors = []
parse("\n\n$SENTENCE -> $INSULT\n\n", RuleSet(), errors);
assert_that(errors, m.contains_exactly(m.all_of(
m.has_attr(message="No production rule for non-terminal $INSULT (line 3, character | 14)",
line_number=3, character_number=14, non_terminal="INSULT"),
m.is_a(NoProductionRule)
)))
@funk.with_co | ntext
def test_adds_error_if_sentence_has_no_production_rule(context):
errors = []
parse("", RuleSet(), errors);
assert_that(errors, m.contains_exactly(m.all_of(
m.has_attr(message="No production rule for non-terminal $SENTENCE",
non_terminal="SENTENCE"),
m.is_a(NoProductionRule)
)))
@funk.with_context
def test_adds_error_if_production_rule_is_never_used(context):
errors = []
parse("$SENTENCE -> \n$RUDE_ADJ -> ugly", RuleSet(), errors);
assert_that(errors, m.contains_exactly(m.all_of(
m.has_attr(message="Production rule with start symbol $RUDE_ADJ is never used (line 2)",
line_number=2, start="RUDE_ADJ"),
m.is_a(RuleNeverUsed)
)))
|
comic/comic-django | app/grandchallenge/jqfileupload/migrations/0001_initial.py | Python | apache-2.0 | 1,357 | 0.000737 | # Generated by Django 1.11.11 on 2018-03-20 18:38
from django.db import migrations, models
import grandchallenge.jqfileupload.models
class Migration(migrations.Migration):
| initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="StagedFile",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
| verbose_name="ID",
),
),
("csrf", models.CharField(max_length=128)),
("client_id", models.CharField(max_length=128, null=True)),
("client_filename", models.CharField(max_length=128)),
("file_id", models.UUIDField()),
("timeout", models.DateTimeField()),
(
"file",
models.FileField(
upload_to=grandchallenge.jqfileupload.models.generate_upload_filename
),
),
("start_byte", models.BigIntegerField()),
("end_byte", models.BigIntegerField()),
("total_size", models.BigIntegerField(null=True)),
],
)
]
|
choltha/mailinabox | management/mailconfig.py | Python | cc0-1.0 | 21,317 | 0.028193 | #!/usr/bin/python3
import subprocess, shutil, os, sqlite3, re
import utils
from email_validator import validate_email as validate_email_, EmailNotValidError
import idna
def validate_email(email, mode=None):
# Checks that an email address is syntactically valid. Returns True/False.
# Until Postfix supports SMTPUTF8, an email address may contain AS | CII
# characters only; IDNs must be IDNA-encoded.
#
# When mode=="user", we're checking that this can be a user account name.
# Dovecot has tighter restrictions - letters, numbers, underscore, and
# dash only!
#
# When mode=="alias", we're allowing anything that can be in a Postfix
# alias table, i.e. omitting the local | part ("@domain.tld") is OK.
# Check the syntax of the address.
try:
validate_email_(email,
allow_smtputf8=False,
check_deliverability=False,
allow_empty_local=(mode=="alias")
)
except EmailNotValidError:
return False
if mode == 'user':
# There are a lot of characters permitted in email addresses, but
# Dovecot's sqlite auth driver seems to get confused if there are any
# unusual characters in the address. Bah. Also note that since
# the mailbox path name is based on the email address, the address
# shouldn't be absurdly long and must not have a forward slash.
# Our database is case sensitive (oops), which affects mail delivery
# (Postfix always queries in lowercase?), so also only permit lowercase
# letters.
if len(email) > 255: return False
if re.search(r'[^\@\.a-z0-9_\-]+', email):
return False
# Everything looks good.
return True
def sanitize_idn_email_address(email):
# The user may enter Unicode in an email address. Convert the domain part
# to IDNA before going into our database. Leave the local part alone ---
# although validate_email will reject non-ASCII characters.
#
# The domain name system only exists in ASCII, so it doesn't make sense
# to store domain names in Unicode. We want to store what is meaningful
# to the underlying protocols.
try:
localpart, domainpart = email.split("@")
domainpart = idna.encode(domainpart).decode('ascii')
return localpart + "@" + domainpart
except (ValueError, idna.IDNAError):
# ValueError: String does not have a single @-sign, so it is not
# a valid email address. IDNAError: Domain part is not IDNA-valid.
# Validation is not this function's job, so return value unchanged.
# If there are non-ASCII characters it will be filtered out by
# validate_email.
return email
def prettify_idn_email_address(email):
# This is the opposite of sanitize_idn_email_address. We store domain
# names in IDNA in the database, but we want to show Unicode to the user.
try:
localpart, domainpart = email.split("@")
domainpart = idna.decode(domainpart.encode("ascii"))
return localpart + "@" + domainpart
except (ValueError, UnicodeError, idna.IDNAError):
# Failed to decode IDNA, or the email address does not have a
# single @-sign. Should never happen.
return email
def is_dcv_address(email):
email = email.lower()
for localpart in ("admin", "administrator", "postmaster", "hostmaster", "webmaster", "abuse"):
if email.startswith(localpart+"@") or email.startswith(localpart+"+"):
return True
return False
def open_database(env, with_connection=False):
conn = sqlite3.connect(env["STORAGE_ROOT"] + "/mail/users.sqlite")
if not with_connection:
return conn.cursor()
else:
return conn, conn.cursor()
def get_mail_users(env):
# Returns a flat, sorted list of all user accounts.
c = open_database(env)
c.execute('SELECT email FROM users')
users = [ row[0] for row in c.fetchall() ]
return utils.sort_email_addresses(users, env)
def get_mail_users_ex(env, with_archived=False, with_slow_info=False):
# Returns a complex data structure of all user accounts, optionally
# including archived (status="inactive") accounts.
#
# [
# {
# domain: "domain.tld",
# users: [
# {
# email: "name@domain.tld",
# privileges: [ "priv1", "priv2", ... ],
# status: "active" | "inactive",
# },
# ...
# ]
# },
# ...
# ]
# Get users and their privileges.
users = []
active_accounts = set()
c = open_database(env)
c.execute('SELECT email, privileges FROM users')
for email, privileges in c.fetchall():
active_accounts.add(email)
user = {
"email": email,
"privileges": parse_privs(privileges),
"status": "active",
}
users.append(user)
if with_slow_info:
user["mailbox_size"] = utils.du(os.path.join(env['STORAGE_ROOT'], 'mail/mailboxes', *reversed(email.split("@"))))
# Add in archived accounts.
if with_archived:
root = os.path.join(env['STORAGE_ROOT'], 'mail/mailboxes')
for domain in os.listdir(root):
for user in os.listdir(os.path.join(root, domain)):
email = user + "@" + domain
mbox = os.path.join(root, domain, user)
if email in active_accounts: continue
user = {
"email": email,
"privileges": "",
"status": "inactive",
"mailbox": mbox,
}
users.append(user)
if with_slow_info:
user["mailbox_size"] = utils.du(mbox)
# Group by domain.
domains = { }
for user in users:
domain = get_domain(user["email"])
if domain not in domains:
domains[domain] = {
"domain": domain,
"users": []
}
domains[domain]["users"].append(user)
# Sort domains.
domains = [domains[domain] for domain in utils.sort_domains(domains.keys(), env)]
# Sort users within each domain first by status then lexicographically by email address.
for domain in domains:
domain["users"].sort(key = lambda user : (user["status"] != "active", user["email"]))
return domains
def get_admins(env):
# Returns a set of users with admin privileges.
users = set()
for domain in get_mail_users_ex(env):
for user in domain["users"]:
if "admin" in user["privileges"]:
users.add(user["email"])
return users
def get_mail_aliases(env):
# Returns a sorted list of tuples of (address, forward-tos, permitted-senders).
c = open_database(env)
c.execute('SELECT source, destination, permitted_senders FROM aliases')
aliases = { row[0]: row for row in c.fetchall() } # make dict
# put in a canonical order: sort by domain, then by email address lexicographically
aliases = [ aliases[address] for address in utils.sort_email_addresses(aliases.keys(), env) ]
return aliases
def get_mail_aliases_ex(env):
# Returns a complex data structure of all mail aliases, similar
# to get_mail_users_ex.
#
# [
# {
# domain: "domain.tld",
# alias: [
# {
# address: "name@domain.tld", # IDNA-encoded
# address_display: "name@domain.tld", # full Unicode
# forwards_to: ["user1@domain.com", "receiver-only1@domain.com", ...],
# permitted_senders: ["user1@domain.com", "sender-only1@domain.com", ...] OR null,
# required: True|False
# },
# ...
# ]
# },
# ...
# ]
required_aliases = get_required_aliases(env)
domains = {}
for address, forwards_to, permitted_senders in get_mail_aliases(env):
# get alias info
domain = get_domain(address)
required = (address in required_aliases)
# add to list
if not domain in domains:
domains[domain] = {
"domain": domain,
"aliases": [],
}
domains[domain]["aliases"].append({
"address": address,
"address_display": prettify_idn_email_address(address),
"forwards_to": [prettify_idn_email_address(r.strip()) for r in forwards_to.split(",")],
"permitted_senders": [prettify_idn_email_address(s.strip()) for s in permitted_senders.split(",")] if permitted_senders is not None else None,
"required": required,
})
# Sort domains.
domains = [domains[domain] for domain in utils.sort_domains(domains.keys(), env)]
# Sort aliases within each domain first by required-ness then lexicographically by address.
for domain in domains:
domain["aliases"].sort(key = lambda alias : (alias["required"], alias["address"]))
return domains
def get_domain(emailaddr, as_unicode=True):
# Gets the domain part of an email address. Turns IDNA
# back to Unicode for display.
ret = emailaddr.split('@', 1)[1]
if as_unicode:
try:
ret = i |
Fuchai/Philosophy-Machine | vae_mining/binary_mining.py | Python | apache-2.0 | 1,418 | 0.022567 | # binary mining.
# say we have a pair of binary encoded predicates, and we have have a sample of the truth values of them
# we want to learn if some logic exist between the two predicates. A->B or B->A. Nothing more.
# given bayesian null prior, establish a confidence method to hypothesize and learn.
# Just a sanity check and decision tree revision. Nothing special here. I should do this under 1 hour.
from sklearn import tree
import numpy as np
if False:
x=[(1,1)]*100
x=x+[(0,1)]*100
x=x+[(0,0)]*100
x=np.array(x)
np.random.shuffle(x)
k=zip(*x)
a=k[0]
b=k[1]
a=[[x] for x in a]
clf=tree.DecisionTreeClassifier()
clf=clf.fit(a,b)
print(clf.predict_proba([0]) | )
print(clf.predict_proba([1]))
# Done. This example just illustrates that decisio | n tree can learn binary predicates pretty easily.
# Most of the time extraction of feature to produce binary labels will be much harder than anything.
# We will use VAE to achieve that.
# Second tttest. I want to spot the permutation with decision tree.
#
# Try to use softmax to encode.
# Actually a good idea.
x=range(10)
print(np.random.permutation(x))
k=zip(x,np.random.permutation(x))
print(k)
k=np.array(k)
m=k[np.random.choice(k.shape[0],200)]
a=m[:,1]
a=[[i] for i in a]
b=m[:,0]
clf = tree.DecisionTreeClassifier()
clf = clf.fit(a, b)
print(clf.predict_proba([0]))
print(clf.predict_proba([1])) |
mkuron/espresso | testsuite/scripts/tutorials/test_04-lattice_boltzmann_part2.py | Python | gpl-3.0 | 1,053 | 0 | # Copyright (C) 2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of | the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHO | UT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest as ut
import importlib_wrapper
tutorial, skipIfMissingFeatures = importlib_wrapper.configure_and_import(
"@TUTORIALS_DIR@/04-lattice_boltzmann/04-lattice_boltzmann_part2.py",
gpu=True, loops=400)
@skipIfMissingFeatures
class Tutorial(ut.TestCase):
system = tutorial.system
if __name__ == "__main__":
ut.main()
|
oculusstorystudio/kraken | Python/kraken/core/objects/operators/operator.py | Python | bsd-3-clause | 12,620 | 0.001189 | """Kraken - objects.operators.operator module.
Classes:
Operator - Base operator object.
"""
import re
from kraken.core.configs.config import Config
from kraken.core.objects.scene_item import SceneItem
class Operator(SceneItem):
"""Operator representation."""
def __init__(self, name, parent=None, metaData=None):
super(Operator, self).__init__(name, parent, metaData=metaData)
self.inputs = {}
self.outputs = {}
self._flags = {}
# =============
# Name Methods
# =============
def getBuildName(self):
"""Returns the build name for the object.
Returns:
str: Name to be used in the DCC.
"""
typeNameHierarchy = self.getTypeHierarchyNames()
config = Config.getInstance()
# If flag is set on object to use explicit name, return it.
if config.getExplicitNaming() is True or \
self.testFlag('EXPLICIT_NAME'):
return self.getName()
nameTemplate = config.getNameTemplate()
# Get the token list for this type of object
format = None
for typeName in nameTemplate['formats'].keys():
if typeName in typeNameHierarchy:
format = nameTemplate['formats'][typeName]
break
if format is None:
format = nameTemplate['formats']['default']
objectType = None
for eachType in typeNameHierarchy:
if eachType in nameTemplate['types'].keys():
objectType = eachType
break
altType = self.getMetaDataItem("altType")
if altType is not None and nameTemplate['types'].get(altType, None) is not None:
objectType = altType
if objectType is None:
objectType = 'default'
# Generate a name by concatenating the resolved tokens together.
builtName = ""
skipSep = False
for token in format:
if token is 'sep':
if not skipSep:
builtName += nameTemplate['separator']
elif token is 'location':
parent = self.getParent()
if parent is None:
raise ValueError("operator [%s] does not have a parent." % self.getName())
location = parent.getLocation()
if location not in nameTemplate['locations']:
raise ValueError("Invalid location on: " + self.getPath())
altLocation = self.getMetaDataItem("altLocation")
if altLocation is not None and altLocation in nameTemplate['locations']:
location = altLocation
builtName += location
| elif token is 'type':
builtName += nameTemplate['types | '][objectType]
elif token is 'name':
builtName += self.getName()
elif token is 'component':
if self.getParent() is None:
skipSep = True
continue
builtName += self.getParent().getName()
elif token is 'container':
if self.getContainer() is None:
skipSep = True
continue
builtName += self.getContainer().getName()
elif token is 'solverName':
if self.isTypeOf("KLOperator"):
builtName += self.solverTypeName
else:
builtName += self.canvasPresetPath.rpartition('.')[-1]
elif token is 'solverSource':
if self.isTypeOf("KLOperator"):
builtName += self.extension
else:
builtName += re.sub("[\W\d]", "", self.canvasPresetPath.rpartition('.')[0])
else:
raise ValueError("Unresolvabled token '" + token +
"' used on: " + self.getPath())
return builtName
# =============
# Flag Methods
# =============
def setFlag(self, name):
"""Sets the flag of the specified name.
Returns:
bool: True if successful.
"""
self._flags[name] = True
return True
def testFlag(self, name):
"""Tests if the specified flag is set.
Args:
name (str): Name of the flag to test.
Returns:
bool: True if flag is set.
"""
return name in self._flags
def clearFlag(self, name):
"""Clears the flag of the specified name.
Args:
name (str): Name of the flag to clear.
Returns:
bool: True if successful.
"""
if name in self._flags:
del self._flags[name]
return True
return False
def getFlags(self):
"""Returns all flags set on this object.
Returns:
list: Flags set on this object.
"""
return self._flags.keys()
# ===============
# Source Methods
# ===============
def getSources(self):
"""Returns the sources of the object.
Returns:
list: All sources of this object.
"""
sources = []
for name in self.getInputNames():
inputTargets = self.getInput(name)
if not isinstance(inputTargets, list):
inputTargets = [inputTargets]
for inputTarget in inputTargets:
if not isinstance(inputTarget, SceneItem):
continue
sources.append(inputTarget)
return super(Operator, self).getSources() + sources
# ==============
# Input Methods
# ==============
def resizeInput(self, name, count):
"""Resizes and array output to a given size.
Args:
name (str): Name of the output.
count (Object): Output object.
Returns:
bool: True if successful.
"""
raise DeprecationWarning("Method 'resizeInput' has been deprecated!")
# if name not in self.inputs:
# raise Exception("Input with name '" + name +
# "' was not found in operator: " + self.getName() +
# ".")
# if isinstance(self.inputs[name], list):
# while len(self.inputs[name]) < count:
# self.inputs[name].append(None)
# else:
# raise Exception("Input is not an array input: " + name + ".")
# return True
def setInput(self, name, operatorInput, index=0):
"""Sets the input by the given name.
Args:
name (str): Name of the input.
operatorInput (Object): Input object.
Returns:
bool: True if successful.
"""
if name not in self.inputs:
raise Exception("Input with name '" + name +
"' was not found in operator: " + self.getName() +
".\nValid inputs are:\n" +
"\n".join(self.inputs.keys()))
if self.inputs[name] is None and self.getInputType(name).endswith('[]'):
self.inputs[name] = []
if isinstance(self.inputs[name], list):
# Set the entire output array
if isinstance(operatorInput, list):
self.inputs[name] = operatorInput
else:
if index >= len(self.inputs[name]):
raise Exception(
"Out of range index for array output index: " +
str(index) + " size: " + str(len(self.inputs[name])) +
".")
self.inputs[name][index] = operatorInput
else:
self.inputs[name] = operatorInput
return True
def getInput(self, name):
"""Returns the input with the specified name.
Args:
name (str): Name of the input to get.
Returns:
object: Input object.
"""
if name not in self.inputs:
raise Exception("Input with name '" + name +
|
SteveMcGrath/Concord | registration/app.py | Python | gpl-2.0 | 1,312 | 0.000762 | from flask import Flask, render_template
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.script import Manager
from flask.ext.bootstrap import Bootstrap
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
Bootstrap(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
class Ticket(db.Model):
__tablename__ = 'tickets'
id = db.Column(db.Integer, primary_key=True)
ticket_type = db.Column(db.Text)
ticket_hash = db.Column | (db.Text)
redeemed = db.Column(db.Boolean, default=False)
email = db.Column(db.Text)
name = db.Column(db.Text)
classes = db.Column(db.PickleType)
@app.route('/<tickethash>')
def checkin(tickethash):
ticket = Ticket.query.filter_by(ticket_hash=tickethash).first()
if ticket is None:
message, code = ['No Ticket Found!', 'danger']
elif ticket.redeemed:
message, code = ['Already Checked In!', 'warning']
else:
ticket.redeemed = T | rue
db.session.merge(ticket)
db.session.commit()
message, code = ['Successfully Checked in!', 'success']
return render_template('page.html', message=message, code=code, ticket=ticket)
|
sven-hm/pythonocc-core | examples/core_topology_edge.py | Python | lgpl-3.0 | 3,032 | 0.004947 | ##Copyright 2009-2015 Thomas Paviot (tpaviot@gmail.com)
##
##This file is part of pythonOCC.
##
##pythonOCC is free software: you can redistribute it and/or modify
##it under the terms of the GNU Lesser General Public License as published by
##the Free Software Foundation, either version 3 of the License, or
##(at your option) any later version.
##
##pythonOCC is distributed in the hope that it will be useful,
##but WITHOUT ANY WARRANTY; without even the implied warranty of
##MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
##GNU Lesser General Public License for more details.
##
##You should have received a copy of the GNU Lesser Gener | al Public License
##alo | ng with pythonOCC. If not, see <http://www.gnu.org/licenses/>.
import math
from OCC.gp import gp_Pnt, gp_Lin, gp_Ax1, gp_Dir, gp_Elips, gp_Ax2
from OCC.BRepBuilderAPI import (BRepBuilderAPI_MakeEdge,
BRepBuilderAPI_MakeVertex)
from OCC.TColgp import TColgp_Array1OfPnt
from OCC.Geom import Geom_BezierCurve
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
def edge(event=None):
# The blud edge
BlueEdge = BRepBuilderAPI_MakeEdge(gp_Pnt(-80, -50, -20),
gp_Pnt(-30, -60, -60))
V1 = BRepBuilderAPI_MakeVertex(gp_Pnt(-20, 10, -30))
V2 = BRepBuilderAPI_MakeVertex(gp_Pnt(10, 7, -25))
YellowEdge = BRepBuilderAPI_MakeEdge(V1.Vertex(), V2.Vertex())
#The white edge
line = gp_Lin(gp_Ax1(gp_Pnt(10, 10, 10), gp_Dir(1, 0, 0)))
WhiteEdge = BRepBuilderAPI_MakeEdge(line, -20, 10)
#The red edge
Elips = gp_Elips(gp_Ax2(gp_Pnt(10, 0, 0), gp_Dir(1, 1, 1)), 60, 30)
RedEdge = BRepBuilderAPI_MakeEdge(Elips, 0, math.pi/2)
# The green edge and the both extreme vertex
P1 = gp_Pnt(-15, 200, 10)
P2 = gp_Pnt(5, 204, 0)
P3 = gp_Pnt(15, 200, 0)
P4 = gp_Pnt(-15, 20, 15)
P5 = gp_Pnt(-5, 20, 0)
P6 = gp_Pnt(15, 20, 0)
P7 = gp_Pnt(24, 120, 0)
P8 = gp_Pnt(-24, 120, 12.5)
array = TColgp_Array1OfPnt(1, 8)
array.SetValue(1, P1)
array.SetValue(2, P2)
array.SetValue(3, P3)
array.SetValue(4, P4)
array.SetValue(5, P5)
array.SetValue(6, P6)
array.SetValue(7, P7)
array.SetValue(8, P8)
curve = Geom_BezierCurve(array)
ME = BRepBuilderAPI_MakeEdge(curve.GetHandle())
GreenEdge = ME
V3 = ME.Vertex1()
V4 = ME.Vertex2()
display.DisplayColoredShape(BlueEdge.Edge(), 'BLUE')
display.DisplayShape(V1.Vertex())
display.DisplayShape(V2.Vertex())
display.DisplayColoredShape(WhiteEdge.Edge(), 'WHITE')
display.DisplayColoredShape(YellowEdge.Edge(), 'YELLOW')
display.DisplayColoredShape(RedEdge.Edge(), 'RED')
display.DisplayColoredShape(GreenEdge.Edge(), 'GREEN')
display.DisplayShape(V3)
display.DisplayShape(V4, update=True)
if __name__ == '__main__':
edge()
start_display()
|
MSFTOSSMgmt/WPSDSCLinux | Providers/Scripts/2.4x-2.5x/Scripts/nxEnvironment.py | Python | mit | 9,103 | 0.002087 | #!/usr/bin/env python
# ===================================
# Copyright (c) Microsoft Corporation. All rights reserved.
# See license.txt for license information.
# ===================================
import os
import sys
import imp
protocol = imp.load_source('protocol', '../protocol.py')
nxDSCLog = imp.load_source('nxDSCLog', '../nxDSCLog.py')
LG = nxDSCLog.DSCLog
global show_mof
show_mof = False
# [Key] string Name;
# [write] string Value;
# [Write,ValueMap{"Present", "Absent"},Values{"Present", "Absent"}] string Ensure;
# [Write] boolean Path;
def init_vars(Name, Value, Ensure, Path):
if Name is not None:
Name = Name.encode('ascii', 'ignore')
else:
Name = ''
if Value is not None:
Value = Value.encode('ascii', 'ignore')
else:
Value = ''
if Ensure is not None and Ensure != '':
Ensure = Ensure.encode('ascii', 'ignore')
else:
Ensure = 'present'
if Path is None:
Path = False
Path = ( Path == True )
return Name, Value, Ensure.lower(), Path
def Set_Marshall(Name, Value, Ensure, Path):
(Name, Value, Ensure, Path) = init_vars(Name, Value, Ensure, Path)
retval = Set(Name, Value, Ensure, Path)
return retval
def Test_Marshall(Name, Value, Ensure, Path):
(Name, Value, Ensure, Path) = init_vars(Name, Value, Ensure, Path)
retval = Test(Name, Value, Ensure, Path)
return retval
def Get_Marshall(Name, Value, Ensure, Path):
arg_names = list(locals().keys())
(Name, Value, Ensure, Path) = init_vars(Name, Value, Ensure, Path)
retval = 0
retval, Name, Value, Ensure, Path = Get(Name, Value, Ensure, Path)
Name = protocol.MI_String(Name)
Value = protocol.MI_String(Value)
Ensure = protocol.MI_String(Ensure)
Path = protocol.MI_Boolean(Path)
retd = {}
ld = locals()
for k in arg_names:
retd[k] = ld[k]
return retval, retd
############################################################
# Begin user defined DSC functions
############################################################
def SetShowMof(a):
global show_mof
show_mof = a
def ShowMof(op, Name, Value, Ensure, Path):
if not show_mof:
return
mof = ''
mof += op + ' nxEnvironment MyEnv \n'
mof += '{\n'
mof += ' Name = "' + Name + '"\n'
mof += ' Value = "' + Value + '"\n'
mof += ' Ensure = "' + Ensure + '"\n'
mof += ' Path = "' + str(Path) + '"\n'
mof += '}\n'
f = open('./test_mofs.log', 'a')
Print(mof, file=f)
f.close()
class Params:
def __init__(self, Name, Value, Ensure, Path):
if not ("present" in Ensure or "absent" in Ensure):
Print(
'ERROR: Param Ensure must be "Present" or "Absent".', file=sys.stderr)
LG().Log(
'ERROR', 'ERROR: Param Ensure must be "Present" or "Absent".')
raise Exception('BadParameter')
self.Ensure = Ensure
if Path is not True and Path is not False:
Print('ERROR: Param Path must be True or False.', file=sys.stderr)
LG().Log('ERROR', 'ERROR: Param Path must be True or False.')
raise Exception('BadParameter')
self.Path = Path
if len(Name) < 1 and True is not Path:
Print(
'ERROR: Param Name must be set if Path <> True.', file=sys.stderr)
LG().Log('ERROR', 'ERROR: Param Name must be set if Path <> True.')
raise Exception('BadParameter')
self.Name = Name
self.Value = Value
self.file_path = '/etc/environment'
if True is self.Path:
self.file_path = '/etc/profile.d/DSCEnvironment.sh'
self.Name = 'PATH=$PATH:"'
# check the configuration and create DSCEnvironment if needed.
if not os.path.isfile('/etc/environment'):
# create the /etc/environment file and source it from
# DSCEnvironment.sh
os.system('echo > /etc/environment')
if not os.path.exists('/etc/profile.d'):
os.system('mkdir /etc/profile.d/')
if not os.path.isfile('/etc/profile.d/DSCEnvironment.sh'):
os.system(
'echo ". /etc/environment" > /etc/profile.d/DSCEnvironment.sh')
else: # file exists - add the sourceline if not already there
found = False
n = ''
F, error = opened_w_error('/etc/profile.d/DSCEnvironment.sh', 'r+')
if error:
raise Exception('BadParameter')
for l in F.readlines():
if l.startswith('. /etc/environment'):
found = True
n += l
if not found:
F.seek(0, 0)
F.write('. /etc/environment\n' + n)
F.close()
def Set(Name, Value, Ensure, Path):
retval = -1
try:
p = Params(Name, Value, Ensure, Path)
except Exception, e:
Print(
'ERROR - Unable to initialize nxEnvironmentProvider. ' +
str(e), file=sys.stderr)
LG().Log(
'ERROR', 'ERROR - Unable to initialize nxEnvironmentProvider. ' + str(e))
return [retval]
ShowMof('SET', Name, Value, Ensure, Path)
if AddOrDelVar(p) is None:
retval = 0
return [retval]
def Test(Name, Value, Ensure, Path):
retval = -1
try:
p = Params(Name, Value, Ensure, Path)
except Exception, e:
Print(
'ERROR - Unable to initialize nxEnvironmentProvider. ' +
str(e), file=sys.stderr)
LG().Log(
'ERROR', 'ERROR - Unable to initialize nxEnvironmentProvider. ' + str(e))
return [retval]
ShowMof('TEST', Name, Value, Ensure, Path)
found, error = FindVar(p)
if found and p.Ensure == 'present':
retval = 0
if not found and p.Ensure == 'absent':
retval = 0
return [retval]
def Get(Name, Value, Ensure, Path):
retval = -1
try:
p = Params(Name, Value, Ensure, Path)
except Exception, e:
Print(
'ERROR - Unable to initialize nxEnvironmentProvider. ' +
str(e), file=sys.stderr)
LG().Log(
'ERROR', 'ERROR - Unable to initialize nxEnvironmentProvider. ' + str(e))
return [retval, Name, Value, Ensure, Path]
ShowMof('GET', Name, Value, Ensure, Path)
found, error = FindVar(p)
if found and p.Ensure == 'present':
retval = 0
if not found and p.Ensure == 'absent':
retval = 0
return [retval, Name, Value, Ensure, Path]
def opened_w_error(filename, mode="a"):
"""
This context ensures the file is closed.
"""
try:
f = open(filename, mode=mode)
except IOError, err:
return None, err
return f, None
def Print(s, file=sys.stdout):
file.write(s + '\n')
def AddOrDelVar(p):
# preserve the ownership of this file
found = False
error = None
st = None
n = ''
if os.path.isfile(p.file_path):
st = os.stat(p.file_path)
F, error = opened_w_error(p.file_path, 'r')
if error:
Print("Exception opening file " + p.file_path + ' Error: ' + str(error), file=sys.stderr)
LG().Log('ERROR', "Exception opening file " + p.file_path + ' Error: ' + str(error))
return found, error
for l in F.readlines():
if p.Path is True:
if l.startswith('PATH=$PATH:"' + p.Value):
# is is already there - keep it if present requested otherwise
# skip
if p.Ensure == 'present':
found = True
n += l
else:
found = True
else: # not a match
n += l
else:
if l.startswith(p.Name + '='):
found = True
if p.Ensure == 'presen | t':
# set the variable to the new values
l = p.Name + '=' + p.Value + '\n'
n += l
el | se:
n += l
# not found - present requested so add it.
if not found and p.Ensure == 'present':
if p.Path is True: |
phantom-root/tasks | hashes/300_homemade_hash/other/botanlq/calc_script.py | Python | mit | 814 | 0.003686 | R = [x for x in range(97, 123) if chr(x) not in 'tfpeqwzgnib']
b = 0x56, 0x2B, 0x0F, 0xC5
number = 0
for x0 in R:
for x1 in R:
for x3 in R:
_00 = b[0] ^ x0
_11 = b[1] | x1
Y0 = _00 ^ _11
Y1 = b[3] ^ _11
Y3 = b[2 | ] & (_00 & x3)
for x4 in R:
for x5 in R:
t_00 = Y0 ^ x4
t_11 = Y1 | x5
if (t_00 ^ t_11) == 0xDD and (Y3 ^ t_11) == 0xFE:
x6 = 0x9A ^ t_11
if x6 in R:
for x2 in R:
s = ''.join(map(chr, [x0, x1, x2, x3, x4, x5, x6]))
| number += 1
#print(s)
print(number)
|
subutai/nupic.research | src/nupic/research/frameworks/pytorch/regularization.py | Python | agpl-3.0 | 2,505 | 0.003593 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2020, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import torch
@torch.no_grad()
def l1_regularization_step(params, lr, weight_decay=1e-3):
"""
Performs L1 regularization gradient step in place, and this implementation is based
on how `torch.optimizer.SGD` performs L2 regularization updates:
https://pytorch.org/docs/stable/_modules/torch/optim/sgd.html#SGD.step
Example usage:
# Assume `model` is an instance of a torch.nn.Module subclass, and `optimizer`
# is used to perform SGD updates on the parameters of `model`
# The following lines perform gradient updates on a specified loss with an L1
# penalty term
# Note that the L1 updates are performed separately | from the updates on the
# regular objective function
loss.backward()
optimizer.step()
l1_regularization_step(params=model.parameters(), lr=0.1, weight_decay=1e-3)
:param params: a | list of parameters on which the L1 regularization update will be
performed, conditioned on whether attribute `requires_grad` is True
:param lr: the learning rate used during optimization, analogous to the `lr`
parameter in `torch.optim.SGD`
:param weight_decay: the L1 penalty coefficient, analogous to the `weight_decay`
parameter (used as the L2 penalty coefficient) in
`torch.optim.SGD`
"""
for p in params:
if p.requires_grad:
grad = torch.sign(p.data)
p.add_(grad, alpha=-lr * weight_decay)
|
CalvinHsu1223/LinuxCNC-EtherCAT-HAL-Driver | configs/sim/gladevcp/hitcounter.py | Python | gpl-2.0 | 28 | 0.035714 | ../../gladev | cp/hitcount | er.py |
idaholab/raven | tests/framework/PostProcessors/TSACharacterizer/TrainingData/generators.py | Python | apache-2.0 | 3,313 | 0.009055 | # Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.or | g/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Generates signals using predefined algorithms
"""
import numpy as np
def fourier(amps, periods, phases, pivot, mean= | 0):
"""
Generates a signal using Fourier properties.
@ In, amps, np.array, amplitudes of waves
@ In, periods, np.array, periods to use
@ In, phases, np.array, phase offsets to use
@ In, pivot, np.array, time-like parameter
@ In, mean, float, offset value
@ Out, signal, np.array, generated signal
"""
signal = np.zeros(len(pivot)) + mean
for k, period in enumerate(periods):
signal += amps[k] * np.sin(2 * np.pi / period * pivot + phases[k])
return signal
def arma(slags, nlags, pivot, noise=None, intercept=0, plot=False):
"""
Generates a signal using ARMA properties.
@ In, slags, list, signal lag coefficients (aka AR coeffs, phi)
@ In, nlags, list, noise lag coefficients (aka MA coeffs, theta)
@ In, pivot, np.array, time-like array
@ In, noise, np.array, optional, instead of sampling random noise will use this if provided
@ In, intercept, float, optional, nominal level of signal
@ In, plot, bool, optional, if True then produce a plot of generated signal
@ Out, signal, np.array, generated signal
@ Out, noise, np.array, noise signal used in generation (provided or sampled)
"""
if plot:
import matplotlib.pyplot as plt
fig, ax = plt.subplots()
signal = np.zeros(len(pivot)) + intercept
if noise is None:
noise = np.random.normal(loc=0, scale=1, size=len(pivot))
signal += noise
# moving average: random noise lag
for q, theta in enumerate(nlags):
signal[q+1:] += theta * noise[:-(q+1)]
# autoregressive: signal lag
for t, time in enumerate(pivot):
for p, phi in enumerate(slags):
if t > p:
signal[t] += phi * signal[t - p - 1]
if plot:
ax.plot(pivot, noise, 'k:')
ax.plot(pivot, signal, 'g.-')
plt.show()
return signal, noise
def toFile(signals, baseName, targets=None, pivotName=None):
"""
writes signals to RAVEN CSV files
@ In, signal, np.ndarray, signals shaped (time, targets+1) with pivot as first target
@ In, baseName, str, base filename
@ In, targets, list(str), optional, target names
@ In, pivotName, str, optional, pivot parameter (time-like) name
@ Ou, None
"""
if targets is None:
targets = [f'signal{i}' for i in range(signals[0].shape[1] - 1)]
if pivotName is None:
pivotName = 'pivot'
with open(f'{baseName}.csv', 'w') as f:
f.writelines('scaling,filename\n')
for s, signal in enumerate(signals):
subname = f'{baseName}_{s}.csv'
np.savetxt(subname, signal, delimiter=',', header=','.join([pivotName] + targets), comments='')
f.writelines(f'1,{subname}\n')
|
zamattiac/SHARE | providers/edu/opensiuc/migrations/0001_initial.py | Python | apache-2.0 | 661 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-08 15:45
from __future__ import unicode_literals
from | django.db import migrations
import share.robot
class Migration(migrations.Migration):
dependencies = [
('share', '0001_initial'),
('djcelery', '0001_initial'),
]
operations = [
migrations.RunPython(
code=share.robot.RobotUserMigration('edu.opensiuc'),
),
migrations.RunPython(
code=share.robot.RobotOauthTokenMigration('edu.opensiuc'),
),
migrations.RunPython(
code=share.robot.RobotScheduleMigratio | n('edu.opensiuc'),
),
]
|
wb253/goapp | app/pingback.py | Python | gpl-3.0 | 5,006 | 0.005793 | # vim: sw=4:expandtab:foldmethod=marker
#
# Copyright (c) 2003, Mathieu Fenniak
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
A simple library that implements a pingback client. The library supports
version 1.0 of the pingback library, based upon the specification published
at http://www.hixie.ch/specs/pingback/pingback.
Implementing a pingback server is beyond the scope of this library simply
because of the very application-specific nature of a server. However, it is
also trivially easy to create a pingback server by using Python's
SimpleXMLRPCServer module. The following simple framework could be used
by a CGI script to implement a pingback server::
def pingback(sourceURI, targetURI):
'''Do something interesting!'''
return "arbitrary string return value."
import SimpleXMLRPCServer
handler = SimpleXMLRPCServer.CGIXMLRPCRequestHandler()
handler.register_function(pingback, "pingback.ping")
handler.handle_request()
It would still be necessary to provide an X-Pingback HTTP header which pointed
at the given CGI script.
"""
__author__ = "Mathieu Fenniak <laotzu@pobox.com>"
__date__ = "2003-01-26"
__version__ = "2003.01.26.01"
__changed__ = "2010.10.03@SkyCloud <admin@tangblog.info>"
__website__ = "www.tangblog.info"
import re
from base import util
from HTMLParser import HTMLParser
def reSTLinks(txt):
reSTLink = re.compile("\n\\.\\.\\s+[^\n:]+:\s+(http://[^\n]+)", re.I)
linkMatches = reSTLink.findall(txt)
return linkMatches
class _LinkExtractor(HTMLParser, object):
def __init__(self, links):
super(_LinkExtractor, self).__init__()
self.links = links
def handle_starttag(self, tag, attrs):
if tag == "a":
for key, value in attrs:
if key == "href" and value.startswith("http://"):
self.links.append(value)
class _HrefExtractor(HTMLParser,object):
def __init__(self, links):
super(_HrefExtractor, self).__init__()
self.links = links
self.currentLink=None
def handle_starttag(self, tag, attrs):
if tag == "a":
self.currentLink=None
self.buffer=""
for key, value in attrs:
if key == "href" and value.startswith("http://"):
self.currentLink=value
def handle_endtag(self,tag):
if tag == "a":
if self.currentLink:
self.links.append((self.currentLink,self.buffer))
self.currentLink=None
self.buffer=""
def handle_data(self,data):
if self.currentLink:
self.buffer += data
def htmlLinks(txt):
links = []
le = _LinkExtractor(links)
le.feed(txt)
le.close()
return links
def hrefExtractor(txt):
links=[]
le = _HrefExtractor(links)
le.feed | (txt)
le.close()
return links
def autoPingback(sourceURI, reST = None, HTML = None):
"""Scans the input text, which can be in either reStructuredText or HTML
format, pings every linked website for auto-discovery-capable pingback
servers, and does an appropriate pingback.
The following specification details how this code should work:
http://www.hixie.ch/specs/pingback/pingback"""
assert | reST != None or HTML != None
if reST != None:
links = reSTLinks(reST)
else:
links = htmlLinks(HTML)
for link in links:
util.do_pingback(sourceURI,link)
|
jiarong/SSUsearch | scripts/plot-pcoa.py | Python | bsd-3-clause | 2,303 | 0.012158 | #! /usr/bin/env python
# by gjr; 021514
"""
Plot PCoA results from pcoa in mothur
% python plot-pcoa.py <file.pcoa.axis> <file.pcoa.loadings> <outfile>
"""
import sys, os
import matplotlib
matplotlib.use('Agg')
#matplotlib.use('Pdf')
import matplotlib.pyplot as plt
import numpy as np
import brewer2mpl
i | mport pandas
almost_black = '#262626'
def main():
if len(sys.argv) != 4:
print >> sys.stderr, \
'Usage: python %s <file.pcoa.axis> <file.pcoa.loadings> <outfile>'\
%(os.path.basename(sys.argv[0]))
sys.exit(1)
outfile = sys.argv[3]
if outfile.lower().endswith('.png'):
outfile = outfile[:-4]
| df = pandas.read_csv(sys.argv[1], sep='\t', index_col=False)
#df = pandas.read_csv(sys.argv[1], sep='\t')
df = df.set_index('group', drop=True, append=False)
df = df.dropna(how='all')
# only first two dimensions
dfx = df[[0,1]]
# % variation explained
df2 = pandas.read_csv(sys.argv[2], sep='\t', index_col='axis')
xvar = df2.loc[1]
yvar = df2.loc[2]
# plot
fig, ax = plt.subplots(1)
for idx in dfx.index:
x, y = dfx.ix[idx]
tempP = ax.scatter(x, y, s = 50, lw = 1, facecolor='none')
ax.annotate(idx, (x,y), (x,y), fontsize='small')
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width, box.height*0.8])
# Remove top and right axes lines ("spines")
spines_to_remove = ['top', 'right']
for spine in spines_to_remove:
ax.spines[spine].set_visible(False)
# Get rid of ticks. The position of the numbers is informative enough of
# the position of the value.
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('none')
# For remaining spines, thin out their line and change the black to a slightly off-black dark grey
spines_to_keep = ['bottom', 'left']
for spine in spines_to_keep:
ax.spines[spine].set_linewidth(0.5)
ax.spines[spine].set_color(almost_black)
ax.set_xlabel('%s (%.f%%)' %('PC1', xvar))
ax.set_ylabel('%s (%.f%%)' %('PC2', yvar))
#plt.title(sys.argv[1])
plt.savefig('%s.png' %(outfile))
#plt.savefig('%s.pdf' %(sys.argv[1]))
#plt.savefig('%s.png' %(sys.argv[1]), dpi=300)
if __name__ == '__main__':
main()
|
waterponey/scikit-learn | sklearn/ensemble/iforest.py | Python | bsd-3-clause | 11,906 | 0.000168 | # Authors: Nicolas Goix <nicolas.goix@telecom-paristech.fr>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# License: BSD 3 clause
from __future__ import division
import numpy as np
import scipy as sp
from warnings import warn
from scipy.sparse import issparse
import numbers
from ..externals import six
from ..tree import ExtraTreeRegressor
from ..utils import check_random_state, check_array
from .bagging import BaseBagging
__all__ = ["IsolationForest"]
INTEGER_TYPES = (numbers.Integral, np.integer)
class IsolationForest(BaseBagging):
"""Isolation Forest Algorithm
Return the anomaly score of each sample using the IsolationForest algorithm
The IsolationForest 'isolates' observations by randomly selecting a feature
and then randomly selecting a split value between the maximum and minimum
values of the selected feature.
Since recursive partitioning can be represented by a tree structure, the
number of splittings required to isolate a sample is equivalent to the path
length from the root node to the terminating node.
This path length, averaged over a forest of such random trees, is a
measure of normality and our decision function.
Random partitioning produces noticeably shorter paths for anomalies.
Hence, when a forest of random trees collectively produce shorter path
lengths for particular samples, they are highly likely to be anomalies.
Read more in the :ref:`User Guide <isolation_forest>`.
.. versionadded:: 0.18
Parameters
----------
n_estimators : int, optional (default=100)
The number of base estimators in the ensemble.
max_samples : int or float, optional (default="auto")
The number of samples to draw from X to train each base estimator.
- If int, then draw `max_samples` samples.
- If float, then draw `max_samples * X.shape[0]` samples.
- If "auto", then `max_samples=min(256, n_samples)`.
If max_samples is larger than the number of samples provided,
all samples will be used for all trees (no sampling).
contamination : float in (0., 0.5), optional (default=0.1)
The amount of contamination of the data set, i.e. the proportion
of outliers in the data set. Used when fitting to define the threshold
on the decision function.
max_features : int or float, optional (default=1.0)
The number of features to draw from X to train each base estimator.
- If int, then draw `max_features` features.
- If float, then draw `max_features * X.shape[1]` features.
bootstrap : boolean, optional (default=False)
If True, individual trees are fit on random subsets of the training
data sampled with replacement. If False, sampling without replacement
is performed.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
Attributes
----------
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
estimators_samples_ : list of arrays
The subset of drawn samples (i.e., the in-bag samples) for each base
estimator.
max_samples_ : integer
The actual number of samples
References
----------
.. [1] Liu, Fei Tony, Ting, Kai Ming and Zhou, Zhi-Hua. "Isolation forest."
Data Mining, 2008. ICDM'08. Eighth IEEE International Conference on.
.. [2] Liu, Fei Tony, Ting, Kai Ming and Zhou, Zhi-Hua. "Isolation-based
anomaly detection." ACM Transactions on Knowledge Discovery from
Data (TKDD) 6.1 (2012): 3.
"""
def __init__(self,
n_estimators=100,
max_samples="auto",
contamination=0.1,
max_features=1.,
bootstrap=False,
n_jobs=1,
random_state=None,
verbose=0):
super(IsolationForest, self).__init__(
base_estimator=ExtraTreeRegressor(
max_features=1,
splitter='random',
| random_state=random_state),
# here above max_features has no links with self.max_features
bootstrap=bootstrap,
bootstrap_features=False,
n_estimators=n_estimators,
max_samples=max_samples,
max_features=max_features,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose)
self.contaminatio | n = contamination
def _set_oob_score(self, X, y):
raise NotImplementedError("OOB score not supported by iforest")
def fit(self, X, y=None, sample_weight=None):
"""Fit estimator.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
The input samples. Use ``dtype=np.float32`` for maximum
efficiency. Sparse matrices are also supported, use sparse
``csc_matrix`` for maximum efficiency.
Returns
-------
self : object
Returns self.
"""
# ensure_2d=False because there are actually unit test checking we fail
# for 1d.
X = check_array(X, accept_sparse=['csc'], ensure_2d=False)
if issparse(X):
# Pre-sort indices to avoid that each individual tree of the
# ensemble sorts the indices.
X.sort_indices()
rnd = check_random_state(self.random_state)
y = rnd.uniform(size=X.shape[0])
# ensure that max_sample is in [1, n_samples]:
n_samples = X.shape[0]
if isinstance(self.max_samples, six.string_types):
if self.max_samples == 'auto':
max_samples = min(256, n_samples)
else:
raise ValueError('max_samples (%s) is not supported.'
'Valid choices are: "auto", int or'
'float' % self.max_samples)
elif isinstance(self.max_samples, INTEGER_TYPES):
if self.max_samples > n_samples:
warn("max_samples (%s) is greater than the "
"total number of samples (%s). max_samples "
"will be set to n_samples for estimation."
% (self.max_samples, n_samples))
max_samples = n_samples
else:
max_samples = self.max_samples
else: # float
if not (0. < self.max_samples <= 1.):
raise ValueError("max_samples must be in (0, 1], got %r"
% self.max_samples)
max_samples = int(self.max_samples * X.shape[0])
self.max_samples_ = max_samples
max_depth = int(np.ceil(np.log2(max(max_samples, 2))))
super(IsolationForest, self)._fit(X, y, max_samples,
max_depth=max_depth,
sample_weight=sample_weight)
self.threshold_ = -sp.stats.scoreatpercentile(
-self.decision_function(X), 100. * (1. - self.contamination))
return self
def predict(self, X):
"""Predict if a particular sample is an outlier or not.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
is_inlier : array, shap |
GFZ-Centre-for-Early-Warning/REM_RRVS | webapp/models.py | Python | bsd-3-clause | 6,057 | 0.006604 | '''
---------------------------
models.py
---------------------------
Created on 24.04.2015
Last modified on 13.01.2016
Author: Marc Wieland, Michael Haas
Description: Defines the database model
----
'''
from webapp import db
from flask_security import RoleMixin, UserMixin
from geoalchemy2 import Geometry
from sqlalchemy.dialects import postgresql
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('users.users.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('users.roles.id')),
schema = 'users')
class Role(db.Model, RoleMixin):
"""
Role for database
"""
__tablename__="roles"
__table | _args__ = {'schema':'users'}
id = db.Column(db.Integer(), primary_key=True)
name = ''
class User(db.Model, UserMixin):
"""
User for RRVS
"""
__tablename__="users"
__table_args__ = {'schema':'users'}
id = db.Column(db.Integer(), primary_key=True)
authenticated = db.Column(db.Boolean, default=False)
password = | ''
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the taskid to satisfy Flask-Login's requirements."""
return self.id
def is_authenticated(self):
"""Return True if the user is authenticated."""
return self.authenticated
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
roles = db.relationship('Role', secondary=roles_users, backref=db.backref('users', lazy='dynamic'))
class task(db.Model):
"""
Holds the tasks from the users schema.
"""
__tablename__="tasks"
__table_args__ = {'schema':'users'}
id = db.Column(db.Integer, primary_key=True)
bdg_gids = db.Column(postgresql.ARRAY(db.Integer))
img_ids = db.Column(postgresql.ARRAY(db.Integer))
class tasks_users(db.Model):
"""
Holds the assigned tasks for all users
"""
__tablename__="tasks_users"
__table_args__ = {'schema':'users'}
user_id = db.Column(db.Integer, primary_key=True)
task_id = db.Column(db.Integer)
class dic_attribute_value(db.Model):
"""
Holds dic_attribute_value from the taxonomy schema.
"""
__tablename__ = "dic_attribute_value"
__table_args__ = {'schema':'taxonomy'}
gid = db.Column(db.Integer, primary_key=True)
attribute_type_code = db.Column(db.String(254))
attribute_value = db.Column(db.String(254), unique=True)
description = db.Column(db.String(254))
description_ar = db.Column(db.String(254))
description_es = db.Column(db.String(254))
extended_description = db.Column(db.String(1024))
class ve_object(db.Model):
"""
Holds ve_object from the asset schema.
"""
__tablename__ = "ve_object"
__table_args__ = {'schema':'asset'}
gid = db.Column(db.Integer, primary_key=True)
mat_type = db.Column(db.String(254))
mat_tech = db.Column(db.String(254))
mat_prop = db.Column(db.String(254))
llrs = db.Column(db.String(254))
llrs_duct = db.Column(db.String(254))
height = db.Column(db.String(254))
height2 = db.Column(db.String(254))
height_1 = db.Column(db.Integer)
height2_1 = db.Column(db.Integer)
yr_built = db.Column(db.String(254))
year_1 = db.Column(db.Integer)
year_2 = db.Column(db.Integer)
occupy = db.Column(db.String(254))
occupy_dt = db.Column(db.String(254))
position = db.Column(db.String(254))
plan_shape = db.Column(db.String(254))
str_irreg = db.Column(db.String(254))
str_irreg_2 = db.Column(db.String(254))
str_irreg_dt = db.Column(db.String(254))
str_irreg_dt_2 = db.Column(db.String(254))
str_irreg_type = db.Column(db.String(254))
str_irreg_type_2 = db.Column(db.String(254))
nonstrcexw = db.Column(db.String(254))
roof_shape = db.Column(db.String(254))
roofcovmat = db.Column(db.String(254))
roofsysmat = db.Column(db.String(254))
roofsystyp = db.Column(db.String(254))
roof_conn = db.Column(db.String(254))
floor_mat = db.Column(db.String(254))
floor_type = db.Column(db.String(254))
floor_conn = db.Column(db.String(254))
foundn_sys = db.Column(db.String(254))
rrvs_status = db.Column(db.String(254))
vuln = db.Column(db.String(254))
comment = db.Column(db.String(254))
the_geom = db.Column(Geometry(geometry_type='POLYGON', srid=4326))
class pan_imgs(db.Model):
"""
Holds panoramic images from the database
NOTE: right now only name of file on hdd
"""
__tablename__ = "img"
__table_args__ = {'schema':'image'}
gid = db.Column(db.Integer, primary_key=True)
gps = db.Column(db.Integer)
repository = db.Column(db.String(255))
filename = db.Column(db.String(100))
frame_id = db.Column(db.Integer)
source = db.Column(db.String(254))
class gps(db.Model):
"""
Holds locations of the panoramic images from the database
"""
__tablename__ = "gps"
__table_args__ = {'schema':'image'}
gid = db.Column(db.Integer, primary_key=True)
#img_id = db.Column(db.Integer)
azimuth = db.Column(db.Float)
the_geom = db.Column(Geometry(geometry_type='POINT',srid=4326))
class t_object(db.Model):
"""
Holds object from the asset schema.
"""
__tablename__ = "object"
__table_args__ = {'schema':'asset'}
#building id
gid = db.Column(db.Integer, primary_key=True)
#the geometry
the_geom = db.Column(Geometry(geometry_type='POLYGON', srid=4326))
class object_attribute(db.Model):
"""
Holds object_attribute from the asset schema.
"""
__tablename__ = "object_attribute"
__table_args__ = {'schema':'asset'}
#gid of attribute
gid = db.Column(db.Integer, primary_key=True)
#building id
object_id = db.Column(db.Integer)
#attribute code
attribute_type_code = db.Column(db.String(254))
#attribute value
attribute_value = db.Column(db.String(254))
#attribute numeric value
attribute_numeric_1 = db.Column(db.Numeric)
|
sserrot/champion_relationships | venv/Lib/site-packages/win32/Demos/win32wnet/testwnet.py | Python | mit | 3,461 | 0.030338 | import win32api
import win32wnet
import sys
from winnetwk import *
import os
possible_shares = []
def _doDumpHandle(handle, level = 0):
indent = " " * level
while 1:
items = win32wnet.WNetEnumResource(handle, 0)
if len(items)==0:
break
for item in items:
try:
if item.dwDisplayType == RESOURCEDISPLAYTYPE_SHARE:
print(indent + "Have share with name:", item.lpRemoteName)
possible_shares.append(item)
elif item.dwDisplayType == RESOURCEDISPLAYTYPE_GENERIC:
print(indent + "Have generic resource with name:", item.lpRemoteName)
else:
# Try generic!
print(indent + "Enumerating " + item.lpRemoteName, end=' ')
k = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET, RESOURCETYPE_ANY,0,item)
print()
_doDumpHandle(k, level + 1)
win32wnet.WNetCloseEnum(k) # could do k.Close(), but this is a good test!
except win32wnet.error as details:
print(indent + "Couldn't enumerate this resource: " + details.strerror)
def TestOpenEnum():
print("Enumerating all resources on the network - this may take some time...")
handle = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET,RESOURCETYPE_ANY,0,None)
try:
_doDumpHandle(handle)
finally:
handle.Close()
print("Finished dumping all resources.")
def findUnusedDriveLetter():
existing = [x[0].lower() for x in win32api.GetLogicalDriveStrings().split('\0') if x]
handle = win32wnet.WNetOpenEnum(RESOURCE_REMEMBERED,RESOURCETYPE_DISK,0,None)
try:
while 1:
items = win32wnet.WNetEnumR | esource(handle, 0)
if len(items)==0:
break
xtra = [i.lpLocalName[0].lower() for i in items if i.lpLocalName]
existing.extend(xtra)
finally:
handle.Close()
for maybe in 'defghijklmnopqrstuvwxyz':
if maybe not in existing:
return maybe
raise RuntimeError("All drive mappings are taken?")
def Tes | tConnection():
if len(possible_shares)==0:
print("Couldn't find any potential shares to connect to")
return
localName = findUnusedDriveLetter() + ':'
for share in possible_shares:
print("Attempting connection of", localName, "to", share.lpRemoteName)
try:
win32wnet.WNetAddConnection2(share.dwType, localName, share.lpRemoteName)
except win32wnet.error as details:
print("Couldn't connect: " + details.strerror)
continue
# Have a connection.
try:
fname = os.path.join(localName + "\\", os.listdir(localName + "\\")[0])
try:
print("Universal name of '%s' is '%s'" % (fname, win32wnet.WNetGetUniversalName(fname)))
except win32wnet.error as details:
print("Couldn't get universal name of '%s': %s" % (fname, details.strerror))
print("User name for this connection is", win32wnet.WNetGetUser(localName))
finally:
win32wnet.WNetCancelConnection2(localName, 0, 0)
# and do it again, but this time by using the more modern
# NETRESOURCE way.
nr = win32wnet.NETRESOURCE()
nr.dwType = share.dwType
nr.lpLocalName = localName
nr.lpRemoteName = share.lpRemoteName
win32wnet.WNetAddConnection2(nr)
win32wnet.WNetCancelConnection2(localName, 0, 0)
# and one more time using WNetAddConnection3
win32wnet.WNetAddConnection3(0, nr)
win32wnet.WNetCancelConnection2(localName, 0, 0)
# Only do the first share that succeeds.
break
def TestGetUser():
u = win32wnet.WNetGetUser()
print("Current global user is", repr(u))
if u != win32wnet.WNetGetUser(None):
raise RuntimeError("Default value didnt seem to work!")
TestGetUser()
TestOpenEnum()
TestConnection()
|
EdDev/vdsm | lib/vdsm/throttledlog.py | Python | gpl-2.0 | 3,750 | 0 | #
# Copyright 2016-2017 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
import logging
from vdsm.common.time import monotonic_time
_DEFAULT_TIMEOUT_SEC = 3600
_logger = logging.getLogger('throttled')
_periodic = {}
class _Periodic(object):
def __init__(self, interval, timeout):
self._interval = interval
self._timeout = timeout
self._counter = 0
self._last_time = 0
def tick(self):
now = monotonic_time()
result = self._result(now)
self._counter = (self._counter + 1) % self._interval
if result:
self._last_time = now
return result
def _result(self, now):
return (self._counter == 0 or
(now - self._last_time) >= self._timeout)
def throttle(name, interval, timeout=_DEFAULT_TIMEOUT_SEC):
"""
Throttle log messages for `name`, logging at most one message per
`interval` calls or always after `timeout` seconds of silence. Throttling
applies only to logging performed via `log()` function of this module. The
first call of `log()` never throttles the log, following calls are
throttled according to the given parameters.
If this function has already been called for `name`, replace the throttling
parameters for `name` with the new ones given here and start throttling
from beginning.
:param name: Arbitrary identifier to be matched in `log()` calls.
:type name: basestring
:param interval: The number of `log()` calls that should log at least once.
:type interval: int
:param timeout: The number of seconds without log emitted after which
`log()` should always unthrottle the next message.
:type timeout: int
"""
_periodic[name] = _Periodic(interval, timeout)
def log(name, level, message, *args):
"""
Log `message` and `args` if throttling settings for `name` allow it.
See `throttle()` for information about throttling and `name`.
`level`, `message` and `args` are passed to `logging.Logger.log()`
unchanged.
:param name: Arbitrary identifier to be matched by `throttle()` settings.
:type name: basestring
.. note::
|
Depending on throttling settings and the | current logging level `message`
and `args` may not be logged at all. So don't perform expensive
preprocessing of `args` before calling this function. If you need to
modify it before logging it, you may want to use something like
`vdsm.common.logutils.Suppressed` or its subclasses.
"""
try:
periodic = _periodic[name]
except KeyError:
pass # unthrottled
else:
if not periodic.tick():
return
_logger.log(level, message, *args)
def debug(name, message, *args):
log(name, logging.DEBUG, message, *args)
def info(name, message, *args):
log(name, logging.INFO, message, *args)
def warning(name, message, *args):
log(name, logging.WARNING, message, *args)
|
Tvlistings/tuxtrax | penguicontrax/api/functions.py | Python | gpl-3.0 | 593 | 0 | from flask import g
def return_null_if_not_logged_in(func):
def return_none(*args, **kwargs):
if g.user is None:
return "You must be logged in to perform this action.", 401
return func(*args, **kwargs)
return return_none
def return_null_if_not_staff(func):
def return_ | none(*args, **kwargs) | :
if g.user is None:
return "You must be logged in to perform this action.", 401
if not g.user.staff:
return "You must be staff to perform this action.", 403
return func(*args, **kwargs)
return return_none
|
Blue-Labs/utf8-percentage-complete-bar | completion-status-bar.py | Python | apache-2.0 | 3,154 | 0.011097 | #!/usr/bin/env python
'''
Need to put your tty into utf8 mode? see utf-8(7) man page
The official ESC sequence to switch from an ISO 2022 encoding scheme (as used for
instance by VT100 terminals) to UTF-8 is ESC % G ("\x1b%G"). The corresponding
return sequence from UTF-8 to ISO 2022 is ESC % @ ("\x1b%@"). Other ISO 2022
sequences (such as for switching the G0 and G1 sets) are not applicable in UTF-8
mode.
if not set as such already, you can set your terminal coding to utf8 mode by:
printf "\033%s" "%G" > $(tty)
be mindful that your environment needs to support utf8 encoding. check that a utf8
encoding is marked available in /etc/locale.gen such as en_US.UTF-8. if commented, then
uncomment it and run locale-gen. your $LANG environment variable (or other LC*) will
also need to be set to a utf8 variant such as en_US.UTF-8.
glyph references: http://unicode-table.com/en/sections/block-elements/
'''
__version__ = '1.0'
__author__ = 'david ford <david@blue-labs.org> (also: firefighterblu3@gmail.com, rarely read)'
__copyright = '2014 '+__author__
__license__ = 'Apache 2.0'
__released__ = '2014 July 25'
import sys, time
import codecs, locale
bfill = [' ', '\u258f', '\u258e', '\u258d', '\u258c', '\u258b', '\u258a', '\u2589', '\u2588']
def draw_completion_status_bar(percent_complete, text=''):
''' draw a granular text representation of a graphical 100% status bar. this uses utf8
glyphs to fairly accurately represent a single 1% change across the range. however,
there are only 8 glyphs to do this with instead of 10 so we jump at the first 1% as
it looks better with a "burst" at the beginning instead of lacking the last 3 bars.
this only takes a dozen character cells on your tty to represent a rather accurate
view (it still takes 39 bytes).
'''
q,r = divmod(percent_complete and percent_complete+3 or 0, 8)
box = '\u2588'*q # how many solid boxes to draw
fc = bfill[r] # choose the partially filled box
pad = ' '*(11-q+1) # pad with spaces to the end
pct = str(percent_complete).rjust(3)+' ' # right justify the numeric value
# draw the bar with a mild white background and bright white foreground
s = '\r\x1b[1;37;47m' + box + fc + pad +'\x1b[0m ' + pct + text
sys.stdout.write(s)
def main():
draw_completion_status_bar(0, 'status lin | e #0')
print()
draw_completion_status_bar(1, 'status line #1')
print()
for n in range(19):
draw_completion_status_bar(n, 'status line #2')
time.sleep(0.1)
print()
draw_completion_status_bar(57, 'status line #3')
print()
draw_completion_status_bar(100, 'status line | #4')
print()
if __name__ == '__main__':
if False: # want to see what your current encoding information is?
print('STDOUT Encoding: %s, isatty(%s), locale.getpreferredencoding(%s), sys.getfilesystemencodig(%s)' %(
sys.stdout.encoding, sys.stdout.isatty(), locale.getpreferredencoding(), sys.getfilesystemencoding()))
main()
|
RussTheAerialist/zensitting | daemon/zend/__init__.py | Python | gpl-2.0 | 20 | 0 | _ | _author__ = 'rhay'
| |
pchaigno/grr | gui/http_api_test.py | Python | apache-2.0 | 6,014 | 0.004656 | #!/usr/bin/env python
"""Tests for HTTP API."""
import json
from grr.gui import api_aff4_object_renderers
from grr.gui import api_call_renderers
from grr.gui import http_api
from grr.lib import flags
from grr.lib import registry
from grr.lib import test_lib
from grr.lib import utils
from grr.lib.rdfvalues import structs as rdf_structs
from grr.proto import tests_pb2
class SampleGetRendererArgs(rdf_structs.RDFProtoStruct):
protobuf = tests_pb2.SampleGetRendererArgs
class SampleGetRenderer(api_call_renderers.ApiCallRenderer):
args_type = SampleGetRendererArgs
def Render(self, args, token=None):
return {
"method": "GET",
"path": args.path,
"foo": args.foo
}
class SampleGetRendererWithAdditionalArgsArgs(rdf_structs.RDFProtoStruct):
protobuf = tests_pb2.SampleGetRendererWithAdditionalArgsArgs
class SampleGetRendererWithAdditionalArgs(api_call_renderers.ApiCallRenderer):
args_type = SampleGetRendererWithAdditionalArgsArgs
additional_args_types = {
"AFF4Object": api_aff4_objec | t_renderers.Api | AFF4ObjectRendererArgs,
"RDFValueCollection": (api_aff4_object_renderers.
ApiRDFValueCollectionRendererArgs)
}
def Render(self, args, token=None):
result = {
"method": "GET",
"path": args.path,
"foo": args.foo
}
if args.additional_args:
rendered_additional_args = []
for arg in args.additional_args:
rendered_additional_args.append(str(arg))
result["additional_args"] = rendered_additional_args
return result
class TestHttpRoutingInit(registry.InitHook):
def RunOnce(self):
http_api.RegisterHttpRouteHandler(
"GET", "/test_sample/<path:path>", SampleGetRenderer)
http_api.RegisterHttpRouteHandler(
"GET", "/test_sample_with_additional_args/<path:path>",
SampleGetRendererWithAdditionalArgs)
class RenderHttpResponseTest(test_lib.GRRBaseTest):
"""Test for api_call_renderers.RenderHttpResponse logic."""
def _CreateRequest(self, method, path, query_parameters=None):
if not query_parameters:
query_parameters = {}
request = utils.DataObject()
request.method = method
request.path = path
request.scheme = "http"
request.environ = {
"SERVER_NAME": "foo.bar",
"SERVER_PORT": 1234
}
request.user = "test"
if method == "GET":
request.GET = query_parameters
request.META = {}
return request
def _RenderResponse(self, request):
response = http_api.RenderHttpResponse(request)
if response.content.startswith(")]}'\n"):
response.content = response.content[5:]
return response
def testReturnsRendererMatchingUrlAndMethod(self):
renderer, _ = http_api.GetRendererForHttpRequest(
self._CreateRequest("GET", "/test_sample/some/path"))
self.assertTrue(isinstance(renderer, SampleGetRenderer))
def testPathParamsAreReturnedWithMatchingRenderer(self):
_, path_params = http_api.GetRendererForHttpRequest(
self._CreateRequest("GET", "/test_sample/some/path"))
self.assertEqual(path_params, {"path": "some/path"})
def testRaisesIfNoRendererMatchesUrl(self):
self.assertRaises(api_call_renderers.ApiCallRendererNotFoundError,
http_api.GetRendererForHttpRequest,
self._CreateRequest("GET",
"/some/missing/path"))
def testRendersGetRendererCorrectly(self):
response = self._RenderResponse(
self._CreateRequest("GET", "/test_sample/some/path"))
self.assertEqual(
json.loads(response.content),
{"method": "GET",
"path": "some/path",
"foo": ""})
self.assertEqual(response.status_code, 200)
def testQueryParamsArePassedIntoRendererArgs(self):
response = self._RenderResponse(
self._CreateRequest("GET", "/test_sample/some/path",
query_parameters={"foo": "bar"}))
self.assertEqual(
json.loads(response.content),
{"method": "GET",
"path": "some/path",
"foo": "bar"})
def testRouteArgumentTakesPrecedenceOverQueryParams(self):
response = self._RenderResponse(
self._CreateRequest("GET", "/test_sample/some/path",
query_parameters={"path": "foobar"}))
self.assertEqual(
json.loads(response.content),
{"method": "GET",
"path": "some/path",
"foo": ""})
def testAdditionalArgumentsAreParsedCorrectly(self):
additional_args = http_api.FillAdditionalArgsFromRequest(
{
"AFF4Object.limit_lists": "10",
"RDFValueCollection.with_total_count": "1"
}, {
"AFF4Object": api_aff4_object_renderers.ApiAFF4ObjectRendererArgs,
"RDFValueCollection":
api_aff4_object_renderers.ApiRDFValueCollectionRendererArgs
})
additional_args = sorted(additional_args, key=lambda x: x.name)
self.assertListEqual(
[x.name for x in additional_args],
["AFF4Object", "RDFValueCollection"])
self.assertListEqual(
[x.type for x in additional_args],
["ApiAFF4ObjectRendererArgs", "ApiRDFValueCollectionRendererArgs"])
self.assertListEqual(
[x.args for x in additional_args],
[api_aff4_object_renderers.ApiAFF4ObjectRendererArgs(limit_lists=10),
api_aff4_object_renderers.ApiRDFValueCollectionRendererArgs(
with_total_count=True)])
def testAdditionalArgumentsAreFoundAndPassedToTheRenderer(self):
response = self._RenderResponse(
self._CreateRequest("GET",
"/test_sample_with_additional_args/some/path",
query_parameters={"foo": "42"}))
self.assertEqual(
json.loads(response.content),
{"method": "GET",
"path": "some/path",
"foo": "42"})
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
|
indrajitr/ansible | lib/ansible/modules/cron.py | Python | gpl-3.0 | 25,911 | 0.00274 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Dane Summers <dsummers@pinedesk.biz>
# Copyright: (c) 2013, Mike Grozak <mike.grozak@gmail.com>
# Copyright: (c) 2013, Patrick Callahan <pmc@patrickcallahan.com>
# Copyright: (c) 2015, Evan Kaufman <evan@digitalflophouse.com>
# Copyright: (c) 2015, Luca Berruti <nadirio@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: cron
short_description: Manage cron.d and crontab entries
description:
- Use this module to manage crontab and environment variables entries. This module allows
you to create environment variables and named crontab entries, update, or delete them.
- 'When crontab jobs are managed: the module includes one line with the description of the
crontab entry C("#Ansible: <name>") corresponding to the "name" passed to the module,
which is used by future ansible/module calls to find/check the state. The "name"
parameter should be unique, and changing the "name" value will result in a new cron
task being created (or a different one being removed).'
- When environment variables are managed, no comment line is added, but, when the module
needs to find/check the state, it uses the "name" parameter to find the environment
variable definition line.
- When using symbols such as %, they must be properly escaped.
version_added: "0.9"
options:
name:
description:
| - Description | of a crontab entry or, if env is set, the name of environment variable.
- Required if C(state=absent).
- Note that if name is not set and C(state=present), then a
new crontab entry will always be created, regardless of existing ones.
- This parameter will always be required in future releases.
type: str
user:
description:
- The specific user whose crontab should be modified.
- When unset, this parameter defaults to using C(root).
type: str
job:
description:
- The command to execute or, if env is set, the value of environment variable.
- The command should not contain line breaks.
- Required if C(state=present).
type: str
aliases: [ value ]
state:
description:
- Whether to ensure the job or environment variable is present or absent.
type: str
choices: [ absent, present ]
default: present
cron_file:
description:
- If specified, uses this file instead of an individual user's crontab.
- If this is a relative path, it is interpreted with respect to I(/etc/cron.d).
- If it is absolute, it will typically be I(/etc/crontab).
- Many linux distros expect (and some require) the filename portion to consist solely
of upper- and lower-case letters, digits, underscores, and hyphens.
- To use the C(cron_file) parameter you must specify the C(user) as well.
type: str
backup:
description:
- If set, create a backup of the crontab before it is modified.
The location of the backup is returned in the C(backup_file) variable by this module.
type: bool
default: no
minute:
description:
- Minute when the job should run ( 0-59, *, */2, etc )
type: str
default: "*"
hour:
description:
- Hour when the job should run ( 0-23, *, */2, etc )
type: str
default: "*"
day:
description:
- Day of the month the job should run ( 1-31, *, */2, etc )
type: str
default: "*"
aliases: [ dom ]
month:
description:
- Month of the year the job should run ( 1-12, *, */2, etc )
type: str
default: "*"
weekday:
description:
- Day of the week that the job should run ( 0-6 for Sunday-Saturday, *, etc )
type: str
default: "*"
aliases: [ dow ]
reboot:
description:
- If the job should be run at reboot. This option is deprecated. Users should use special_time.
version_added: "1.0"
type: bool
default: no
special_time:
description:
- Special time specification nickname.
type: str
choices: [ annually, daily, hourly, monthly, reboot, weekly, yearly ]
version_added: "1.3"
disabled:
description:
- If the job should be disabled (commented out) in the crontab.
- Only has effect if C(state=present).
type: bool
default: no
version_added: "2.0"
env:
description:
- If set, manages a crontab's environment variable.
- New variables are added on top of crontab.
- C(name) and C(value) parameters are the name and the value of environment variable.
type: bool
default: no
version_added: "2.1"
insertafter:
description:
- Used with C(state=present) and C(env).
- If specified, the environment variable will be inserted after the declaration of specified environment variable.
type: str
version_added: "2.1"
insertbefore:
description:
- Used with C(state=present) and C(env).
- If specified, the environment variable will be inserted before the declaration of specified environment variable.
type: str
version_added: "2.1"
requirements:
- cron (or cronie on CentOS)
author:
- Dane Summers (@dsummersl)
- Mike Grozak (@rhaido)
- Patrick Callahan (@dirtyharrycallahan)
- Evan Kaufman (@EvanK)
- Luca Berruti (@lberruti)
'''
EXAMPLES = r'''
- name: Ensure a job that runs at 2 and 5 exists. Creates an entry like "0 5,2 * * ls -alh > /dev/null"
cron:
name: "check dirs"
minute: "0"
hour: "5,2"
job: "ls -alh > /dev/null"
- name: 'Ensure an old job is no longer present. Removes any job that is prefixed by "#Ansible: an old job" from the crontab'
cron:
name: "an old job"
state: absent
- name: Creates an entry like "@reboot /some/job.sh"
cron:
name: "a job for reboot"
special_time: reboot
job: "/some/job.sh"
- name: Creates an entry like "PATH=/opt/bin" on top of crontab
cron:
name: PATH
env: yes
job: /opt/bin
- name: Creates an entry like "APP_HOME=/srv/app" and insert it after PATH declaration
cron:
name: APP_HOME
env: yes
job: /srv/app
insertafter: PATH
- name: Creates a cron file under /etc/cron.d
cron:
name: yum autoupdate
weekday: "2"
minute: "0"
hour: "12"
user: root
job: "YUMINTERACTIVE=0 /usr/sbin/yum-autoupdate"
cron_file: ansible_yum-autoupdate
- name: Removes a cron file from under /etc/cron.d
cron:
name: "yum autoupdate"
cron_file: ansible_yum-autoupdate
state: absent
- name: Removes "APP_HOME" environment variable from crontab
cron:
name: APP_HOME
env: yes
state: absent
'''
import os
import platform
import pwd
import re
import sys
import tempfile
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves import shlex_quote
class CronTabError(Exception):
pass
class CronTab(object):
"""
CronTab object to write time based crontab file
user - the user of the crontab (defaults to root)
cron_file - a cron file under /etc/cron.d, or an absolute path
"""
def __init__(self, module, user=None, cron_file=None):
self.module = module
self.user = user
self.root = (os.getuid() == 0)
self.lines = None
self.ansible = "#Ansible: "
self.existing = ''
self.cron_cmd = self.module.get_bin_path('crontab', required=True)
if cron_file:
if os.path.isabs(cron_file):
self.cron_file = cron_file
else:
self.cron_file = os.path.join('/etc/cron.d', cron_file)
else:
self.cron_file = None
self.read()
def read(self):
# Read in the crontab from the system
self.lines = []
if self.cron_file:
# read the cronfile
try:
f = open(self.cron_file, 'r')
self.existing = f.read()
self.lines = self.existing.splitlines()
f.close()
except IOError:
|
jfkirk/tensorrec | test/test_representation_graphs.py | Python | apache-2.0 | 2,973 | 0.004709 | from nose_parameterized import parameterized
from unittest import TestCase
from tensorrec import TensorRec
from tensorrec.representation_graphs import (
LinearRepresentationGraph, NormalizedLinearRepresentationGraph, FeaturePassThroughRepresentationGraph,
WeightedFeaturePassThroughRepresentationGraph, ReLURepresentationGraph
)
from tensorrec.util import generate_dummy_data
class RepresentationGraphTestCase(TestCase):
@parameterized.expand([
["linear", LinearRepresentationGraph, LinearRepresentationGraph, 50, 60, 20],
["norm_lin", NormalizedLinearRepresentationGraph, | NormalizedLinearRepresentationGraph, 50, 60, 20],
["fpt_user", FeaturePassThroughRepresentationGraph, NormalizedLinearRepresentationGraph, 50, 60, 50],
["fpt_item", NormalizedLin | earRepresentationGraph, FeaturePassThroughRepresentationGraph, 50, 60, 60],
["fpt_both", FeaturePassThroughRepresentationGraph, FeaturePassThroughRepresentationGraph, 50, 50, 50],
["weighted_fpt", WeightedFeaturePassThroughRepresentationGraph, WeightedFeaturePassThroughRepresentationGraph,
50, 50, 50],
["relu", ReLURepresentationGraph, ReLURepresentationGraph, 50, 60, 20],
])
def test_fit(self, name, user_repr, item_repr, n_user_features, n_item_features, n_components):
interactions, user_features, item_features = generate_dummy_data(
num_users=15, num_items=30, interaction_density=.5, num_user_features=n_user_features,
num_item_features=n_item_features, n_features_per_user=20, n_features_per_item=20, pos_int_ratio=.5
)
model = TensorRec(n_components=n_components,
user_repr_graph=user_repr(),
item_repr_graph=item_repr())
model.fit(interactions, user_features, item_features, epochs=10)
# Ensure that the nodes have been built
self.assertIsNotNone(model.tf_prediction)
class IdentityRepresentationGraphTestCase(TestCase):
def test_fit_fail_on_bad_dims(self):
interactions, user_features, item_features = generate_dummy_data(
num_users=15, num_items=30, interaction_density=.5, num_user_features=30,
num_item_features=20, n_features_per_user=20, n_features_per_item=20, pos_int_ratio=.5
)
with self.assertRaises(ValueError):
model = TensorRec(n_components=25,
user_repr_graph=FeaturePassThroughRepresentationGraph(),
item_repr_graph=LinearRepresentationGraph())
model.fit(interactions, user_features, item_features, epochs=10)
with self.assertRaises(ValueError):
model = TensorRec(n_components=25,
user_repr_graph=LinearRepresentationGraph(),
item_repr_graph=FeaturePassThroughRepresentationGraph())
model.fit(interactions, user_features, item_features, epochs=10)
|
google/upvote_py2 | upvote/gae/utils/user_utils.py | Python | apache-2.0 | 901 | 0.00333 | # Copyright 2017 Google Inc. All Righ | ts Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICEN | SE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Resources for mapping between user emails and host usernames/owners."""
from upvote.gae import settings
def UsernameToEmail(username):
if '@' in username:
return username
return '@'.join((username, settings.USER_EMAIL_DOMAIN))
def EmailToUsername(email):
return email.partition('@')[0]
|
mudragada/util-scripts | PyProblems/LogFileProcessing/logFileParser.py | Python | mit | 1,318 | 0.002276 | """ 1. Parse log file of a webserver
2. Print the filename and number of bytes delivered for 200 responses
"""
import re
import sys
from os import path
import operator
import itertools
log_file_path = "server.log"
log_data = []
pattern = re.compile(r'\[(?P<time>.+)\](\s+\")(?P<requestType>\w+)(\s+)(?P<fileName>.*?)(\sHTTP)\/(?P<httpVersion>.*?)\"\s+(?P<httpResponse>\d+)\s(?P<bytes>\d+)')
fileDict = dict()
with open(log_file_path, "r") as file:
for line in file:
pattern_match = pattern.match(line)
log_data.append(pattern_match.groupdict())
dedup_log_data = []
for i in log_data:
if i not in dedup_log_data:
dedup_log_data.append(i)
for item in dedup_log_data:
key = item['fileName']
value = int(it | em['bytes'])
respCode = item['httpResponse']
if (respCode == '200'):
if key not in fileDict.keys():
fileDict[key] = value
else:
oldValue = int(fileDict.get(key))
value = oldValue+value
fileDict[key] = value
print(fileDict)
print( | dict(sorted(fileDict.items(), key=operator.itemgetter(1))))
sorted_fileDict = dict(sorted(fileDict.items(), key=operator.itemgetter(1)))
out_Dict = dict(itertools.islice(sorted_fileDict.items(), 10))
for k, v in out_Dict.items():
print (str(k) + " " + str(v))
|
gutomaia/nesasm_py | nesasm/__init__.py | Python | bsd-3-clause | 985 | 0 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import argparse
from nesasm.compiler import compile_file
def main(argv=None):
parser = argparse.ArgumentParser(
prog="nesasm",
description='NESasm - NES Assembly Compiler',
epilog='')
subparsers = parser.add_subparsers(
title="subcommands", description="utilities", help="aditional help")
asm_cmd = su | bparsers.add_parser('asm') # TODO, aliases=['asm'])
asm_cmd.add_argument('input', nargs='?', metavar='INPUT',
help="input c6502 asm file")
asm_cmd.add_argument('-o', '--output', metavar='OUTPUT',
help="output NES file")
asm_cmd.add_argument('-p', '--path', metavar='PATH',
help="path for assets")
asm_cmd.set_defaults(func=exec_asm)
args = parser.parse_args(argv[1 | :])
args.func(args)
def exec_asm(args):
compile_file(args.input, output=args.output, path=args.path)
|
hyphaltip/cndtools | util/rotateTabDelim.py | Python | gpl-2.0 | 1,070 | 0.001869 | #!/usr/bin/env python
# Copyright (c) 2006
# Colin Dewey (University of Wisconsin-Madison)
# cdewey@biostat.wisc.edu
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that | it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, | write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys
rows = [line[:-1].split('\t') for line in sys.stdin]
maxLen = max(map(len, rows))
for row in rows:
if len(row) < maxLen:
row.extend(["NA"] * (maxLen - len(row)))
rows = zip(*rows)
for row in rows:
print '\t'.join(row)
|
puttarajubr/commcare-hq | custom/ilsgateway/tanzania/handlers/keyword.py | Python | bsd-3-clause | 786 | 0.001272 | from corehq.apps.sms.api import send_sms_to_verified_number
from corehq.util.translation import localize
from django.utils.transla | tion import ugettext as _
class KeywordHandler(object):
def __init__(self, user, domain, args, verified_contact, msg):
self.user = user
self.domain = domain
self.args = args
self.verified_contact = verified_contact
self.msg = msg
def handle(self):
raise NotImplementedError("Not implemented yet")
def help(self):
raise NotImplementedError("Not implemented yet")
def respond(self, message, **kwargs):
owner = self.verifie | d_contact.owner
with localize(owner.get_language_code()):
send_sms_to_verified_number(self.verified_contact, _(message) % kwargs)
|
vishnupriyam/review-classification | classify.py | Python | cc0-1.0 | 1,294 | 0.016229 | import sys
import pickle
from nltk.tokenize import word_tokenize
from cleaneddata.remove_stopwords_nltk import read_words, clean_review
from model.generateModel import generatemodel
from validation.validate import predict
review = raw_input("Enter a review to classify: ")
try:
paramfile = open("model/savedmodel.p", "r")
print("Found a saved model in model/savedmodel.p\nTo generate a new model, please delete the model/savedmodel.p file and re-run the program.\n")
except IOError:
print("No saved model found...\nGenerating a new model...\n")
reviewfile = raw_input("Enter the review training set file path : ")
vocabfile = raw_input("Enter the vocabulary file path : ")
generatemodel(reviewfile, vocabfile, "model/savedmodel.p")
#model
(PP,PN,positive_probabilities,negative_probabilities,unseen_pos_prob,unseen_neg_prob) = pickle.load( open("model/savedmodel.p","rb") )
#clean the input review
review = review.lower()
review = word_tokenize(review)
stopwords = read | _words("cleaneddata/stopwords.txt")
review = clean_review(review,stopwords)
#predict the class of the review
testreview = []
testreview.append(review)
predicted_class = predict(testreview,PP,PN,positive_probabilities,negative_probabilities,unseen_pos_prob,unseen_neg_prob)
print(predicted_cla | ss)
|
Opentrons/labware | api/tests/opentrons/protocols/execution/test_execute_python.py | Python | apache-2.0 | 2,472 | 0 | import pytest
from opentrons.protocol_api import ProtocolContext
from opentrons.protocols.execution import execute, execute_python
from opentrons.protocols.parse import parse
def test_api2_runfunc():
def noargs():
pass
with pytest.raises(SyntaxError):
execute_python._runfunc_ok(noargs)
def twoargs(a, b):
pass
with pytest.raises(SyntaxError):
execute_python._runfunc_ok(twoargs)
def two_with_default(a, b=2):
pass
# making sure this doesn't raise
execute_python._runfunc_ok(two_with_default)
def one_with_default(a=2):
pass
# shouldn't raise
execute_python._runfunc_ok(one_with_default)
def starargs(*args):
pass
# shouldn't raise
execute_python._runfunc_ok(starargs)
@pytest.mark.parametrize('protocol_file', ['testosaur_v2.py'])
def test_execute_ok(protocol, protocol_file, loop):
proto = parse(protocol.text, protocol.filename)
ctx = ProtocolContext(loop)
execu | te.run_protocol(p | roto, context=ctx)
def test_bad_protocol(loop):
ctx = ProtocolContext(loop)
no_args = parse('''
metadata={"apiLevel": "2.0"}
def run():
pass
''')
with pytest.raises(execute_python.MalformedProtocolError) as e:
execute.run_protocol(no_args, context=ctx)
assert "Function 'run()' does not take any parameters" in str(e.value)
many_args = parse('''
metadata={"apiLevel": "2.0"}
def run(a, b):
pass
''')
with pytest.raises(execute_python.MalformedProtocolError) as e:
execute.run_protocol(many_args, context=ctx)
assert "must be called with more than one argument" in str(e.value)
def test_proto_with_exception(loop):
ctx = ProtocolContext(loop)
exc_in_root = '''metadata={"apiLevel": "2.0"}
def run(ctx):
raise Exception("hi")
'''
protocol = parse(exc_in_root)
with pytest.raises(execute_python.ExceptionInProtocolError) as e:
execute.run_protocol(
protocol,
context=ctx)
assert 'Exception [line 4]: hi' in str(e.value)
nested_exc = '''
import ast
def this_throws():
raise Exception("hi")
def run(ctx):
this_throws()
metadata={"apiLevel": "2.0"};
'''
protocol = parse(nested_exc)
with pytest.raises(execute_python.ExceptionInProtocolError) as e:
execute.run_protocol(
protocol,
context=ctx)
assert '[line 5]' in str(e.value)
assert 'Exception [line 5]: hi' in str(e.value)
|
robertoalotufo/ia636 | ia636/iadctmatrix.py | Python | bsd-3-clause | 322 | 0.021739 | # -*- encoding: utf-8 -*-
# Module iadctmatrix
from numpy import *
def iad | ctmatrix(N):
from iameshgrid import iameshgrid
x, u = iameshgrid(range(N), range(N)) # (u,x)
alpha = ones((N,N)) * sqrt(2./N)
alp | ha[0,:] = sqrt(1./N) # alpha(u,x)
A = alpha * cos((2*x+1)*u*pi / (2.*N)) # Cn(u,x)
return A
|
Francis-Liu/animated-broccoli | nova/utils.py | Python | apache-2.0 | 53,116 | 0.00032 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import contextlib
import copy
import datetime
import errno
import functools
import hashlib
import hmac
import inspect
import logging as std_logging
import os
import pyclbr
import random
import re
import shutil
import socket
import struct
import sys
import tempfile
import time
from xml.sax import saxutils
import eventlet
import netaddr
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_context import context as common_context
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_utils import encodeutils
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import strutils
from oslo_utils import timeutils
from oslo_utils import units
import six
from six.moves import range
from nova import exception
from nova.i18n import _, _LE, _LI, _LW
notify_decorator = 'nova.notifications.notify_decorator'
monkey_patch_opts = [
cfg.BoolOpt('monkey_patch',
default=False,
help='Whether to log monkey patching'),
cfg.ListOpt('monkey_patch_modules',
default=[
'nova.api.ec2.cloud:%s' % (notify_decorator),
'nova.compute.api:%s' % (notify_decorator)
],
help='List of modules/decorators to monkey patch'),
]
utils_opts = [
cfg.IntOpt('password_length',
default=12,
help='Length of generated instance admin passwords'),
cfg.StrOpt('instance_usage_audit_period',
default='month',
help='Time period to generate instance usages for. '
'Time period must be hour, day, month or year'),
cfg.BoolOpt('use_rootwrap_daemon', default=False,
help="Start and use a daemon that can run the commands that "
"need to be run with root privileges. This option is "
"usually enabled on nodes that run nova compute "
"processes"),
cfg.StrOpt('rootwrap_config',
default="/etc/nova/rootwrap.conf",
help='Path to the rootwrap configuration file to use for '
'running commands as root'),
cfg.StrOpt('tempdir',
help='Explicitly specify the temporary working directory'),
]
workarounds_opts = [
cfg.BoolOpt('disable_rootwrap',
default=False,
help='This option allows a fallback to sudo for performance '
'reasons. For example see '
'https://bugs.launchpad.net/nova/+bug/1415106'),
cfg.BoolOpt('disable_libvirt_livesnapshot',
default=True,
help='When using libvirt 1.2.2 live snapshots fail '
'intermittently under load. This config option provides '
'a mechanism to enable live snapshot while this is '
'resolved. See '
'https://bugs.launchpad.net/nova/+bug/1334398'),
cfg.BoolOpt('destroy_after_evacuate',
default=True,
deprecated_for_removal=True,
help='DEPRECATED: Whether to destroy '
'instances on startup when we suspect '
'they have previously been evacuated. This can result in '
'data loss if undesired. See '
'https://launchpad.net/bugs/1419785'),
cfg.BoolOpt('handle_virt_lifecycle_events',
default=True,
help="Whether or not to handle events raised from the compute "
"driver's 'emit_event' method. These are lifecycle "
"events raised from compute drivers that implement the "
"method. An example of a lifecycle event is an instance "
"starting or stopping. If the instance is going through "
"task state changes due to an API operation, like "
"resize, the events are ignored. However, this is an "
"advanced feature which allows the hypervisor to signal "
"to the compute service that an unexpected state change "
"has occurred in an instance and the instance can be "
"shutdown automatically - which can inherently race in "
"reboot operations or when the compute service or host "
"is rebooted, either planned or due to an unexpected "
"outage. Care should be taken when using this and "
"sync_power_state_interval is negative since then if any "
"instances are out of sync between the hypervisor and "
"the Nova database they will have to be synchronized "
"manually. See https://bugs.launchpad.net/bugs/1444630"),
]
""" The workarounds_opts group is for very specific reasons.
If you're:
- Working around an issue in a system tool (e.g. libvirt or qemu) where the
fix is in flight/discussed in that community.
- The tool can be/is fixed in some distributions and rather than patch the
code those distributions can trivially set a config option to get the
"correct" behavior.
Then this is a good place for your workaround.
.. warning::
Please use with care! Document the BugID that your workaround is paired with.
"""
CONF = cfg.CONF
CONF.register_opts(monkey_patch_opts)
CONF.register_opts(utils_opts)
CONF.import_opt('network_api_class', 'nova.network')
CONF.register_opts(workarounds_opts, group='workarounds')
LOG = logging.getLogger(__name__)
# used in limits
TIME_UNITS = {
'SECOND': 1,
'MINUTE': 60,
'HOUR': 3600,
'DAY': 86400
}
_IS_NEUTRON = None
synchronized = lockutils.synchronized_with_prefix('nova-')
SM_IMAGE_PROP_PREFIX = "image_"
SM_INHERITABLE_KEYS = (
'min_ram', 'min_disk', 'disk_format', 'container_format',
)
# Keys which hold large structured data that won't fit in the
# size constraints of the system_metadata table, so we avoid
# storing and/or loading them.
SM_SKIP_KEYS = (
# Legacy names
'mappings', 'block_device_mapping',
# Modern names
'img_mappings', 'img_block_device_mapping',
)
# Image attributes which Cinder stores in volume image metadata
# as regular properties
VIM_IMAGE_ATTRIBUTES = (
'image_id', 'image_name', 'size', 'checksum',
'container_format', 'disk_format', 'min_ram', 'min_disk',
)
_FILE_CACHE = {}
def vpn_ping(address, port, timeout=0.05, session_id=None):
"""Sends a vpn negotiation packet and returns the server session.
Returns Boolean indicating whether the vpn_server is listen | ing.
Basic packet structure is below.
Client packet (14 bytes)::
0 1 8 9 13
| +-+--------+-----+
|x| cli_id |?????|
+-+--------+-----+
x = packet identifier 0x38
cli_id = 64 bit identifier
? = unknown, probably flags/padding
Server packet (26 bytes)::
0 1 8 9 13 14 21 2225
+-+--------+-----+--------+----+
|x| srv_id |?????| cli_id |????|
+-+--------+-----+--------+----+
x = packet identifier 0x40
cli_id = 64 bit identifier
? = unknown, probably flags/padding
bit 9 was 1 and the rest |
camradal/ansible | lib/ansible/modules/system/runit.py | Python | gpl-3.0 | 9,034 | 0.007084 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, Brian Coca <bcoca@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
# This is a modification of @bcoca's `svc` module
DOCUMENTATION = '''
---
module: runit
author: "James Sumners (@jsumners)"
version_added: "2.3"
short_description: Manage runit services.
description:
- Controls runit services on remote hosts using the sv utility.
options:
name:
required: true
description:
- Name of the service to manage.
state:
required: false
choices: [ started, stopped, restarted, killed, reloaded, once ]
description:
- C(started)/C(stopped) are idempotent actions that will not run
commands unless necessary. C(restarted) will always bounce the
service (sv restart) and C(killed) will always bounce the service (sv force-stop).
C(reloaded) will send a HUP (sv reload).
C(once) will run a normally downed sv once (sv once), not really
an idempotent operation.
enabled:
required: false
choices: [ "yes", "no" ]
description:
- Wheater the service is enabled or not, if disabled it also implies stopped.
service_dir:
required: false
default: /var/service
description:
- directory runsv watches for services
service_src:
required: false
default: /etc/sv
description:
- directory where services are defined, the source of symlinks to service_dir.
'''
EXAMPLES = '''
# Example action to start sv dnscache, if not running
- sv:
name: dnscache
state: started
# Example action to stop sv dnscache, if running
- sv:
name: dnscache
state: stopped
# Example action to kill sv dnscache, in all cases
- sv:
name: dnscache
state: killed
# Example action to restart sv dnscache, in all cases
- sv:
name: dnscache
state: restarted
# Example action to reload sv dnscache, in all cases
- sv:
name: dnscache
state: reloaded
# Example using alt sv directory location
- sv:
name: dnscache
state: reloaded
service_dir: /run/service
'''
import platform
import shlex
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.basic import *
def _load_dist_subclass(cls, *args, **kwargs):
'''
Used for derivative implementations
'''
subclass = None
distro = kwargs['module'].params['distro']
# get the most specific superclass for this platform
if distro is not None:
for sc in cls.__subclasses__():
if sc.distro is not None and sc.distro == distro:
subclass = sc
if subclass is None:
subclass = cls
return super(cls, subclass).__new__(subclass)
class Sv(object):
"""
Main class that handles daemontools, can be subclassed and overridden in case
we want to use a 'derivative' like encore, s6, etc
"""
#def __new__(cls, *args, **kwargs):
# return _load_dist_subclass(cls, args, kwargs)
def __init__(self, module):
self.extra_paths = [ ]
self.report_vars = ['state', 'enabled', 'svc_full', 'src_full', 'pid', 'duration', 'full_state']
self.module = module
self.name = module.params['name']
self.service_dir = module.params['service_dir']
self.service_src = module.params['service_src']
self.enabled = None
self.full_state = None
self.state = None
self.pid = None
self.duration = None
self.svc_cmd = module.get_bin_path('sv | ', opt_dirs=self.extra_paths)
self.svstat_cmd = module.get_bin_path('sv', opt_dirs=self.extra_paths)
self.svc_full = '/'.join([ self.service_dir, self.name ])
self.src_full = '/'.join([ self.service_src, self.name ])
self.enabled = os.path.lexists(self.svc_full) |
if self.enabled:
self.get_status()
else:
self.state = 'stopped'
def enable(self):
if os.path.exists(self.src_full):
try:
os.symlink(self.src_full, self.svc_full)
except OSError:
e = get_exception()
self.module.fail_json(path=self.src_full, msg='Error while linking: %s' % str(e))
else:
self.module.fail_json(msg="Could not find source for service to enable (%s)." % self.src_full)
def disable(self):
self.execute_command([self.svc_cmd,'force-stop',self.src_full])
try:
os.unlink(self.svc_full)
except OSError:
e = get_exception()
self.module.fail_json(path=self.svc_full, msg='Error while unlinking: %s' % str(e))
def get_status(self):
(rc, out, err) = self.execute_command([self.svstat_cmd, 'status', self.svc_full])
if err is not None and err:
self.full_state = self.state = err
else:
self.full_state = out
m = re.search('\(pid (\d+)\)', out)
if m:
self.pid = m.group(1)
m = re.search(' (\d+)s', out)
if m:
self.duration = m.group(1)
if re.search('run:', out):
self.state = 'started'
elif re.search('down:', out):
self.state = 'stopped'
else:
self.state = 'unknown'
return
def started(self):
return self.start()
def start(self):
return self.execute_command([self.svc_cmd, 'start', self.svc_full])
def stopped(self):
return self.stop()
def stop(self):
return self.execute_command([self.svc_cmd, 'stop', self.svc_full])
def once(self):
return self.execute_command([self.svc_cmd, 'once', self.svc_full])
def reloaded(self):
return self.reload()
def reload(self):
return self.execute_command([self.svc_cmd, 'reload', self.svc_full])
def restarted(self):
return self.restart()
def restart(self):
return self.execute_command([self.svc_cmd, 'restart', self.svc_full])
def killed(self):
return self.kill()
def kill(self):
return self.execute_command([self.svc_cmd, 'force-stop', self.svc_full])
def execute_command(self, cmd):
try:
(rc, out, err) = self.module.run_command(' '.join(cmd))
except Exception:
e = get_exception()
self.module.fail_json(msg="failed to execute: %s" % str(e))
return (rc, out, err)
def report(self):
self.get_status()
states = {}
for k in self.report_vars:
states[k] = self.__dict__[k]
return states
# ===========================================
# Main control flow
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(choices=['started', 'stopped', 'restarted', 'killed', 'reloaded', 'once']),
enabled = dict(required=False, type='bool'),
dist = dict(required=False, default='runit'),
service_dir = dict(required=False, default='/var/service'),
service_src = dict(required=False, default='/etc/sv'),
),
supports_check_mode=True,
)
module.run_command_env |
efiring/numpy-work | numpy/core/setupscons.py | Python | bsd-3-clause | 4,414 | 0.004304 | import os
import sys
import glob
from os.path import join, basename
from numpy.distutils import log
from numscons import get_scons_build_dir
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration,dot_join
from numpy.distutils.command.scons import get_scons_pkg_build_dir
from numpy.distutils.system_info import get_info, default_lib_dirs
config = Configuration('core',parent_package,top_path)
local_dir = config.local_path
header_dir = 'include/numpy' # this is relative to config.path_in_package
config.add_subpackage('code_generators')
# List of files to register to numpy.distutils
dot_blas_src = [join('blasdot', '_do | tblas.c'),
join('blasdot', 'cblas.h')]
api_definition = [join('code_generators', 'array_api_order.txt'),
join('code_generators', 'multiarray_api_order.txt'),
join('code_generators', 'ufunc_api_order.txt')]
core_src = [join('src', basename(i)) for i in glob.glob(join(local_dir,
'src',
' | *.c'))]
core_src += [join('src', basename(i)) for i in glob.glob(join(local_dir,
'src',
'*.src'))]
source_files = dot_blas_src + api_definition + core_src + \
[join(header_dir, 'numpyconfig.h.in')]
# Add generated files to distutils...
def add_config_header():
scons_build_dir = get_scons_build_dir()
# XXX: I really have to think about how to communicate path info
# between scons and distutils, and set the options at one single
# location.
target = join(get_scons_pkg_build_dir(config.name), 'config.h')
incl_dir = os.path.dirname(target)
if incl_dir not in config.numpy_include_dirs:
config.numpy_include_dirs.append(incl_dir)
def add_numpyconfig_header():
scons_build_dir = get_scons_build_dir()
# XXX: I really have to think about how to communicate path info
# between scons and distutils, and set the options at one single
# location.
target = join(get_scons_pkg_build_dir(config.name),
'include/numpy/numpyconfig.h')
incl_dir = os.path.dirname(target)
if incl_dir not in config.numpy_include_dirs:
config.numpy_include_dirs.append(incl_dir)
config.add_data_files((header_dir, target))
def add_array_api():
scons_build_dir = get_scons_build_dir()
# XXX: I really have to think about how to communicate path info
# between scons and distutils, and set the options at one single
# location.
h_file = join(get_scons_pkg_build_dir(config.name), '__multiarray_api.h')
t_file = join(get_scons_pkg_build_dir(config.name), 'multiarray_api.txt')
config.add_data_files((header_dir, h_file),
(header_dir, t_file))
def add_ufunc_api():
scons_build_dir = get_scons_build_dir()
# XXX: I really have to think about how to communicate path info
# between scons and distutils, and set the options at one single
# location.
h_file = join(get_scons_pkg_build_dir(config.name), '__ufunc_api.h')
t_file = join(get_scons_pkg_build_dir(config.name), 'ufunc_api.txt')
config.add_data_files((header_dir, h_file),
(header_dir, t_file))
def add_generated_files(*args, **kw):
add_config_header()
add_numpyconfig_header()
add_array_api()
add_ufunc_api()
config.add_sconscript('SConstruct',
post_hook = add_generated_files,
source_files = source_files)
config.add_data_files('include/numpy/*.h')
config.add_include_dirs('src')
config.numpy_include_dirs.extend(config.paths('include'))
# Don't install fenv unless we need them.
if sys.platform == 'cygwin':
config.add_data_dir('include/numpy/fenv')
config.add_data_dir('tests')
config.make_svn_version_py()
return config
if __name__=='__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.