repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
NMTHydro/Recharge
|
utils/TAW_optimization_subroutine/disagg_tester.py
|
Python
|
apache-2.0
| 15,034
| 0.002993
|
# ===============================================================================
# Copyright 2019 Jan Hendrickx and Gabriel Parrish
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import os
import yaml
import numpy as np
import gdal
from gdalconst import GDT_Float32
# ============= standard library imports ========================
def write_raster(array, geotransform, output_path, output_filename, dimensions, projection, flip_arr=False):
"""
Write raster outputs a Geotiff to a specified location.
:param array: an array to be printed as a raster
:param geotransform: a list of intergers containing information about the size and resolution of the raster
:param output_path: path where you want to output the raster.
:param output_filename:
:param dimensions: x and y dimensions of the raster as a tuple
:param projection: geographic projection string
:param datatype: NA
:return: NA
"""
filename = os.path.join(output_path, output_filename)
print 'writing to location {}'.format(filename)
driver = gdal.GetDriverByName('GTiff')
# path, cols, rows, bandnumber, data type (if not specified, as below, the default is GDT_Byte)
output_dataset = driver.Create(filename, dimensions[0], dimensions[1], 1, GDT_Float32)
# we write TO the output band
output_band = output_dataset.GetRasterBand(1)
if flip_arr:
array = np.flipud(array)
print 'shape of transpose', array.shape
# we don't need to do an offset
output_band.WriteArray(array, 0, 0)
print 'done writing.'
# set the geo
|
transform in order to georefference the image
output_dataset.SetGeoTransform(geotransform)
# set the projection
output_dataset.SetProjection(projection)
def numpy_to_geotiff(array, geo_info, output_path, output_name):
""""""
trans = geo_info['geotransform']
dim = geo_info['dimensions']
proj = geo_info['projection']
print'transform', trans
print 'dimensions', dim
print '
|
projections', proj
write_raster(array, geotransform=trans, output_path=output_path, output_filename=output_name,
dimensions=dim, projection=proj)
def geotiff_output(taw_vals, rss_arrs, geo_info, namekey, outpath):
""""""
for arr, taw_val in zip(rss_arrs, taw_vals):
outname = '{}_image_taw_{}.tif'.format(namekey, taw_val)
numpy_to_geotiff(arr, geo_info, outpath, outname)
def optimize_taw_disaggregate(rss_path, output_path, geo_info, big_arr=False, test_mode=False, hair_trigger=False):
"""
:param rss_path:
:param output_path:
:param geo_info:
:param big_arr:
:param test_mode:
:param hair_trigger: if the error reduction ever falls below specified threshold we take the correspondig TAW.
If false, we take TAW beyond which every error reduction is below the specified threshold.
:return:
"""
if test_mode:
test_path = '/Users/dcadol/Desktop/academic_docs_II/JPL_Data/taw_calibration_disaggregated/grassland_test.csv'
with open(test_path, 'r') as rfile:
taw_vals = []
rss_vals = []
for line in rfile:
taw_rss = line.split(',')
taw = int(taw_rss[0])
rss = float(taw_rss[1])
taw_vals.append(taw)
rss_vals.append(rss)
# get the average daily rss in mm
rss_vals_avg_daily = [((rss / 11.0) / 365.0) for rss in rss_vals]
print 'the rss avg daily error \n', rss_vals_avg_daily
error_reduced_lst = []
for i in range(len(rss_vals_avg_daily)):
# print 'i', i
if i == 0:
error_reduced_lst.append('')
elif i > 0:
# calculate the error reduced by each taw step
error_reduced = rss_vals_avg_daily[i] - rss_vals_avg_daily[i-1]
error_reduced_lst.append(error_reduced)
# elif i == len(rss_vals_avg_daily)
print 'the error reduced list \n', error_reduced_lst
# set the first value of the list to the second value
error_reduced_lst[0] = error_reduced_lst[1]
print 'the error reduced list \n', error_reduced_lst
# round the values to the 2nd decimal place
error_reduced_lst= [round(i, 2) for i in error_reduced_lst]
# # select the TAW after which error reduced is no longer greater than 0.01
# for taw, reduced_error in zip(taw_vals, error_reduced_lst):
# print 'taw {}, re {}'.format(taw, reduced_error)
indx_lst = []
for i, re in enumerate(error_reduced_lst):
if abs(re) <= 0.01:
indx_lst.append(i)
print 'the index list\n', indx_lst
consecutives = []
for i in range(len(indx_lst)+1):
if i > 0 and i < (len(indx_lst)-1):
print i
if indx_lst[i + 1] == indx_lst[i] + 1:
consecutives.append(indx_lst[i])
elif i == len(indx_lst)-1:
if indx_lst[i] -1 == indx_lst[i-1]:
consecutives.append(indx_lst[i-1])
consecutives.append(indx_lst[i])
print 'consecutives \n', consecutives
# take the first index after which the reduced error is consistently less than or equal to 0.01
target_index = consecutives[0]
# taw at the target index is the optimum taw
optimum_taw = taw_vals[target_index]
print 'optimum taw', optimum_taw
else:
print 'running'
# open rss dict from yml file for testing
with open(rss_path, 'r') as rfile:
rss = yaml.load(rfile)
print 'optimizing taw'
# get taw, rss arrays out.
taw_vals = rss['taw']
rss_arrs = rss['rss']
# # slice the array for testing so you can see it change or not...
# rss_arrs = [rss[200:220, 200:220] for rss in rss_arrs]
print 'len of rss arrs', len(rss_arrs)
# get the average daily rss in mm for an 11 year time period todo - these outputs look strange
rss_vals_avg_daily = [((rss / 11.0) / 365.0) for rss in rss_arrs]
# output average daily rss as images for better visualization
geotiff_output(taw_vals, rss_vals_avg_daily, geo_info, namekey='daily_rss', outpath=output_path)
print 'the rss avg daily error \n', len(rss_vals_avg_daily)
error_reduced_lst = []
for i in range(len(rss_vals_avg_daily)):
print 'i', i
if i == 0:
error_reduced_lst.append('')
elif i > 0:
# calculate the error reduced by each taw step todo - these should be positive if error is DECREASING
error_reduced = rss_vals_avg_daily[i] - rss_vals_avg_daily[i - 1]
error_reduced_lst.append(error_reduced)
# elif i == len(rss_vals_avg_daily)
print 'the error reduced list \n', error_reduced_lst
# set the first value of the list to the second value
error_reduced_lst[0] = error_reduced_lst[1]
print 'the error reduced list \n', error_reduced_lst
# output ERROR_REDUCED as images
geotiff_output(taw_vals, error_reduced_lst, geo_info, namekey='error_reduced', outpath=output_path)
# make all errors positive by taking the absolute value todo - what are the implications of taking the absolute value? It may mess up the algorithm
error_reduced_lst = [np.absolute(i) for i in error_reduced_lst]
# output ERROR_REDUCED as images
geoti
|
githubutilities/LeetCode
|
Python/intersection-of-two-arrays.py
|
Python
|
mit
| 2,721
| 0.002205
|
# Time: O(m + n)
# Space: O(min(m, n))
# Given two arrays, write a function to compute their intersection.
#
# Example:
# Given nums1 = [1, 2, 2, 1], nums2 = [2, 2], return [2].
#
# Note:
# Each element in the result must be unique.
# The result can be in any order.
# Hash solution.
class Solution(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2:
|
List[int]
:rtype: List[int]
"""
if len(nums1) > len(nums2):
return self.intersection(nums2, nums1)
lookup = set()
for i in nums1:
lookup.add(i)
res = []
for i in nums2:
if i in lookup:
res += i,
lookup.discard(i)
return res
def intersection2(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: Li
|
st[int]
:rtype: List[int]
"""
return list(set(nums1) & set(nums2))
# Time: O(max(m, n) * log(max(m, n)))
# Space: O(1)
# Binary search solution.
class Solution2(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
if len(nums1) > len(nums2):
return self.intersection(nums2, nums1)
def binary_search(compare, nums, left, right, target):
while left < right:
mid = left + (right - left) / 2
if compare(nums[mid], target):
right = mid
else:
left = mid + 1
return left
nums1.sort(), nums2.sort()
res = []
left = 0
for i in nums1:
left = binary_search(lambda x, y: x >= y, nums2, left, len(nums2), i)
if left != len(nums2) and nums2[left] == i:
res += i,
left = binary_search(lambda x, y: x > y, nums2, left, len(nums2), i)
return res
# Time: O(max(m, n) * log(max(m, n)))
# Space: O(1)
# Two pointers solution.
class Solution3(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
nums1.sort(), nums2.sort()
res = []
it1, it2 = 0, 0
while it1 < len(nums1) and it2 < len(nums2):
if nums1[it1] < nums2[it2]:
it1 += 1
elif nums1[it1] > nums2[it2]:
it2 += 1
else:
if not res or res[-1] != nums1[it1]:
res += nums1[it1],
it1 += 1
it2 += 1
return res
|
koeninger/spark
|
python/examples/wordcount.py
|
Python
|
bsd-3-clause
| 555
| 0
|
import sys
from operator import add
from pyspark import SparkContext
|
if __name__ == "__main__":
if len(sys.argv) < 3:
print >> sys.stderr, \
"Usage: PythonWordCount <master> <file>"
exit(-1)
sc = SparkContext(sys.argv[1], "PythonWordCount")
lines = sc.textFile(sys.argv[2], 1)
counts = lines.flatMap(lambda x: x.split(' ')) \
.map(lambda x: (x, 1)) \
.reduceByKey(add)
output = counts.collect()
for (word, count) in output:
print "%s : %i" % (word, count)
| |
motobyus/moto
|
module_django/jstest/jstest/settings.py
|
Python
|
mit
| 3,255
| 0.001536
|
"""
Django settings for jstest project.
Generated by 'django-admin startproject' using Django 1.10.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '(n=5&yvpo-9!=db58cbix!za-$30^osiq1i42o42xh8)9j81i1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'samplepage',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'jstest.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'jstest.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
|
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# ht
|
tps://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'samplepage/statics'),
)
|
shincling/MemNN_and_Varieties
|
DataCoupus/list_document/namelist_answer.py
|
Python
|
bsd-3-clause
| 2,870
| 0.004476
|
# -*- coding: utf8 -*-
__author__ = 'shin'
import jieba
namelist_answer=[]
'''
namelist_answer.append('[slot_name]。')
namelist_answer.append('叫[slot_name]。')
namelist_answer.append('姓名是[slot_name]。')
namelist_answer.append('我是[slot_name]。')
namelist_answer.append('您好,我叫[slot_name]。')
namelist_answer.append('[slot_name]')
namelist_answer.append('我的名字是[slot_name]。')
namelist_answer.append('我大名唤作[slot_name]。')
namelist_answer.append('哦,我的名字就是[slot_name]啊。')
namelist_answer.append('名叫[slot_name]。')
namelist_answer.append('叫[slot_name]。')
namelist_answer.append('没问题,我叫[slot_name]。')
namelist_answer.append('好的,名字是[slot_name]。')
namelist_answer.append('我的全名就是[slot_name]。')
namelist_answer.append('姓名是[slot_name]。')
namelist_answer.append('[slot_name]是我的名字。')
namelist_answer.append('我名叫[slot_name]。')
namelist_answer.append('我是[slot_name]啊。')
'''
namelist_answer.append('周杰伦。')
namelist_answer.append('叫周杰伦。')
namelist_answer.append('姓名是周杰伦。')
namelist_answer.append('我是周杰伦。')
namelist_answer.append('您好,我叫周杰伦。')
namelist_answer.append('周杰伦')
namelist_answer.append('我的名字是周杰伦。')
namelist_answer.append('我大名唤作周杰伦。')
namelist_answer.append('哦,我的名字就是周杰伦啊。'
|
)
namelist_answer.append('名叫周杰伦。')
namelist_answer.append('叫周杰伦。')
namelist_answer.append('没问题,我叫周杰伦。')
namelist_answer.append('好的,名字是周杰伦。')
namelist_answer.append('我的全名就是周杰伦。')
namelist_answer.append('姓名是周杰伦。')
namelist_answer.append('周杰伦是我的名字。')
namelist_answer.append('我名叫周杰伦。')
namelist_answer.append('我是周杰伦啊。')
namelist_answer.append('我叫周杰伦')
namelist_answer.append('周杰伦')
namelist_answer.append('我的名字是周杰伦')
namelist_answer.append('我的姓名是周杰伦')
namelist_answer.append('姓名周杰伦')
namelist_answer.app
|
end('名字叫周杰伦。')
namelist_answer.append('您好,我叫周杰伦。')
namelist_answer.append('好的。您记一下。周杰伦。')
namelist_answer.append('名是周杰伦。')
namelist_answer.append('名叫周杰伦。')
namelist_answer.append('我叫周杰伦。')
namelist_answer.append('我是周杰伦。')
namelist_answer.append('名字是周杰伦。')
namelist_answer.append('我的名字是周杰伦。')
namelist_answer_cut=[]
for ans in namelist_answer:
w_sent=''
sent=jieba._lcut(ans)
for word in (sent):
w_sent +=' '
w_sent +=word
w_sent += '\n'
w_sent=w_sent.replace('周杰伦'.decode('utf8'),'[slot_name]')
namelist_answer_cut.append(w_sent)
pass
|
jenix21/DarunGrim
|
Src/Scripts/Test/ListDirectories.py
|
Python
|
bsd-3-clause
| 1,288
| 0.045031
|
import dircache
import os.path
from sqlalchemy import create_engine,Table,Column,Integer,String,ForeignKey,MetaData
from sqlalchemy.orm import mapper
from sqlalchemy.orm import sessionmaker
from Files import *
def SearchDirectory(session,directory,whitelist):
for file in dircache.listdir(directory):
if file in whitelist:
continue
full_path=os.path.join(directory,file)
if os.path.isdir(full_path):
#print 'Directory',full_path
SearchDirectory(session,full_path,whitelist)
else:
try:
fd=open(full_path)
if fd.read(2)=='MZ':
path_elements=full_path.split('\\')
filename=path_elements[-1]
version=path_elements[-2]
print filename.lower(),ve
|
rsion,full_path
session.add(Files(filename,version,full_path))
fd.close()
except:
pass
engine=create_engine('sqlite:///Files.db',echo=True)
"""
metadata=MetaData()
FilesTable=Table('Files',metadata,
Column('id',Integer,primary_key=True),
Column('Filename',S
|
tring),
Column('Version',String),
Column('FullPath',String))
mapper(Files,FilesTable)
"""
metadata=Base.metadata
metadata.create_all(engine)
Session=sessionmaker(bind=engine)
session=Session()
SearchDirectory(session,r'T:\mat\Projects\Binaries',['.svn'])
session.commit()
|
plotly/python-api
|
packages/python/plotly/plotly/validators/heatmapgl/_visible.py
|
Python
|
mit
| 517
| 0.001934
|
import _plotly_utils.basevalidators
class VisibleValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="visible", parent_name="heatmapgl", **kwargs):
super(VisibleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_
|
name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "info"
|
),
values=kwargs.pop("values", [True, False, "legendonly"]),
**kwargs
)
|
willthames/ansible-lint
|
test/TestAnsibleSyntax.py
|
Python
|
mit
| 409
| 0
|
"""Test Ansible Syntax.
This module
|
contains tests that validate that linter does not produce errors
when encountering what counts as valid Ansible syntax.
"""
PB_WITH_NULL_TASKS = '''
- hosts: all
tasks:
'''
def test_null_tasks(default_text_runner):
"""Assure we do not fail when encountering null tasks."""
results = default_text_run
|
ner.run_playbook(PB_WITH_NULL_TASKS)
assert not results
|
mindm/2017Challenges
|
challenge_3/python/sarcodian/src/challenge_3.py
|
Python
|
mit
| 218
| 0.009174
|
def majority(array0):
store = {
|
}
for i in array0:
store[i] = store.get(i,0) + 1
for i in store.keys():
if store[i] > len(array0)//2:
|
return i
print('No majority found')
|
alexlo03/ansible
|
lib/ansible/utils/plugin_docs.py
|
Python
|
gpl-3.0
| 4,053
| 0.002714
|
# Copyright: (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError, AnsibleAssertionError
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native
from ansible.module_utils.common._collections_compat import MutableMapping, MutableSet, MutableSequence
from ansible.parsing.plugin_docs import read_docstring, read_docstub
from ansible.parsing.yaml.loader import AnsibleLoader
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
# modules that are ok that they do not have documentation strings
BLACKLIST = {
'MODULE': frozenset(('async_wrapper',)),
'CACHE': frozenset(('base',)),
}
def merge_fragment(target, source):
for key, value in source.items():
if key in target:
# assumes both structures have same type
if isinstance(target[key], MutableMapping):
value.update(target[key])
elif isinstance(target[key], MutableSet):
value.add(target[key])
elif isinstance(target[key], MutableSequence):
value = sorted(frozenset(value + target[key]))
else:
raise Exception("Attempt to extend a documentation fragement, invalid type for %s" % key)
target[key] = value
def add_fragments(doc, filename, fragment_loader):
fragments = doc.pop('extends_documentation_fragment', [])
if isinstance(fragments, string_types):
fragments = [fragments]
# Allow the module to specify a var other than DOCUMENTATION
# to pull the fragment from, using dot notation as a separator
for fragment_slug in fragments:
fragment_slug = fragment_slug.lower()
if '.' in fragment_slug:
fragment_name, fragment_var = fragment_slug.split('.', 1)
fragment_var = fragment_var.upper()
else:
fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION'
fragment_class = fragment_loader.get(fragment_name)
if fragment_class is None:
raise AnsibleAssertionError('fragment_class is None')
fragment_yaml = getattr(fragment_class, fragment_var, '{}')
fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data()
if 'notes' in fragment:
notes = fragment.pop('notes')
if notes:
if 'notes' not in doc:
doc['notes'] = []
doc['notes'].extend(notes)
if 'options' not in fragment:
raise Exception("missing options in fragment (%s), possibly misformatted?: %s" % (fragment_name, filename))
# ensure options themselves are directly merged
if 'options' in doc:
try:
merge_fragment(doc['options'], fragment.pop('options'))
except Exception as e:
raise AnsibleError("%s options (%s) of unknown type: %s" % (to_native(e), fragment_name, filename))
else:
doc['options'] = fragment.pop('options')
# merge rest of the sections
try:
merge_fragment(doc, fragment)
except Exception as e:
raise AnsibleError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename))
def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False):
"""
DOCUMENTATION can be extended using documentation fragm
|
ents loaded by the PluginLoader from the module_docs_fragments directory.
"""
data = read_docstring(filename, verbose=verbose, ignore_errors=ignore_errors)
# add fragments to documentation
if data.get('doc
|
', False):
add_fragments(data['doc'], filename, fragment_loader=fragment_loader)
return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']
|
log2timeline/plaso
|
tests/cli/helpers/parsers.py
|
Python
|
apache-2.0
| 2,477
| 0.002826
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the parsers CLI arguments helper."""
import argparse
import unittest
from plaso.cli import tools
from plaso.cli.helpers import parsers
from plaso.lib import errors
from tests.cli import test_lib as cli_test_lib
class ParsersArgumentsHelperTest
|
(cli_test_lib.CLIToolTestCase):
"""Tests for the parsers CLI arguments helper."""
# pylint: disable=no-member,protected-access
_EXPECTED_OUTPUT = """\
usage: cli_helper.py [--parsers PARSER_FILTER_EXPRESSION]
Test argument parser.
{0:s}:
--parsers PARSER_FILTER_EXPRESSION
Define which presets, parsers and/or plugin
|
s to use,
or show possible values. The expression is a comma
separated string where each element is a preset,
parser or plugin name. Each element can be prepended
with an exclamation mark to exclude the item. Matching
is case insensitive. Examples: "linux,!bash_history"
enables the linux preset, without the bash_history
parser. "sqlite,!sqlite/chrome_history" enables all
sqlite plugins except for chrome_history".
"win7,syslog" enables the win7 preset, as well as the
syslog parser. Use "--parsers list" or "--info" to
list available presets, parsers and plugins.
""".format(cli_test_lib.ARGPARSE_OPTIONS)
def testAddArguments(self):
"""Tests the AddArguments function."""
argument_parser = argparse.ArgumentParser(
prog='cli_helper.py', description='Test argument parser.',
add_help=False,
formatter_class=cli_test_lib.SortedArgumentsHelpFormatter)
parsers.ParsersArgumentsHelper.AddArguments(argument_parser)
output = self._RunArgparseFormatHelp(argument_parser)
self.assertEqual(output, self._EXPECTED_OUTPUT)
def testParseOptions(self):
"""Tests the ParseOptions function."""
options = cli_test_lib.TestOptions()
options.parsers = 'winevt'
test_tool = tools.CLITool()
parsers.ParsersArgumentsHelper.ParseOptions(options, test_tool)
self.assertEqual(test_tool._parser_filter_expression, options.parsers)
with self.assertRaises(errors.BadConfigObject):
parsers.ParsersArgumentsHelper.ParseOptions(options, None)
if __name__ == '__main__':
unittest.main()
|
smurfix/pybble
|
TEST.py
|
Python
|
gpl-3.0
| 466
| 0.038627
|
# -*- coding: utf-8 -*-
## This is a minima
|
l config file for testing.
TESTING=True # this file only works in test mode
sql_driver="sqlite"
sql_database=":memory:" ## overridden when running tests
##
SECRET_KEY="fbfzkar2ihf3ulqhelg8srlzg7resibg748wifgbz478"
#TRACE=True
#M
|
EDIA_PATH="/var/tmp/pybble"
## set by the test run script
ADMIN_EMAIL="smurf@smurf.noris.de"
URLFOR_ERROR_FATAL=False
REDIS_HOST='localhost'
REDIS_DB=3 ## a db number not used in production
|
jpbonson/SBBReinforcementLearner
|
SBB/environments/default_environment.py
|
Python
|
bsd-2-clause
| 2,122
| 0.010839
|
import abc
from default_metrics import DefaultMetrics
class DefaultEnvironment(object):
"""
Abstract class for environments. All environments must implement these
methods to be able to work with SBB.
"""
__metaclass__ = abc.ABCMeta
def __init__(self):
self.metrics_ = DefaultMetrics(self)
@abc.abstractmethod
def reset(self):
"""
Method that is called at the beginning of each run by SBB, to reset the
variables that will be used by the generations.
"""
@abc.abstractmethod
def setup(self, teams_population):
"""
Method that is called at the beginning of each generation by SBB, to set the
variables that will be used by the generationand remove the ones that are no
longer being used.
"""
@abc.abstractmethod
def evaluate_point_population(self, teams_population):
"""
Evalu
|
ate the fitness of the point population, to define which points will be removed
or added in the next generation, when setup_point_population() is executed.
"""
|
@abc.abstractmethod
def evaluate_teams_population_for_training(self, teams_population):
"""
Evaluate all the teams using the evaluate_team() method, and sets metrics. Used only
for training.
"""
@abc.abstractmethod
def evaluate_team(self, team, mode):
"""
Evaluate the team using the environment inputs. May be executed in the training
or the test mode.
This method must set the attribute results_per_points of the team, if you intend to
use pareto.
"""
@abc.abstractmethod
def validate(self, current_generation, teams_population):
"""
For classification:
- Return the best team for the teams_population using the champion set.
For reinforcement:
- All teams go against the validation set, and then the best one go against the champion set
"""
def hall_of_fame(self):
return []
|
nlproc/splunkml
|
bin/mcpredict.py
|
Python
|
apache-2.0
| 2,357
| 0.026729
|
#!env python
import os
import sys
sys.path.append(
os.path.join(
os.environ.get( "SPLUNK_HOME", "/opt/splunk/6.1.3" ),
"etc/apps/framework/contrib/splunk-sdk-python/1.3.0",
)
)
from collections import Counter, OrderedDict
from math import log
from nltk import tokenize
import execnet
import json
from splunklib.searchcommands import Configuration, Option
from splunklib.searchcommands import dispatch, validators
from remote_commands import OptionRemoteStreamingCommand, ValidateLocalFile
@Configuration(clear_required_fields=False)
class MCPredict(OptionRemoteStreamingCommand):
model = Option(require=True, validate=ValidateLocalFile(mode='r',extension="pkl",subdir='classifiers',nohandle=True))
code = """
import os, sys, itertools, collections, numbers
try:
import cStringIO as StringIO
except:
import StringIO
import numpy as np
import scipy.sparse as sp
from multiclassify import process_records
from gensim.models import LsiModel, TfidfModel, LdaModel
from sklearn.linear_model import LogisticRegression
from sklearn.preprocessing import LabelEncoder
from sklearn.externals import joblib
if __name__ == "__channelexec__":
args = channel.receive()
records = []
for record in channel:
if not record:
break
records.append(record)
if records:
records = np.array(records)
# Try loading existing model
try:
model = joblib.load(args['model'])
encoder = model['encoder']
est = model['est']
target = model['target']
fields = model['fields']
if model.get('text'):
if model['text'] == 'lsi':
textmodel = LsiModel.load(args['model'].replace(".pkl",".%s" % model['text']))
elif model['text'] == 'tfidf':
textmodel = TfidfModel.load(args['model'].replace(".pkl",".%s" % model['text']))
else:
textmodel = model['text']
except Exc
|
eption as e:
print >> sys.stderr, "ERROR", e
channel.send({ 'error': "Couldn't find model %s" % args['model']})
else:
X, y_labels, textmodel = process_records(records, fields, target, textmodel=textmodel)
print >> sys.stderr, X.shape
y = est.predict(X)
y_labels = encoder.inverse_transform(y)
|
for i, record in enumerate(records):
record['%s_predicted' % target] = y_labels.item(i)
channel.send(record)
"""
def __dir__(self):
return ['model']
dispatch(MCPredict, sys.argv, sys.stdin, sys.stdout, __name__)
|
psychopy/versions
|
psychopy/hardware/labjacks.py
|
Python
|
gpl-3.0
| 1,270
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the PsychoPy library
# Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2020 Open Science Too
|
ls Ltd.
# Distributed under the terms of the GNU General Public License (GPL).
"""This provides a basic ButtonBox class, and imports the
`ioLab python library <http://github.com/ioLab/python-ioLabs>`_.
"""
from __future__ import absolute_import, division, print_function
try:
from labjack import u3
except ImportError:
import u3
# Could not load the Exodriver driver
# "dlopen(liblabjackusb.dylib, 6): image not found"
class U3(u3.U3):
def setData(self, by
|
te, endian='big', address=6701):
"""Write 1 byte of data to the U3 port
parameters:
- byte: the value to write (must be an integer 0:255)
- endian: ['big' or 'small'] ignored from 1.84 onwards; automatic?
- address: the memory address to send the byte to
- 6700 = FIO
- 6701 (default) = EIO (the DB15 connector)
- 6702 = CIO
"""
# Upper byte is the writemask, lower byte is the 8 lines/bits to set.
# Bit 0 = line 0, bit 1 = line 1, bit 2 = line 2, etc.
self.writeRegister(address, 0xFF00 + (byte & 0xFF))
|
agry/NGECore2
|
scripts/mobiles/rori/dreaded_vir_vir.py
|
Python
|
lgpl-3.0
| 1,491
| 0.028169
|
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Ve
|
ctor
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('dreaded_vir_vir')
mobileTemplate.setLevel(40)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(True)
|
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Avian Meat")
mobileTemplate.setMeatAmount(25)
mobileTemplate.setBoneType("Avian Bones")
mobileTemplate.setBoneAmount(16)
mobileTemplate.setSocialGroup("vir vur")
mobileTemplate.setAssistRange(2)
mobileTemplate.setStalker(True)
mobileTemplate.setOptionsBitmask(Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_vir_vur.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_bite_3')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('dreaded_vir_vir', mobileTemplate)
return
|
Jacy-Wang/MyLeetCode
|
MaxDepthBinTree104Recursion.py
|
Python
|
gpl-2.0
| 678
| 0.001475
|
tion for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def maxDepth(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if root:
self.cand = []
self.find(0, root
|
)
return max(self.cand)
else:
return 0
def find(self, length, node):
if node.left:
self.cand.append(self.find(length + 1, node.left))
if node.right:
self.cand.append(self.find(length + 1, node.right))
self.
|
cand.append(length + 1)
|
Venefyxatu/phennyfyxata
|
phennyfyxata/scores/models.py
|
Python
|
bsd-2-clause
| 1,030
| 0.000971
|
from django.db import models
class Writer(models.Model):
alias = models.ForeignKey('Writer', blank=True, null=True)
nick = models.CharField(unique=True, max_length=16)
class War(models.Model):
id = models.AutoField(primary_key=True)
starttim
|
e = models.DateTimeField()
endtime = models.DateTimeField()
finished = models.BooleanField(default=False)
def __unicode__(self):
return "War %s: %s tot %s (%s minuten)" % (self.id, self.starttime.strftime("%H:%M"), self.endtime.strftime("%H:%M"), (self.endtime - self.starttime).seconds / 60)
class ParticipantScore(models.Model):
writer = models.ForeignKey(Writer)
war = models.Foreign
|
Key(War)
score = models.IntegerField(default=0, blank=True)
class WriterStats(models.Model):
warcount = models.IntegerField()
wordcount = models.IntegerField()
wpm = models.DecimalField(max_digits=5, decimal_places=2)
class WarParticipants(models.Model):
war = models.ForeignKey(War)
participant = models.ForeignKey(Writer)
|
alexallah/django
|
tests/postgres_tests/models.py
|
Python
|
bsd-3-clause
| 5,090
| 0.000393
|
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from .fields import (
ArrayField, BigIntegerRangeField, CICharField, CIEmailField, CITextField,
DateRangeField, DateTimeRangeField, FloatRangeField, HStoreField,
IntegerRangeField, JSONField, SearchVectorField,
)
class Tag:
def __init__(self, tag_id):
self.tag_id = tag_id
def __eq__(self, other):
return isinstance(other, Tag) and self.tag_id == other.tag_id
class TagField(models.SmallIntegerField):
def from_db_value(self, value, expression, connection, context):
if value is None:
return value
return Tag(int(value))
def to_python(self, value):
if isinstance(value, Tag):
return value
if value is None:
return value
return Tag(int(value))
def get_prep_value(self, value):
return value.tag_id
class PostgreSQLModel(models.Model):
class Meta:
abstract = True
required_db_vendor = 'postgresql'
class IntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=[], blank=True)
class NullableIntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), blank=True, null=True)
class CharArrayModel(PostgreSQLModel):
field = ArrayField(models.CharField(max_length=10))
class DateTimeArrayModel(PostgreSQLModel):
datetimes = ArrayField(models.DateTimeField())
dates = ArrayField(models.DateField())
times = ArrayField(models.TimeField())
|
class NestedIntegerArrayModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.IntegerField()))
class O
|
therTypesArrayModel(PostgreSQLModel):
ips = ArrayField(models.GenericIPAddressField())
uuids = ArrayField(models.UUIDField())
decimals = ArrayField(models.DecimalField(max_digits=5, decimal_places=2))
tags = ArrayField(TagField(), blank=True, null=True)
class HStoreModel(PostgreSQLModel):
field = HStoreField(blank=True, null=True)
class CharFieldModel(models.Model):
field = models.CharField(max_length=16)
class TextFieldModel(models.Model):
field = models.TextField()
def __str__(self):
return self.field
# Scene/Character/Line models are used to test full text search. They're
# populated with content from Monty Python and the Holy Grail.
class Scene(models.Model):
scene = models.CharField(max_length=255)
setting = models.CharField(max_length=255)
def __str__(self):
return self.scene
class Character(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return self.name
class CITestModel(PostgreSQLModel):
name = CICharField(primary_key=True, max_length=255)
email = CIEmailField()
description = CITextField()
def __str__(self):
return self.name
class Line(PostgreSQLModel):
scene = models.ForeignKey('Scene', models.CASCADE)
character = models.ForeignKey('Character', models.CASCADE)
dialogue = models.TextField(blank=True, null=True)
dialogue_search_vector = SearchVectorField(blank=True, null=True)
dialogue_config = models.CharField(max_length=100, blank=True, null=True)
def __str__(self):
return self.dialogue or ''
class RangesModel(PostgreSQLModel):
ints = IntegerRangeField(blank=True, null=True)
bigints = BigIntegerRangeField(blank=True, null=True)
floats = FloatRangeField(blank=True, null=True)
timestamps = DateTimeRangeField(blank=True, null=True)
dates = DateRangeField(blank=True, null=True)
class RangeLookupsModel(PostgreSQLModel):
parent = models.ForeignKey(RangesModel, models.SET_NULL, blank=True, null=True)
integer = models.IntegerField(blank=True, null=True)
big_integer = models.BigIntegerField(blank=True, null=True)
float = models.FloatField(blank=True, null=True)
timestamp = models.DateTimeField(blank=True, null=True)
date = models.DateField(blank=True, null=True)
class JSONModel(models.Model):
field = JSONField(blank=True, null=True)
field_custom = JSONField(blank=True, null=True, encoder=DjangoJSONEncoder)
class Meta:
required_db_features = ['has_jsonb_datatype']
class ArrayFieldSubclass(ArrayField):
def __init__(self, *args, **kwargs):
super().__init__(models.IntegerField())
class AggregateTestModel(models.Model):
"""
To test postgres-specific general aggregation functions
"""
char_field = models.CharField(max_length=30, blank=True)
integer_field = models.IntegerField(null=True)
boolean_field = models.NullBooleanField()
class StatTestModel(models.Model):
"""
To test postgres-specific aggregation functions for statistics
"""
int1 = models.IntegerField()
int2 = models.IntegerField()
related_field = models.ForeignKey(AggregateTestModel, models.SET_NULL, null=True)
class NowTestModel(models.Model):
when = models.DateTimeField(null=True, default=None)
class UUIDTestModel(models.Model):
uuid = models.UUIDField(default=None, null=True)
|
mtils/ems
|
ems/qt4/services/modelupdate.py
|
Python
|
mit
| 590
| 0.013559
|
'''
Created on 04.10.2012
@author: michi
'''
from PyQt4.QtCore import pyqtSignal
from ems.qt4.applicationservice import ApplicationService #@UnresolvedImport
class ModelUpdateService(ApplicationService):
objectIdsUpdated = pyqtSignal(str, list)
objectsUpdated = pyqtSignal(str)
modelUpdated = pyqtSignal()
def triggerUpdate(self, modelObjectName, keys=None):
if key
|
s is not None:
self.objectIdsUpdated.emit(modelObjectName, keys)
else:
self.objectsUpdated.emit(modelObjectName)
self.model
|
Updated.emit()
|
nicolashainaux/mathmaker
|
tests/integration/mental_calculation/04_yellow1/test_04_yellow1_multi_divi_10_100_1000.py
|
Python
|
gpl-3.0
| 1,673
| 0
|
# -*- coding: utf-8 -*-
# Mathmaker creates automatically maths exercises sheets
# with their answers
# Copyright 2006-2018 Nicolas Hainaux <nh.techn@gmail.com>
# This file is part of Mathmaker.
# Mathmaker is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
# Mathmaker is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Mathmaker; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from mathmaker.lib import shared
from mathmaker.lib.document.frames import Sheet
def test_multi_divi_10_100_1000(
|
):
"""Check this sheet is generated without any error."""
shared.machine.write_out(str(Sheet('mental_calculation',
'04_yellow1',
'multi_divi_10_100_1000')),
pdf_output=True)
def test_multi_divi_10_100_1000_embedding_js():
"""Check this sheet is generated without any error."""
|
shared.machine.write_out(str(Sheet('mental_calculation',
'04_yellow1',
'multi_divi_10_100_1000',
enable_js_form=True)),
pdf_output=True)
|
sandvine/horizon
|
horizon/forms/fields.py
|
Python
|
apache-2.0
| 15,925
| 0.000063
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
import re
import netaddr
import six
import uuid
from django.core.exceptions import ValidationError # noqa
from django.core import urlresolvers
from django.forms import fields
from django.forms import forms
from django.forms.utils import flatatt # noqa
from django.forms import widgets
from django.template import Context # noqa
from django.template.loader import get_template # noqa
from django.utils.encoding import force_text
from django.utils.functional import Promise # noqa
from django.utils import html
from django.utils.safestring import mark_safe # noqa
from django.utils.translation import ugettext_lazy as _
ip_allowed_symbols_re = re.compile(r'^[a-fA-F0-9:/\.]+$')
IPv4 = 1
IPv6 = 2
class IPField(fields.Field):
"""Form field for entering IP/range values, with validation.
Supports IPv4/IPv6 in the format:
.. xxx.xxx.xxx.xxx
.. xxx.xxx.xxx.xxx/zz
.. ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
.. ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/zz
and all compressed forms. Also the short forms
are supported:
xxx/yy
xxx.xxx/yy
.. attribute:: version
Specifies which IP version to validate,
valid values are 1 (fields.IPv4), 2 (fields.IPv6) or
both - 3 (fields.IPv4 | fields.IPv6).
Defaults to IPv4 (1)
.. attribute:: mask
Boolean flag to validate subnet masks along with IP address.
E.g: 10.0.0.1/32
.. attribute:: mask_range_from
Subnet range limitation, e.g. 16
That means the input mask will be checked to be in the range
16:max_value. Useful to limit the subnet ranges
to A/B/C-class networks.
"""
invalid_format_message = _("Incorrect format for IP address")
invalid_version_message = _("Invalid version for IP address")
invalid_mask_message = _("Invalid subnet mask")
max_v4_mask = 32
max_v6_mask = 128
def __init__(self, *args, **kwargs):
self.mask = kwargs.pop("mask", None)
self.min_mask = kwargs.pop("mask_range_from", 0)
self.version = kwargs.pop('version', IPv4)
super(IPField, self).__init__(*args, **kwargs)
def validate(self, value):
super(IPField, self).validate(value)
if not value and not self.required:
return
try:
if self.mask:
self.ip = netaddr.IPNetwork(value)
else:
self.ip = netaddr.IPAddress(value)
except Exception:
raise ValidationError(self.invalid_format_message)
if not any([self.version & IPv4 > 0 and self.ip.version == 4,
self.version & IPv6 > 0 and self.ip.version == 6]):
raise ValidationError(self.invalid_version_message)
if self.mask:
if self.ip.version == 4 and \
not self.min_mask <= self.ip.prefixlen <= self.max_v4_mask:
raise ValidationError(self.invalid_mask_message)
if self.ip.version == 6 and \
not self.min_mask <= self.ip.prefixlen <= self.max_v6_mask:
raise ValidationError(self.invalid_mask_message)
def clean(self, value):
super(IPField, self).clean(value)
return str(getattr(self, "ip", ""))
class MultiIPField(IPField):
"""Extends IPField to allow comma-separated lists of addresses."""
def validate(self, value):
self.addresses = []
if value:
addresses = value.split(',')
for ip in addresses:
super(MultiIPField, self).validate(ip)
self.addresses.append(ip)
else:
super(MultiIPField, self).validate(value)
def clean(self, value):
super(MultiIPField, self).clean(value)
return str(','.join(getattr(self, "addresses", [])))
class SelectWidget(widgets.Select):
"""Customizable select widget, that allows to render
data-xxx attributes from choices. This widget also
allows user to specify additional html attributes
for choices.
.. attribute:: data_attrs
Specifies object properties to serialize as
data-xxx attribute. If passed ('id', ),
this will be rendered as:
<option data-id="123">option_value</option>
where 123 is the value of choice_value.id
.. attribute:: transform
A callable used to render the display value
from the option object.
.. attribute:: transform_html_attrs
A callable used to render additional HTML attributes
for the option object. It returns a dictionary
containing the html attributes and their values.
For example, to define a title attribute for the
choices::
helpText = { 'Apple': 'This is a fruit',
'Carrot': 'This is a vegetable' }
def get_title(data):
text = helpText.get(data, None)
if text:
return {'title': text}
else:
return {}
....
....
widget=forms.ThemableSelect( attrs={'class': 'switchable',
'data-slug': 'source'},
transform_html_attrs=get_title )
self.fields[<field name>].choices =
([
('apple','Apple'),
('carrot','Carrot')
])
"""
def __init__(self, attrs=None, choices=(), data_attrs=(), transform=None,
transform_html_attrs=None):
self.data_attrs = data_attrs
self.transform = transform
self.transform_html_attrs = transform_html_attrs
super(SelectWidget, self).__init__(attrs, choices)
def render_option(self, selected_choices, option_value, option_label):
option_value = force_text(option_value)
other_html = (u' selected="selected"'
if option_value in selected_choices else '')
other_html += self.transform_option_html_attrs(option_label)
data_attr_html = self.get_data_attrs(option_label)
if data_attr_html:
other_html += ' ' + data_attr_html
option_label = self.transform_option_label(option_label)
return u'<option value="%s"%s>%s</option>' % (
|
html.escape(option_value), other_html, option_label)
def get_data_attrs(self, option_label):
other_html = []
if
|
not isinstance(option_label, (six.string_types, Promise)):
for data_attr in self.data_attrs:
data_value = html.conditional_escape(
force_text(getattr(option_label,
data_attr, "")))
other_html.append('data-%s="%s"' % (data_attr, data_value))
return ' '.join(other_html)
def transform_option_label(self, option_label):
if (not isinstance(option_label, (six.string_types, Promise)) and
callable(self.transform)):
option_label = self.transform(option_label)
return html.conditional_escape(force_text(option_label))
def transform_option_html_attrs(self, option_label):
if not callable(self.transform_html_attrs):
return ''
return flatatt(self.transform_html_attrs(option_label))
class ThemableSelectWidget(SelectWidget):
"""Bootstrap base select field widget."""
def render(self, name, value, attrs=None, choices=()):
# NOTE(woodnt): Currently the "attrs" contents are being added to the
#
|
deanishe/alfred-fakeum
|
src/libs/faker/providers/phone_number/tr_TR/__init__.py
|
Python
|
mit
| 389
| 0
|
from __future__ import unicode_literals
from .. import Provider as PhoneNumberProvider
class Provider(PhoneNumberProvider):
form
|
ats = (
'+90(###)#######',
'+90 (###) #######',
'0### ### ## ##',
'0##########',
'0###-### ####',
'(###)### ####',
'### # ###',
'+90(###)###-####x###',
'+90(###)###
|
-####x####',
)
|
KiChjang/servo
|
tests/wpt/web-platform-tests/tools/wptserve/tests/functional/test_handlers.py
|
Python
|
mpl-2.0
| 16,970
| 0.00165
|
import json
import os
import sys
import unittest
import uuid
import pytest
from urllib.error import HTTPError
wptserve = pytest.importorskip("wptserve")
from .base import TestUsingServer, TestUsingH2Server, doc_root
from .base import TestWrapperHandlerUsingServer
from serve import serve
class TestFileHandler(TestUsingServer):
def test_GET(self):
resp = self.request("/document.txt")
self.assertEqual(200, resp.getcode())
self.assertEqual("text/plain", resp.info()["Content-Type"])
self.assertEqual(open(os.path.join(doc_root, "document.txt"), 'rb').read(), resp.read())
def test_headers(self):
resp = self.request("/with_headers.txt")
self.assertEqual(200, resp.getcode())
self.assertEqual("text/html", resp.info()["Content-Type"])
self.assertEqual("PASS", resp.info()["Custom-Header"])
# This will fail if it isn't a valid uuid
uuid.UUID(resp.info()["Another-Header"])
self.assertEqual(resp.info()["Same-Value-Header"], resp.info()["Another-Header"])
self.assert_multiple_headers(resp, "Double-Header", ["PA", "SS"])
def test_range(self):
resp = self.request("/document.txt", headers={"Range":"bytes=10-19"})
self.assertEqual(206, resp.getcode())
data = resp.read()
expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
self.assertEqual(10, len(data))
self.assertEqual("bytes 10-19/%i" % len(expected), resp.info()['Content-Range'])
self.assertEqual("10", resp.info()['Content-Length'])
self.assertEqual(expected[10:20], data)
def test_range_no_end(self):
resp = self.request("/document.txt", headers={"Range":"bytes=10-"})
self.assertEqual(206, resp.getcode())
data = resp.read()
expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
self.assertEqual(len(expected) - 10, len(data))
self.assertEqual("bytes 10-%i/%i" % (len(expected) - 1, len(expected)), resp.info()['Content-Range'])
self.assertEqual(expected[10:], data)
def test_range_no_start(self):
resp = self.request("/document.txt", headers={"Range":"bytes=-10"})
self.assertEqual(206, resp.getcode())
data = resp.read()
expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
self.assertEqual(10, len(data))
self.assertEqual("bytes %i-%i/%i" % (len(expected) - 10, len(expected) - 1, len(expected)),
resp.info()['Content-Range'])
self.assertEqual(expected[-10:], data)
def test_multiple_ranges(self):
resp = self.request("/document.txt", headers={"Range":"bytes=1-2,5-7,6-10"})
self.assertEqual(206, resp.getcode())
data = resp.read()
expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
self.assertTrue(resp.info()["Content-Type"].startswith("multipart/byteranges; boundary="))
boundary = resp.info()["Content-Type"].split("boundary=")[1]
parts = data.split(b"--" + boundary.encode("ascii"))
self.assertEqual(b"\r\n", parts[0])
self.assertEqual(b"--", parts[-1])
expected_parts = [(b"1-2", expected[1:3]), (b"5-10", expected[5:11])]
for expected_part, part in zip(expected_parts, parts[1:-1]):
header_string, body = part.split(b"\r\n\r\n")
headers = dict(item.split(b": ", 1) for item in header_string.split(b"\r\n") if item.strip())
self.assertEqual(headers[b"Content-Type"], b"text/plain")
self.assertEqual(headers[b"Content-Range"], b"bytes %s/%i" % (expected_part[0], len(expected)))
self.assertEqual(expected_part[1] + b"\r\n", body)
def test_range_invalid(self):
with self.assertRaises(HTTPError) as cm:
self.request("/document.txt", headers={"Range":"bytes=11-10"})
self.assertEqual(cm.exception.code, 416)
expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
with self.assertRaises(HTTPError) as cm:
self.request("/document.txt", headers={"Range":"bytes=%i-%i" % (len(expected), len(expected) + 10)})
self.assertEqual(cm.exception.code, 416)
def test_sub_config(self):
resp = self.request("/sub.sub.txt")
expected = b"localhost localhost %i" % self.server.port
assert resp.read().rstrip() == expected
def test_sub_headers(self):
resp = self.request("/sub_headers.sub.txt", headers={"X-Test": "PASS"})
expected = b"PASS"
assert resp.read().rstrip() == expected
def test_sub_params(self):
resp = self.request("/sub_params.txt", query="plus+pct-20%20pct-3D%3D=PLUS+PCT-20%20PCT-3D%3D&pipe=sub")
expected = b"PLUS PCT-20 PCT-3D="
assert resp.read().rstrip() == expected
class TestFunctionHandler(TestUsingServer):
def test_string_rv(self):
@wptserve.handlers.handler
def handler(request, response):
return "test data"
route = ("GET", "/test/test_string_rv", handler)
self.server.router.register(*route)
resp = self.request(route[1])
self.assertEqual(200, resp.getcode())
self.assertEqual("9", resp.info()["Content-Length"])
self.assertEqual(b"test data", resp.read())
def test_tuple_1_rv(self):
@wptserve.handlers.handler
def handler(request, response):
return ()
route = ("GET", "/test/test_tuple_1_rv", handler)
self.server.router.register(*route)
with pytest.raises(HTTPError) as cm:
self.request(route[1])
assert cm.value.code == 500
def test_tuple_2_rv(self):
@wptserve.handlers.handler
def handler(request, response):
return [("Content-Length", 4), ("test-header", "test-value")], "test data"
route = ("GET", "/test/test_tuple_2_rv", handler)
self.server.router.register(*route)
resp = self.request(route[1])
self.assertEqual(200, resp.getcode())
self.assertEqual("4", resp.info()["Content-Length"])
self.assertEqual("test-value", resp.info()["test-header"])
self.assertEqual(b"test", resp.read())
def test_tuple_3_rv(self):
@wptserve.handlers.handler
def handler(request, response):
return 202, [("test-header", "test-value")], "test data"
route = ("GET", "/test/test_tuple_3_rv", handler)
self.server.router.register(*route)
resp = self.request(route[1])
self.assertEqual(202, resp.getcode())
self.assertEqual("test-value", resp.info()["test-header"])
self.assertEqual(b"te
|
st data", resp.read())
def test_tuple_3_rv_1(self):
@wptserve.handlers.handler
def handler(request, response):
return (202, "Some Status"), [("test-header", "test-value")], "test data"
route = ("GET", "/test/test_tuple_3_rv_1", handler)
self.server.router.register(*route)
resp = self.request(route[1])
|
self.assertEqual(202, resp.getcode())
self.assertEqual("Some Status", resp.msg)
self.assertEqual("test-value", resp.info()["test-header"])
self.assertEqual(b"test data", resp.read())
def test_tuple_4_rv(self):
@wptserve.handlers.handler
def handler(request, response):
return 202, [("test-header", "test-value")], "test data", "garbage"
route = ("GET", "/test/test_tuple_1_rv", handler)
self.server.router.register(*route)
with pytest.raises(HTTPError) as cm:
self.request(route[1])
assert cm.value.code == 500
def test_none_rv(self):
@wptserve.handlers.handler
def handler(request, response):
return None
route = ("GET", "/test/test_none_rv", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 200
assert "Content-Length" not in resp.info()
assert resp.read() == b""
class TestJSONHandler(TestUsingServer):
def test_json_0(self):
@wptserve.handlers.json_handler
|
SNoiraud/gramps
|
gramps/plugins/docgen/cairodoc.py
|
Python
|
gpl-2.0
| 12,315
| 0.002436
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2007 Zsolt Foldvari
# Copyright (C) 2008 Brian G. Matherly
# Copyright (C) 2013 Vassilii Khachaturov
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""PS/PDF output generator based on Cairo.
"""
#------------------------------------------------------------------------
#
# Python modules
#
#------------------------------------------------------------------------
import logging
#-------------------------------------------------------------------------
#
# GTK modules
#
#-------------------------------------------------------------------------
import gi
gi.require_version('Pango', '1.0')
gi.require_version('PangoCairo', '1.0')
from gi.repository import Pango, PangoCairo
import cairo
#------------------------------------------------------------------------
#
# Gramps modules
#
#------------------------------------------------------------------------
from gramps.gen.constfunc import is_quartz
import gramps.plugins.lib.libcairodoc as libcairodoc
from gramps.gen.plug.docgen import INDEX_TYPE_ALP, INDEX_TYPE_TOC
from gramps.gen.errors import ReportError
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
LOG = logging.getLogger(".cairodoc")
#------------------------------------------------------------------------
#
# Constants
#
#------------------------------------------------------------------------
# resolution
DPI = 72.0
#------------------------------------------------------------------------
#
# CairoDocgen class
#
#------------------------------------------------------------------------
class CairoDocgen(libcairodoc.CairoDoc):
"""Render the document into a file using a Cairo surface.
"""
def create_cairo_surface(self, fobj, width_in_points, height_in_points):
# See
# http://cairographics.org/documentation/pycairo/3/re
|
ference/surfaces.html#class-ps
|
surface-surface
# for the arg semantics.
raise "Missing surface factory override!!!"
def run(self):
"""Create the output file.
The derived class overrides EXT and create_cairo_surface
"""
# get paper dimensions
paper_width = self.paper.get_size().get_width() * DPI / 2.54
paper_height = self.paper.get_size().get_height() * DPI / 2.54
page_width = round(self.paper.get_usable_width() * DPI / 2.54)
page_height = round(self.paper.get_usable_height() * DPI / 2.54)
left_margin = self.paper.get_left_margin() * DPI / 2.54
top_margin = self.paper.get_top_margin() * DPI / 2.54
# create cairo context and pango layout
filename = self._backend.filename
# Cairo can't reliably handle unicode filenames on Linux or
# Windows, so open the file for it.
with open(filename, 'wb') as fd:
try:
surface = self.create_cairo_surface(fd, paper_width,
paper_height)
surface.set_fallback_resolution(300, 300)
cr = cairo.Context(surface)
fontmap = PangoCairo.font_map_new()
fontmap.set_resolution(DPI)
pango_context = fontmap.create_context()
options = cairo.FontOptions()
options.set_hint_metrics(cairo.HINT_METRICS_OFF)
if is_quartz():
PangoCairo.context_set_resolution(pango_context, 72)
PangoCairo.context_set_font_options(pango_context, options)
layout = Pango.Layout(pango_context)
PangoCairo.update_context(cr, pango_context)
# paginate the document
self.paginate_document(layout, page_width, page_height,
DPI, DPI)
body_pages = self._pages
# build the table of contents and alphabetical index
toc_page = None
index_page = None
toc = []
index = {}
for page_nr, page in enumerate(body_pages):
if page.has_toc():
toc_page = page_nr
if page.has_index():
index_page = page_nr
for mark in page.get_marks():
if mark.type == INDEX_TYPE_ALP:
if mark.key in index:
if page_nr + 1 not in index[mark.key]:
index[mark.key].append(page_nr + 1)
else:
index[mark.key] = [page_nr + 1]
elif mark.type == INDEX_TYPE_TOC:
toc.append([mark, page_nr + 1])
# paginate the table of contents
rebuild_required = False
if toc_page is not None:
toc_pages = self.__generate_toc(layout, page_width,
page_height, toc)
offset = len(toc_pages) - 1
if offset > 0:
self.__increment_pages(toc, index, toc_page, offset)
rebuild_required = True
if index_page and toc_page < index_page:
index_page += offset
else:
toc_pages = []
# paginate the index
if index_page is not None:
index_pages = self.__generate_index(layout, page_width,
page_height, index)
offset = len(index_pages) - 1
if offset > 0:
self.__increment_pages(toc, index, index_page, offset)
rebuild_required = True
if toc_page and toc_page > index_page:
toc_page += offset
else:
index_pages = []
# rebuild the table of contents and index if required
if rebuild_required:
if toc_page is not None:
toc_pages = self.__generate_toc(layout, page_width,
page_height, toc)
if index_page is not None:
index_pages = self.__generate_index(layout, page_width,
page_height, index)
# render the pages
if toc_page is not None:
body_pages = body_pages[:toc_page] + toc_pages + \
body_pages[toc_page+1:]
if index_page is not None:
body_pages = body_pages[:index_page] + index_pages + \
body_pages[index_page+1:]
self._pages = body_pages
for page_nr in range(len(self._pages)):
cr.save()
cr.translate(left_margin, top_margin)
self.draw_page(page_nr, cr, layout,
page_width, page_height,
DPI, DPI)
cr.show_page()
cr.restore()
# clos
|
heynemann/level
|
tests/unit/test_app.py
|
Python
|
mit
| 6,332
| 0.000632
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of level.
# https://github.com/heynemann/level
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT-license
# Copyright (c) 2016, Bernardo Heynemann <heynemann@gmail.com>
from importer import Importer
from preggy import expect
from tornado.testing import gen_test
from level.app import LevelApp
from level.config import Config
from level.json import dumps, loads
from level.context import Context, ServerParameters
from level.services import BaseService
from tests.unit.base import TestCase, WebTestCase
class AppTestCase(TestCase):
def setUp(self):
super(AppTestCase, self).setUp()
self.server_parameters = ServerParameters(
io_loop=self.io_loop,
host='localhost',
port=8888,
config_path='./tests/fixtures/test-valid.conf',
log_level='INFO',
debug=True,
)
self.config = Config()
self.importer = Importer()
self.importer.load(
dict(key='service_classes', module_names=self.config.SERVICES, class_name='Service'),
) # load all modules here
services = []
for service_class in self.importer.service_classes:
srv = service_class()
srv.name = service_class.__module__
services.append(srv)
self.importer.services = services
self.context = Context(self.server_parameters, self.config, self.importer)
@gen_test
async def test_can_create_app(self):
app = await LevelApp.create(self.context)
expect(app).not_to_be_null()
expect(app.context).to_equal(self.context)
@gen_test
async def test_can_initialize_services(self):
class TestService(BaseService):
def __init__(self, *args, **kw):
super(TestService, self).__init__(*args, **kw)
self.initialized = False
self.name = 'TestService'
self.app = None
async def initialize_service(self, app):
await super(TestService, self).initialize_service(app)
self.initialized = True
s = TestService()
self.context.importer.services = [s]
app = LevelApp(self.context, [])
expect(app).not_to_be_null()
await app.initialize()
expect(s.initialized).to_be_true()
expect(s.app).to_equal(app)
@gen_test
async def test_can_get_handlers_from_services(self):
class TestService(BaseService):
def __init__(self):
self.initialized = False
self.name = 'TestService'
self.app = None
async def initialize_service(self, app):
await super(TestService, self).initialize_service(app)
self.initialized = True
async def get_handlers(self):
return (
('/test', None),
)
s = TestService()
self.context.importer.services = [s]
app = LevelApp(self.context, [])
expect(app).not_to_be_null()
handlers = await app.get_handlers()
expect(handlers).to_length(2)
expect(handlers[1]).to_be_like(
('/test', None),
)
class WebSocketTestCase(WebTestCase):
def setUp(self):
super(WebSocketTestCase, self).setUp()
class TestService(BaseService):
def __init__(self):
self.message = None
self.name = 'TestService'
async def on_message(self, message):
if message['type'] == 'ping':
await self.publish_message(message['socket_id'], 'pong', message['payload'])
else:
self.message = message
self.socket_id = message['socket_id']
self.service = TestService()
self.service.app = self.app
self.context.importer.services = [self.service]
@gen_test
async def test_can_receive_open_message(self):
await self.websocket_connect('/ws')
expect(self.ws).not_to_be_null()
await self.wait_for(lambda: self.service.message is not None)
expect(self.service.socket_id).not_to_be_null()
expect(self.service.message).to_be_like({
'type': 'core.connection.open',
'socket_id': self.service.socket_id,
'payload': {},
})
@gen_test
async def test_can_receive_close_message(self):
await self.websocket_connect('/ws')
expect(self.ws).not_to_be_null()
# wait for open
await self.wait_for(lambda: self.service.message is not None)
self.service.message = None
self.websocket_close()
|
await self.wait_for(lambda: self.service.message is not None)
expect(self.service.socket_id).not_to_be_null()
expect(self.service.message).to_be_like({
'type': 'core.connection.close',
|
'socket_id': self.service.socket_id,
'payload': {},
})
@gen_test
async def test_can_receive_message(self):
await self.websocket_connect('/ws')
expect(self.ws).not_to_be_null()
await self.ws.write_message(dumps({
'type': 'custom.message',
'qwe': 123,
}))
await self.wait_for(lambda: self.service.message is not None and self.service.message['type'] == 'custom.message')
expect(self.service.socket_id).not_to_be_null()
expect(self.service.message).to_be_like({
'type': 'custom.message',
'socket_id': self.service.socket_id,
'payload': {
'qwe': 123,
},
})
@gen_test
async def test_can_publish_message(self):
await self.websocket_connect('/ws')
expect(self.ws).not_to_be_null()
await self.write_ws_message(dumps({
'type': 'ping',
'msg': 'woot?!',
}))
response = await self.read_ws_message()
expect(response).not_to_be_null()
obj = loads(response)
expect(obj).to_equal({
'type': 'pong',
'socket_id': self.service.socket_id,
'payload': {
'msg': 'woot?!',
}
})
|
rtfd/readthedocs.org
|
readthedocs/core/fields.py
|
Python
|
mit
| 218
| 0
|
# -*- coding: utf-8 -*-
"""Shar
|
ed model fields and defaults."""
import
|
binascii
import os
def default_token():
"""Generate default value for token field."""
return binascii.hexlify(os.urandom(20)).decode()
|
BhallaLab/moose-thalamocortical
|
pymoose/gui/moosetree.py
|
Python
|
lgpl-2.1
| 3,777
| 0.018268
|
# moosetree.py ---
|
#
# Filename: moosetree.py
# Description:
# Author: subhasis ray
# Maintainer:
# Created: Tue Jun 23 18:54:14 2009 (+0530)
# Version:
# Last-Updated: Sun Jul 5 01:35:11 2009 (+0530)
# By: subhasis ray
# Update #: 137
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
#
|
modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
import moose
import sys
from PyQt4 import QtCore, QtGui
class MooseTreeItem(QtGui.QTreeWidgetItem):
def __init__(self, *args):
QtGui.QTreeWidgetItem.__init__(self, *args)
self.mooseObj_ = None
def setMooseObject(self, mooseObject):
if isinstance(mooseObject, moose.Id):
self.mooseObj_ = moose.Neutral(mooseObject)
elif isinstance(mooseObject, moose.PyMooseBase):
self.mooseObj_ = mooseObject
else:
raise Error
self.setText(0, QtCore.QString(self.mooseObj_.name))
self.setToolTip(0, QtCore.QString('class:' + self.mooseObj_.className))
def getMooseObject(self):
return self.mooseObj_
def updateSlot(self, text):
self.setText(0, QtCore.QString(self.mooseObj_.name))
class MooseTreeWidget(QtGui.QTreeWidget):
def __init__(self, *args):
QtGui.QTreeWidget.__init__(self, *args)
self.rootObject = moose.Neutral('/')
self.itemList = []
self.setupTree(self.rootObject, self, self.itemList)
self.setCurrentItem(self.itemList[0]) # Make root the default item
def setupTree(self, mooseObject, parent, itemlist):
item = MooseTreeItem(parent)
item.setMooseObject(mooseObject)
itemlist.append(item)
for child in mooseObject.children():
childObj = moose.Neutral(child)
self.setupTree(childObj, item, itemlist)
return item
def recreateTree(self):
self.clear()
self.itemList = []
self.setupTree(moose.Neutral('/'), self, self.itemList)
def insertMooseObjectSlot(self, class_name):
try:
class_name = str(class_name)
class_obj = eval('moose.' + class_name)
current = self.currentItem()
new_item = MooseTreeItem(current)
parent = current.getMooseObject()
# print 'creating new', class_name, 'under', parent.path
new_obj = class_obj(class_name, parent)
new_item.setMooseObject(new_obj)
current.addChild(new_item)
self.itemList.append(new_item)
except AttributeError:
print class_name, ': no such class in module moose'
if __name__ == '__main__':
c = moose.Compartment("c")
d = moose.HHChannel("chan", c)
app = QtGui.QApplication(sys.argv)
widget = MooseTreeWidget()
# widget = QtGui.QTreeWidget()
# items = []
# root = moose.Neutral('/')
# parent = widget
# item = setupTree(root, widget, items)
# while stack:
# mooseObject = stack.pop()
# item = QtGui.QTreeWidgetItem(parent)
# item.setText(0, widget.tr(mooseObject.name))
# parent = item
# for child in mooseObject.children():
# stack.append(moose.Neutral(child))
widget.show()
sys.exit(app.exec_())
#
# moosetree.py ends here
|
VitalPet/bank-statement-import
|
account_bank_statement_import/__openerp__.py
|
Python
|
agpl-3.0
| 573
| 0
|
# -*- encoding: utf-8 -*-
{
'name': 'Account Bank Statement Import',
'category': 'Banking addons',
'version': '8.0.1.0.1',
'author': 'OpenERP SA,'
'Odoo Community Association (OCA)',
'website': 'https://github.com
|
/OCA/bank-statement-import',
'depends': ['account'],
|
'data': [
"views/account_config_settings.xml",
'views/account_bank_statement_import_view.xml',
],
'demo': [
'demo/fiscalyear_period.xml',
'demo/partner_bank.xml',
],
'auto_install': False,
'installable': False,
}
|
cjhdev/lora_device_lib
|
vendor/cmocka/.ycm_extra_conf.py
|
Python
|
mit
| 3,399
| 0.028832
|
import os
import ycm_core
flags = [
'-Wall',
'-Wextra',
'-Werror',
'-x', 'c',
'-Iinclude',
]
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
compilation_database_folder = 'obj'
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
brea
|
k
if flag.startswith( path_flag ):
path = flag[ len(
|
path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
# NOTE: This is just for YouCompleteMe; it's highly likely that your project
# does NOT need to remove the stdlib flag. DO NOT USE THIS IN YOUR
# ycm_extra_conf IF YOU'RE NOT 100% SURE YOU NEED IT.
try:
final_flags.remove( '-stdlib=libc++' )
except ValueError:
pass
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return {
'flags': final_flags,
'do_cache': True
}
|
Tecnativa/website
|
website_event_register_free/model/__init__.py
|
Python
|
agpl-3.0
| 1,041
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open S
|
ource Management Solution
# This module copyright (C) 2015 Therp BV <http://therp.nl>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This progr
|
am is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import ir_ui_view
from . import event_registration
|
mohamedhagag/community-addons
|
project_scrum/tests/test_project_scrum.py
|
Python
|
agpl-3.0
| 196
| 0.020408
|
# -*- coding: utf-8 -*-
from openerp.tests import common
clas
|
s TestProjectScrum(common.Tr
|
ansactionCase):
def test_project_scrum(self)
env = self.env
record = env['project_scrum.0'].create({})
|
homoludens/EventMap
|
hello/forms.py
|
Python
|
agpl-3.0
| 1,156
| 0.006055
|
from flask_wtf import Form
|
from wtforms import TextField, DecimalField, TextAreaField, DateField, validators, PasswordField, BooleanField
class
|
CommentForm(Form):
text = TextField('Title', [validators.Required()])
text2 = TextAreaField('Body')
longitude = DecimalField('Longitude')
latitude = DecimalField('Longitude')
date = DateField('Date')
class SignupForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(), validators.EqualTo('confirm', message='Passwords must match')])
confirm = PasswordField('Confirm Password', [validators.Required()])
email = TextField('eMail', [validators.Required(),validators.Email()])
#accept_tos = BooleanField('I accept the TOS', [validators.Required])
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
class PasswordResetForm(Form):
username = TextField('Username')
email = TextField('eMail')
class PasswordChangeForm(Form):
password = PasswordField('Password', [validators.Required()])
|
mhoffma/micropython
|
tests/basics/struct_micropython.py
|
Python
|
mit
| 332
| 0.009036
|
# test MicroPython-specific features of st
|
ruct
try:
import ustruct as struct
except:
try:
import struct
except ImportError:
import sys
print("SKIP")
sys.exit()
class A():
pass
# pack and unpack objects
o = A()
s = struct.pack("<O", o)
o2 = struct.unpack("<O", s)
|
print(o is o2[0])
|
simontakite/sysadmin
|
pythonscripts/thinkpython/thread.py
|
Python
|
gpl-2.0
| 439
| 0.006834
|
"""Example code using Python threads.
Copy
|
right 2010 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from threading import Thread
from time import sleep
def counter(xs, delay=1):
for x in xs:
print x
sleep(delay)
# one thread counts backwards, fast
t = Thread(target=counter, args=[range(100, 1, -1), 0.25])
t.sta
|
rt()
# the other thread count forwards, slow
counter(range(1, 100), 1)
|
mrok/ircAntiFloodBot
|
src/antiFloodBot.py
|
Python
|
apache-2.0
| 2,562
| 0.010929
|
import os
import time
import json
import pprint
from util import hook
def readConfig():
### Read config json and parse it
confJson = None
with open(os.getcwd() + '/antiFloodBotConfig.json', 'r') as confFile:
confJson = confFile.read()
return json.loads(confJson)
inputs = {} #store time (unixtimestamp in sec) of every entry sent by user in map where key is user nickname
kicked = [] #store nicknames of kicked users
conf = readConfig()
timeIntervalScope = conf['timeIntervalScope'] # interval when entries are collected [sec]
entryThreshold = conf['entryThreshold'] #how many entries are allowed in timeIntervalScope
logFile = conf['logFile']
@hook.event('PRIVMSG')
def antiFlood(inp, nick=None, msg=None, conn=None, chan=None):
if (nick not in inputs):
inputs[nick] = []
currentTime = time.time()
timeThreshold = currentTime - timeIntervalScope
inputs[nick].append(currentTime)
inputs[nick] = filter(lambda x: x > timeThreshold, inputs[nick]) #filter out every entry older than 8 sec (threshold)
if len(inputs[nick]) >= entryThreshold: #if user has good day, kick one
explanationMessage = conf['kickMessage']
file = open(logFile, 'a')
file.write('Trying to kick %s on channel %s \n' % (nick, chan))
if nick in kicked:
explanationMessage = conf['banMessage']
out = "MODE %s +b %s" % (chan, nick)
conn.send(out)
file.write('%s is kicked with ban \
|
n' % (nick))
out = "KICK %s %s : %s" % (chan, nick, explanationMessage)
conn.send(out)
kicked.append(nick)
file.close()
#todo
#if the same user joins again within 24 hour and keeps spamming temp ban in XX time.
#step 3) if the same user joins after the removal of the ban and spams, permanent ban
|
.
@hook.event('PRIVMSG')
def paramDump(inp, nick=None, msg=None, conn=None, chan=None):
def saveToFile(file, label, obj):
file.write("===== " + label + " ======== \n")
file.write("type " + str(type (obj)) + " ========\n")
file.write("methods " + str(dir(obj)) + " ========\n")
file.write("properties ========\n")
pprint.pprint(obj, file)
file.write("\n\n\n")
file = open(logFile, 'a')
saveToFile(file, "inp", inp)
saveToFile(file, "nick", nick)
saveToFile(file, "msg", msg)
saveToFile(file, "chan", chan)
saveToFile(file, "conn", conn)
file.close()
@hook.event("004")
def onConnect(param, conn=None, raw=None):
conn.send("Antiflod bot is ready")
|
oldmanmike/minecraftd
|
minecraftd/tests/test.py
|
Python
|
gpl-3.0
| 812
| 0.002463
|
import random
import unittest
from minecraftd.common import tmux_id
"""
def tmux_id(id_list):
random.seed()
new_id = random.randint(1,100)
while new_id in id_list:
new_id = random.randint(1,100)
return new_id
"""
class CommonTest(unittest.TestCase):
def setUp(self):
self.id_list = random.sample(range(1000), 10)
def test(self):
new_id_list = list(self.id_list)
print(new_id_list)
old_id_list = list(self.id_list)
print(old_id_list)
new_id = tmux_id(self.id_list)
print(new_id)
new_id_lis
|
t.append(new_id)
print(self.id_list)
print(new_id_list)
pri
|
nt(old_id_list)
self.assertEqual(len(set(new_id_list)), (len(set(old_id_list)) + 1))
if __name__ == '__main__':
unittest.main()
|
jzcxer/0Math
|
python/test.py
|
Python
|
gpl-3.0
| 286
| 0.045455
|
l=int(input())
d=list(input().split())
s={0:0}
for i in range(l):
for j in range(i+1,l):
diff=abs(int(d[i])-int(d[j]))
if diff not in s:
|
s[abs(diff)]=1
else:
s[diff]=s[diff]+1
f =lambda
|
x:print(str(x)+" "+str(s[x]))
f(max(s.keys()))
|
koturn/FiveProgrammingProblems
|
Python/problem05.py
|
Python
|
mit
| 602
| 0.001661
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def problem05(nrlst, oplst, answer):
if len(nrlst) == 0:
return []
else:
|
exprlst = []
def _problem05(expr, i):
if i < len(nrlst):
for op in oplst:
_problem05(expr + op + str(nrlst[i]), i + 1)
elif eval(expr) == answer:
exprlst.append(expr)
_problem05(str(nrlst[0]), 1)
return exprlst
if __name__ == '__main__':
ANSWER = 100
for expr in problem05(range(1, 10), [' + ', ' - ', ''], ANSWER)
|
:
print expr, '=', ANSWER
|
mosen/salt-osx
|
_modules/app.py
|
Python
|
mit
| 2,741
| 0.001094
|
# -*- coding: utf-8 -*-
'''
Manage running applications.
Similar to `ps`, you can treat running applications as unix processes.
On OS X, there is a higher level Cocoa functionality (see NSApplication) which responds to events sent through the
notification center. This module operates at that level.
:maintainer: Mosen <mosen@github.com>
:maturity: beta
:depends: objc
:platform: darwin
'''
import logging
import salt.utils
log = logging.getLogger(__name__)
__virtualname__ = 'app'
HAS_LIBS = False
try:
from Cocoa import NSWorkspace
HAS_LIBS = True
except ImportError:
log.debug('Execution module not suitable because one or more imports failed.')
def __virtual__():
'''
Only load module if we are running on OS X.
'''
return __virtualname__ if HAS_LIBS else False
def quit(appname, blocking=False):
'''
Ask an application to quit.
Does not guarantee that the application will quit without user interaction.
Does not block until the application quits.
CLI Example::
salt '*' app.quit 'Safari'
'''
workSpace = NSWorkspace.sharedWorkspace()
applications = workSpace.runningApplications()
for app in applications:
if app.localizedName() == appname:
acknowledged = app.terminate()
return acknowledged
return None
def force_quit(appname, blocking=False):
'''
Force an application to quit aka `Force Quit`.
Does not block until the application quits.
CLI Example::
salt '*' app.force_quit 'Safari'
'''
workSpace = NSWorkspace.sharedWorkspace()
applications = workSpace.runningApplications()
for app in applications:
if app.localizedName() == appname:
acknowledged = app.forceTerminate()
return acknowledged
return None
def launch(application):
'''
Open an Application by name.
This does not need to be the full path to the application, and does not need to have an .app extension.
CLI Example::
salt '*' app.launch 'TextEdit'
'''
workSpace = NSWorkspace.sharedWorkspace()
status = workSpace.launchApplication_(application)
r
|
eturn status
def processes():
'''
Get a list of running processes in the user session
TODO: optional get by bundle ID
TODO: optional get hidden
'''
workSpace = NSWorkspace.sh
|
aredWorkspace()
appList = workSpace.runningApplications()
names = [app.localizedName() for app in appList]
names.sort()
return names
def frontmost():
'''
Get the name of the frontmost application
'''
workSpace = NSWorkspace.sharedWorkspace()
app = workSpace.frontmostApplication()
return app.localizedName()
|
godiard/sugar-toolkit-gtk3
|
src/sugar3/graphics/animator.py
|
Python
|
lgpl-2.1
| 7,131
| 0
|
# Copyright (C) 2007, Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser Genera
|
l Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
"""
The animator module provide
|
s a simple framwork to create animations.
Example:
Animate the size of a window::
from gi.repository import Gtk
from sugar3.graphics.animator import Animator, Animation
# Construct a 5 second animator
animator = Animator(5)
# Construct a window to animate
w = Gtk.Window()
w.connect('destroy', Gtk.main_quit)
# Start the animation when the window is shown
w.connect('realize', lambda self: animator.start())
w.show()
# Create an animation subclass to animate the widget
class SizeAnimation(Animation):
def __init__(self):
# Tell the animation to give us values between 20 and
# 420 during the animation
Animation.__init__(self, 20, 420)
def next_frame(self, frame):
size = int(frame)
w.resize(size, size)
# Add the animation the the animator
animation = SizeAnimation()
animator.add(animation)
# The animation needs to run inside a GObject main loop
Gtk.main()
STABLE.
"""
import time
from gi.repository import GObject
from gi.repository import GLib
EASE_OUT_EXPO = 0
EASE_IN_EXPO = 1
class Animator(GObject.GObject):
'''
The animator class manages the the timing for calling the
animations. The animations can be added using the `add` function
and then started with the `start` function. If multiple animations
are added, then they will be played back at the same time and rate
as each other.
The `completed` signal is emitted upon the completion of the
animation and also when the `stop` function is called.
Args:
duration (float): the duration of the animation in seconds
fps (int, optional): the number of animation callbacks to make
per second (frames per second)
easing (int): the desired easing mode, either `EASE_OUT_EXPO`
or `EASE_IN_EXPO`
.. note::
When creating an animation, take into account the limited cpu power
on some devices, such as the XO. Setting the fps too high on can
use signifigant cpu usage on the XO.
'''
__gsignals__ = {
'completed': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, duration, fps=20, easing=EASE_OUT_EXPO):
GObject.GObject.__init__(self)
self._animations = []
self._duration = duration
self._interval = 1.0 / fps
self._easing = easing
self._timeout_sid = 0
self._start_time = None
def add(self, animation):
'''
Add an animation to this animator
Args:
animation (:class:`sugar3.graphics.animator.Animation`):
the animation instance to add
'''
self._animations.append(animation)
def remove_all(self):
'''
Remove all animations and stop this animator
'''
self.stop()
self._animations = []
def start(self):
'''
Start the animation running. This will stop and restart the
animation if the animation is currently running
'''
if self._timeout_sid:
self.stop()
self._start_time = time.time()
self._timeout_sid = GLib.timeout_add(
int(self._interval * 1000), self._next_frame_cb)
def stop(self):
'''
Stop the animation and emit the `completed` signal
'''
if self._timeout_sid:
GObject.source_remove(self._timeout_sid)
self._timeout_sid = 0
self.emit('completed')
def _next_frame_cb(self):
current_time = min(self._duration, time.time() - self._start_time)
current_time = max(current_time, 0.0)
for animation in self._animations:
animation.do_frame(current_time, self._duration, self._easing)
if current_time == self._duration:
self.stop()
return False
else:
return True
class Animation(object):
'''
The animation class is a base class for creating an animation.
It should be subclassed. Subclasses should specify a `next_frame`
function to set the required properties based on the animation
progress. The range of the `frame` value passed to the `next_frame`
function is defined by the `start` and `end` values.
Args:
start (float): the first `frame` value for the `next_frame` method
end (float): the last `frame` value for the `next_frame` method
.. code-block:: python
# Create an animation subclass
class MyAnimation(Animation):
def __init__(self, thing):
# Tell the animation to give us values between 0.0 and
# 1.0 during the animation
Animation.__init__(self, 0.0, 1.0)
self._thing = thing
def next_frame(self, frame):
# Use the `frame` value to set properties
self._thing.set_green_value(frame)
'''
def __init__(self, start, end):
self.start = start
self.end = end
def do_frame(self, t, duration, easing):
'''
This method is called by the animtor class every frame. This
method calculated the `frame` value to then call `next_frame`.
Args:
t (float): the current time elapsed of the animation in seconds
duration (float): the length of the animation in seconds
easing (int): the easing mode passed to the animator
'''
start = self.start
change = self.end - self.start
if t == duration:
# last frame
frame = self.end
else:
if easing == EASE_OUT_EXPO:
frame = change * (-pow(2, -10 * t / duration) + 1) + start
elif easing == EASE_IN_EXPO:
frame = change * pow(2, 10 * (t / duration - 1)) + start
self.next_frame(frame)
def next_frame(self, frame):
'''
This method is called every frame and should be overriden by
subclasses.
Args:
frame (float): a value between `start` and `end` representing
the current progress in the animation
'''
pass
|
huzq/scikit-learn
|
sklearn/datasets/_svmlight_format_io.py
|
Python
|
bsd-3-clause
| 19,022
| 0.000473
|
"""This module implements a loader and dumper for the svmlight format
This format is a text-based format, with one sample per line. It does
not store zero valued features hence is suitable for sparse dataset.
The first element of each line can be used to store a target variable to
predict.
This format is used as the default format for both svmlight and the
libsvm command line programs.
"""
# Authors: Mathieu Blondel <mathieu@mblondel.org>
# Lars Buitinck
# Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
from contextlib import closing
import io
import os.path
import numpy as np
import scipy.sparse as sp
from .. import __version__
from ..utils import check_array, IS_PYPY
if not IS_PYPY:
from ._svmlight_format_fast import _load_svmlight_file
else:
def _load_svmlight_file(*args, **kwargs):
raise NotImplementedError(
"load_svmlight_file is currently not "
"compatible with PyPy (see "
"https://github.com/scikit-learn/scikit-learn/issues/11543 "
"for the status updates)."
)
def load_svmlight_file(
f,
*,
n_features=None,
dtype=np.float64,
multilabel=False,
zero_based="auto",
query_id=False,
offset=0,
length=-1,
):
"""Load datasets in the svmlight / libsvm format into sparse CSR matrix
This format is a text-based format, with one sample per line. It does
not store zero valued features hence is suitable for sparse dataset.
The first element of each line can be used to store a target variable
to predict.
This format is used as the default format for both svmlight and the
libsvm command line programs.
Parsing a text based source can be expensive. When working on
repeatedly on the same dataset, it is recommended to wrap this
loader with joblib.Memory.cache to store a memmapped backup of the
CSR results of the first call and benefit from the near instantaneous
loading of memmapped structures for the subsequent calls.
In case the file contains a pairwise preference constraint (known
as "qid" in the svmlight format) these are ignored unless the
query_id parameter is set to True. These pairwise preference
constraints can be used to constraint the combination of samples
when using pairwise loss functions (as is the case in some
learning to rank problems) so that only pairs with the same
query_id value are considered.
This implementation is written in Cython and is reasonably fast.
However, a faster API-compatible loader is also available at:
https://github.com/mblondel/svmlight-loader
Parameters
----------
f : str, file-like or int
(Path to) a file to load. If a path ends in ".gz" or ".bz2", it will
be uncompressed on the fly. If an integer is passed, it is assumed to
be a file descriptor. A file-like or file descriptor will not be closed
by this function. A file-like object must be opened in binary mode.
n_features : int, default=None
The number of features to use. If None, it will be inferred. This
argument is useful to load several files that are subsets of a
bigger sliced dataset: each subset might not have examples of
every feature, hence the inferred shape might vary from one
slice to another.
n_features is only required if ``offset`` or ``length`` are passed a
non-default value.
dtype : numpy data type, default=np.float64
Data type of dataset to be loaded. This will be the data type of the
output numpy arrays ``X`` and ``y``.
multilabel : bool, default=False
Samples may have several labels each (see
https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html)
zero_based : bool or "auto", default="auto"
Whether column indices in f are zero-based (True) or one-based
(False). If column indices are one-based, they are transformed to
zero-based to match Python/NumPy conventions.
If set to "auto", a heuristic check is applied to determine this from
the file contents. Both kinds of files occur "in the wild", but they
are unfortunately not self-identifying. Using "auto" or True should
always be safe when no ``offset`` or ``length`` is passed.
If ``offset`` or ``length`` are passed, the "auto" mode falls back
to ``zero_based=True`` to avoid having the heuristic check yield
inconsistent results on different segments of the file.
query_id : bool, default=False
If True, will return the query_id array for each file.
offset : int, default=0
Ignore the offset first bytes by seeking forward, then
discarding the following bytes up until the next new line
character.
length : int, default=-1
If strictly positive, stop reading any new line of data once the
position in the file has reached the (offset + length) bytes threshold.
Returns
-------
X : scipy.sparse matrix of shape (n_samples, n_features)
y : ndarray of shape (n_samples,), or, in the multilabel a list of
tuples of length n_samples.
query_id : array of shape (n_samples,)
query_id for each sample. Only returned when query_id is set to
True.
See Also
--------
load_svmlight_files : Similar function for loading multiple files in this
format, enforcing the same number of features/columns on all of them.
Examples
--------
To use joblib.Memory to cache the svmlight file::
from joblib import Memory
from .datasets import load_svmlight_file
mem = Memory("./mycache")
@mem.cache
def get_data():
data = load_svmlight_file("mysvmlightfile")
return data[0], data[1]
X, y = get_data()
"""
return tuple(
load_svmlight_files(
[f],
n_features=n_features,
dtype=dtype,
multilabel=multilabel,
zero_based=zero_based,
query_id=query_id,
offset=offset,
length=length,
)
)
def _gen_open(f):
if isinstance(f, int): # file descriptor
return io.open(f, "rb", closefd=False)
elif not isinstance(f, str):
raise TypeError("expected {str, int, file-like}, got %s" % type(f))
_, ext = os.path.splitext(f)
if ext == ".gz":
import gzip
return gzip.open(f, "rb")
elif ext == ".bz2":
from bz2 import BZ2File
return BZ2File(f, "rb")
else:
return open(f, "rb")
def _open_and_load(f, dtype, multilabel, zero_based, query_id, offset=0, length=-1):
if hasattr(f, "read"):
actual_dtype, data, ind, indptr, labels, query = _load_svmlight_file(
f, dtype, multilabel, zero_based, query_id, offset, length
)
else:
with closing(_gen_open(f)) as f:
actual_dtype, data, ind, indptr, labels, query =
|
_load_svml
|
ight_file(
f, dtype, multilabel, zero_based, query_id, offset, length
)
# convert from array.array, give data the right dtype
if not multilabel:
labels = np.frombuffer(labels, np.float64)
data = np.frombuffer(data, actual_dtype)
indices = np.frombuffer(ind, np.longlong)
indptr = np.frombuffer(indptr, dtype=np.longlong) # never empty
query = np.frombuffer(query, np.int64)
data = np.asarray(data, dtype=dtype) # no-op for float{32,64}
return data, indices, indptr, labels, query
def load_svmlight_files(
files,
*,
n_features=None,
dtype=np.float64,
multilabel=False,
zero_based="auto",
query_id=False,
offset=0,
length=-1,
):
"""Load dataset from multiple files in SVMlight format
This function is equivalent to mapping load_svmlight_file over a list of
files, except that the results are concatenated into a single, flat list
and the samples vectors are constrained to all have the same number of
features.
In case the file contains a pairwise preference constraint (known
|
beeftornado/sentry
|
src/sentry/constants.py
|
Python
|
bsd-3-clause
| 16,659
| 0.00102
|
"""
These settings act as the default (base) settings for the Sentry-provided
web-server
"""
from __future__ import absolute_import, print_function
import logging
import os.path
import six
from datetime import timedelta
from collections import OrderedDict, namedtuple
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from sentry.utils.integrationdocs import load_doc
from sentry.utils.geo import rust_geoip
import sentry_relay
def get_all_languages():
results = []
for path in os.listdir(os.path.join(MODULE_ROOT, "locale")):
if path.startswith("."):
continue
if "_" in path:
pre, post = path.split("_", 1)
path = u"{}-{}".format(pre, post.lower())
results.append(path)
return results
MODULE_ROOT = os.path.dirname(__import__("sentry").__file__)
DATA_ROOT = os.path.join(MODULE_ROOT, "data")
BAD_RELEASE_CHARS = "\n\f\t/"
MAX_VERSION_LENGTH = 200
MAX_COMMIT_LENGTH = 64
COMMIT_RANGE_DELIMITER = ".."
SORT_OPTIONS = OrderedDict(
(
("priority", _("Priority")),
("date", _("Last Seen")),
("new", _("First Seen")),
("freq", _("Frequency")),
)
)
SEARCH_SORT_OPTIONS = OrderedDict(
(("score", _("Score")), ("date", _("Last Seen")), ("new", _("First Seen")))
)
# XXX: Deprecated: use GroupStatus instead
STATUS_UNRESOLVED = 0
STATUS_RESOLVED = 1
STATUS_IGNORED = 2
STATUS_CHOICES = {
"resolved": STATUS_RESOLVED,
"unresolved": STATUS_UNRESOLVED,
"ignored": STATUS_IGNORED,
# TODO(dcramer): remove in 9.0
"muted": STATUS_IGNORED,
}
# Normalize counts to the 15 minute marker. This value MUST be less than 60. A
# value of 0 would store counts for every minute, and is the lowest level of
# accuracy provided.
MINUTE_NORMALIZATION = 15
MAX_TAG_KEY_LENGTH = 32
MAX_TAG_VALUE_LENGTH = 200
MAX_CULPRIT_LENGTH = 200
MAX_EMAIL_FIELD_LENGTH = 75
ENVIRONMENT_NAME_PATTERN = r"^[^\n\r\f\/]*$"
ENVIRONMENT_NAME_MAX_LENGTH = 64
SENTRY_APP_SLUG_MAX_LENGTH = 64
# Maximum number of results we are willing to fetch when calculating rollup
# Clients should adapt the interval width based on their display width.
MAX_ROLLUP_POINTS = 10000
# Team slugs which may not be used. Generally these are top level URL patterns
# which we don't want to worry about conflicts on.
RESERVED_ORGANIZATION_SLUGS = frozenset(
(
"admin",
"manage",
"login",
"account",
"register",
"api",
"accept",
"organizations",
"teams",
"projects",
"help",
"docs",
"logout",
"404",
"500",
"_static",
"out",
"debug",
"remote",
"get-cli",
"blog",
"welcome",
"features",
"customers",
"integrations",
"signup",
"pricing",
"subscribe",
"enterprise",
"about",
"jobs",
"thanks",
"guide",
"privacy",
"security",
"terms",
"from",
"sponsorship",
"for",
"at",
"platforms",
"branding",
"vs",
"answers",
"_admin",
"support",
"contact",
"onboarding",
"ext",
"extension",
"extensions
|
",
"plugins",
"themonitor",
"settings",
"legal",
"avatar",
"organization-avatar",
"project-avatar",
|
"team-avatar",
"careers",
"_experiment",
"sentry-apps",
"resources",
"integration-platform",
"trust",
"legal",
"community",
)
)
RESERVED_PROJECT_SLUGS = frozenset(
(
"api-keys",
"audit-log",
"auth",
"members",
"projects",
"rate-limits",
"repos",
"settings",
"teams",
"billing",
"payments",
"legal",
"subscription",
"support",
"integrations",
"developer-settings",
"usage",
)
)
LOG_LEVELS = {
logging.NOTSET: "sample",
logging.DEBUG: "debug",
logging.INFO: "info",
logging.WARNING: "warning",
logging.ERROR: "error",
logging.FATAL: "fatal",
}
DEFAULT_LOG_LEVEL = "error"
DEFAULT_LOGGER_NAME = ""
LOG_LEVELS_MAP = {v: k for k, v in six.iteritems(LOG_LEVELS)}
# Default alerting threshold values
DEFAULT_ALERT_PROJECT_THRESHOLD = (500, 25) # 500%, 25 events
DEFAULT_ALERT_GROUP_THRESHOLD = (1000, 25) # 1000%, 25 events
# Default sort option for the group stream
DEFAULT_SORT_OPTION = "date"
# Setup languages for only available locales
_language_map = dict(settings.LANGUAGES)
LANGUAGES = [(k, _language_map[k]) for k in get_all_languages() if k in _language_map]
del _language_map
# TODO(dcramer): We eventually want to make this user-editable
TAG_LABELS = {
"exc_type": "Exception Type",
"sentry:user": "User",
"sentry:release": "Release",
"sentry:dist": "Distribution",
"os": "OS",
"url": "URL",
"server_name": "Server",
}
PROTECTED_TAG_KEYS = frozenset(["environment", "release", "sentry:release"])
# Don't use this variable directly. If you want a list of rules that are registered in
# the system, access them via the `rules` registry in sentry/rules/__init__.py
_SENTRY_RULES = (
"sentry.mail.actions.NotifyEmailAction",
"sentry.rules.actions.notify_event.NotifyEventAction",
"sentry.rules.actions.notify_event_service.NotifyEventServiceAction",
"sentry.rules.conditions.every_event.EveryEventCondition",
"sentry.rules.conditions.first_seen_event.FirstSeenEventCondition",
"sentry.rules.conditions.regression_event.RegressionEventCondition",
"sentry.rules.conditions.reappeared_event.ReappearedEventCondition",
"sentry.rules.conditions.tagged_event.TaggedEventCondition",
"sentry.rules.conditions.event_frequency.EventFrequencyCondition",
"sentry.rules.conditions.event_frequency.EventUniqueUserFrequencyCondition",
"sentry.rules.conditions.event_attribute.EventAttributeCondition",
"sentry.rules.conditions.level.LevelCondition",
"sentry.rules.filters.age_comparison.AgeComparisonFilter",
"sentry.rules.filters.issue_occurrences.IssueOccurrencesFilter",
"sentry.rules.filters.assigned_to.AssignedToFilter",
"sentry.rules.filters.latest_release.LatestReleaseFilter",
# The following filters are duplicates of their respective conditions and are conditionally shown if the user has issue alert-filters
"sentry.rules.filters.event_attribute.EventAttributeFilter",
"sentry.rules.filters.tagged_event.TaggedEventFilter",
"sentry.rules.filters.level.LevelFilter",
)
MIGRATED_CONDITIONS = frozenset(
[
"sentry.rules.conditions.tagged_event.TaggedEventCondition",
"sentry.rules.conditions.event_attribute.EventAttributeCondition",
"sentry.rules.conditions.level.LevelCondition",
]
)
TICKET_ACTIONS = frozenset(
[
"sentry.integrations.jira.notify_action.JiraCreateTicketAction",
"sentry.integrations.vsts.notify_action.AzureDevopsCreateTicketAction",
]
)
# methods as defined by http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html + PATCH
HTTP_METHODS = ("GET", "POST", "PUT", "OPTIONS", "HEAD", "DELETE", "TRACE", "CONNECT", "PATCH")
# See https://github.com/getsentry/relay/blob/master/relay-general/src/protocol/constants.rs
VALID_PLATFORMS = sentry_relay.VALID_PLATFORMS
OK_PLUGIN_ENABLED = _("The {name} integration has been enabled.")
OK_PLUGIN_DISABLED = _("The {name} integration has been disabled.")
OK_PLUGIN_SAVED = _("Configuration for the {name} integration has been saved.")
WARN_SESSION_EXPIRED = "Your session has expired." # TODO: translate this
# Maximum length of a symbol
MAX_SYM = 256
# Known debug information file mimetypes
KNOWN_DIF_FORMATS = {
"text/x-breakpad": "breakpad",
"application/x-mach-binary": "macho",
"application/x-elf-binary": "elf",
"application/x-dosexec": "pe",
"application/x-ms-pdb": "pdb",
"text/x-proguard+plain": "proguard",
"application/x-sentry-bundle+zip": "sourcebundle",
}
NATIVE_UNKNOWN_STRING
|
supersu097/Mydailytools
|
converter.py
|
Python
|
gpl-3.0
| 905
| 0.01105
|
#!/usr/bin/env python
# coding=utf-8
import sys
import argparse
parser = argparse.ArgumentParser(
description='convert a non-standord hostname like xx-xx-[1-3] to a '
'expansion state',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Sample:
$ ./converter.py xxx-xxx-\[1-3\]
xxx-xxx-1
xxx-xxx-2
xxx-xxx-3
Tips: You can pass many args behind the command,and you need to not forget to
escape the character of [ and ]
""")
parser.add_argument(
'hostname_pattern',
help='',
type=str,
nargs='+')
args = parser.parse_args()
if __name__ == '__main__':
for arg in args.hostname_pattern:
basestr=arg.
|
split('-')
prefix='-'.join(basestr[:-2])
range_li=basestr[-2:]
start_num=int(range_li[0][1:])
end_num=int(range_li[1][:-1])
for i in range(start
|
_num,end_num+1):
print prefix + '-' + str(i)
|
jiadaizhao/LeetCode
|
0201-0300/0287-Find the Duplicate Number/0287-Find the Duplicate Number.py
|
Python
|
mit
| 357
| 0.002801
|
class Solution:
def findDuplicate(self, nums: List[int]) -> int:
slow = nums[0]
fast
|
= nums[nums[0]]
while slow != fast:
slow = nums[slow]
fast = nums[nums[fast]]
slow2 = 0
while slow != slow2:
slow = nums[slow]
|
slow2 = nums[slow2]
return slow
|
dougwig/acos-client
|
acos_client/v21/partition.py
|
Python
|
apache-2.0
| 1,617
| 0
|
# Copyright 2014, Doug Wiegley, A10 Networks.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific lan
|
guage governing permissions and limitations
# under the Licen
|
se.
import acos_client.errors as acos_errors
import base
class Partition(base.BaseV21):
def exists(self, name):
if name == 'shared':
return True
try:
self._post("system.partition.search", {'name': name})
return True
except acos_errors.NotFound:
return False
def active(self, name='shared'):
if self.client.current_partition != name:
self._post("system.partition.active", {'name': name})
self.client.current_partition = name
def create(self, name):
params = {
'partition': {
'max_aflex_file': 32,
'network_partition': 0,
'name': name
}
}
if name != 'shared':
self._post("system.partition.create", params)
def delete(self, name):
if name != 'shared':
self.client.session.close()
self._post("system.partition.delete", {"name": name})
|
dadisigursveinn/VEF-Lokaverkefni
|
photos/views.py
|
Python
|
bsd-3-clause
| 991
| 0.002018
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from .models import Document
from .forms import DocumentForm
def list(request):
# Handle file upload
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES)
if form.is_valid():
newdoc = Document(docfile=request.FILES['docfile'])
newdoc.save()
# Redirect to the document list after POST
return HttpResponseRedirect(reverse('ljosmyndasida.photos.views.list'))
else:
form = DocumentForm() # A empty, unbound form
# Load documents for the list page
doc
|
uments = Docum
|
ent.objects.all()
# Render list page with the documents and the form
return render_to_response(
'list.html',
{'documents': documents, 'form': form},
context_instance=RequestContext(request)
)
|
wilkerwma/codeschool
|
src/cs_core/migrations/0007_auto_20160619_2154.py
|
Python
|
gpl-3.0
| 729
| 0.001372
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-20 00:54
from __future__ import unicode_literals
from dja
|
ngo.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cs_c
|
ore', '0006_auto_20160619_2151'),
]
operations = [
migrations.RemoveField(
model_name='responsecontext',
name='parent',
),
migrations.AlterField(
model_name='responsecontext',
name='delayed_feedback',
field=models.BooleanField(default=False, help_text='If set, students will be only be able to see the feedback after the activity expires its deadline.', verbose_name='delayed feedback'),
),
]
|
tofu-rocketry/apel
|
test/test_blah.py
|
Python
|
apache-2.0
| 6,667
| 0.00855
|
import datetime
import unittest
from iso8601 import ParseError
from apel.parsers import BlahParser
from apel.db.records.record import InvalidRecordException
class ParserBlahTest(unittest.TestCase):
'''
Test case for LSF parser
'''
def setUp(self):
self.parser = BlahParser('testSite', 'testHost')
def test_parse(self):
line1 = ('"timestamp=2012-05-20 23:59:47" '
+'"userDN=/O=GermanGrid/OU=UniWuppertal/CN=Torsten Harenberg" '
+'"userFQAN=/atlas/Role=production/Capability=NULL" '
+'"ceID=cream-2-fzk.gridka.de:8443/cream-pbs-atlasXL" '
+'"jobID=CREAM410741480" "lrmsID=9575064.lrms1" "localUser=11999"')
line1_values = {"TimeStamp": datetime.datetime(2012, 5, 20, 23, 59, 47),
"GlobalUserName":"/O=GermanGrid/OU=UniWuppertal/CN=Torsten Harenberg",
"FQAN": "/atlas/Role=production/Capability=NULL",
"CE": "cream-2-fzk.gridka.de:8443/cream-pbs-atlasXL",
"GlobalJobId": "CREAM410741480",
"LrmsId": "9575064.lrms1",
}
cases = {}
cases[line1] = line1_values
for line in cases.keys():
record = self.parser.parse(line)
cont = record._record_content
# Keys presence in record
self.assertTrue(cont.has_key("TimeStamp"))
self.assertTrue(cont.has_key("GlobalUserName"))
self.assertTrue(cont.has_key("FQAN"))
self.assertTrue(cont.has_key("CE"))
self.assertTrue(cont.has_key("GlobalJobId"))
self.assertTrue(cont.has_key("LrmsId"))
for key in cases[line].keys():
self.assertEqual(cont[key], cases[line][key], "%s != %s for key %s" % (cont[key], cases[line][key], key))
def test_invalid_timestamp(self):
'''
Test if parser raises exception for invalid timestamp
'''
line_invalidtimestamp = ('"timestamp=2012-05-20A23:59:47" '
+'"userDN=/O=GermanGrid/OU=UniWuppertal/CN=Torsten Harenberg" '
+'"userFQAN=/atlas/Role=production/Capability=NULL" '
+'"ceID=cream-2-fzk.gridka.de:8443/cream-pbs-atlasXL" '
+'"jobID=CREAM410741480" "lrmsID=9575064.lrms1" "localUser=11999"')
# Should raise an exception - we have 'A' between date and time
try:
# iso8601 >= 0.1.9 version of test (now stricter in what it accepts)
self.assertRaises(ParseError, self.parser.parse, line_invalidtimestamp)
except:
# iso8601 <= 0.1.8 version of test (should be deprecated)
self.assertRaises(InvalidRecordException, self.parser.parse, line_invalidtimestamp)
def test_invalid_record_line(self):
line_invalid = ('"timestamp=2012-05-20 23:59:47" '
+'"userDN=/O=GermanGrid/OU=UniWuppertal/CN=Torsten Harenberg" '
+'"userFQAN=/atlas&Role=production/Capability=NULL" '
+'"ceID=cream-2-fzk.gridka.de:8443/cream-pbs-atlasXL" '
+'"jobID=CREAM410741480"&sd"lrmsID=9575064.lrms1" "localUser=11999"')
self.assertRaises(ValueError, self.parser.parse, line_invalid)
def test_multiple_fqans(self):
"""The parser should take the first FQAN to be the primary FQAN."""
lines = (
'"timestamp=2014-05-18 00:00:58" "userDN=/C=CA/O=Grid/OU=triumf.ca/'
'CN=Asoka De Silva GC1" "userFQAN=/atlas/Role=pilot/Capability=NULL'
'" "userFQAN=/atlas/Role=NULL/Capability=NULL" "userFQAN=/atlas/ca/'
'Role=NULL/Capability=NULL" "userFQAN=/atlas/lcg1/Role=NULL/Capabil'
'ity=NULL" "ceID=ce1.triumf.ca:8443/cream-pbs-atlas" "jobID=CREAM66'
'3276716" "lrmsID=15876368.ce1.triumf.ca" "localUser=41200" "client'
'ID=cream_663276716"',
'"timestamp=2014-05-18 00:03:00" "userDN=/DC=ch/DC=cern/OU=Organic '
'Units/OU=Users/CN=atlpilo2/CN=531497/CN=Robot: ATLAS Pilot2" "user'
'FQAN=/atlas/Role=pilot/Capability=NULL" "userFQAN=/atlas/Role=NULL'
'/Capability=NULL" "userFQAN=/atlas/lcg1/Role=NULL/Capability=NULL"'
' "userFQAN=/atlas/usatlas/Role=NULL/Capability=NULL" "ceID=ce1.tri'
'umf.ca:8443/cream-pbs-atlas" "jobID=CREAM503347888" "lrmsID=158764'
'80.ce1.triumf.ca" "localUser=41200" "clientID=cream_503347888"',
)
values = (
(datetime.datetime(2014, 5, 18, 00, 00, 58),
'/C=CA/O=Grid/OU=triumf.ca/CN=Asoka De Silva GC1',
'/atlas/Role=pilot/Capability=NULL', # primary FQAN is first one
'atlas',
'/atlas',
'Role=pilot',
'ce1.triumf.ca:8443/cream-pbs-atlas',
'CREAM663276716',
'15876368.ce1.triumf.ca',
datetime.datetime(2014, 5, 17, 00, 00, 58),
datetime.datetime(2014, 6, 15, 00, 00, 58),
0
),
(datetime.datetime(2014, 5, 18, 00, 03, 00),
'/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=atlpilo2/CN=531497/CN'
'=Robot: ATLAS Pilot2',
'/atlas/Role=pilot/Capability=NULL', # primary FQAN is first one
'atlas',
'/atlas',
'Role=pilot',
'ce1.triumf.ca:8443/cream-pbs-atlas',
'CREAM503347888',
'15876480.ce1.triumf.ca',
datetime.datetime(2014, 5, 17, 00, 03, 00),
datetime.datetime(2014, 6, 15, 00, 03, 00),
0
),
)
fields = ('TimeStamp', 'GlobalUserName', 'FQAN', 'VO', 'VOGroup',
'VORole', 'CE', 'GlobalJobId', 'LrmsId', 'ValidFrom',
'ValidUntil', 'Processed')
cases = {}
for line, value in zip(lines, values):
cases[line] = dict(zip(fields, value))
for line in cases.keys():
record = self.parser.parse(line)
cont
|
= record._record_content
# Check that 'Site' has been set
self.assertEqual(cont['Site'], 'testSite')
for key in cases[line].keys():
# Check all fields are present
self.assertTrue(key in cont, "Key '%s' not in record." % key)
# Check values are correct
self.assertEqual(cont[key], cases[
|
line][key],
"'%s' != '%s' for key '%s'" %
(cont[key], cases[line][key], key))
if __name__ == '__main__':
unittest.main()
|
mbroome/ogslb
|
bin/backend-test.py
|
Python
|
gpl-2.0
| 2,025
| 0.017284
|
#!/usr/bin/python
# Open Global Server Load Balancer (ogslb)
# Copyright (C) 2010 Mitchell Broome
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys, os
from subprocess import *
scriptPath = os.path.realpath(os.path.dirname(sys.argv[0]))
# account for where we live
sys.path.append(scriptPath + '/..')
sys.path.append(scriptPath + '/../lib')
import pprint
pp = pprint.PrettyPrinter(indent=4)
# this is a very basic program to test backend.py. We effectivly emulate the
# PowerDNS protocol and talk to it on stdout and read back from stdin.
# Basically, just call this with a hostname as an argument
if __name__ == '__main__':
host = ''
try:
host = sys.argv[1]
except:
print "need a hostname to lookup";
|
sys.exit()
try:
scriptName = scriptPath + '/backend.py'
p = Popen(scriptName, shell=True, bufsize=256, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
# p = Popen(scriptName, shell=True, bufsize=256, stdin=PIPE, stdout=PIPE, close_fds=True)
(child_stdin, child_stdout) = (p.stdin, p.stdout)
child_stdin.write('HELO\t1\n');
child_stdin.flush()
l = child_stdout.readline()
print l
child_stdin.write('Q\t%s\tIN\tANY\t-1\t127.0.0.1\
|
n' % host);
child_stdin.flush()
l = child_stdout.readline()
print l
p.close()
except:
''' '''
|
zepheir/pySrv_sipai
|
srv/src/sipaiSampleServer.py
|
Python
|
apache-2.0
| 4,049
| 0.013844
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2012-2-5
@author: zepheir
'''
import sys
sys.path.append('/app/srv/src')
from binascii import b2a_hex
try:
from twisted.internet import epollreactor
epollreactor.install()
except:
pass
from twisted.internet import reactor
from twisted.python import log
from twisted.application import service
from zhyDB import ZhyDB
import Zoro
from ussop import sipai as Sipai
import time
import config
from config import *
def ReceiveData(*data):
if DEBUG: print 'print data----------------', data
# 常量
# ZDB = SipaiDB()
zhy = ZhyDB()
SipaiModsDict = zhy.listSipaiMods(allSDS=None)
# factoryDict = {}
# modules = {}
class SampleServer(object):
"""docstring for SampleServer"""
def __init__(self, *sds):
super(SampleServer, self).__init__()
self.sds = sds
self.host,self.port = self.sds[0], int(self.sds[1])
self.modules = []
self.mod = object
self.nowtype=''
self.factory = Zoro.SetupModbusConnect(self.host, self.port, self.ReceiveData, reConnectMode=False)
self.factory.spendtime = 0.3
self.setup()
def setup(self):
self.modules += SipaiModsDict[self.sds]
self.sampletimer = SipaiSampleTimer
if ECHO: print "*********** Time pass from start: %s"%(time.ctime()), self.factory.connection.getDestination(),self.factory.getState()
def ReceiveData(self, *data):
if DEBUG: print ' ===> Received Data:', data, b2a_hex(data[2])
# global zhy
_result = self.mod.dealdata(data[2])
print '----------result---------',_result
print data[0],data[1],zhy.updateSipaiResults(
ip=data[1][0],
port=data[1][1],
addr=data[0],
type=self.nowtype,
# value=b2a_hex(data[2])
value=_result
)
def update(self):
if DEBUG: print "[",self.sds,"] starting in the SampleServer Class!"
if len(self.modules)>0:
modinfo=self.modules.pop(0)
self.nowtype = modinfo['type']
self.mod = Sipai.createspm(type=modinfo['type'], address=modinfo['addr'])
_cmd = self.mod.cmd(self.mod.CMD_READDATA)
zhy.setSipaiModState(
ip=self.host,
port=str(self.port),
addr=modinfo['addr'],
type=self.nowtype,
state='reading'
)
if DEBUG: print "===> Output command:",b2a_hex(_cmd)
reactor.callLater(0.1, self.factory.protocol.SendCmd, _cmd)
reactor.callLater(self.factory.spendtime, self.update)
self.sampletimer-=self.factory.spendtime
else:
if SERVERRECONNECT:
reactor.callLater(self.factory.spendtime, self.factory.connection.disconnect)
reactor.callLater(SdsConnectTimer,self.factory.connection.connect)
reactor.callLater(SdsConnectTimer,self.setup)
reactor.callLater(self.sampletimer-SdsConnectTimer, self.update)
# reactor.callLater(SdsConnectTimer+self.factory.spendtime, self.update)
servs ={}
def main():
for sds in SipaiModsDict:
servs[sds]=SampleServer(sds[0],sds[1])
servs[sds].update()
# time.sleep(0.2)
# if DEBUG:
# # servs1=SampleServer('130.139.200.50','6020')
# servs2=SampleServer('130.139.200.51','10001')
# # servs3=SampleServer('130.139.200.56','10001')
#
|
# servs1.update()
# servs2.update()
# # servs3.update()
# else:
# for sds in SipaiModsDict:
# servs[sds]=SampleServer(sds[0],sds[1])
# servs[sds].update()
# time.sleep(0.2)
if __name__ == '__main__':
import sys
main()
reactor.run()
print 'reactor stopped!'
sys.exit(1)
elif __name__ =="__builtin__":
import sys
main()
application = service.Application("SI
|
PAI")
|
endlessm/chromium-browser
|
third_party/llvm/lldb/test/API/lang/objc/conflicting-definition/TestConflictingDefinition.py
|
Python
|
bsd-3-clause
| 1,681
| 0.000595
|
"""Test that types defined in shared libraries work correctly."""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestRealDefinition(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipUnlessDarwin
def test_frame_var_after_stop_at_implementation(self):
"""Test that we can find the implementation for an objective C type"""
if self.getArchitecture() == 'i386':
self.skipTest("requires modern objc runtime")
self.build()
self.shlib_names = ["libTestExt.dylib", "libTest.dylib"]
self.common_setup()
line = line_number('TestExt/TestExt.m', '// break here')
lldbutil.run_break_set_by_file_and_line(
self, 'TestExt.m', line, num_expected_locations=1, loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
self.expect("breakpoint list -f", BREAKPOINT_HIT_ONCE,
substrs=[' resolved, hit count = 1'])
# This should display correctly.
self.expect(
"expr 42",
"A simple expression should execute correctly",
substrs=
|
[
"42"])
def common_setup(self):
exe = self.getBuildArtifact("a.out")
target = self.dbg.CreateTarget(exe)
self.registerSharedLibrariesWithTarget(target, self.shlib_names)
self.
|
runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
|
hmustafamail/digitalimageprocessing
|
HW 3 - Spatial Filtering/spatialFiltering.py
|
Python
|
gpl-2.0
| 8,567
| 0.023345
|
# Mustafa Hussain
# Digital Image Processing with Dr. Anas Salah Eddin
# FL Poly, Spring 2015
#
# Homework 3: Spatial Filtering
#
# USAGE NOTES:
#
# Written in Python 2.7
#
# Please ensure that the script is running as the same directory as the images
# directory!
import cv2
import copy
#import matplotlib.pyplot as plt
import numpy
import math
#from skimage import exposure
INPUT_DIRECTORY = 'input/'
OUTPUT_DIRECTORY = 'output/'
IMAGE_FILE_EXTENSION = '.JPG'
MAX_INTENSITY = 255 # 8-bit images
def averagingFilter(image):
"""Each pixel becomes the average of its immediately surrounding pixels.
We are doing a simple 3x3 box blur.
Referencing below zero wraps around, so top and left sides will be blurred.
We are not bothering with the right and bottom edges, because referencing
above the image size results in a boundary error.
"""
width, height = image.shape
filteredImage = copy.deepcopy(image)
# Avoid right, bottom edges.
for i in range(width - 1):
for j in range(height - 1):
total = 0.0
for i1 in range(i - 1, i + 2):
for j1 in range(j - 1, j + 2):
total = total + float(image[i1][j1])
filteredImage[i][j] = float(total) / float(9)
return filteredImage
def gaussianFilter(image):
"""Each pixel becomes the Gaussian-weighted average of nearby pixels.
Referencing below zero wraps around, so top and left sides will be blurred.
We are not bothering with the right and bottom edges, because referencing
above the image size results in a boundary error.
"""
width, height = image.shape
filteredImage = copy.deepcopy(image)
# Avoid right, bottom edges.
for i in range(width - 2):
for j in range(height - 2):
# Mask from homepages.inf.ed.ac.uk/rbf/HIPR2/gsmooth.htm
total = 0.0
total += 1 * float(image[i-2][j+2])
total += 4 * float(image[i-1][j+2])
total += 7 * float(image[i-0][j+2])
total += 4 * float(image[i+1][j+2])
total += 1 * float(image[i+2][j+2])
total += 4 * float(image[i-2][j+1])
total += 16 * float(image[i-1][j+1])
total += 26 * float(image[i-0][j+1])
total += 16 * float(image[i+1][j+1])
total += 4 * float(image[i+2][j+1])
total += 7 * float(image[i-2][j+0])
total += 26 * float(image[i-1][j+0])
total += 41 * float(image[i-0][j+0])
total += 26 * float(image[i+1][j+0])
total += 7 * float(image[i+2][j+0])
total += 4 * float(image[i-2][j-1])
total += 16 * float(image[i-1][j-1])
total += 26 * float(image[i-0][j-1])
total += 16 * float(image[i+1][j-1])
total += 4 * float(image[i+2][j-1])
total += 1 * float(image[i-2][j-2])
total += 4 * float(image[i-1][j-2])
total += 7 * float(image[i-0][j-2])
total += 4 * float(image[i+1][j-2])
total += 1 * float(image[i+2][j-2])
filteredImage[i][j] = total / float(273)
return filteredImage
def medianFilter(image):
"""Each pixel becomes the median of its immediately surrounding pixels.
We are doing a simple 5x5 median blur.
Referencing below zero wraps around, so top and left sides will be blurred.
We are not bothering with the right and bottom edges, because referencing
above the image size results in a boundary error.
"""
width, height = image.shape
filteredImage = copy.deepcopy(image)
# Avoid right, bottom edge
|
s.
for i in range(width - 2):
for j in range(height - 2):
neighborhood = list()
for i1 in range(i - 2, i + 3):
for j1 in range(j - 2, j + 3):
neighborhood.
|
append(image[i1][j1])
filteredImage[i][j] = numpy.median(neighborhood)
return filteredImage
def laplacianFilter(image):
"""Approximates the second derivative, bringing out edges.
Referencing below zero wraps around, so top and left sides will be sharpened.
We are not bothering with the right and bottom edges, because referencing
above the image size results in a boundary error.
"""
width, height = image.shape
filteredImage = copy.deepcopy(image)
originalImage = copy.deepcopy(image)
# Avoid right, bottom edges.
for i in range(width - 1):
for j in range(height - 1):
# Mask from homepages.inf.ed.ac.uk/rbf/HIPR2/log.htm
total = 0.0
total += -1 * float(image[i][j + 1])
total += -1 * float(image[i - 1][j])
total += 4 * float(image[i][j])
total += -1 * float(image[i + 1][j])
total += -1 * float(image[i][j - 1])
filteredImage[i][j] = originalImage[i][j] + (1.5 * (total / 4.0))
return filteredImage
def sobelXFilter(image):
"""Performs a horizontal Sobel operation.
Referencing below zero wraps around, so top and left sides will be sharpened.
We are not bothering with the right and bottom edges, because referencing
above the image size results in a boundary error.
"""
width, height = image.shape
filteredImage = copy.deepcopy(image)
#originalImage = copy.deepcopy(image)
# Remove some noise before we begin.
image = medianFilter(image)
# Avoid right, bottom edges.
for i in range(width - 1):
for j in range(height - 1):
# Mask from en.wikipedia.org/wiki/Sobel_operator
total = 0.0
total += -1 * float(image[i - 1][j - 1])
total += 1 * float(image[i + 1][j - 1])
total += -2 * float(image[i - 1][j])
total += 2 * float(image[i + 1][j])
total += -1 * float(image[i - 1][j + 1])
total += 1 * float(image[i + 1][j + 1])
#filteredImage[i][j] = originalImage[i][j] + (total / 6.0)
filteredImage[i][j] = total / 6.0
return filteredImage
def sobelYFilter(image):
"""Performs a vertical Sobel operation.
Referencing below zero wraps around, so top and left sides will be sharpened.
We are not bothering with the right and bottom edges, because referencing
above the image size results in a boundary error.
"""
width, height = image.shape
filteredImage = copy.deepcopy(image)
#originalImage = copy.deepcopy(image)
# Remove some noise before we begin.
image = medianFilter(image)
# Avoid right, bottom edges.
for i in range(width - 1):
for j in range(height - 1):
# Mask from en.wikipedia.org/wiki/Sobel_operator
total = 0.0
total += -1 * float(image[i - 1][j - 1])
total += -2 * float(image[i + 0][j - 1])
total += -1 * float(image[i + 1][j - 1])
total += 1 * float(image[i - 1][j + 1])
total += 2 * float(image[i - 0][j + 1])
total += 1 * float(image[i + 1][j + 1])
#filteredImage[i][j] = originalImage[i][j] + (total / 6.0)
filteredImage[i][j] = total / 6.0
return filteredImage
def sobelXYFilter(image):
"""
Combines the Sobel X and Y filters to find all edges.
"""
width, height = image.shape
xFiltered = sobelXFilter(copy.deepcopy(image))
yFiltered = sobelYFilter(copy.deepcopy(image))
for i in range(width):
for j in range(height):
x = xFiltered[i][j]
y = yFiltered[i][j]
image[i][j] = math.sqrt((x ** 2) + (y ** 2))
return image
def saveImage(image, filename):
"""Saves the image in the output directory with the filename given.
"""
cv2.imwrite(OUTPUT_DIRECTORY + filename + IMAGE_FILE_EXTENSION, image)
def openImage(fileName):
"""Opens the image in the input directory with the filename given.
"""
return cv2.imread(INPUT_DIRECTORY + fileName + IMAGE_FILE_EXTENSION, 0)
# Input images
inputForBlurring = 'fabio'
inputForSharpening = 'bball'
# Import image.
imageForBlurring = openImage(inputForBlurring)
imageForSharpening = openImage(inputForSharpening)
## Run filters on image, save.
#print("Averaging Filter...")
#saveImage(averagingFilter(imageForBlurring), inputForBlurring + 'Averaging')
#
#print("Gaussian Filter...")
#saveImage(gaussianFilter(imageForBlurring), inputForBlurring + 'Gauss')
#
#print("Median Filter...")
#saveImage(medianFilter(imageForBlurring), inputForBlurring + 'Median')
print("Laplacian Filter...")
saveImage(laplacianF
|
Devyani-Divs/pagure
|
pagure/lib/__init__.py
|
Python
|
gpl-2.0
| 44,784
| 0.000357
|
# -*- coding: utf-8 -*-
"""
(c) 2014-2015 - Copyright Red Hat Inc
Authors:
Pierre-Yves Chibon <pingou@pingoured.fr>
"""
import datetime
import os
import shutil
import tempfile
import uuid
import sqlalchemy
import sqlalchemy.schema
from datetime import timedelta
from sqlalchemy import func
from sqlalchemy.orm import aliased
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.exc import SQLAlchemyError
import pygit2
import pagure.exceptions
import pagure.lib.git
import pagure.lib.login
import pagure.lib.notify
from pagure.lib import model
def __get_user(session, key):
""" Searches for a user in the database for a given username or email.
"""
user_obj = search_user(session, username=key)
if not user_obj:
user_obj = search_user(session, email=key)
if not user_obj:
raise pagure.exceptions.PagureException(
'No user "%s" found' % key
)
return user_obj
def create_session(db_url, debug=False, pool_recycle=3600):
''' Create the Session object to use to query the database.
:arg db_url: URL used to connect to the database. The URL contains
information with regards to the database engine, the host to connect
to, the user and password and the database name.
ie: <engine>://<user>:<password>@<host>/<dbname>
:kwarg debug: a boolean specifying wether we should have the verbose
output of sqlalchemy or not.
:return a Session that can be used to query the database.
'''
engine = sqlalchemy.create_engine(
db_url, echo=debug, pool_recycle=pool_recycle)
scopedsession = scoped_session(sessionmaker(bind=engine))
return scopedsession
def get_next_id(session, projectid):
""" Returns the next identifier of a project ticket or pull-request
based on the identifier already in the database.
"""
q1 = session.query(
func.max(model.Issue.id)
).filter(
model.Issue.project_id == projectid
)
q2 = session.query(
func.max(model.PullRequest.id)
).filter(
model.PullRequest.project_id == projectid
)
nid = max([el[0] for el in q1.union(q2).all()]) or 0
return nid + 1
def search_user(session, username=None, email=None, token=None, pattern=None):
''' Searches the database for the user or users matching the given
criterias.
:arg session: the session to use to connect to the database.
:kwarg username: the username of the user to look for.
:type username: string or None
:kwarg email: the email or one of the email of the user to look for
:type email: string or None
:kwarg token: the token of the user to look for
:type token: string or None
:kwarg pattern: a pattern to search the users with.
:type pattern: string or None
:return: A single User object if any of username, email or token is
specified, a list of User objects otherwise.
:rtype: User or [User]
'''
query = session.query(
model.User
)
if username is not None:
query = query.filter(
model.User.user == username
)
if email is not None:
query = query.filter(
model.UserEmail.user_id == model.User.id
).filter(
model.UserEmail.email == email
)
if token is not None:
query = query.filter(
model.User.token == token
)
if pattern:
pattern = pattern.replace('*', '%')
query = query.filter(
model.User.user.like(pattern)
)
if any([username, email, token]):
output = query.first()
else:
output = query.all()
return output
def add_issue_comment(session, issue, comment, user, ticketfolder,
notify=True):
''' Add a comment to an issue. '''
user_obj = __get_user(session, user)
issue_comment = model.IssueComment(
issue_uid=issue.uid,
comment=comment,
user_id=user_obj.id,
)
session.add(issue_comment)
# Make sure we won't have SQLAlchemy error before we create the repo
session.commit()
pagure.lib.git.update_git(
|
issue, repo=issue.project, repofolder=ticketfolder)
if notify:
pagure.lib.notify.notify_new_comment(issue_comment, user=user_obj)
if not issue.private:
pagure.lib.notify.fedmsg_publish(
'issue.comment.added',
dict(
issue=issue.to_json(),
project=issue.project.to_json(),
agent=user_obj.username,
)
)
return 'Comment added'
def add_issue_tag(session, i
|
ssue, tags, user, ticketfolder):
''' Add a tag to an issue. '''
user_obj = __get_user(session, user)
if isinstance(tags, basestring):
tags = [tags]
msgs = []
added_tags = []
for issue_tag in tags:
known = False
for tag_issue in issue.tags:
if tag_issue.tag == issue_tag:
known = True
if known:
continue
tagobj = get_tag(session, issue_tag)
if not tagobj:
tagobj = model.Tag(tag=issue_tag)
session.add(tagobj)
session.flush()
issue_tag = model.TagIssue(
issue_uid=issue.uid,
tag=tagobj.tag,
)
session.add(issue_tag)
# Make sure we won't have SQLAlchemy error before we create the repo
session.flush()
added_tags.append(tagobj.tag)
pagure.lib.git.update_git(
issue, repo=issue.project, repofolder=ticketfolder)
if not issue.private:
pagure.lib.notify.fedmsg_publish(
'issue.tag.added',
dict(
issue=issue.to_json(),
project=issue.project.to_json(),
tags=added_tags,
agent=user_obj.username,
)
)
if added_tags:
return 'Tag added: %s' % ', '.join(added_tags)
else:
return 'Nothing to add'
def add_issue_assignee(session, issue, assignee, user, ticketfolder):
''' Add an assignee to an issue, in other words, assigned an issue. '''
user_obj = __get_user(session, user)
if assignee is None and issue.assignee != None:
issue.assignee_id = None
session.add(issue)
session.commit()
pagure.lib.git.update_git(
issue, repo=issue.project, repofolder=ticketfolder)
pagure.lib.notify.notify_assigned_issue(issue, None, user_obj)
if not issue.private:
pagure.lib.notify.fedmsg_publish(
'issue.assigned.reset',
dict(
issue=issue.to_json(),
project=issue.project.to_json(),
agent=user_obj.username,
)
)
return 'Assignee reset'
elif assignee is None and issue.assignee == None:
return
# Validate the assignee
assignee_obj = __get_user(session, assignee)
if issue.assignee_id != assignee_obj.id:
issue.assignee_id = assignee_obj.id
session.add(issue)
session.flush()
pagure.lib.git.update_git(
issue, repo=issue.project, repofolder=ticketfolder)
pagure.lib.notify.notify_assigned_issue(
issue, assignee_obj, user_obj)
if not issue.private:
pagure.lib.notify.fedmsg_publish(
'issue.assigned.added',
dict(
issue=issue.to_json(),
project=issue.project.to_json(),
agent=user_obj.username,
)
)
return 'Issue assigned'
def add_issue_dependency(session, issue, issue_blocked, user, ticketfolder):
''' Add a dependency between two issues. '''
user_obj = __get_user(session, user)
if issue.uid == issue_blocked.uid:
raise pagure.exceptions.PagureException(
'An issue cannot depend on itself'
)
if issue_blocked not in issue.children:
i2i = model.IssueToIssue(
parent_issue_id=issue_block
|
Thingee/cinder
|
cinder/api/contrib/backups.py
|
Python
|
apache-2.0
| 13,643
| 0.000073
|
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The backups api."""
import webob
from webob import exc
from cinder.api import common
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder.api.views import backups as backup_views
from cinder.api import xmlutil
from cinder import backup as backupAPI
from cinder import exception
from cinder.openstack.common import log as logging
from cinder import utils
LOG = logging.getLogger(__name__)
def make_backup(elem):
elem.set('id')
elem.set('status')
elem.set('size')
elem.set('container')
elem.set('volume_id')
elem.set('object_count')
elem.set('availability_zone')
elem.set('created_at')
elem.set('name')
elem.set('description')
elem.set('fail_reason')
def make_backup_restore(elem):
elem.set('backup_id')
elem.set('volume_id')
def make_backup_export_import_record(elem):
elem.set('backup_service')
elem.set('backup_url')
class BackupTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('backup', selector='backup')
make_backup(root)
alias = Backups.alias
namespace = Backups.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class BackupsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('backups')
elem =
|
xmlutil.SubTemplateElement(root, 'backup', selector='backups')
make_backup(elem)
alias = Backups.alias
namespace = Backups.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class BackupRestoreTemplate(xmlutil.TemplateBuilder):
|
def construct(self):
root = xmlutil.TemplateElement('restore', selector='restore')
make_backup_restore(root)
alias = Backups.alias
namespace = Backups.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class BackupExportImportTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('backup-record',
selector='backup-record')
make_backup_export_import_record(root)
alias = Backups.alias
namespace = Backups.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class CreateDeserializer(wsgi.MetadataXMLDeserializer):
def default(self, string):
dom = utils.safe_minidom_parse_string(string)
backup = self._extract_backup(dom)
return {'body': {'backup': backup}}
def _extract_backup(self, node):
backup = {}
backup_node = self.find_first_child_named(node, 'backup')
attributes = ['container', 'display_name',
'display_description', 'volume_id']
for attr in attributes:
if backup_node.getAttribute(attr):
backup[attr] = backup_node.getAttribute(attr)
return backup
class RestoreDeserializer(wsgi.MetadataXMLDeserializer):
def default(self, string):
dom = utils.safe_minidom_parse_string(string)
restore = self._extract_restore(dom)
return {'body': {'restore': restore}}
def _extract_restore(self, node):
restore = {}
restore_node = self.find_first_child_named(node, 'restore')
if restore_node.getAttribute('volume_id'):
restore['volume_id'] = restore_node.getAttribute('volume_id')
return restore
class BackupImportDeserializer(wsgi.MetadataXMLDeserializer):
def default(self, string):
dom = utils.safe_minidom_parse_string(string)
backup = self._extract_backup(dom)
retval = {'body': {'backup-record': backup}}
return retval
def _extract_backup(self, node):
backup = {}
backup_node = self.find_first_child_named(node, 'backup-record')
attributes = ['backup_service', 'backup_url']
for attr in attributes:
if backup_node.getAttribute(attr):
backup[attr] = backup_node.getAttribute(attr)
return backup
class BackupsController(wsgi.Controller):
"""The Backups API controller for the OpenStack API."""
_view_builder_class = backup_views.ViewBuilder
def __init__(self):
self.backup_api = backupAPI.API()
super(BackupsController, self).__init__()
@wsgi.serializers(xml=BackupTemplate)
def show(self, req, id):
"""Return data about the given backup."""
LOG.debug(_('show called for member %s'), id)
context = req.environ['cinder.context']
try:
backup = self.backup_api.get(context, backup_id=id)
except exception.BackupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
return self._view_builder.detail(req, backup)
def delete(self, req, id):
"""Delete a backup."""
LOG.debug(_('delete called for member %s'), id)
context = req.environ['cinder.context']
LOG.audit(_('Delete backup with id: %s'), id, context=context)
try:
self.backup_api.delete(context, id)
except exception.BackupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
except exception.InvalidBackup as error:
raise exc.HTTPBadRequest(explanation=error.msg)
return webob.Response(status_int=202)
@wsgi.serializers(xml=BackupsTemplate)
def index(self, req):
"""Returns a summary list of backups."""
return self._get_backups(req, is_detail=False)
@wsgi.serializers(xml=BackupsTemplate)
def detail(self, req):
"""Returns a detailed list of backups."""
return self._get_backups(req, is_detail=True)
def _get_backups(self, req, is_detail):
"""Returns a list of backups, transformed through view builder."""
context = req.environ['cinder.context']
backups = self.backup_api.get_all(context)
limited_list = common.limited(backups, req)
if is_detail:
backups = self._view_builder.detail_list(req, limited_list)
else:
backups = self._view_builder.summary_list(req, limited_list)
return backups
# TODO(frankm): Add some checks here including
# - whether requested volume_id exists so we can return some errors
# immediately
# - maybe also do validation of swift container name
@wsgi.response(202)
@wsgi.serializers(xml=BackupTemplate)
@wsgi.deserializers(xml=CreateDeserializer)
def create(self, req, body):
"""Create a new backup."""
LOG.debug(_('Creating new backup %s'), body)
if not self.is_valid_body(body, 'backup'):
raise exc.HTTPBadRequest()
context = req.environ['cinder.context']
try:
backup = body['backup']
volume_id = backup['volume_id']
except KeyError:
msg = _("Incorrect request body format")
raise exc.HTTPBadRequest(explanation=msg)
container = backup.get('container', None)
name = backup.get('name', None)
description = backup.get('description', None)
LOG.audit(_("Creating backup of volume %(volume_id)s in container"
" %(container)s"),
{'volume_id': volume_id, 'container': container},
context=context)
try:
new_backup = self.backup_api.create(context, name, description,
volume_id, c
|
timbrom/lightshow
|
scripts/flash_and_debug.py
|
Python
|
apache-2.0
| 2,657
| 0.004893
|
#!/usr/bin/env python
import telnetlib
import subprocess
import signal
import time
###############################################################
# This script will automatically flash and start a GDB debug
# session to the STM32 discovery board using OpenOCD. It is
# meant to be called from the rake task "debug" (execute
# rake debug) and the working directory is assumed to be the
# project root
###############################################################
###############################################################
# We need to be able to send a SIGTERM (ctrl-c) to GDB
# without killing openocd or this script. Set up a custom
# signal handler here that essentially ignores SIGTERM
###############################################################
def signal_handler(signal, frame):
|
pass # do nothing
############
|
###################################################
# Start up the openocd thread
###############################################################
# We need gdb to respond to a SIGINT (ctrl-c), but by default,
# that will cause every other child process to die, including
# openocd. Disable sigint, then re-enable it after the child
# spawns. The child inherits the current state of signal
# handlers.
signal.signal(signal.SIGINT, signal.SIG_IGN)
openocd = subprocess.Popen(["openocd"])
time.sleep(2) # Wait for this to start up
# Set up a custom signal handler so that SIGINT doesn't kill
# this script
signal.signal(signal.SIGINT, signal_handler)
###############################################################
# Flash the new image to the development board
###############################################################
# Create the flashable image
subprocess.call(["arm-none-eabi-objcopy", "-Obinary", "build/flash.elf", "build/flash.bin"])
# Flash the image
tn = telnetlib.Telnet("127.0.0.1", "4444")
tn.read_until("> ")
tn.write("poll\n")
tn.read_until("> ")
tn.write("reset halt\n")
tn.read_until("> ")
tn.write("flash probe 0\n")
tn.read_until("> ")
tn.write("flash write_image erase build/flash.bin 0x08000000\n")
tn.read_until("> ")
tn.write("reset\n")
tn.read_until("> ")
tn.write("exit\n")
tn.close()
###############################################################
# Start the gdb session
###############################################################
time.sleep(2)
gdb_proc = subprocess.Popen(["arm-none-eabi-gdb", "-ex", "target remote localhost:3333", "build/flash.elf", "-ex", "set remote hardware-breakpoint-limit 6", "-ex", "set remote hardware-watchpoint-limit 4"])
# Spin until GDB is exited
while gdb_proc.poll() == None:
time.sleep(1)
# Gracefully exit openocd
openocd.terminate()
|
RCMRD/geonode
|
geonode/upload/views.py
|
Python
|
gpl-3.0
| 26,130
| 0.000651
|
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
"""
Provide views for doing an upload.
The upload process may be multi step so views are all handled internally here
by the view function.
The pattern to support separation of view/logic is each step in the upload
process is suffixed with "_step". The view for that step is suffixed with
"_step_view". The goal of separation of view/logic is to support various
programmatic uses of this API. The logic steps should not accept request objects
or return response objects.
State is stored in a UploaderSession object stored in the user's session.
This needs to be made more stateful by adding a model.
"""
import gsimporter
import json
import logging
import os
import traceback
from httplib import BadStatusLine
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.utils.html import escape
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.views.generic import CreateView, DeleteView
from geonode.base.enumerations import CHARSETS
from geonode.upload import forms, upload, files
from geonode.upload.forms import LayerUploadForm, UploadFileForm
from geonode.upload.models import Upload, UploadFile
from geonode.utils import json_response as do_json_response
from geonode.geoserver.helpers import ogc_server_settings
logger = logging.getLogger(__name__)
_SESSION_KEY = 'geonode_upload_session'
_ALLOW_TIME_STEP = getattr(settings, 'UPLOADER', False)
if _ALLOW_TIME_STEP:
_ALLOW_TIME_STEP = _ALLOW_TIME_STEP.get(
'OPTIONS',
False).get(
'TIME_ENABLED',
False)
_ASYNC_UPLOAD = True if ogc_server_settings and ogc_server_settings.DATASTORE else False
# at the moment, the various time support transformations require the database
if _ALLOW_TIME_STEP and not _ASYNC_UPLOAD:
raise Exception(
"To support the time step, you must enable the OGC_SERVER DATASTORE option")
_geoserver_down_error_msg = """
GeoServer is not responding. Please try again later and sorry for the inconvenience.
"""
_unexpected_error_msg = """
An error occurred while trying to process your request. Our administrator has
been notified, but if you'd like, please note this error code
below and details on what you were doing when you encountered this error.
That information can help us identify the cause of the problem and help us with
fixing it. Thank you!
"""
def _is_async_step(upload_session):
return _ASYNC_UPLOAD and get_next_step(upload_session, offset=2) == 'run'
def _progress_redirect(step):
return json_response(dict(
success=True,
redirect_to=reverse('data_upload', args=[step]),
progress=reverse('data_upload_progress')
))
def json_response(*args, **kw):
if 'exception' in kw:
logger.warn(traceback.format_exc(kw['exception']))
return do_json_response(*args, **kw)
class JSONResponse(HttpResponse):
"""JSON response class."""
def __init__(self,
obj='',
json_opts={},
mimetype="application/json", *args, **kwargs):
content = json.dumps(obj, **json_opts)
super(JSONResponse, self).__init__(content, mimetype, *args, **kwargs)
def _error_response(req, exception=None, errors=None, force_ajax=True):
if exception:
logger.exception('Unexpected error in upload step')
else:
logger.error('upload error: %s', errors)
if req.is_ajax() or force_ajax:
content_type = 'text/html' if not req.is_ajax() else None
return json_response(exception=exception, errors=errors,
content_type=content_type, status=400)
# not sure if any responses will (ideally) ever be non-ajax
if errors:
exception = "<br>".join(errors)
return render_to_response(
'upload/layer_upload_error.html',
RequestContext(
req,
{
'error_msg': 'Unexpected error : %s,' %
exception}))
def _next_step_response(req, upload_session, force_ajax=True):
|
# if the current step is the view POST for this step, advance one
if req.method == 'POST':
if upload_session.completed_step:
advance_step(req, upload_session)
else:
upload_session.completed_step = 'save'
next = get_next_step(upload_session)
if next == 'time':
# @TODO we skip time steps
|
for coverages currently
import_session = upload_session.import_session
store_type = import_session.tasks[0].target.store_type
if store_type == 'coverageStore':
upload_session.completed_step = 'time'
return _next_step_response(req, upload_session, force_ajax)
if next == 'time' and (
upload_session.time is None or not upload_session.time):
upload_session.completed_step = 'time'
return _next_step_response(req, upload_session, force_ajax)
if next == 'time' and force_ajax:
import_session = upload_session.import_session
url = reverse('data_upload') + "?id=%s" % import_session.id
return json_response(
{'url': url,
'status': 'incomplete',
'success': True,
'redirect_to': '/upload/time',
}
)
if next == 'srs' and force_ajax:
import_session = upload_session.import_session
url = reverse('data_upload') + "?id=%s" % import_session.id
return json_response(
{'url': url,
'status': 'incomplete',
'success': True,
'redirect_to': '/upload/srs',
}
)
if next == 'csv' and force_ajax:
import_session = upload_session.import_session
url = reverse('data_upload') + "?id=%s" % import_session.id
return json_response(
{'url': url,
'status': 'incomplete',
'success': True,
'redirect_to': '/upload/csv',
}
)
# @todo this is not handled cleanly - run is not a real step in that it
# has no corresponding view served by the 'view' function.
if next == 'run':
upload_session.completed_step = next
if _ASYNC_UPLOAD and req.is_ajax():
return run_response(req, upload_session)
else:
# on sync we want to run the import and advance to the next step
run_import(upload_session, async=False)
return _next_step_response(req, upload_session,
force_ajax=force_ajax)
if req.is_ajax() or force_ajax:
content_type = 'text/html' if not req.is_ajax() else None
return json_response(redirect_to=reverse('data_upload', args=[next]),
content_type=content_type)
# return HttpResponseRedirect(reverse('data_upload', args=[next]))
def _create_time_form(import_session, form_data):
feature_type = import_session.tasks[0].layer
def filter_type(b):
return [att.name for att in feature_type.attributes if att.binding == b]
args = dict(
time_names=filter_type('java.util.Date'),
text_names=filter_type('java.lang.String'),
year_n
|
jmarcelogimenez/petroFoam
|
initialConditions.py
|
Python
|
gpl-2.0
| 6,385
| 0.010807
|
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 25 13:08:19 2015
@author: jgimenez
"""
from PyQt4 import QtGui, QtCore
from initialConditions_ui import Ui_initialConditionsUI
import os
from utils import *
from PyFoam.RunDictionary.BoundaryDict import BoundaryDict
from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile
from utils import types
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
unknowns = ['U','p','p_rgh','alpha','k','epsilon','omega','nut','nuTilda']
class initialConditionsUI(QtGui.QScrollArea, Ui_initialConditionsUI):
def __init__(self, parent=None, f=QtCore.Qt.WindowFlags()):
QtGui.QScrollArea.__init__(self, parent)
self.setupUi(self)
class initialConditionsWidget(initialConditionsUI):
def __init__(self,folder):
self.currentFolder = folder
initialConditionsUI.__init__(self)
[self.timedir,self.fields,currtime] = currentFields(self.currentFolder)
self.pushButton.setEnabled(False)
self.addTabs()
def addTabs(self,ipatch=None):
for itab in range(self.tabWidget.count()):
layout = self.tabWidget.widget(itab).findChildren(QtGui.QVBoxLayout)[0]
self.clearLayout(layout,0)
self.tabWidget.clear()
for ifield in self.fields:
if ifield not in unknowns:
continue
widget = QtGui.QWidget()
layout = QtGui.QVBoxLayout(widget)
layout2 = QtGui.QHBoxLayout()
cb = QtGui.QComboBox()
cb.addItems(['uniform','nonuniform'])
layout2.addWidget(cb)
if types[ifield]=='scalar':
ledit = QtGui.QLineEdit()
ledit.setValidator(QtGui.QDoubleValidator())
QtCore.QObject.connect(ledit, QtCore.SIGNAL(_fromUtf8("textEdited(QString)")), self.checkData)
layout2.addWidget(ledit)
else:
for j in range(3):
ledit = QtGui.QLineEdit()
ledit.setValidator(QtGui.QDoubleValidator())
layout2.addWidget(ledit)
QtCore.QObject.connect(ledit, QtCore.SIGNAL(_fromUtf8("textEdited(QString)")), self.checkData)
layout.addLayout(layout2)
if ifield=='U':
qbutton = QtGui.QCheckBox()
qbutton.setText('Initialize from potential flow')
layout.addWidget(qbutton)
QtCore.QObject.connect(qbutton, QtCore.SIGNAL(_fromUtf8("stateChanged(int)")), self.onPotentialFlow)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
layout.addItem(spacerItem)
self.tabWidget.addTab(widget, ifield)
self.tabWidget.setTabText(self.tabWidget.count(),ifield)
def onPotentialFlow(self):
for itab in range(self.tabWidget.count()):
ifield = self.tabWidget.tabText(itab)
if ifield=='U':
print ifield
layout = self.tabWidget.widget(itab).findChildren(QtGui.QVBoxLayout)[0]
cb = self.tabWidget.widget(itab).findChildren(QtGui.QCheckBox)[0]
layout2 = layout.itemAt(0).layout()
for i in range(layout2.count()):
if isinstance(layout2.itemAt(i), QtGui.QWidgetItem):
layout2.itemAt(i).widget().setEnabled(not cb.isChecked())
def clearLayout(self, layout, dejar):
for i in reversed(range(layout.count())):
if i>= dejar:
item = layout.itemAt(i)
if isinstance(item, QtGui.QWidgetItem):
item.widget().close()
item.widget().deleteLater()
# or
# item.widget().setParent(None)
elif isinstance(item, QtGui.QSpacerItem):
None
# no need to do extra stuff
else:
self.clearLayout(item.layout(),0)
# remove the item from layout
layout.removeItem(item)
def setConditions(self):
runPotentialFlow = 0
for itab in range(self.tabWidget.count()):
ifield = self.tabWidget.tabText(itab)
layout = self.tabWidget.widget(itab).findChildren(QtGui.QVBoxLayout)[0]
filename = '%s/%s'%(self.timedir,ifield)
parsedData = ParsedParameterFile(filename,createZipped=False)
layout2 = layout.itemAt(0).layout()
if layout2.count()==2:
parsedData['internalField'] = '%s %s'%(layout2.itemAt(0).widget().currentText(),layout2.itemAt(1).widget().text())
else:
if ifield == 'U' and self.tabWidget.widget(itab).findChildren(QtGui.QCheckBox)[0].isChecked():
runPotentialFlow = 1
parsedData['internalField'] = '%s (%s %s %s)'%('uniform',0,0,0)
|
else:
parsedData['internalField'] = '%s (%s %s %s)'%(layout2.itemAt(0).widget().currentText(),layout2.itemAt(1).widget().text(),layout2.itemAt(2).widget
|
().text(),layout2.itemAt(3).widget().text())
parsedData.writeFile()
self.pushButton.setEnabled(False)
if runPotentialFlow:
QtGui.QMessageBox.about(self, "ERROR", 'Debe simularse con potentialFoam, hacer!!')
return
def checkData(self):
ready = True
for itab in range(self.tabWidget.count()):
edits = self.tabWidget.widget(itab).findChildren(QtGui.QLineEdit)
for E in edits:
if E.isEnabled():
if not E.text():
ready = False
if ready:
self.pushButton.setEnabled(True)
else:
self.pushButton.setEnabled(False)
|
scholer/pptx-downsizer
|
pptx_downsizer/utils.py
|
Python
|
gpl-3.0
| 3,492
| 0.003436
|
import os
import sys
import zipfile
def zip_directory(directory, targetfn=None, relative=True, compress_type=zipfile.ZIP_DEFLATED, verbose=1):
"""Zip all files and folders in a directory.
Args:
directory: The directory whose contents should be zipped.
targetfn: Output filename of the zipped archive.
relative: If True, make the arcname relative to the input directory.
compress_type: Which kind of compression to use. See zipfile package.
verbose: How much information to print to stdout while creating the archive.
Returns:
The filename of the zipped archive.
"""
assert os.path.isdir(directory)
if targetfn is None:
targetfn = directory + ".zip"
filecount = 0
if verbose and verbose > 0:
print("Creating archive %r from directory %r:" % (targetfn, directory))
with zipfile.ZipFile(targetfn, mode="w") as zipfd:
for dirpath, dirnames, filenames in os.walk(directory):
for fname in filenames:
fpath = os.path.join(dirpath, fname)
arcname = os.path.relpath(fpath, start=directory) if relative else fpath
if verbose and verbose > 0:
print(" - adding %r" % (arcname,))
zipfd.write(fpath, arcname=arcname, compress_type=c
|
ompress_type)
filecount += 1
if verbose and verbose > 0:
print("\n%s files written to archive %r" % (filecount, targetfn))
return targetfn
def convert_str_to_int(s, do_float=True, do_eval=True):
try:
return int(s)
except ValueError as e:
if do_float:
try:
return convert_str_to_int(float(s), do_float=False, do
|
_eval=False)
except ValueError as e:
try:
import humanfriendly
except ImportError:
print((
"Warning, the `humanfriendly` package is not available."
"If you want to use e.g. \"500kb\" as filesize, "
"please install the `humanfriendly` package:\n"
" pip install humanfriendly\n"))
pass
humanfriendly = None
else:
try:
return humanfriendly.parse_size(s)
except humanfriendly.InvalidSize:
pass
if do_eval:
try:
return convert_str_to_int(eval(s), do_float=do_float, do_eval=False)
except (ValueError, SyntaxError) as e:
print("Error, could not parse/convert string %r as integer. " % (s,))
raise e
else:
print("Error, could not parse/convert string %r as integer. " % (s,))
raise e
else:
print("Error, could not parse/convert string %r as integer. " % (s,))
raise e
def open_pptx(fpath):
"""WIP: Open a pptx presentation in PowerPoint on any platform."""
import subprocess
import shlex
if 'darwin' in sys.platform:
exec = 'open -a "Microsoft PowerPoint"'
else:
raise NotImplementedError("Opening pptx files not yet supported on Windows.")
# TODO: The right way to do this is probably to search the registry using _winreg package.
p = subprocess.Popen(shlex.split(exec) + [fpath])
|
mozilla/lumbergh
|
careers/base/templatetags/helpers.py
|
Python
|
mpl-2.0
| 1,025
| 0
|
import datetime
try:
import urllib.parse as urlparse
except ImportError:
from urllib.urlparse import urlparse
from django_jinja import library
from django.utils.http import urlencode
@library.global_function
def thisyear():
"""The current year."""
return datetime.date.today().year
@library.filter
def urlparams(url_, hash=None, **query):
"""Add a fragment and/or query paramaters to a URL.
New query params will be appended to exising parameters, except duplicate
names, which will be replaced.
"""
url = urlparse.urlparse(url_
|
)
fragment = hash if hash is not None else url.fragment
# Use dict(parse_qsl) so we don't get lists of values.
query_dict = dict(urlparse.parse_qsl(url.query))
query_dict.update(query)
query_string = urlencode(
[(k, v) for k, v in query_dict.items() if v is not None])
new = urlparse.ParseResult(url.scheme, url.netloc, url.path, u
|
rl.params,
query_string, fragment)
return new.geturl()
|
google/grr
|
grr/server/grr_response_server/prometheus_stats_collector_test.py
|
Python
|
apache-2.0
| 514
| 0.005837
|
#!/u
|
sr/bin/env python
# Lint as: python3
"""Tests for PrometheusStatsCollector."""
from absl import app
from grr_response_core.stats import stats_test_utils
from grr_response_server import prometheus_stats_collector
from grr.test_lib import test_lib
class PrometheusStatsCollectorTest(stats_test_utils.StatsCollectorTest):
def _CreateStatsCollector(self):
return prometheus_stats_collector.PrometheusStatsCollector()
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
app.run(main)
| |
prculley/gramps
|
gramps/gen/filters/rules/person/_changedsince.py
|
Python
|
gpl-2.0
| 1,921
| 0.006247
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# ge
|
n.filters.rules/Person/_ChangedSince.py
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-----------------
|
--------------------------------------------------------
from .._changedsincebase import ChangedSinceBase
#-------------------------------------------------------------------------
#
# ChangedSince
#
#-------------------------------------------------------------------------
class ChangedSince(ChangedSinceBase):
"""Rule that checks for persons changed since a specific time."""
labels = [ _('Changed after:'), _('but before:') ]
name = _('Persons changed after <date time>')
description = _("Matches person records changed after a specified "
"date-time (yyyy-mm-dd hh:mm:ss) or in the range, if a second "
"date-time is given.")
|
zenodo/zenodo-migrator
|
zenodo_migrator/serializers/schemas/__init__.py
|
Python
|
gpl-2.0
| 1,061
| 0
|
# -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free
|
Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See th
|
e GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Deposit serialization schemas."""
from __future__ import absolute_import, print_function
|
tuxlifan/moneyguru
|
qt/controller/panel.py
|
Python
|
gpl-3.0
| 5,750
| 0.003826
|
# Copyright 2016 Virgil Dupras
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (
QDialog, QLineEdit, QSpinBox, QComboBox, QCheckBox, QPlainTextEdit
)
class Panel(QDialog):
# A list of two-sized tuples (QWidget's name, model field name).
FIELDS = []
# Name to use for serialization of persistent data about this panel (geometry).
# XXX At the time of this writing (ticket #364), there's already a separate system in Cocoa
# to persist dialog frames. A "clean" implementation would do like we do with the main window
# and implement frame save/restore in core, but I fear that I'll needlessly complicate things
# doing so, so for now, I limit myself to a qt-only solution. Later, we should re-evaluate
# whether it could be a good idea to push this implementation to the core.
PERSISTENT_NAME = None
def __init__(self, mainwindow):
# The flags we pass are that so we don't get the "What's this" button in the title bar
QDialog.__init__(self, mainwindow, Qt.WindowTitleHint | Qt.WindowSystemMenuHint)
self._widget2ModelAttr = {}
self.mainwindow = mainwindow
def _changeComboBoxItems(self, comboBox, newItems):
# When a combo box's items are changed, its currentIndex changed with a currentIndexChanged
# signal, and if that signal results in the model being updated, it messes the model.
# We thus have to disconnect the combo box's signal before changing the items.
if comboBox in self._widget2ModelAttr:
comboBox.currentIndexChanged.disconnect
|
(self.comboBoxCurrentIndexChanged)
index = comboBox.currentIndex()
combo
|
Box.clear()
comboBox.addItems(newItems)
comboBox.setCurrentIndex(index)
if comboBox in self._widget2ModelAttr:
comboBox.currentIndexChanged.connect(self.comboBoxCurrentIndexChanged)
def _connectSignals(self):
for widgetName, modelAttr in self.FIELDS:
widget = getattr(self, widgetName)
self._widget2ModelAttr[widget] = modelAttr
if isinstance(widget, QComboBox):
widget.currentIndexChanged.connect(self.comboboxChanged)
elif isinstance(widget, QSpinBox):
widget.valueChanged.connect(self.spinboxChanged)
elif isinstance(widget, QLineEdit):
widget.editingFinished.connect(self.lineeditChanged)
elif isinstance(widget, QPlainTextEdit):
widget.textChanged.connect(self.plaintexteditChanged)
elif isinstance(widget, QCheckBox):
widget.stateChanged.connect(self.checkboxChanged)
def _loadFields(self):
for widgetName, modelAttr in self.FIELDS:
widget = getattr(self, widgetName)
value = getattr(self.model, modelAttr)
if isinstance(widget, QComboBox):
widget.setCurrentIndex(value)
elif isinstance(widget, QSpinBox):
widget.setValue(value)
elif isinstance(widget, QLineEdit):
widget.setText(value)
elif isinstance(widget, QPlainTextEdit):
widget.setPlainText(value)
elif isinstance(widget, QCheckBox):
widget.setChecked(value)
def _saveFields(self):
pass
def _loadGeometry(self):
if self.PERSISTENT_NAME:
self.mainwindow.app.prefs.restoreGeometry('%sGeometry' % self.PERSISTENT_NAME, self)
def _saveGeometry(self):
if self.PERSISTENT_NAME:
self.mainwindow.app.prefs.saveGeometry('%sGeometry' % self.PERSISTENT_NAME, self)
def accept(self):
# The setFocus() call is to force the last edited field to "commit". When the save button
# is clicked, accept() is called before the last field to have focus has a chance to emit
# its edition signal.
self.setFocus()
self.model.save()
self._saveGeometry()
QDialog.accept(self)
def reject(self):
self._saveGeometry()
super().reject()
# --- Event Handlers
def _widgetChanged(self, sender, newvalue):
modelAttr = self._widget2ModelAttr[sender]
setattr(self.model, modelAttr, newvalue)
def comboboxChanged(self):
sender = self.sender()
self._widgetChanged(sender, sender.currentIndex())
def spinboxChanged(self):
sender = self.sender()
self._widgetChanged(sender, sender.value())
def lineeditChanged(self):
sender = self.sender()
self._widgetChanged(sender, sender.text())
def plaintexteditChanged(self):
sender = self.sender()
self._widgetChanged(sender, sender.toPlainText())
def checkboxChanged(self):
sender = self.sender()
self._widgetChanged(sender, sender.isChecked())
# --- model --> view
def pre_load(self):
self._loadGeometry()
def pre_save(self):
self._saveFields()
def post_load(self):
if not self._widget2ModelAttr: # signal not connected yet
self._connectSignals()
self._loadFields()
self.show()
# For initial text edits to have their text selected, we *have to* first select the dialog,
# then setFocus on it with qt.TabFocusReason. Don't ask, I don't know why either...
self.setFocus()
focus = self.nextInFocusChain()
while focus.focusPolicy() == Qt.NoFocus:
focus = focus.nextInFocusChain()
focus.setFocus(Qt.TabFocusReason)
|
macks22/nsf-award-data
|
util/num_cpus.py
|
Python
|
mit
| 2,737
| 0
|
import os
import re
import subprocess
def available_cpu_count():
""" Number of available virtual or physical CPUs on this system, i.e.
user/real as output by time(1) when called with an optimally scaling
userspace-only program"""
# cpuset
# cpuset may restrict the number of *available* processors
try:
m = re.search(r'(?m)^Cpus_allowed:\s*(.*)$',
open('/proc/self/status').read())
if m:
res = bin(int(m.group(1).replace(',', ''), 16)).count('1')
if res > 0:
return res
except IOError:
pass
# Python 2.6+
try:
import multiprocessing
return multiprocessing.cpu_count()
except (ImportError, NotImplementedError):
pass
# http://code.google.com/p/psutil/
try:
import psutil
return psutil.NUM_CPUS
except (ImportError, AttributeError):
pass
# POSIX
try:
res = int(os.sysconf('SC_NPROCESSORS_ONLN'))
if res > 0:
return res
except (AttributeError, ValueError):
pass
# Windows
try:
res = int(os.environ['NUMBER_OF_PROCESSORS'])
if res > 0:
return res
except (KeyError, ValueError):
pass
# jython
try:
from java.lang import Runtime
runtime = Runtime.getRuntime()
res = runtime.availableProcessors()
if res > 0:
return res
except ImportError:
pass
# BSD
try:
sysctl = subprocess.Popen(['sysctl', '-n', 'hw.ncpu'],
stdout=subprocess.PIPE)
scStdout = sysctl.communicate()[0]
res = int(scStdout)
if res > 0:
return res
except (OSError, ValueError):
pass
# Linux
try:
res = open('/proc/cpuinfo').read().count('processor\t:')
if res > 0:
return res
except IOError:
pass
# Solaris
try:
pseudoDevices = os.listdir('/devices/pseudo/')
res = 0
for pd in pseudoDevices:
if re.match(r'^c
|
puid@[0-9]+$', pd):
res += 1
if res > 0:
return res
except OSError:
pass
# Other UNIXes (heuristic)
try:
try:
dmesg = open('/var/run/dmesg.boot').read()
except IOError:
dmesgProcess = subprocess.Popen(['dmesg'], stdout=subprocess.PIPE)
dmesg = dmesgProcess.communicate()[0]
res = 0
while '\ncpu' + str(res) + ':' in dm
|
esg:
res += 1
if res > 0:
return res
except OSError:
pass
raise Exception('Can not determine number of CPUs on this system')
|
geoscixyz/em_examples
|
em_examples/Loop.py
|
Python
|
mit
| 6,800
| 0.032206
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from SimPEG import Mesh, Utils
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from scipy.sparse import spdiags,csr_matrix, eye,kron,hstack,vstack,eye,diags
import copy
from scipy.constants import mu_0
from SimPEG import SolverLU
from scipy.sparse.linalg import spsolve,splu
from SimPEG.EM import TDEM
from SimPEG.EM.Analytics.TDEM import hzAnalyticDipoleT,hzAnalyticCentLoopT
from scipy.interpolate import interp2d,LinearNDInterpolator
from scipy.special import ellipk,ellipe
def rectangular_plane_layout(mesh,corner, closed = False,I=1.):
"""
corner: sorted list of four corners (x,y,z)
2--3
| |
1--4
y
|
|--> x
Output:
Js
"""
Jx = np.zeros(mesh.nEx)
Jy = np.zeros(mesh.nEy)
Jz = np.zeros(mesh.nEz)
indy1 = np.logical_and( \
np.logical_and( \
np.logical_and(mesh.gridEy[:,0]>=corner[0,0],mesh.gridEy[:,0]<=corner[1,0]), \
np.logical_and(mesh.gridEy[:,1] >=corner[0,1] , mesh.gridEy[:,1]<=corner[1,1] )),
(mesh.gridEy[:,2] == corner[0,2]
)
)
indx1 = np.logical_and( \
np.logical_and( \
np.logical_and(mesh.gridEx[:,0]>=corner[1,0],mesh.gridEx[:,0]<=corner[2,0]), \
np.logical_and(mesh.gridEx[:,1] >=corner[1,1] , mesh.gridEx[:,1]<=corner[2,1] )),
(mesh.gridEx[:,2] == corner[1,2]
)
)
indy2 = np.logical_and( \
np.logical_and( \
np.logical_and(mesh.gridEy[:,0]>=corner[2,0],mesh.gridEy[:,0]<=corner[3,0]), \
np.logical_and(mesh.gridEy[:,1] <=corner[2,1] , mesh.gridEy[:,1]>=corner[3,1] )),
(mesh.gridEy[:,2] == corner[2,2]
)
)
if closed:
indx2 = np.logical_and( \
np.logical_and( \
np.logical_and(mesh.gridEx[:,0]>=corner[0,0],mesh.gridEx[:,0]<=corner[3,0]), \
np.logical_and(mesh.gridEx[:,1] >=corner[0,1] , mesh.gridEx[:,1]<=corner[3,1] )),
(mesh.gridEx[:,2] == corner[0,2]
)
)
else:
indx2 = []
Jy[indy1] = -I
Jx[indx1] = -I
Jy[indy2] = I
Jx[indx2] = I
J = np.hstack((Jx,Jy,Jz))
J = J*mesh.edge
return J
def BiotSavart(locs,mesh,Js):
"""
Compute the magnetic field generated by current discretized on a mesh using Biot-Savart law
Input:
locs: observation locations
mesh: mesh on which the current J is discretized
Js: discretized source current in A-m (Finite Volume formulation)
Output:
B: magnetic field [Bx,By,Bz]
"""
c = mu_0/(4*np.pi)
nwire = np.sum(Js!=0.)
ind= np.where(Js!=0.)
ind = ind[0]
B = np.zeros([locs.shape[0],3])
gridE = np.vstack([mesh.gridEx,mesh.gridEy,mesh.gridEz])
for i in range(nwire):
# x wire
if ind[i]<mesh.nEx:
r = locs-gridE[ind[i]]
I = Js[ind[i]]*np.hstack([np.ones([locs.shape[0],1]),np.zeros([locs.shape[0],1]),np.zeros([locs.shape[0],1])])
cr = np.cross(I,r)
rsq = np.linalg.norm(r,axis=1)**3.
B = B + c*cr/rsq[:,None]
# y wire
elif ind[i]<mesh.nEx+mesh.nEy:
r = locs-gridE[ind[i]]
I = Js[ind[i]]*np.hstack([np.zeros([locs.shape[0],1]),np.ones([locs.shape[0],1]),np.zeros([locs.shape[0],1])])
cr = np.cross(I,r)
rsq = np.linalg.norm(r,axis=1)**3.
B = B + c*cr/rsq[:,None]
# z wire
elif ind[i]<mesh.nEx+mesh.nEy+mesh.nEz:
r = locs-gridE[ind[i]]
I = Js[ind[i]]*np.hstack([np.zeros([locs.shape[0],1]),np.zeros([locs.shape[0],1]),np.ones([locs.shape[0],1])])
cr = np.cross
|
(I,r)
rsq = np.linalg.norm(r,axis=1)**3.
B = B + c*cr/rsq[:,None]
else:
print('error: index of J out of bounds (number of edges in the mesh)')
return B
def analytic_infinite_wire(obsloc,wireloc,orientation,I=1.):
"""
Compute the response of an infinite wire with orientation 'orientation'
and current I at the obsvervation locations obsloc
Output:
B: magnetic field [Bx,By,B
|
z]
"""
n,d = obsloc.shape
t,d = wireloc.shape
d = np.sqrt(np.dot(obsloc**2.,np.ones([d,t]))+np.dot(np.ones([n,d]),(wireloc.T)**2.)
- 2.*np.dot(obsloc,wireloc.T))
distr = np.amin(d, axis=1, keepdims = True)
idxmind = d.argmin(axis=1)
r = obsloc - wireloc[idxmind]
orient = np.c_[[orientation for i in range(obsloc.shape[0])]]
B = (mu_0*I)/(2*np.pi*(distr**2.))*np.cross(orientation,r)
return B
def mag_dipole(m,obsloc):
"""
Compute the response of an infinitesimal mag dipole at location (0,0,0)
with orientation X and magnetic moment 'm'
at the obsvervation locations obsloc
Output:
B: magnetic field [Bx,By,Bz]
"""
loc = np.r_[[[0.,0.,0.]]]
n,d = obsloc.shape
t,d = loc.shape
d = np.sqrt(np.dot(obsloc**2.,np.ones([d,t]))+np.dot(np.ones([n,d]),(loc.T)**2.)
- 2.*np.dot(obsloc,loc.T))
d = d.flatten()
ind = np.where(d==0.)
d[ind] = 1e6
x = obsloc[:,0]
y = obsloc[:,1]
z = obsloc[:,2]
#orient = np.c_[[orientation for i in range(obsloc.shape[0])]]
Bz = (mu_0*m)/(4*np.pi*(d**3.))*(3.*((z**2.)/(d**2.))-1.)
By = (mu_0*m)/(4*np.pi*(d**3.))*(3.*(z*y)/(d**2.))
Bx = (mu_0*m)/(4*np.pi*(d**3.))*(3.*(x*z)/(d**2.))
B = np.vstack([Bx,By,Bz]).T
return B
def circularloop(a,obsloc,I=1.):
"""
From Simpson, Lane, Immer, Youngquist 2001
Compute the magnetic field B response of a current loop
of radius 'a' with intensity 'I'.
input:
a: radius in m
obsloc: obsvervation locations
Output:
B: magnetic field [Bx,By,Bz]
"""
x = np.atleast_2d(obsloc[:,0]).T
y = np.atleast_2d(obsloc[:,1]).T
z = np.atleast_2d(obsloc[:,2]).T
r = np.linalg.norm(obsloc,axis=1)
loc = np.r_[[[0.,0.,0.]]]
n,d = obsloc.shape
r2 = x**2.+y**2.+z**2.
rho2 = x**2.+y**2.
alpha2 = a**2.+r2-2*a*np.sqrt(rho2)
beta2 = a**2.+r2+2*a*np.sqrt(rho2)
k2 = 1-(alpha2/beta2)
lbda = x**2.-y**2.
C = mu_0*I/np.pi
Bx = ((C*x*z)/(2*alpha2*np.sqrt(beta2)*rho2))*\
((a**2.+r2)*ellipe(k2)-alpha2*ellipk(k2))
Bx[np.isnan(Bx)] = 0.
By = ((C*y*z)/(2*alpha2*np.sqrt(beta2)*rho2))*\
((a**2.+r2)*ellipe(k2)-alpha2*ellipk(k2))
By[np.isnan(By)] = 0.
Bz = (C/(2.*alpha2*np.sqrt(beta2)))*\
((a**2.-r2)*ellipe(k2)+alpha2*ellipk(k2))
Bz[np.isnan(Bz)] = 0.
#print(Bx.shape)
#print(By.shape)
#print(Bz.shape)
B = np.hstack([Bx,By,Bz])
return B
|
davelab6/pyfontaine
|
fontaine/charsets/internals/google_greek_ancient_musical_symbols.py
|
Python
|
gpl-3.0
| 371
| 0.008086
|
# -*- coding: utf-8 -*-
from fontaine.namelist import codepoin
|
tsInNamelist
class Charset:
common_name = u'Google Fonts: Greek Ancient Musical Symbols'
native_name = u''
abbreviation = 'GREK'
def glyphs(self):
gl
|
yphs = codepointsInNamelist("charsets/internals/google_glyphsets/Greek/GF-greek-ancient-musical-symbols.nam")
return glyphs
|
hirokazumiyaji/pundler
|
pundler/commands/install.py
|
Python
|
mit
| 258
| 0
|
# coding: utf-8
from __future__ i
|
mport absolute_import
from .base import Base
class Install(Base):
def __init__(self, config):
self.config = config
def run(self):
for package in self.config.packages:
package.inst
|
all()
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/py-traceback2/package.py
|
Python
|
lgpl-2.1
| 1,692
| 0.001182
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This p
|
rogram is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See th
|
e terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyTraceback2(PythonPackage):
"""Backports of the traceback module"""
homepage = "https://github.com/testing-cabal/traceback2"
url = "https://pypi.io/packages/source/t/traceback2/traceback2-1.4.0.tar.gz"
version('1.4.0', '9e9723f4d70bfc6308fa992dd193c400')
depends_on('py-setuptools', type='build')
depends_on('py-linecache2', type=('build', 'run'))
depends_on('py-pbr', type=('build', 'run'))
|
kfeiWang/pythonUtils
|
wordProbability.py
|
Python
|
mit
| 1,782
| 0.003064
|
# -*- coding:utf8 -*-
from __future__ import division
import codecs
import re
def calWordProbability(infile, outfile):
'''
计算词概率,源语言词翻译成目标语言词的概率
一个源语言可能对应多个目标语言,这里计算平均值
infile: 输入文件 格式:source word \t target word
outfile: source word \t target word \t probability
'''
with codecs.open(infile, 'r', 'utf8') as fin:
# 用于存储数据结构
wordDic = {}
line = fin.r
|
eadline()
linNum = 1
while line:
linNum += 1
if linNum % 10001 == 1:
print(linNum, line.encode('utf8'))
line = line.strip() # 删除两端空白符
wArr = re.split('[ |\t]', line)
if len(wArr) >= 2:
key = wArr[0] # 源语言词
val = wArr[1] # 目标语言词
if key in wordDic:
wordDic[ke
|
y][val] = 1
else:
valMap = dict()
valMap[val] = 1
wordDic[key] = valMap
line = fin.readline()
with codecs.open(outfile, 'w', 'utf8') as fout:
print('start write')
wCount = 0
for key in wordDic.keys():
wCount += 1
if(wCount % 1001 == 0):
print('writing', wCount)
if len(key.split(' ')) > 1:
continue
valMap = wordDic[key]
valLen = len(valMap)
for val in valMap.keys():
fout.write(key)
fout.write('\t')
fout.write(val)
fout.write('\t')
fout.write(str(1/valLen))
fout.write('\n')
|
OCA/bank-statement-reconcile
|
base_transaction_id/models/invoice.py
|
Python
|
agpl-3.0
| 1,321
| 0
|
# Copyright 2011-2012 Nicolas Bessi (Camptocamp)
# Copyright 2012-2015 Yannick Vaucher (Camptocamp)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, fields, api
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
transaction_id = fields.Char(string='Transaction ID',
index=True,
copy=False,
help="Transaction ID from the "
"financial institute")
|
@api.multi
|
def finalize_invoice_move_lines(self, move_lines):
"""Propagate the transaction_id from the invoice to the move lines.
The transaction ID is written on the move lines only if the account is
the same than the invoice's one.
"""
move_lines = super(AccountInvoice, self).finalize_invoice_move_lines(
move_lines)
for invoice in self:
if invoice.transaction_id:
invoice_account_id = invoice.account_id.id
for line in move_lines:
# line is a tuple (0, 0, {values})
if invoice_account_id == line[2]['account_id']:
line[2]['transaction_ref'] = invoice.transaction_id
return move_lines
|
pstrinkle/drf-coupons
|
coupons/views.py
|
Python
|
apache-2.0
| 8,022
| 0.001247
|
from django.conf import settings
from django.contrib.auth.decorators import user_passes_test
from django.shortcuts import get_object_or_404
from django.utils.decorators import method_decorator
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters, status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.response import Response
from coupons.filters import CouponFilter
from coupons.models import Coupon, ClaimedCoupon
from coupons.serializers import CouponSerializer, ClaimedCouponSerializer
# based on https://djangosnippets.org/snippets/1703/
def group_required(api_command):
"""
This is implemented such that it's default open.
"""
def in_groups(u):
if u.is_authenticated():
# supervisor can do anything
if u.is_superuser:
return True
# coupons have permissions set (I think I may set them by default to remove this check)
if settings.COUPON_PERMISSIONS and api_command in settings.COUPON_PERMISSIONS:
group_names = settings.COUPON_PERMISSIONS[api_command]
# but no group specified, so anyone can.
if len(group_names) == 0:
return True
# group specified, so only those in the group can.
if bool(u.groups.filter(name__in=group_names)):
return True
return False
return user_passes_test(in_groups)
def get_redeemed_queryset(user, coupon_id=None):
"""
Return a consistent list of the redeemed list. across the two endpoints.
"""
api_command = 'REDEEMED'
# If the a coupon isn't specified, get them all.
if coupon_id is None:
qs_all = ClaimedCoupon.objects.all()
qs_some = ClaimedCoupon.objects.filter(user=user.id)
else:
qs_all = ClaimedCoupon.objects.filter(coupon=coupon_id)
qs_some = ClaimedCoupon.objects.filter(coupon=coupon_id, user=user.id)
if user.is_superuser:
return qs_all
if settings.COUPON_PERMISSIONS and api_command in settings.COUPON_PERMISSIONS:
group_names = settings.COUPON_PERMISSIONS[api_command]
# So the setting is left empty, so default behavior.
if len(group_names) == 0:
return qs_some
# group specified, so only those in the group can.
if bool(user.groups.filter(name__in=group_names)):
return qs_all
return qs_some
class CouponViewSet(viewsets.ModelViewSet):
"""
API endpoint that lets you create, delete, retrieve coupons.
"""
filter_backends = (filters.SearchFilter, DjangoFilterBackend)
filter_class = CouponFilter
search_fields = ('code', 'code_l')
serializer_class = CouponSerializer
def get_queryset(self):
"""
Return a subset of coupons or all coupons depending on who is asking.
"""
api_command = 'LIST'
qs_all = Coupon.objects.all()
qs_some = Coupon.objects.filter(bound=True, user=self.request.user.id)
if self.request.user.is_superuser:
return qs_all
# This is different from the normal check because it's default closed.
if settings.COUPON_PERMISSIONS and api_command in settings.COUPON_PERMISSIONS:
group_names = settings.COUPON_PERMISSIONS[api_command]
# So the setting is left empty, so default behavior.
if len(group_names) == 0:
return qs_some
# group specified, so only those in the group can.
if bool(self.request.user.groups.filter(name__in=group_names)):
return qs_all
return qs_some
@method_decorator(group_required('CREATE'))
def create(self, request, **kwargs):
"""
Create a coupon
"""
serializer = CouponSerializer(data=request.data, context={'request': request})
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@method_decorator(group_required('DELETE'))
def destroy(self, request, pk=None, **kwargs):
"""
Delete the coupon.
"""
coupon = get_object_or_404(Coupon.objects.all(), pk=pk)
coupon.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def partial_update(self, request, pk=None, **kwargs):
return Response(status=status.HTTP_404_NOT_FOUND)
def retrieve(self, request, pk=None, **kwargs):
"""
Anybody can retrieve any coupon.
"""
value_is_int = False
try:
pk = int(pk)
|
value_is_int = True
except ValueError:
pass
|
if value_is_int:
coupon = get_object_or_404(Coupon.objects.all(), pk=pk)
else:
coupon = get_object_or_404(Coupon.objects.all(), code_l=pk.lower())
serializer = CouponSerializer(coupon, context={'request': request})
return Response(serializer.data)
@method_decorator(group_required('UPDATE'))
def update(self, request, pk=None, **kwargs):
"""
This forces it to return a 202 upon success instead of 200.
"""
coupon = get_object_or_404(Coupon.objects.all(), pk=pk)
serializer = CouponSerializer(coupon, data=request.data, context={'request': request})
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@detail_route(methods=['get'])
def redeemed(self, request, pk=None, **kwargs):
"""
Convenience endpoint for getting list of claimed instances for a coupon.
"""
coupon = get_object_or_404(Coupon.objects.all(), pk=pk)
qs = get_redeemed_queryset(self.request.user, coupon.id)
serializer = ClaimedCouponSerializer(qs, many=True, context={'request': request})
return Response(serializer.data)
@detail_route(methods=['put'])
def redeem(self, request, pk=None, **kwargs):
"""
Convenience endpoint for redeeming.
"""
queryset = Coupon.objects.all()
coupon = get_object_or_404(queryset, pk=pk)
# Maybe should do coupon.redeem(user).
# if data['expires'] < now():
data = {
'coupon': pk,
'user': self.request.user.id,
}
serializer = ClaimedCouponSerializer(data=data, context={'request': request})
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class ClaimedCouponViewSet(viewsets.ModelViewSet):
"""
API endpoint that lets you retrieve claimed coupon details.
"""
filter_backends = (DjangoFilterBackend,)
filter_fields = ('user',)
serializer_class = ClaimedCouponSerializer
def get_queryset(self):
return get_redeemed_queryset(self.request.user)
def create(self, request, **kwargs):
return Response(status=status.HTTP_404_NOT_FOUND)
@method_decorator(group_required('DELETE'))
def destroy(self, request, pk=None, **kwargs):
"""
Basically un-redeem a coupon.
"""
redeemed = get_object_or_404(ClaimedCoupon.objects.all(), pk=pk)
redeemed.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def partial_update(self, request, pk=None, **kwargs):
return Response(status=status.HTTP_404_NOT_FOUND)
def retrieve(self, request, pk=None, **kwargs):
return Response(status=status.HTTP_404_NOT_FOUND)
def update(self, request, pk=None, **kwargs):
return Response(status=status.HTTP_404_NOT_FOUND)
|
botify-labs/moto
|
moto/cloudwatch/urls.py
|
Python
|
apache-2.0
| 164
| 0
|
from .responses import
|
CloudWatchResponse
url_bases = [
"https?://monitoring.(.+).amazonaws.com",
]
url_paths = {
'{0}/$': CloudWa
|
tchResponse.dispatch,
}
|
meatballhat/ansible-inventory-hacks
|
ansible_inventory_hacks/filters/instance_filter.py
|
Python
|
mit
| 1,066
| 0
|
#!/usr/bin/env python
# vim:fileencoding=utf-8
import argparse
import json
import sys
import os
def main(sysargs=sys.argv[:]):
parser = argparse.A
|
rgumentParser()
parser.add_argument(
'instream', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument(
'-f', '--output-format', choices=['text', 'json'],
default=os.environ.get('FORMAT', 'json'))
args = parser.parse_args(sysargs[1:])
instance_mapped_inv = filter_json(json.load(args.instream))
if args.output_format == 'text':
|
for key, value in sorted(instance_mapped_inv.items()):
sys.stdout.write('{} {}\n'.format(key, value))
else:
json.dump(instance_mapped_inv, sys.stdout, indent=2)
sys.stdout.write('\n')
return 0
def filter_json(inv):
instance_mapped_inv = {}
for key, values in inv.items():
if not key.startswith('i-'):
continue
instance_mapped_inv[values[0]] = key
return instance_mapped_inv
if __name__ == '__main__':
sys.exit(main())
|
tdyas/pants
|
contrib/node/src/python/pants/contrib/node/register.py
|
Python
|
apache-2.0
| 2,805
| 0.001426
|
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
"""Support for JavaScript and Node.js."""
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.goal.task_registrar import TaskRegistrar as task
from pants.contrib.node.subsystems.resolvers.node_preinstalled_module_resolver import (
NodePreinstalledModuleResolver,
)
from pants.contrib.node.subsystems.resolvers.npm_resolver import NpmResolver
from pants.contrib.node.target_types import (
NodeBundle,
NodeModule,
NodePreinstalledModule,
NodeRemoteModule,
NodeTest,
)
from pants.contrib.node.targets.node_bundle import NodeBundle as NodeBundleV1
from pants.contrib.node.targets.node_module import NodeModule as NodeModuleV1
from pants.contrib.node.targets.node_preinstalled_module import (
NodePreinstalledModule as NodePreinstalledModuleV1,
)
from pan
|
ts.contrib.node.targets.node_remote_module import NodeRemoteModule as NodeRemoteModuleV1
from pants.contrib.node.targets.node_test import NodeTest as NodeTestTargetV1
from
|
pants.contrib.node.tasks.javascript_style import JavascriptStyleFmt, JavascriptStyleLint
from pants.contrib.node.tasks.node_build import NodeBuild
from pants.contrib.node.tasks.node_bundle import NodeBundle as NodeBundleTask
from pants.contrib.node.tasks.node_install import NodeInstall
from pants.contrib.node.tasks.node_repl import NodeRepl
from pants.contrib.node.tasks.node_resolve import NodeResolve
from pants.contrib.node.tasks.node_run import NodeRun
from pants.contrib.node.tasks.node_test import NodeTest as NodeTestTask
def build_file_aliases():
return BuildFileAliases(
targets={
"node_bundle": NodeBundleV1,
"node_module": NodeModuleV1,
"node_preinstalled_module": NodePreinstalledModuleV1,
"node_remote_module": NodeRemoteModuleV1,
"node_test": NodeTestTargetV1,
},
)
def register_goals():
# Register tasks.
task(name="node", action=NodeRepl).install("repl")
task(name="node", action=NodeResolve).install("resolve")
task(name="node", action=NodeRun).install("run")
task(name="node", action=NodeBuild).install("compile", first=True)
task(name="node", action=NodeTestTask).install("test")
task(name="node", action=NodeBundleTask).install("bundle")
task(name="node-install", action=NodeInstall).install()
# Linting
task(name="javascriptstyle", action=JavascriptStyleLint).install("lint")
task(name="javascriptstyle", action=JavascriptStyleFmt).install("fmt")
def global_subsystems():
return (NodePreinstalledModuleResolver, NpmResolver)
def target_types():
return [NodeBundle, NodeModule, NodePreinstalledModule, NodeRemoteModule, NodeTest]
|
tcstewar/ev3_demo
|
udp_base.py
|
Python
|
gpl-2.0
| 3,050
| 0.00623
|
import nengo
from nengo.dists import Uniform
import nstbot
import numpy as np
import joystick_node
import udp
import time
use_bot = False
if use_bot:
bot = nstbot.EV3Bot()
#bot.connect(nstbot.connection.Socket('192.168.1.160'))
bot.connect(nstbot.connection.Socket('10.162.177.187'))
time.sleep(1)
bot.connection.send('!M+\n')
bot.activate_sensor([1, 2, 3, 4], period=0.05)
synapse = 0.006
msg_period = 0.1
model = nengo.Network(label='EV3 Demo')
with model:
joystick = nengo.Node([0,0,0,0,0,0])#joystick_node.Joystick())
control = nengo.networks.EnsembleArray(n_ensembles = 4, n_neurons=100)
nengo.Connection(joystick[:4], control.input, synapse=None)
motor = nengo.networks.EnsembleArray(n_ensembles = 4, n_neurons=100)
for ens in motor.ense
|
mbles:
ens.intercepts = Uniform(0.05, 0.9)
omni_transform = np.array([[-1, 0, -1], [0.5, 1, -0.5], [1, -1, -1]]).T
nengo.Connection(control.output[[1, 0, 2]], motor.input[:3],
|
transform=omni_transform * 2, synapse=synapse)
nengo.Connection(control.output[3], motor.input[3], transform=-1,
synapse=synapse)
def bot_motor(t, x):
if use_bot:
bot.motor(1, x[0], msg_period=msg_period)
bot.motor(0, x[1], msg_period=msg_period)
bot.motor(2, x[2], msg_period=msg_period)
if abs(x[3]) > 0:
bot.motor(3, x[3]*0.2, msg_period=msg_period)
else:
bot.motor(3, 0, msg_period=msg_period)
motor_node = nengo.Node(bot_motor, size_in=4)
nengo.Connection(motor.output, motor_node, synapse=synapse)
def sensors(t):
#left = (bot.lego_sensors[0] + bot.lego_sensors[1]) * 0.5
#right = (bot.lego_sensors[2] + bot.lego_sensors[3]) * 0.5
#joystick.output.joystick.set_vibration(left, right)
if use_bot:
return bot.lego_sensors
else:
return [0, 0, 0, 0]
sensor_node = nengo.Node(sensors, size_out=4)
udp_node = nengo.Node(udp.UDP(size_in=4, size_out=4, address='localhost',
in_port=8889, out_port=8888),
size_in=4, size_out=4)
nengo.Connection(sensor_node, udp_node, synapse=None)
nengo.Connection(udp_node, control.input, synapse=synapse)
'''
avoid_inhibit = nengo.Ensemble(n_neurons=50, dimensions=1,
intercepts=Uniform(0.2, 0.9))
nengo.Connection(joystick[5], avoid_inhibit, synapse=None)
nengo.Connection(avoid_inhibit, sensors_ir.neurons, transform=[[-1]]*200,
synapse=0.1)
nengo.Connection(avoid_inhibit, sensors_us.neurons, transform=[[-1]]*200,
synapse=0.1)
'''
if True:
import nengo_viz
viz = nengo_viz.Viz(model)
viz.slider(sensor_node)
viz.value(control.output)
viz.value(motor.output)
viz.raster(motor.ensembles[0].neurons, n_neurons=50)
viz.raster(control.ensembles[0].neurons, n_neurons=10)
viz.start()
|
mitschabaude/nanopores
|
nanopores/geo2xml.py
|
Python
|
mit
| 6,483
| 0.004782
|
import subprocess
from importlib import import_module
import os
import dolfin
import nanopores
from nanopores.tools.utilities import Log
#FIXME: deprecated because of license conflict -> import from dolfin
#from nanopores.meshconvert import convert2xml
MESHDIR = "/tmp/nanopores"
def geofile2geo(code, meta, name=None, clscale=1.):
pid = str(os.getpid())
meshdir = (MESHDIR + "/" + name) if name is not None else MESHDIR
if not os.path.exists(meshdir):
os.makedirs(meshdir)
inputfile = "%s/input%s.geo" % (meshdir, pid)
outfile = "%s/out%s.msh" % (meshdir, pid)
meshfile = "%s/mesh%s.xml" % (meshdir, pid)
xml_sub = "%s/mesh%s_physical_region.xml" % (meshdir, pid)
xml_bou = "%s/mesh%s_facet_region.xml" % (meshdir, pid)
if os.path.exists(xml_sub): os.remove(xml_sub)
if os.path.exists(xml_bou): os.remove(xml_bou)
with Log("executing gmsh..."):
# save code to .geo file
with open(inputfile, "w") as f:
f.write(code)
# after writing the geo file, call gmsh
#gmsh_out = subprocess.call(["gmsh", "-3", "-v", "1",
gmsh_out = subprocess.call(["./gmsh", "-3", "-v", "1", "-format", "msh2",
"-clscale", "%f" %clscale, inputfile, "-o", outfile, "-optimize"])
if gmsh_out != 0:
raise RuntimeError("Gmsh failed in generating this geometry")
with Log("converting to dolfin..."):
subprocess.check_output(["dolfin-convert", outfile, meshfile])
# for debugging:
# convert2xml(outfile, meshfile)
mesh = dolfin.Mesh(meshfile)
with open('%s/meta%s.txt' % (meshdir, pid), 'w') as f:
f.write(repr(meta))
pdom = meta.pop("physical_domain")
pbou = meta.pop("physical_boundary")
subdomains = dolfin.MeshFunction("size_t", mesh, xml_sub) if pdom else None
boundaries = dolfin.MeshFunction("size_t", mesh, xml_bou) if pbou else None
geo = nanopores.Geometry(None, mesh, subdomains, boundaries, pdom, pbou)
return geo
def reconstructgeo(name=None, pid=None, params=None):
# if pid is None, simp
|
ly take latest mesh
# if params is not None, check if they agree with meta["params"]
# throw error if no matching mesh is available
meshdir = (MESHDIR + "/" + name) if name is not None else MESHD
|
IR
if not os.path.exists(meshdir):
raise EnvironmentError("Geometry folder does not exist yet.")
if pid is None:
# get pid of latest mesh
files = os.listdir(meshdir)
mfiles = [f for f in files if f.startswith("input")]
if not mfiles:
raise EnvironmentError("No existing mesh files found.")
latest = max(mfiles, key=lambda f: os.path.getmtime(meshdir + "/" + f))
pid = latest.lstrip("input").rstrip(".geo")
meshfile = "%s/mesh%s.xml" % (meshdir, pid)
if not os.path.exists(meshfile):
raise EnvironmentError(
"No existing mesh files found with pid %s." % pid)
print "Found existing mesh file with pid %s." % pid
with open('%s/meta%s.txt' % (meshdir, pid), "r") as f:
meta = eval(f.read())
if params is not None:
if not params == meta["params"]:
#mparams = meta["params"]
#print {k: v for k, v in params.items() if k not in mparams or mparams[k] != v}
#print {k: v for k, v in mparams.items() if k not in params or params[k] != v}
raise EnvironmentError(
"Mesh file does not have compatible parameters.")
print "Mesh file has compatible parameters."
print "Reconstructing geometry from %s." % meshfile
xml_sub = "%s/mesh%s_physical_region.xml" % (meshdir, pid)
xml_bou = "%s/mesh%s_facet_region.xml" % (meshdir, pid)
mesh = dolfin.Mesh(meshfile)
pdom = meta.pop("physical_domain")
pbou = meta.pop("physical_boundary")
subdomains = dolfin.MeshFunction("size_t", mesh, xml_sub) if pdom else None
boundaries = dolfin.MeshFunction("size_t", mesh, xml_bou) if pbou else None
geo = nanopores.Geometry(None, mesh, subdomains, boundaries, pdom, pbou)
return geo
def generate_mesh(clscale, gid, xml=True, pid="", dim=3, optimize=True, **params):
"""
python function that writes geo for given geometry and xml for fenics
Input: clscale... scaling of characteristic length in gmsh [float]
gid ... geometry identifier [string]
pid ... optional process id to prevent file access clashes
...
Out: geo_dict... file identifier dictionary + geo_dict
"""
pid = str(os.getpid())
inputfile = "input%s.geo" %pid
outfile = "out%s.msh" %pid
meshfile = "mesh%s.xml" %pid
py4geo = "nanopores.geometries.%s.py4geo" %gid
#exec('from %s import get_geo' %py4geo)
mod = import_module(py4geo)
get_geo = mod.get_geo
# create path/to/nanoporesdata/gid/mesh if not already there
meshdir = os.path.join(nanopores.DATADIR, gid, "mesh")
if not os.path.exists(meshdir):
os.makedirs(meshdir)
fid_dict = {"fid_geo": os.path.join(meshdir, inputfile),
"fid_msh": os.path.join(meshdir, outfile),
}
# save code to .geo file
geo_dict = get_geo(**params)
fobj = open(fid_dict["fid_geo"], "w")
fobj.write(geo_dict["geo_code"])
fobj.close()
del geo_dict["geo_code"]
# after writing the geo file, call gmsh
callstr = ["gmsh", "-%s" %dim, "-v", "1","-clscale", "%f" %clscale,
fid_dict["fid_geo"], "-o", fid_dict["fid_msh"]]
if optimize:
callstr.append("-optimize")
gmsh_out = subprocess.call(callstr)
if gmsh_out != 0:
raise RuntimeError('Gmsh failed in generating this geometry')
if xml:
fid_dict["fid_xml"] = os.path.join(meshdir, meshfile)
subprocess.check_output(["dolfin-convert", fid_dict["fid_msh"], fid_dict["fid_xml"]])
# for debugging:
#convert2xml(fid_dict["fid_msh"], fid_dict["fid_xml"])
# optionally, write metadata to file ("meta" should be dict)
if "meta" in geo_dict:
save(geo_dict["meta"], meshdir, "meta%s" %pid)
geo_dict.update(fid_dict)
return geo_dict
def save(data, dir=".", name="file"):
with open('%s/%s.txt' % (dir,name), 'w') as f:
f.write(repr(data))
# -----
# to test script run '>> python -m nanopores.geo2xml'
if __name__ == '__main__':
params = {"x0": None}
print(generate_mesh(
clscale=7.0, gid="W_2D_geo", xml=False, **params)
)
|
cooncesean/remotestatus
|
remotestatus/urls.py
|
Python
|
mit
| 328
| 0.009146
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('remotestatus.views',
url(r'^re
|
mote-box/(?P<remote_box_id>[0-9]+)/$', 'remote_box_detail', name='rs-remote-box-detail'),
url(r'^(?P<call_round_id>[0-
|
9]+)/$', 'dashboard', name='rs-dashboard'),
url(r'^$', 'dashboard', name='rs-dashboard'),
)
|
sih4sing5hong5/django-allauth
|
allauth/account/views.py
|
Python
|
mit
| 27,368
| 0.000512
|
from django.core.urlresolvers import reverse, reverse_lazy
from django.http import (HttpResponseRedirect, Http404,
HttpResponsePermanentRedirect)
from django.views.generic.base import TemplateResponseMixin, View, TemplateView
from django.views.generic.edit import FormView
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth import logout as auth_logout
from django.shortcuts import redirect
from django.views.decorators.debug import sensitive_post_parameters
from django.utils.decorators import method_decorator
from ..exceptions import ImmediateHttpResponse
from ..utils import get_form_class, get_request_param, get_current_site
from .utils import (get_next_redirect_url, complete_signup,
get_login_redirect_url, perform_login,
passthrough_next_redirect_url, url_str_to_user_pk)
from .forms import (
AddEmailForm, ChangePasswordForm,
LoginForm, ResetPasswordKeyForm,
ResetPasswordForm, SetPassw
|
ordForm, SignupForm, UserTokenForm)
from .utils import sync_user_email_addresses
from .models import EmailAddress, EmailConfirmation
from . import signals
from . import app_settings
from .adapter import get_adapter
try:
from django.contrib.auth import update_session_auth_hash
except ImportError:
update_session_auth_hash = None
sensitive_post_parameters_m = method_decorator(
sensitive_post_parameters('password', 'password1', 'password2'))
def _ajax_respon
|
se(request, response, form=None):
if request.is_ajax():
if (isinstance(response, HttpResponseRedirect)
or isinstance(response, HttpResponsePermanentRedirect)):
redirect_to = response['Location']
else:
redirect_to = None
response = get_adapter().ajax_response(request,
response,
form=form,
redirect_to=redirect_to)
return response
class RedirectAuthenticatedUserMixin(object):
def dispatch(self, request, *args, **kwargs):
# WORKAROUND: https://code.djangoproject.com/ticket/19316
self.request = request
# (end WORKAROUND)
if request.user.is_authenticated():
redirect_to = self.get_authenticated_redirect_url()
response = HttpResponseRedirect(redirect_to)
return _ajax_response(request, response)
else:
response = super(RedirectAuthenticatedUserMixin,
self).dispatch(request,
*args,
**kwargs)
return response
def get_authenticated_redirect_url(self):
redirect_field_name = self.redirect_field_name
return get_login_redirect_url(self.request,
url=self.get_success_url(),
redirect_field_name=redirect_field_name)
class AjaxCapableProcessFormViewMixin(object):
def post(self, request, *args, **kwargs):
form_class = self.get_form_class()
form = self.get_form(form_class)
if form.is_valid():
response = self.form_valid(form)
else:
response = self.form_invalid(form)
return _ajax_response(self.request, response, form=form)
class LoginView(RedirectAuthenticatedUserMixin,
AjaxCapableProcessFormViewMixin,
FormView):
form_class = LoginForm
template_name = "account/login.html"
success_url = None
redirect_field_name = "next"
@sensitive_post_parameters_m
def dispatch(self, request, *args, **kwargs):
return super(LoginView, self).dispatch(request, *args, **kwargs)
def get_form_class(self):
return get_form_class(app_settings.FORMS, 'login', self.form_class)
def form_valid(self, form):
success_url = self.get_success_url()
try:
return form.login(self.request, redirect_url=success_url)
except ImmediateHttpResponse as e:
return e.response
def get_success_url(self):
# Explicitly passed ?next= URL takes precedence
ret = (get_next_redirect_url(self.request,
self.redirect_field_name)
or self.success_url)
return ret
def get_context_data(self, **kwargs):
ret = super(LoginView, self).get_context_data(**kwargs)
signup_url = passthrough_next_redirect_url(self.request,
reverse("account_signup"),
self.redirect_field_name)
redirect_field_value = get_request_param(self.request,
self.redirect_field_name)
site = get_current_site(self.request)
ret.update({"signup_url": signup_url,
"site": site,
"redirect_field_name": self.redirect_field_name,
"redirect_field_value": redirect_field_value})
return ret
login = LoginView.as_view()
class CloseableSignupMixin(object):
template_name_signup_closed = "account/signup_closed.html"
def dispatch(self, request, *args, **kwargs):
# WORKAROUND: https://code.djangoproject.com/ticket/19316
self.request = request
# (end WORKAROUND)
try:
if not self.is_open():
return self.closed()
except ImmediateHttpResponse as e:
return e.response
return super(CloseableSignupMixin, self).dispatch(request,
*args,
**kwargs)
def is_open(self):
return get_adapter().is_open_for_signup(self.request)
def closed(self):
response_kwargs = {
"request": self.request,
"template": self.template_name_signup_closed,
}
return self.response_class(**response_kwargs)
class SignupView(RedirectAuthenticatedUserMixin, CloseableSignupMixin,
AjaxCapableProcessFormViewMixin, FormView):
template_name = "account/signup.html"
form_class = SignupForm
redirect_field_name = "next"
success_url = None
@sensitive_post_parameters_m
def dispatch(self, request, *args, **kwargs):
return super(SignupView, self).dispatch(request, *args, **kwargs)
def get_form_class(self):
return get_form_class(app_settings.FORMS, 'signup', self.form_class)
def get_success_url(self):
# Explicitly passed ?next= URL takes precedence
ret = (get_next_redirect_url(self.request,
self.redirect_field_name)
or self.success_url)
return ret
def form_valid(self, form):
user = form.save(self.request)
return complete_signup(self.request, user,
app_settings.EMAIL_VERIFICATION,
self.get_success_url())
def get_context_data(self, **kwargs):
form = kwargs['form']
form.fields["email"].initial = self.request.session \
.get('account_verified_email', None)
ret = super(SignupView, self).get_context_data(**kwargs)
login_url = passthrough_next_redirect_url(self.request,
reverse("account_login"),
self.redirect_field_name)
redirect_field_name = self.redirect_field_name
redirect_field_value = get_request_param(self.request,
redirect_field_name)
ret.update({"login_url": login_url,
"redirect_field_name": redirect_field_name,
"redirect_field_value": redirect_field_value})
return ret
signup = SignupView.as_view()
class ConfirmEmailView(TemplateResponseMixin, View):
def get_template_names(self):
if self.request.method == 'POS
|
seraphln/onedrop
|
onedrop/odtasks/tests.py
|
Python
|
gpl-3.0
| 72
| 0.017241
|
# c
|
oding=utf8
#
"""
odtasks模块的测试用例
"""
import un
|
ittest
|
zellahenderson/PennApps2013
|
src/prettydate.py
|
Python
|
mit
| 1,307
| 0.004591
|
def pretty_date(time=False):
"""
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
"""
from datetime import datetime
now = datetime.now()
if type(time) is int:
diff = now - datetime.fromtimestamp(time)
elif isinstance(time,datetime):
diff = now - time
elif not time:
diff = now - now
second_diff = diff.seconds
day_diff = diff.days
if day_diff < 0:
return ''
if day_diff == 0:
if second_diff < 10:
return "just now"
if second_diff < 60:
return str(second_diff) + " seconds ago"
if second_diff < 120:
return "a minute ago"
if second_diff < 3600:
return str( second_diff / 60 ) + " minutes ago"
if second_diff < 7200:
return "an hour ago"
if second_diff < 86400:
return str( second_diff / 3600 ) + " hours ago"
if day_diff == 1:
|
return "Yesterday"
if day_diff < 7:
return str(day_diff) + " days ago"
if day_diff < 31:
return str(day_diff/7) + " weeks ago"
if day_diff < 365:
return str(day_diff/30) + " mont
|
hs ago"
return str(day_diff/365) + " years ago"
|
alabarga/SocialLearning
|
SocialLearning/pbl.py
|
Python
|
gpl-3.0
| 1,607
| 0.009956
|
enlaces_iniciales = ['http://www.edutopia.org/project-based-learning-history',
'http://bie.org/about/why_pbl',
'http://es.wikipedia.org/wiki/Aprendizaje_basado_en_proyectos',
'http://en.wikipedia.org/wiki/Project-based_learning',
'https://www.youtube.com/watch?v=LMCZvGesRz8',
'http://www.learnnc.org/lp/pages/4753',
'http://www.ascd.org/publications/educational_leadership/sept10/vol68/num01
|
/seven_essentials_for_project-based_learning.aspx',
'http://eric.ed.gov/?q=%22%22&ff1=subActive+Learning',
'http://eric.ed.gov/?q=%22%22&ff1=subStudent+Projects']
from learningobjects.utils.alchemyapi import AlchemyAPI
from learningobjects.utils.parsers import *
from learningobjects.utils.search import *
from ftfy import fix_text
import urllib
url = enlaces_iniciales[0]
texto = ''
tags = set()
|
for url in enlaces_iniciales:
gp_desc = GooseParser(url).describe()
texto += gp_desc.text
for tag in gp_desc.tags:
tags.add(tag.strip())
texto = fix_text(texto)
more_links = set()
alchemyapi = AlchemyAPI()
response = alchemyapi.keywords("text", texto)
concept = response['keywords'][0]['text']
wiki = Wikipedia()
for res in wiki.search(concept):
more_links.add(res)
google = Google()
for res in google.search('related:'+url):
more_links.add(res)
if len(more_links) > 30:
break
duck = DuckDuckGo()
for link in enlaces_iniciales:
for res in duck.search_related(link):
more_links.add(res)
"""
response = alchemyapi.entities("text", texto)
if response['status'] == 'OK':
noticia.entities = response["entities"]
else:
print response['statusInfo']
"""
|
dudochkin-victor/contextkit
|
sandbox/multithreading-tests/stress-test/provider.py
|
Python
|
lgpl-2.1
| 946
| 0.013742
|
#!/u
|
sr/bin/env python2.5
"""A test provider for the stress testing."""
# change registry this often [msec]
registryChangeTimeout = 2017
from ContextKit.flexiprovider import *
import gobject
import time
import os
def update():
t = time.time()
dt = int(1000*(t - round(t)))
gobject.timeout_add(1000 - dt, update)
v = int(round(t))
fp.set('test.int', v)
fp.set('test.int2', v)
print t
|
return False
pcnt = 0
def chgRegistry():
global pcnt
pcnt += 1
if pcnt % 2:
print "1 provider"
os.system('cp 1provider.cdb tmp.cdb; mv tmp.cdb cache.cdb')
else:
print "2 providers"
os.system('cp 2providers.cdb tmp.cdb; mv tmp.cdb cache.cdb')
return True
gobject.timeout_add(1000, update)
# uncoment this to see the "Bus error" XXX
gobject.timeout_add(registryChangeTimeout, chgRegistry)
fp = Flexiprovider([INT('test.int'), INT('test.int2')], 'my.test.provider', 'session')
fp.run()
|
albertosalmeronunefa/tuconsejocomunal
|
addons/tcc_communal_council/models/family.py
|
Python
|
gpl-3.0
| 24,178
| 0.009822
|
# -*- coding: utf-8 -*-
from datetime import date, datetime
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models, tools, SUPERUSER_ID, _
from odoo.exceptions import AccessDenied, AccessError, UserError, ValidationError
from odoo.tools import DEFAULT_SERVER_DATE_FORMAT as DF
from odoo.http import request
import uuid
import urlparse
class TccFamily(models.Model):
_name = "tcc.family"
_rec_name = 'name'
_description = 'Familia'
@api.multi
def default_communal_council(self):
list_group_name = []
for name_goup in self.env.user.groups_id:
list_group_name.append(name_goup.name)
if 'Consejo Comunal' in list_group_name:
return self.env['tcc.communal.council'].search([('user_id', '=', self.env.uid)]).id
if 'Vocero' in list_group_name:
return self.env['tcc.communal.council'].search([('communal_council_id.user_id', '=', self.env.user.communal_council_id.user_id.id)]).id
if 'Residente del Consejo Comunal' in list_group_name:
return self.env['tcc.communal.council'].search([('communal_council_id.user_id', '=', self.env.uid)]).id
@api.onchange('name')
def title_string(self):
if self.name:
self.name = self.name.title()
tenancy_data=[
('Propia', 'Propia'),
('Alquilada', 'Alquilada'),
('Compartida','Compartida'),
('Invadida','Invadida'),
('Traspasada','Traspasada'),
('Prestada','Prestada'),
('Other','Otro'),
]
type_dwelling_data=[
('Quinta', 'Quinta'),
('Casa', 'Casa'),
('Apartamento', 'Apartamento'),
('Rancho', 'Rancho'),
('Barraca', 'Barraca'),
('Habita
|
cion', 'Habitación'),
('Other','Otro'),
]
name = fields.Char(
string='Nombre de la familia',
readonly=True,
)
code_family = fields.Char(
string='Código de la familia',
readonly=True,
)
co
|
mmunal_council_id = fields.Many2one(
'tcc.communal.council',
string='Consejo comunal',
default=default_communal_council,
readonly=True,
)
apartment = fields.Char(
string='Apartamento',
)
floor = fields.Char(
string='Piso',
)
house_id = fields.Many2one(
'tcc.dwelling.house',
string='Casa',
)
edifice_id = fields.Many2one(
'tcc.dwelling.edifice',
string='Edificio',
)
tenancy = fields.Selection(
tenancy_data,
string='Forma de Tenencia',
default='Propia',
)
type_dwelling = fields.Selection(
type_dwelling_data,
string='Tipo de Vivienda',
default='Casa',
)
terreno_propio = fields.Selection(
[('Si', 'Si'),
('No', 'No'),],
string='Terreno propio',
)
pertenece_ocv = fields.Selection(
[('Si', 'Si'),
('No', 'No'),],
string='Pertenece a (OCV)',
)
type_walls_ids = fields.Many2many(
'tcc.family.type.walls',
'tcc_family_type_walls_rel',
'family_id',
'type_walls_id',
string='Tipo de pared'
)
type_roof_ids = fields.Many2many(
'tcc.family.type.roof',
'tcc_family_type_roof_rel',
'family_id',
'type_roof_id',
string='Tipo de techo'
)
equipment_dwelling_ids = fields.Many2many(
'tcc.family.dwelling.equipment',
'tcc_family_equipment_dwelling_rel',
'family_id',
'equipment_id',
string='Enseres de la vivienda'
)
salubrity_id = fields.Many2one(
'tcc.family.dwelling.salubrity',
string='Salud de vivienda',
)
pests_dwelling_ids = fields.Many2many(
'tcc.family.dwelling.pests',
'tcc_family_dwelling_pests_rel',
'family_id',
'pest_id',
string='Insectos y roedores'
)
pets_ids = fields.Many2many(
'tcc.family.dwelling.pets',
'tcc_family_dwelling_pets_rel',
'family_id',
'pest_id',
string='Animales domésticos'
)
room_ids = fields.Many2many(
'tcc.family.dwelling.room',
'tcc_family_dwelling_room_rel',
'family_id',
'room_id',
string='Áreas de la vivienda'
)
cant_room = fields.Integer(string='cantidad de habitaciones', )
disease_ids = fields.Many2many(
'tcc.family.disease',
'tcc_family_disease_rel',
'family_id',
'disease_id',
string='Enfermedades en la familia'
)
need_help = fields.Selection(
[('Si', 'Si'),
('No', 'No'),],
string='Necesita ayuda',
help="Necesita ayuda para familiares enfermos"
)
name_help = fields.Char(
string='¿Cuáles ayudas?',
)
commercial_activity_hose = fields.Selection(
[('Si', 'Si'),
('No', 'No'),],
string='Actividad commercial en la vivienda',
)
commercial_activity_ids = fields.Many2many(
'tcc.family.commercial.activity',
'tcc_family_commercial_activity_rel',
'family_id',
'commercial_activity_id',
string='Venta de:'
)
family_income_id = fields.Many2one(
'tcc.family.income',
string='Ingreso familiar',
)
arrival_date = fields.Date(
string='Fecha de llegada a la comunidad',
required=True,
)
person_ids = fields.One2many(
'tcc.persons',
'family_id',
string='Grupo familiar',
help="Casas ubicadas en el sector del Consejo comunal.",
)
children_street = fields.Boolean(
default=False,
string='Niños en la calle'
)
quantity_children_street = fields.Integer(
string='¿Cuántos niños?',
)
indigent = fields.Boolean(
default=False,
string='Indigentes'
)
quantity_indigent = fields.Integer(
string='¿Cuántos indigentes?',
)
terminally_patient = fields.Boolean(
default=False,
string='Enfermos terminales'
)
quantity_terminally_patient = fields.Integer(
string='¿Cuántos enfermos terminales?',
)
handicapped = fields.Boolean(
default=False,
string='Discapacitados'
)
quantity_handicapped = fields.Integer(
string='¿Cuántos Discapacitados?',
)
water_white_ids = fields.Many2many(
'tcc.family.white.water',
'tcc_family_white_water_rel',
'family_id',
'water_id',
string='Aguas Blancas'
)
water_meter = fields.Boolean(
string='Medidor de agua',
)
wastewater_ids = fields.Many2many(
'tcc.family.waste.water',
|
vmalloc/flux
|
flux/timeline.py
|
Python
|
bsd-3-clause
| 5,399
| 0.001297
|
import contextlib
import datetime
import functools
import heapq
import time
from numbers import Number
class Timeline(object):
def __init__(self, start_time=None):
super(Timeline, self).__init__()
current_time = self._real_time()
self._forced_time = None
self._scheduled = []
self._time_factor = 1
self._time_correction = None
if start_time is not None:
self._correct_time(base=start_time)
def is_modified(self):
return self._time_correction is not None
def _real_sleep(self, seconds):
time.sleep(seconds)
def _real_time(self):
return time.time()
def set_time_factor(self, factor):
"""
Sets the time factor -- the factor by which the virtual time advances compared to the real
time. If set to 0, this means the virtual time does not progress at all until
sleeps are performed
"""
if factor < 0:
raise ValueError("Cannot set negative time factor")
self._correct_time()
self._time_factor = factor
def get_time_factor(self):
"""
Retrieves the current time factor
"""
return self._time_factor
def freeze(self):
"""
Shortcut for :func:`.set_time_factor`(0)
"""
self.set_time_factor(0)
def _correct_time(self, base=None):
current_time = self._real_time()
if base is None:
base = current_time
if self._time_correction is None:
self._time_correction = TimeCorrection(base, current_time)
self._time_correction.virtual_time = self.time()
self._time_correction.real_time = self._real_time()
|
# shift stems from the previous correction...
self._time_correction.shift = 0
def sleep(self, seconds):
"""
Sleeps a given number of seconds in the virtual timeline
|
"""
if not isinstance(seconds, Number):
raise ValueError(
"Invalid number of seconds specified: {0!r}".format(seconds))
if seconds < 0:
raise ValueError("Cannot sleep negative number of seconds")
if self._time_factor == 0:
self.set_time(self.time() + seconds)
else:
end_time = self.time() + seconds
while self.time() < end_time:
self._real_sleep(max(0, (end_time - self.time()) / self._time_factor))
self.trigger_past_callbacks()
def sleep_wait_all_scheduled(self):
"""
Sleeps enough time for all scheduled callbacks to occur
"""
while self._scheduled:
self.sleep(max(0, self._scheduled[0].time - self.time()))
def sleep_stop_first_scheduled(self, sleep_seconds):
"""
Sleeps the given amount of time, but wakes up if a scheduled event exists before the destined end time
"""
if self._scheduled:
sleep_seconds = min(
max(0, self._scheduled[0].time - self.time()), sleep_seconds)
self.sleep(sleep_seconds)
def trigger_past_callbacks(self):
current_time = self.time()
while self._scheduled and self._scheduled[0].time <= current_time:
scheduled = heapq.heappop(self._scheduled)
with self._get_forced_time_context(scheduled.time):
scheduled.callback()
def set_time(self, time, allow_backwards=False):
delta = time - self.time()
if delta < 0 and not allow_backwards:
# Can't move time backwards. Not an exception, if using threads.
return
self._time_correction.shift += delta
def time(self):
"""
Gets the virtual time
"""
if self._forced_time is not None:
return self._forced_time
returned = self._real_time()
if self._time_correction is not None:
returned = self._time_correction.virtual_time + self._time_correction.shift + (returned - self._time_correction.real_time) * self._time_factor
return returned
@contextlib.contextmanager
def _get_forced_time_context(self, time):
prev_forced_time = self._forced_time
self._forced_time = time
try:
yield
finally:
self._forced_time = prev_forced_time
def schedule_callback(self, delay, callback, *args, **kwargs):
if delay < 0:
raise ValueError("Cannot schedule negative delays")
item = ScheduledItem(self.time() + delay, functools.partial(callback, *args, **kwargs))
heapq.heappush(self._scheduled, item)
def __repr__(self):
return "<Timeline (@{})>".format(datetime.datetime.fromtimestamp(self.time()).ctime())
class ScheduledItem(object):
def __init__(self, time, callback):
super(ScheduledItem, self).__init__()
self.time = time
self.callback = callback
def __lt__(self, other):
if not isinstance(other, ScheduledItem):
return NotImplemented
return self.time < other.time
class TimeCorrection(object):
"""
Utility class used for keeping records of time shifts or corrections
"""
def __init__(self, virtual_time, real_time):
super(TimeCorrection, self).__init__()
self.virtual_time = virtual_time
self.real_time = real_time
self.shift = 0
|
BurtBiel/azure-cli
|
src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/mgmt_avail_set/lib/models/__init__.py
|
Python
|
mit
| 1,439
| 0.00417
|
#---------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#---------------------------------------------------------------------------------------------
#pylint: skip-file
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------
|
------------------------
from .deployment_avail_set import DeploymentAvailSet
from .template_link import TemplateLink
from .parameters_link import ParametersLink
from .provider_resource_type import ProviderResourceType
from .provider import Provider
from .basic_dependency import BasicDependency
from .depen
|
dency import Dependency
from .deployment_properties_extended import DeploymentPropertiesExtended
from .deployment_extended import DeploymentExtended
from .avail_set_creation_client_enums import (
DeploymentMode,
)
__all__ = [
'DeploymentAvailSet',
'TemplateLink',
'ParametersLink',
'ProviderResourceType',
'Provider',
'BasicDependency',
'Dependency',
'DeploymentPropertiesExtended',
'DeploymentExtended',
'DeploymentMode',
]
|
dsweet04/rekall
|
rekall-agent/rekall_agent/client_actions/tsk_test.py
|
Python
|
gpl-2.0
| 1,298
| 0
|
from rekall import resources
from rekall_agent import testlib
from rekall_agent.client_actions import files
from rekall_agent.client_actions import tsk
class TestTSK(testlib.ClientAcionTest):
def setUp(self):
super(TestTSK, self).setUp()
# Add a fake mount point to the image.
mount_tree_hook = files.MountPointHook(session=self.session)
mount_tree = {}
mount_tree_hook._add_to_tree(
|
mount_tree, "/mnt/",
resources.get_resource("winexec_img.dd",
package="rekall_agent",
prefix="test_data"),
"ext2")
se
|
lf.session.SetParameter("mount_points", mount_tree)
def testTSK(self):
action = tsk.TSKListDirectoryAction(session=self.session)
action.path = "/mnt/a"
action.vfs_location = self.get_test_location("test")
self.assert_baseline("testTSK", list(action.collect()))
def testTSKRecursive(self):
action = tsk.TSKListDirectoryAction(session=self.session)
action.path = "/mnt/a"
action.depth = 2
action.vfs_location = self.get_test_location("test")
self.assert_baseline("testTSKRecursive", list(action.collect()))
if __name__ == "__main__":
testlib.main()
|
danking/hail
|
hail/python/setup-hailtop.py
|
Python
|
mit
| 848
| 0
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='hailtop',
version="0.0.1",
author="Hail Team",
author_email="hail@broadinstitute.org",
description="Top level Hail module.",
url="https://hail.is",
|
project_urls={
'Documentation': 'https://hail.is/docs/0.2/',
'Repository': 'https://github.com/hail-is/hail',
},
packages=find_packages('.'),
package_dir={
'hailtop': 'hailtop'},
package_data={
'hailtop.hailctl': ['hail_version', 'deploy.yaml']},
|
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
],
python_requires=">=3.6",
entry_points={
'console_scripts': ['hailctl = hailtop.hailctl.__main__:main']
},
setup_requires=["pytest-runner", "wheel"]
)
|
andreaso/ansible
|
lib/ansible/modules/system/ohai.py
|
Python
|
gpl-3.0
| 1,863
| 0.002684
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you c
|
an redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# A
|
nsible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ohai
short_description: Returns inventory data from I(Ohai)
description:
- Similar to the M(facter) module, this runs the I(Ohai) discovery program
(U(http://wiki.opscode.com/display/chef/Ohai)) on the remote host and
returns JSON inventory data.
I(Ohai) data is a bit more verbose and nested than I(facter).
version_added: "0.6"
options: {}
notes: []
requirements: [ "ohai" ]
author:
- "Ansible Core Team"
- "Michael DeHaan (@mpdehaan)"
'''
EXAMPLES = '''
# Retrieve (ohai) data from all Web servers and store in one-file per host
ansible webservers -m ohai --tree=/tmp/ohaidata
'''
def main():
module = AnsibleModule(
argument_spec = dict()
)
cmd = ["/usr/bin/env", "ohai"]
rc, out, err = module.run_command(cmd, check_rc=True)
module.exit_json(**json.loads(out))
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
vimilimiv/weibo-popularity_judge-and-content_optimization
|
数据处理/get_keyword_feature.py
|
Python
|
mit
| 3,446
| 0.016007
|
#-------------------------------------------------------------------------------
#
|
coding=utf8
# Name: 模块1
# Purpose:
#
# Author: zhx
#
# Created: 10/05/2016
# Copyright: (c) zhx 2016
# Licence: <yo
|
ur licence>
#-------------------------------------------------------------------------------
import openpyxl
import jieba
threshold = 2140
popular = 0
def main():
cctv_data = openpyxl.load_workbook("cctv.xlsx")
cctv_keywords = openpyxl.load_workbook("cctv_keywords.xlsx")
cctv_new = openpyxl.Workbook()
new_sheet = cctv_new.active
#print cctv_data.get_sheet_names()
sheet1 = cctv_keywords["Sheet"]
sheet2 = cctv_data["Sheet"]
words = {}
for r in xrange(1,36003):
word = sheet1.cell(row=r,column=1).value
word_min = sheet1.cell(row=r,column=2).value
word_max = sheet1.cell(row=r,column=3).value
word_mean = sheet1.cell(row=r,column=4).value
words[word] = [word_min,word_max,word_mean]
for r in xrange(2,4749):
print r
content = sheet2.cell(row=r,column=3).value
time = sheet2.cell(row=r,column=11).value
like = sheet2.cell(row=r,column=5).value
repost = sheet2.cell(row=r,column=6).value
if like == '赞':
like = '0'
if repost =='转发':
repost = '0'
like_repost = int(like)+int(repost)
if like_repost>threshold:
popular =1
else:
popular =0
hour = int(time[1:3])
minute =int (time[4:])
time = hour*60 + minute
new_sheet.cell(row=r,column=10).value = time
new_sheet.cell(row=r,column=11).value = like_repost
if content ==None:
continue
print r
seg_list = jieba.cut(content, cut_all = True)
wordsplite = ' '.join(seg_list)
wordsplite = wordsplite.split(' ')
maxlike = 0
max_word =''
min_word =''
mean_word=''
minlike = 9999999
tmplist = []
tmpdic ={}
for w in wordsplite:
if words.has_key(w):
tmpdic[w] =int(words[w][2])
tmplist.append(int(words[w][2]))
likes = int(words[w][2])
if likes<minlike:
minlike = likes
min_word = w
if likes>maxlike:
maxlike = likes
max_word = w
else:
continue
if len(tmplist)!=0:
tmplist.sort()
mean = tmplist[int(len(tmplist)/2)]
for w in tmpdic:
if tmpdic[w]==mean:
mean_word =w
if min_word!='':
new_sheet.cell(row=r,column=1).value = words[min_word][0]
new_sheet.cell(row=r,column=2).value = words[min_word][1]
new_sheet.cell(row=r,column=3).value = words[min_word][2]
if max_word!='':
new_sheet.cell(row=r,column=4).value = words[max_word][0]
new_sheet.cell(row=r,column=5).value = words[max_word][1]
new_sheet.cell(row=r,column=6).value = words[max_word][2]
if mean_word!='':
new_sheet.cell(row=r,column=7).value = words[mean_word][0]
new_sheet.cell(row=r,column=8).value = words[mean_word][1]
new_sheet.cell(row=r,column=9).value = words[mean_word][2]
cctv_new.save("train_feature_keyword_reg.xlsx")
main()
|
pythonistas-tw/academy
|
web-api/tonypythoneer/db-exercise/v2/app/views/users/auth.py
|
Python
|
gpl-2.0
| 2,194
| 0.000456
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @first_date 20160129
# @date 20160129
# @version 0.0
"""auth for Users API
"""
from flask import abort
from flask.views import MethodView
from flask.ext.login import login_required, current_user
from sqlalchemy.exc import IntegrityError
from webargs.flaskparser import use_args
from . import users_bp
from ..mixins import RestfulViewMixin
from ...models.users import User
from ...schemas.users import SignupSchema, LoginSchema, ResetPasswordSchema
from ...error_handlers import user_errors
class SignupView(RestfulViewMixin, MethodView):
@use_args(SignupSchema, locations=('json',))
def post(self, args):
user = User(**args)
try:
user.add()
except IntegrityError as err:
err.data = user_errors.USER_ERR_1001_REGISTERED_ACC
raise
return self.get_response(status=201)
class LoginView(RestfulViewMixin, MethodView):
@use_args(LoginSchema, locations=('json',))
def post(self, args):
user = User.authenticate(**args)
if not user:
abort(401)
key = user.login() # It will return key
return self.get_response({"key": key}, status=200)
class LogoutView(RestfulViewMixin, MethodView):
decorators = (login_required,)
def post(self):
user = current_user
user.logout()
return self.get_response(status=200)
class ResetPasswordView(RestfulViewMixin, MethodView):
decorators = (login_required,)
@use_args(ResetPasswordSchema, locations
|
=('json',))
def put(self, args):
user = current_user
if not user.check_password(args['old_password']):
abort(401)
user.set_password(args['new_password'])
user.update()
return self.get_response(status=200)
# Url patterns: To register views in blueprint
users_bp.add_url_rule('/signup', view_func=SignupView.as_view('signup'))
users_bp.add_url_rule('/login', view_func=LoginView.as_view('login'))
users_bp.add_url_rule('/l
|
ogout', view_func=LogoutView.as_view('logout'))
users_bp.add_url_rule('/reset_password', view_func=ResetPasswordView.as_view('reset-password'))
|
LuyaoHuang/patchwatcher
|
patchwatcher2/patchwork/patchwork.py
|
Python
|
lgpl-3.0
| 3,188
| 0.00345
|
import os
import re
import lxml.etree as etree
import subprocess
import urllib2
""" TODO: move patchwatcher in this dir """
def improvemailaddr(strings):
if '<' in strings and '>' in strings:
tmplist = strings[strings.find('<')+1:strings.find('>')].split()
retstrings = "%s@" % tmplist[0]
first = 0
for n in tmplist[1:]:
if first == 0:
first = 1
retstrings += n
else:
retstrings += '.%s' % n
return '%s <%s>' % (strings[:strings.find('<')], retstrings)
def createpatch(htmllink):
returnstr = ""
strings = urllib2.urlopen(htmllink).read().decode("utf-8")
xml = etree.HTML(strings)
try:
lilist = xml.xpath('/html/body/ul/li')
pre = xml.xpath('/html/body/pre')[0]
except:
raise Exception("Fail to parse html")
for i in lilist:
if i.getchildren()[0].text == "From":
author = i.getchildren()[0].tail[2:]
elif i.getchildren()[0].text == "Subject":
subject = i.getchildren()[0].tail[2:]
elif i.getchildren()[0].text == "Date":
date = i.getchildren()[0].tail[2:]
tmpstr = improvemailaddr(author)
if '\r\n\t' in subject:
subject = subject.replace('\r\n\t', ' ')
if '\r\n' in subject:
subject = subject.replace('\r\n', '')
if '\t' in subject:
subject = subject.replace('\t', ' ')
returnstr += 'From: %s\n' % tmpstr
returnstr += 'Date: %s\n' % date
returnstr += 'Subject: %s\n\n' % subject
if pre.getchildren() == []:
if pre.text:
returnstr += pre.text
else:
if pre.text:
returnstr += pre.text
for n in pre.getchildren():
if n.text:
returnstr += n.text
returnstr += n.tail
if "diff --git" not in returnstr:
#this is not a patch
return None, None
return returnstr, subject
def create_patch_set(html_link_list):
def _parseSubject(subject):
""" TODO: move utils in a right place and use it here """
info = ''
labels = []
cleansubj = subject.split(']')[-1]
|
[1:]
if "PATCH" not in subject:
return [info, cleansubj, labels]
for i in re.findall('\[[^\[\]]+\]', subject):
tmplist = i[1:-1].replace('PATCH', ' ').split()
for n in tmplist:
if '/' in n:
info = n
labels.append(n)
return [info, cleansubj, labels]
ret_patch = ''
patch_dict = {}
for html_link in html_link_list:
tmppatch, tmpsubject = createpatch(html_link)
|
if not tmpsubject:
continue
index, _, _ = _parseSubject(tmpsubject)
if index == '':
""" not sure what happened """
ret_patch += tmppatch
continue
patch_dict[str(index)] = tmppatch
queue = patch_dict.keys()
queue.sort()
for i in queue:
ret_patch += patch_dict[i]
return ret_patch
if __name__ == '__main__':
print createpatch("https://www.redhat.com/archives/libvir-list/2016-June/msg01022.html")
|
snakeleon/YouCompleteMe-x86
|
third_party/ycmd/third_party/JediHTTP/vendor/jedi/test/completion/definition.py
|
Python
|
gpl-3.0
| 1,072
| 0.028918
|
"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
"""Parenthesis closed at next line."""
# Ignore these definitions for a little while, not sure if we really want them.
# python <= 2.5
#? isinstance
isinstance(
)
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
"""Parenthesis closed at same line."""
# Note: len('isinstance(') == 11
#? 11 isinstance
isinstance()
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None,)
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None, )
# Note: len('isinstance(None, ') == 17
##? 17 isinstance
isinstance(None, )
# Note: len('isinstance( ') == 12
##? 12 isinstance
isinstan
|
ce( )
"""Unclosed parenthesis."""
#? isinstance
isinstance(
def
|
x(): pass # acts like EOF
##? isinstance
isinstance(
def x(): pass # acts like EOF
#? isinstance
isinstance(None,
def x(): pass # acts like EOF
##? isinstance
isinstance(None,
|
wei0831/fileorganizer
|
fileorganizer/fanhaorename.py
|
Python
|
mit
| 2,156
| 0.000928
|
#!/usr/bin/python
""" fanhaorename.py
"""
import os
import os.path
import logging
import fileorganizer
from fileorganizer import _helper
from fileorganizer.replacename import _replacename
__author__ = "Jack Chang <wei0831@gmail.com>"
def _tagHelper(tag):
""" TODO
"""
result = ""
for c in tag:
if c.isalpha():
result += "[{0}{1}]".format(c.lower(), c.upper())
else:
result += c
return result
def fanhaorename(work_dir,
tag,
exclude=None,
mode=0,
wetrun=False,
this_name=os.path.basename(__file__)):
""" Batch Rename Fanhao
\b
Args:
work_dir (str): Working Directory
tag (str): Fanhao tag
find (str, optional): Regex string to find in filename/foldername
replace (str, optional): Regex string to replace in filename/foldername
exclude (str, optional): Regex string to exclude in mattches
mode (int, optional): 0=FILE ONLY, 1=FOLDER ONLY, 2=BOTH
wetrun (bool, optional): Test Run or not
"""
_find_dir = r"(.*)({0})(-|_| )*(\d\d\d)(.*)".format(_tagHelper(tag))
_replace_dir = r"{0}-\4".format(tag)
_find_file = _find_dir + r"(\.(.*))"
_replace_file = _replace_dir + r"\6"
_helper.init_loger()
this_run = "WET" if wetrun else "DRY"
loger = logging.getLogger(this_name)
loger.info("[START] === %s [%s RUN] ===", this_name, this_run)
loger.info("[DO] Rename \"%s\" fanhao in \"%s\"; Mode %s", tag, work_dir,
mode)
if mode in (0, 2): # mode 0 and 2
for item in _replacename(_find_file, _replace_file, work_dir, 0,
exclude):
item.commit() if wetrun else loger.info("%s", item)
i
|
f mode in (1, 2): # mode 1 and 2
for item in _replacename(_find_dir, _replace_dir, work_dir, 1,
exclude):
item.commit() if wetrun else loger.info("%s", item)
loger.info("[END] === %s [%s RUN] ===", this_name, this_run)
if __name__ == "__main
|
__":
fileorganizer.cli.cli_fanhaorename()
|
kohr-h/odl
|
odl/contrib/tensorflow/examples/tensorflow_layer_ray_transform.py
|
Python
|
mpl-2.0
| 1,582
| 0
|
"""Example of how to convert a RayTransform operator to a tensorflow layer.
This example is similar to ``tensorflow_layer_matrix``,
|
but demonstrates how
more advanced operators, such as a ray transform, can be handled.
"""
from __future__ im
|
port print_function
import tensorflow as tf
import numpy as np
import odl
import odl.contrib.tensorflow
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
space = odl.uniform_discr([-64, -64], [64, 64], [128, 128],
dtype='float32')
geometry = odl.tomo.parallel_beam_geometry(space)
ray_transform = odl.tomo.RayTransform(space, geometry)
x = tf.constant(np.asarray(ray_transform.domain.one()))
z = tf.constant(np.asarray(ray_transform.range.one()))
# Create tensorflow layer from odl operator
odl_op_layer = odl.contrib.tensorflow.as_tensorflow_layer(
ray_transform, 'RayTransform')
# Add empty axes for batch and channel
x_reshaped = x[None, ..., None]
z_reshaped = z[None, ..., None]
# Lazily apply operator in tensorflow
y = odl_op_layer(x_reshaped)
# Evaluate using tensorflow
print(y.eval())
# Compare result with pure ODL
print(ray_transform(x.eval()))
# Evaluate the adjoint of the derivative, called gradient in tensorflow
# We need to scale by cell size to get correct value since the derivative
# in tensorflow uses unweighted spaces.
scale = ray_transform.range.cell_volume / ray_transform.domain.cell_volume
print(tf.gradients(y, [x_reshaped], z_reshaped)[0].eval() * scale)
# Compare result with pure ODL
print(ray_transform.derivative(x.eval()).adjoint(z.eval()))
|
crypotex/taas
|
taas/user/migrations/0003_change_user_manager.py
|
Python
|
gpl-2.0
| 447
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import taas.user.models
class Migration(migrations.Migration):
dependencies = [
('user', '0002_r
|
emove_username'),
]
operations = [
migrations.AlterModelManagers(
name='user',
managers=[
('objects', taas.user.mode
|
ls.CustomUserManager()),
],
),
]
|
unioslo/cerebrum
|
Cerebrum/modules/tsd/ResourceService.py
|
Python
|
gpl-2.0
| 5,210
| 0.001536
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2013 University of Oslo, Norway
#
# This file is part of Cerebrum.
#
# Cerebrum is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Webservice functionality for Resource management in the TSD project.
Resources are registered in Cerebrum, but they are administered by other
systems. For those systems to be able to retrieve the information, we are giving
it through a SOAP webservice.
"""
# TODO: check if something could be removed from here:
import random, hashlib
import string, pickle
from mx.DateTime import RelativeDateTime, now
import twisted.python.log
import cereconf
from Cerebrum import Errors
from Cerebrum.Utils import Factory
from Cerebrum.modules.dns import Utils, Subnet, AAAARecord, IPv6Number
from Cerebrum.modules.cis import Utils
log = Utils.SimpleLogger()
class ResourceService(object):
"""The functionality for the Resource service.
Note that this main class should be independent of what server we use. It is
important that each thread gets its own instance of this class, to avoid
race conditions.
Another thing to remember is that database connections should be closed.
This is to avoid having old and idle database connections, as the garbage
collector can't destroy the instances, due to twisted's reuse of threads.
"""
# The default DNS zone to use:
default_zone = 'tsd.usit.no.'
def __init__(self, operator_id):
"""Constructor. Since we are using access control, we need the
authenticated entity's ID as a parameter.
"""
self.db = Factory.get('Database')()
self.db.cl_init(change_program='resource_service')
self.co = Factory.get('Constants')(self.db)
self.finder = Utils.Find(self.db, self.default_zone)
self.subnet = Subnet.
|
Subnet(self.db)
self.aaaa = AAAARecord.AAAARecord(self.db)
self.ip = IPv6Number.IPv6Number(self.db)
# TODO: could we save work by only using a single, shared object of
# the auth class? It is supposed to be thread safe.
#self.ba = BofhdAuth(self.db)
self.operator_id = operator_id
def close(self):
"""Explicitly close this instance, as python's garbage collector can't
close the database connections w
|
hen Twisted is reusing the threads.
"""
if hasattr(self, 'db'):
try:
self.db.close()
except Exception, e:
log.warning("Problems with db.close: %s" % e)
else:
# TODO: this could be removed later, when it is considered stable
log.warning("db doesn't exist")
def search_mac_addresses(self, hostname, mac_address):
"""Search for hostnames and their MAC addresses."""
m_id = a_id = None
if hostname:
a_id = self.finder.find_a_record(hostname)
self.aaaa.clear()
self.aaaa.find(a_id)
if not self.aaaa.mac:
return ()
if mac_address and mac_address != self.aaaa.mac:
return ()
return ((self.aaaa.name, self.aaaa.mac),)
# Return either the complete list of hosts and their MAC addresses, or
# only the host with the given MAC address:
# TODO: What is used? The element 'mac' or IPNumber's 'mac_adr'?
return ((row['name'], row['mac']) for row in self.aaaa.list_ext() if
row['mac'] and (not mac_address or (row['mac'] == mac_address)))
def register_mac_address(self, hostname, mac_address):
"""Register a MAC address for a given host."""
self.aaaa.clear()
a_id = self.finder.find_a_record(hostname)
self.aaaa.find(a_id)
# TODO: do any validation on the MAC address?
self.aaaa.mac = mac_address
self.aaaa.write_db()
self.db.commit()
return True
def get_vlan_info(self, hostname):
"""Get the VLAN info about a given host.
The needed details are VLAN number and net category.
"""
self.subnet.clear()
# Check if hostname is rather an IP address or subnet:
if ':' in hostname:
self.subnet.find(hostname)
else:
a_id = self.finder.find_a_record(hostname)
self.aaaa.clear()
self.aaaa.find(a_id)
self.ip.clear()
self.ip.find(a_id.ip_number)
# bah, now we have the ip address
self.subnet.find(self.ip.aaaa_ip)
return (self.subnet.vlan_number, self.subnet.subnet_mask)
|
ajinabraham/Mobile-Security-Framework-MobSF
|
MalwareAnalyzer/views/domain_check.py
|
Python
|
gpl-3.0
| 4,055
| 0.002466
|
# -*- coding: utf_8 -*-
# Module for Malware Analysis
from urllib.parse import urlparse
import logging
import shutil
import io
import os
import re
import tempfile
import requests
from django.conf import settings
from MobSF.utils import (
PrintException,
isInternetAvailable,
upstream_proxy,
sha256
)
logger = logging.getLogger(__name__)
# PATH
MALWARE_DB_DIR = TOOLS_DIR = os.path.join(
settings.BASE_DIR, 'MalwareAnalyzer/malwaredb/')
def update_malware_db():
"""Check for update in malware DB"""
try:
proxies, verify = upstream_proxy('http')
except:
PrintException("[ERROR] Setting upstream proxy")
try:
url = "http://www.malwaredomainlist.com/mdlcsv.php"
response = requests.get(url, timeout=3, proxies=proxies, verify=verify)
data = response.content
tmp_dwd = tempfile.NamedTemporaryFile()
tmp_dwd.write(data)
mal_db = os.path.join(MALWARE_DB_DIR, 'malwaredomainlist')
tmp_dwd.seek(0)
# Check1: SHA256 Change
if sha256(tmp_dwd.name) != sha256(mal_db):
# DB needs update
# Check2: DB Syntax Changed
line = tmp_dwd.readline().decode("utf-8", "ignore")
lst = line.split('",')
if len(lst) == 10:
# DB Format is not changed. Let's update DB
logger.info("Updating Malware Database....")
shutil.copyfile(tmp_dwd.name, mal_db)
else:
logger.info("Malware Database format from malwaredomainlist.com changed. Database is not updated. "
"Please report to: https://github.com/MobSF/Mobile-Security-Framework-MobSF/issues")
else:
logger.info("Malware Database is up-to-date.")
tmp_dwd.close()
except:
PrintException("[ERROR] Malware DB Update")
def malware_check(urllist):
result = {}
try:
if settings.DOMAIN_MALWARE_SCAN == False:
logger.info("Domain Malware Check disabled in settings")
return result
domainlist = get_domains(urllist)
if domainlist:
if isInternetAvailable():
update_malware_db()
else:
logger.warning(
"No Internet Connection. Skipping Malware Database Update.")
mal_db = os.path.join(MALWARE_DB_DIR, 'malwaredomainlist'
|
)
with io.open(mal_db, mode='r', encoding="utf8", errors="ignore") as flip:
entry_list = flip.readlines()
|
for entry in entry_list:
enlist = entry.split('","')
if len(enlist) > 5:
details_dict = dict()
details_dict["domain_or_url"] = enlist[1]
details_dict["ip"] = enlist[2]
details_dict["desc"] = enlist[4]
details_dict["bad"] = "yes"
for domain in domainlist:
if (details_dict["domain_or_url"].startswith(domain) or
details_dict["ip"].startswith(domain)):
result[domain] = details_dict
# Good Domains
for domain in domainlist:
if domain not in result:
tmp_d = dict()
tmp_d["bad"] = "no"
result[domain] = tmp_d
except:
PrintException("[ERROR] Performing Malware Check")
return result
# Helper Functions
def get_domains(urls):
"""Get Domains"""
try:
domains = []
for url in urls:
parsed_uri = urlparse(url)
domain = '{uri.netloc}'.format(uri=parsed_uri)
if ((domain not in domains) and
(len(domain) > 2) and
("." in domain) and
(domain.endswith(".") is False and re.search('[a-zA-Z0-9]', domain))):
domains.append(domain)
return domains
except:
PrintException("[ERROR] Extracting Domain form URL")
|
jstacoder/pycrm
|
level2_pycrm/__init__.py
|
Python
|
bsd-3-clause
| 9,286
| 0.010015
|
# -*- coding: utf-8 -*-
import wtforms
from werkzeug import OrderedMultiDict
from sample_data import ContactsDashboard
from flask import Flask, redirect, url_for, render_template, session, request, flash
from functools import wraps
from flask_dashed.views import get_next_or
from flask_dashed.admin import Admin
from flask_dashed.ext.sqlalchemy import ModelAdminModule, model_form
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy.orm import aliased, contains_eager
from login_utils import encrypt_password, check_password
from flask.views import MethodView
app = Flask('level2_pycrm')
app.config['SECRET_KEY'] = 'secret'
app.debug = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db'
app.jinja_env.trim_blocks = True
db = SQLAlchemy(app)
db_session = db.session
#session['logged_in'] = False
def is_email(data):
'returns true if given data that looks like an email'
return '@' in data and '.com' in data
def is_unique(itm):
'returns true if itm is unique email or name in database'
def login_required(test):
@wraps(test)
def wrap(*args,**kwargs):
if 'logged_in' in session: #and not session['logged_in']:
return test(*args,**kwargs)
else:
return redirect(url_for('login',next=request.url))
return wrap
# view functions
@app.route('/login',methods=["POST","GET"])
def login():
if request.method.upper() == "POST":
pw = encrypt_password(request.form['password'])
username = request.form['username']
session['logged_in'] = True
return redirect(url_for('redirect_to_admin'))
return render_template('login.html')
@app.route('/register',methods=["POST","GET"])
def register():
if request.method.upper() == "POST":
username = request.form['username']
# check_unique_username(username)
# error if not
email = request.form['email']
# same verification above
pw1 = encrypt_password(request.form['password'])
pw2 = request.form['confirm']
if not check_password(pw2,pw1):
flash('Passwords didnt match, try again')
return redirect(url_for('register'))
else:
attrs = (
('username',username),('email',email),
)
return render_template("verify_registration.html",attrs=attrs)
return render_template('register.html')
@app.route('/verified')
def verified():
session['logged_in'] = True
flash('Thank you for sgning up')
return redirect(url_for('redirect_to_admin'))
@app.teardown_context
def auto_logout():
if session.get('logged_in',False):
del(session['logged_in'])
# define model classes for Modules
#class Worker(db.Model):
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(255), unique=True, nullable=False)
email = db.Column(db.String(255), unique=True, nullable=False)
send_info_email = db.Column(db.Boolean())
zone = db.Column(db.Integer,nullable=False)
is_active = db.Column(db.Boolean())
account_num = db.Column(db.String(20),nullable=False)
password = db.Column(db.String(255))
create_password = db.Column(db.Boolean())
def __unicode__(self):
return self.username
def __str__(self):
return self.__unicode__()
class Company(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), unique=True, nullable=False)
zone = db.Column(db.Integer,nullable=False)
account_num = db.Column(db.String(15),nullable=False)
main_contact_id = db.Column(db.Integer,db.ForeignKey(User.id))
contacts_group = db.relationship(User,backref=db.backref("Agencys"))
main_phone = db.Column(db.String(15))
alt_phone = db.Column(db.String(15))
#main_contact_email = db.Column(db.Integer,db.ForeignKey(User.email))
date_created = db.Column(db.String(10))
date_modified = db.Column(db.String(10))
contract_start = db.Column(db.String(10))
contract_end = db.Column(db.String(10))
def __unicode__(self):
return unicode(self.name)
def __repr__(self):
return '<Agency %r>' % self.name
class Warehouse(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False)
company_id = db.Column(db.Integer, db.ForeignKey(Company.id))
company = db.relationship(Company, backref=db.backref("warehouses"))
def __unicode__(self):
return self.name
def __repr__(self):
return '<Warehouse %r>' % self.name
class Profile(db.Model):
id = db.Column(db.Integer, db.ForeignKey(User.id), primary_key=True)
name = db.Column(db.String(255), nullable=False)
location = db.Column(db.String(255))
company_id = db.Column(db.Integer, db.ForeignKey(Company.id),
nullable=True)
user = db.relationship(User, backref=db.backref("profile",
remote_side=id, uselist=Fa
|
lse, cascade="all, delete-orphan"))
company = db.relationship(Company, backref=db.backref("staff"))
def __unicode__(self):
return self.user.username
user_group = db.Table(
'user_group', db.Model.metadata,
db.Column('user_id', db.Integer, db.ForeignKey('user.id')),
db.Column('group_id', db.Integer, db.ForeignKey('group.id'))
)
class Group(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), unique=True, n
|
ullable=False)
users = db.relationship("User", secondary=user_group,
backref=db.backref("groups", lazy='dynamic'))
def __unicode__(self):
return unicode(self.name)
def __repr__(self):
return '<Group %r>' % self.name
db.drop_all()
db.create_all()
group = Group(name="admins")
db_session.add(group)
company = Company(name="Level 2 Designs",zone=1,main_phone='714-783-6369',account_num="4565")
user = User(username="kyle",zone=1,account_num="222",email="kyle@level2designs.com",password="14wp88",is_active=True)
db_session.add(user)
db_session.add(company)
db_session.commit()
UserForm = model_form(User, db_session)#,exclude=['password'])
CompanyForm = model_form(Company, db_session, exclude=['main_contact_id','date_modified'])
class UserForm(UserForm):
# Embeds OneToOne as FormField
profile = wtforms.FormField(
model_form(Profile, db_session, exclude=['user'],
base_class=wtforms.Form))
class UserModule(ModelAdminModule):
model = User
db_session = db_session
profile_alias = aliased(Profile)
list_fields = OrderedMultiDict((
('id', {'label': 'id', 'column': User.id}),
('username', {'label': 'username', 'column': User.username}),
('email', {'label': 'email address', 'column': User.email}),
('zone', {'label':'Zone', 'column': User.zone}),
('account_num',{'label' : 'Account Number','column': User.account_num}),
('profile.name', {'label': 'name', 'column': profile_alias.name}),
('profile.location', {'label': 'location',
'column': profile_alias.location}),
))
list_title = 'User list'
searchable_fields = ['username', 'profile.name', 'zone','account_num','email'] #,'role']
order_by = ('id', 'desc')
list_query_factory = model.query\
.outerjoin(profile_alias, 'profile')\
.options(contains_eager('profile', alias=profile_alias))\
form_class = UserForm
detail_title = 'User Details'
def create_object(self):
user = self.model()
user.profile = Profile()
return user
class CompanyForm(CompanyForm):
contact = wtforms.FormField(
model_form(User, db_session, exclude=['account_num','profile.name','profile.location'],
base_class=wtforms.Form))
class ContactModule(ModelAdminModule):
model = User
db_session = db_session
form_class = model_form(User, db_session)
class GroupModule(ModelAdminModule):
model = Group
db_session = db_session
form_class = model_form(Group, db_session, only=['name'])
class WarehouseModule(ModelAdminModule):
model = Warehouse
db_session = db_se
|
flopezag/fiware-backlog
|
kernel/DataFactory.py
|
Python
|
apache-2.0
| 5,015
| 0.002991
|
__author__ = "Manuel Escriche <mev@tid.es>"
import os, pickle, base64, requests
from datetime import datetime
from kconfig import trackersBook, trackersBookByKey
from kconfig import tComponentsBook
from kernel.Jira import JIRA
class DataEngine:
class DataObject:
def __init__(self, name, storage):
self.storage = storage
self.name = name
def save(self, data):
timestamp = datetime.now().strftime("%Y%m%d-%H%M")
filename = 'FIWARE.Engine.{}.{}.pkl'.format(self._type, self.name)
longFilename = os.path.join(self.storage, filename)
with open(longFilename, 'wb') as f:
pickle.dump((timestamp, data), f, pickle.HIGHEST_PROTOCOL)
return filename
def load(self):
filename = 'FIWARE.Engine.{}.{}.pkl'.format(self._type, self.name)
try:
f = open(os.path.join(self.storage, filename), 'rb')
timestamp, data = pickle.load(f)
except FileNotFoundError:
raise
else:
f.close()
return data, timestamp
class Tracker(DataObject):
_type = 'Tracker'
def __init__(self, trackername, storage):
super().__init__(trackername, storage)
class Comp(DataObject):
_type = 'Component'
def __init__(self, cmpname, storage):
super().__init__(cmpname, storage)
class Query(DataObject):
_type = 'Query'
def __init__(self, name, storage):
super().__init__(name, storage )
def __init__(self, storage):
self.storage = storage
#self.jira = JIRA()
@classmethod
def snapshot(cls, storage):
jira = JIRA()
files = list()
for trackername in trackersBook:
tracker = trackersBook[trackername]
data = jira.getTrackerData(tracker.keystone)
filename = DataEngine.Tracker(trackername, storage).save(data)
files.append(filename)
return files
def getTrackerData(self, tracker_id):
tracker = trackersBookByKey[tracker_id]
return DataEngine.Tracker(tracker.name, self.storage).load()
def saveTrackerData(self, tracker_id, data):
tracker = trackersBookByKey[tracker_id]
DataEngine.Tracker(tracker.name, self.storage).save(data)
def getComponentData(self, cmp_id):
comp = tComponentsBook[cmp_id]
name = '{}-{}'.format(comp.name, cmp_id)
try:
return DataEngine.Comp(name, self.storage).load()
except Exception:
tracker = trackersBookByKey[comp.tracker]
trackerData, timestamp = DataEngine.Tracker(tracker.name, self.storage).load()
data = list()
for item in trackerData:
try:key = item['fields']['components'][0]['id']
except Exception: continue
if cmp_id == key: data.append(item)
return data, timestamp
def saveComponentData(self, cmp_id, data):
cmp = tComponentsBook[cmp_id]
name = '{}-{}'.format(cmp.name, cmp_id)
DataEngine.Comp(name, self.storage).save(data)
def getQueryData(self, name):
return DataEngine.Query(name, self.storage).load()
def saveQueryData(self, name, data):
DataEngine.Query(name, self.storage).save(data)
class DataFactory:
def __init__(self, storage):
self.engine = DataEngine(storage)
def getTrackerData(self, tracker_id):
data, timestamp = self.engine.getTrackerData(tracker_id)
source = 'store'
return data, timestamp, source
def getComponentData(self, cmp_id):
try:
data = JIRA().getComponentData(cmp_id)
self.engine.saveComponentData(cmp_id, data)
timestamp = datetime.now().strftime("%Y%m%d-%H%M")
source = 'jira'
|
except Exception:
data, timestamp = self.engine.getComponentData(cmp_id)
source = 'store'
return data, timestamp, source
def getQueryData(self, name, jql):
try:
data = JIRA().getQuery(jql)
self.engine.saveQueryData(name, data)
timestamp = datetime.now().strftime("%Y%m%d-%H%M")
source = 'jira'
except:
data, timestamp = self.engine.getQueryData(name)
source = 'store'
|
return data, timestamp, source
def getTrackerNoComponentData(self, tracker_id):
jql = 'project = {} AND component = EMPTY'.format(tracker_id)
name = '{}-NoComp'.format(tracker_id)
try:
data = JIRA().getQuery(jql)
self.engine.saveQueryData(name, data)
timestamp = datetime.now().strftime("%Y%m%d-%H%M")
source = 'jira'
except Exception:
data, timestamp = self.engine.getQueryData(name)
source = 'store'
return data, timestamp, source
if __name__ == "__main__":
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.