repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
gushie/gushpibot
|
refs/heads/master
|
component.py
|
1
|
class Component(object):
def __init__(self):
pass
def cleanup(self):
pass
def check(self):
pass
def update_menu(self, menu):
pass
class EventHandler(object):
def __init__(self):
self.handlers = []
def add(self, func):
self.handlers.append(func)
def remove(self, func):
self.handlers.remove(func)
def fire(self, *args, **kwargs):
for handler in self.handlers:
handler(*args, **kwargs)
|
Zeromixis/ZexGameEngine
|
refs/heads/master
|
External/freetype/src/tools/cordic.py
|
555
|
# compute arctangent table for CORDIC computations in fttrigon.c
import sys, math
#units = 64*65536.0 # don't change !!
units = 180 * 2**16
scale = units/math.pi
shrink = 1.0
comma = ""
print ""
print "table of arctan( 1/2^n ) for PI = " + repr(units/65536.0) + " units"
for n in range(1,32):
x = 0.5**n # tangent value
angle = math.atan(x) # arctangent
angle2 = round(angle*scale) # arctangent in FT_Angle units
if angle2 <= 0:
break
sys.stdout.write( comma + repr( int(angle2) ) )
comma = ", "
shrink /= math.sqrt( 1 + x*x )
print
print "shrink factor = " + repr( shrink )
print "shrink factor 2 = " + repr( int( shrink * (2**32) ) )
print "expansion factor = " + repr( 1/shrink )
print ""
|
mariansoban/ardupilot
|
refs/heads/Copter-4.0.x-sobi-phl-8m
|
Tools/autotest/build-with-disabled-features.py
|
5
|
#!/usr/bin/env python
from __future__ import print_function
'''
Build ArduPilot with various build-time options enabled or disabled
Usage is straight forward; invoke this script from the root directory
of an ArduPilot checkout:
pbarker@bluebottle:~/rc/ardupilot(build-with-disabled-features)$ ./Tools/autotest/build-with-disabled-features.py
BWFD: Building
Running: ("/home/pbarker/rc/ardupilot/Tools/autotest/autotest.py" "build.ArduCopter") in (.)
lckfile='/home/pbarker/rc/buildlogs/autotest.lck'
.
.
.
>>>> PASSED STEP: build.ArduCopter at Thu Feb 22 09:46:43 2018
check step: build.ArduCopter
BWFD: ADVANCED_FAILSAFE OK
BWFD: Successes: ['MOUNT', 'AUTOTUNE_ENABLED', 'AC_FENCE', 'CAMERA', 'RANGEFINDER_ENABLED', 'PROXIMITY_ENABLED', 'AC_RALLY', 'AC_AVOID_ENABLED', 'AC_TERRAIN', 'PARACHUTE', 'NAV_GUIDED', 'OPTFLOW', 'VISUAL_ODOMETRY_ENABLED', 'FRSKY_TELEM_ENABLED', 'ADSB_ENABLED', 'PRECISION_LANDING', 'SPRAYER', 'WINCH_ENABLED', 'ADVANCED_FAILSAFE']
BWFD: Failures: ['LOGGING_ENABLED']
pbarker@bluebottle:~/rc/ardupilot(build-with-disabled-features)$ q
''' # noqa
import re
import shutil
import subprocess
import sys
from pysim import util
class Builder():
def __init__(self, spec, autotest=False, board=None):
self.config = spec["config"]
self.autotest_build = spec["autotest_target"]
self.target_binary = spec["target_binary"]
# list other features that have to be disabled when a feature
# is disabled (recursion not done; be exhaustive):
self.reverse_deps = spec["reverse-deps"]
self.autotest = autotest
self.board = board
def description(self):
if self.autotest:
return self.autotest_build
if self.target_binary:
return "%s:%s" % (self.board, self.target_binary)
print("Bad config")
sys.exit(1)
def reverse_deps_for_var(self, var):
return self.reverse_deps.get(var, [])
def progress(self, string):
print("BWFD: %s" % string)
def get_config_variables(self):
ret = []
r = (' *# *define +([A-Z_]+)\s+'
'(ENABLED|DISABLED|!HAL_MINIMIZE_FEATURES)')
with open(util.reltopdir(self.config)) as fd:
for line in fd:
match = re.match(r, line)
if match is None:
continue
if match.group(1) in ("ENABLE", "DISABLE",
"!HAL_MINIMIZE_FEATURES"):
continue
ret.append((match.group(1), match.group(2)))
return set(ret)
def disable_option_in_config(self, var):
tmpfile = util.reltopdir(self.config) + ".tmp"
shutil.move(self.config, tmpfile)
with open(self.config, 'w+') as out_fd:
with open(util.reltopdir(tmpfile)) as fd:
did_enable = False
for line in fd:
regex = ' *# *define +%s\s+(ENABLED|DISABLED|!HAL_MINIMIZE_FEATURES)' % (var[0],)
match = re.match(regex, line)
if match is not None:
if (match.group(1) in ["ENABLED",
"!HAL_MINIMIZE_FEATURES"]):
fnoo = "DISABLED"
else:
fnoo = "ENABLED"
did_enable = True
line = "#define %s %s\n" % (var[0], fnoo)
out_fd.write(line)
# turn dependencies on or off:
tmpfile = util.reltopdir(self.config) + ".tmp-deps"
shutil.move(self.config, tmpfile)
with open(self.config, 'w+') as out_fd:
with open(util.reltopdir(tmpfile)) as fd:
for line in fd:
things_to_toggle = self.reverse_deps_for_var(var[0])
for thing in things_to_toggle:
regex = ' *# *define +%s\s+(ENABLED|DISABLED|!HAL_MINIMIZE_FEATURES)' % thing
match = re.match(regex, line)
if match is not None:
if did_enable:
fnoo = "ENABLED"
else:
fnoo = "DISABLED"
line = "#define %s %s\n" % (thing, fnoo)
out_fd.write(line)
def backup_config_filepath(self):
return util.reltopdir(self.config) + ".backup"
def backup_config(self):
shutil.copy(self.config, self.backup_config_filepath())
def restore_config(self):
shutil.copy(self.backup_config_filepath(), self.config)
def build_works(self):
self.progress("Building")
if self.autotest:
return self.build_works_autotest()
try:
ret = util.run_cmd(["./waf", "configure", "--board", self.board])
except subprocess.CalledProcessError:
return False
if ret != 0:
return False
try:
ret = util.run_cmd(["./waf", "build", "--target", self.target_binary])
except subprocess.CalledProcessError:
return False
if ret != 0:
return False
return True
def build_works_autotest(self):
autotest = util.reltopdir("Tools/autotest/autotest.py")
try:
ret = util.run_cmd([autotest, self.autotest_build])
except subprocess.CalledProcessError:
return False
return ret == 0
def run(self):
self.progress("Doing: %s" % (self.autotest_build,))
self.backup_config()
successes = []
failures = []
for var in self.get_config_variables():
print("var: %s" % str(var))
self.disable_option_in_config(var)
if self.build_works():
self.progress("%s OK" % var[0])
successes.append(var[0])
else:
self.progress("%s BAD" % var[0])
failures.append(var[0])
self.restore_config()
self.successes = successes
self.failures = failures
self.progress("Successes: %s" % str(successes))
self.progress("Failures: %s" % str(failures))
class BuilderCopter(Builder):
def get_config_variables(self):
ret = []
r = '//#define ([A-Z_]+)\s+(ENABLED|DISABLED!HAL_MINIMIZE_FEATURES)'
with open(util.reltopdir(self.config)) as fd:
for line in fd:
print("line: %s" % line)
match = re.match(r, line)
if match is not None:
ret.append(match.group(1))
return ret
# read reverse dep "MODE_AUTO_ENABLED": ["AC_TERRAIN", "MODE_GUIDED"] thusly:
# "if mode-auto is disabled then you must also disable terrain and guided mode"
specs = [
{
"config": 'ArduCopter/config.h',
"autotest_target": "build.ArduCopter",
"target_binary": "bin/arducopter",
"reverse-deps": {
"AC_FENCE": ["AC_AVOID_ENABLED", "MODE_FOLLOW_ENABLED"],
"PROXIMITY_ENABLED": ["AC_AVOID_ENABLED", "MODE_FOLLOW_ENABLED"],
"AC_RALLY": ["AC_TERRAIN"],
"MODE_AUTO_ENABLED": ["AC_TERRAIN", "MODE_GUIDED"],
"MODE_RTL_ENABLED": ["MODE_AUTO_ENABLED", "AC_TERRAIN", "MODE_SMARTRTL_ENABLED"],
"BEACON_ENABLED": ["AC_AVOID_ENABLED", "MODE_FOLLOW_ENABLED"],
"MODE_CIRCLE_ENABLED": ["MODE_AUTO_ENABLED", "AC_TERRAIN"],
"MODE_GUIDED_ENABLED": ["MODE_AUTO_ENABLED",
"AC_TERRAIN",
"ADSB_ENABLED",
"MODE_FOLLOW_ENABLED",
"MODE_GUIDED_NOGPS_ENABLED"],
"AC_AVOID_ENABLED": ["MODE_FOLLOW_ENABLED"],
},
},
{
"config": 'ArduCopter/config.h',
"autotest_target": "build.Helicopter",
"target_binary": "bin/arducopter-heli",
"reverse-deps": {
"AC_FENCE": ["AC_AVOID_ENABLED", "MODE_FOLLOW_ENABLED"],
"PROXIMITY_ENABLED": ["AC_AVOID_ENABLED", "MODE_FOLLOW_ENABLED"],
"AC_RALLY": ["AC_TERRAIN"],
"MODE_AUTO_ENABLED": ["AC_TERRAIN", "MODE_GUIDED"],
"MODE_RTL_ENABLED": ["MODE_AUTO_ENABLED", "AC_TERRAIN"],
"BEACON_ENABLED": ["AC_AVOID_ENABLED", "MODE_FOLLOW_ENABLED"],
"MODE_CIRCLE_ENABLED": ["MODE_AUTO_ENABLED", "AC_TERRAIN"],
"MODE_GUIDED_ENABLED": ["MODE_AUTO_ENABLED", "AC_TERRAIN"],
"AC_AVOID_ENABLED": ["MODE_FOLLOW_ENABLED"],
},
},
{
"config": 'ArduPlane/config.h',
"autotest_target": "build.ArduPlane",
"target_binary": "bin/arduplane",
"reverse-deps": {
},
}, {
"config": 'APMrover2/config.h',
"autotest_target": "build.APMrover2",
"target_binary": "bin/ardurover",
"reverse-deps": {
},
}, {
"config": 'ArduSub/config.h',
"autotest_target": "build.ArduSub",
"target_binary": "bin/ardusub",
"reverse-deps": {
"AC_FENCE": ["AVOIDANCE_ENABLED"],
"PROXIMITY_ENABLED": ["AVOIDANCE_ENABLED"],
"AC_RALLY": ["AC_TERRAIN"],
},
}, {
"config": 'AntennaTracker/config.h',
"autotest_target": "build.AntennaTracker",
"target_binary": "bin/antennatracker",
"reverse-deps": {
},
},
]
builders = []
# append autotest builders:
for spec in specs:
builder = Builder(spec, autotest=True)
builder.run()
builders.append(builder)
# append directly-build-by-waf targets
for spec in specs:
for board in ["CubeOrange"]:
builder = Builder(spec, board=board)
builder.run()
builders.append(builder)
print("")
for builder in builders:
print("Builder: %s" % builder.description())
print(" Successes: %s" % builder.successes)
print(" Failures: %s" % builder.failures)
|
sofiabravo103/virtualfarmacy-api
|
refs/heads/master
|
tests/queries/views/test_queries_actions.py
|
1
|
import random
import pytest
import json
from django.urls import reverse
from rest_framework import status
from queries.models import Query
from queries.serializers import QuerySerializer
from tests.factories import QueryFactory, random_keyword_set
@pytest.mark.django_db
def test_create_query(client):
'''Ensure we can create a new query object.'''
url = reverse('queries-list')
keywords = random_keyword_set()
data = {
'intention': random.choice(Query.INTENTION_CHOICES),
'intersect_keywords': random.choice([True, False]),
'keywords': json.dumps(list(keywords)),
'public' : random.choice([True, False])
}
response = client.post(url, data, format='json')
assert response.status_code == status.HTTP_201_CREATED
assert Query.objects.count() == 1
assert Query.objects.get().intention == data['intention']
assert Query.objects.get().intersect_keywords == data['intersect_keywords']
assert Query.objects.get().public == data['public']
tag_names = {tag.name for tag in Query.objects.get().keywords.all()}
assert tag_names == keywords
@pytest.mark.django_db
def test_list_queries(client):
'''Ensure we can list all queries'''
list_size = random.choice(range(1,5))
for _ in range(list_size):
QueryFactory.create()
url = reverse('queries-list')
response = client.get(url)
assert response.status_code == status.HTTP_200_OK
assert len(response.data) == list_size
@pytest.mark.django_db
def test_get_query(client):
'''Ensure we can get a query object.'''
test_query = QueryFactory.create()
url = reverse('query-detail', args=([test_query.id]))
response = client.get(url)
assert response.status_code == status.HTTP_200_OK
assert response.data == QuerySerializer(test_query).data
@pytest.mark.django_db
def test_edit_query(client):
'''Ensure we can edit a query'''
test_query = QueryFactory.create()
keywords = random_keyword_set()
data = {
'intention': random.choice(Query.INTENTION_CHOICES),
'keywords': json.dumps(list(keywords))
}
url = reverse('query-detail', args=([test_query.id]))
response = client.put(url, json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_200_OK
edited_tag_names = {tag.name for tag in Query.objects.get().keywords.all()}
assert edited_tag_names == keywords
assert Query.objects.get().intention == data['intention']
@pytest.mark.django_db
def test_delete_query(client):
'''Ensure we can delete a query'''
test_query = QueryFactory.create()
url = reverse('query-detail', args=([test_query.id]))
response = client.delete(url)
assert response.status_code == status.HTTP_204_NO_CONTENT
assert len(Query.objects.all()) == 0
|
danilito19/django
|
refs/heads/master
|
tests/staticfiles_tests/__init__.py
|
12133432
| |
camillemonchicourt/Geotrek
|
refs/heads/master
|
geotrek/authent/fixtures/__init__.py
|
12133432
| |
zvolsky/muzika
|
refs/heads/master
|
languages/my.py
|
151
|
# coding: utf8
{
'!langcode!': 'my',
'!langname!': 'Malay',
'%d days ago': '%d hari yang lalu',
'%d hours ago': '%d jam yang lalu',
'%d minutes ago': '%d minit yang lalu',
'%d months ago': '%d bulan yang lalu',
'%d seconds ago': '%d saat yang lalu',
'%d seconds from now': '%d saat dari sekarang',
'%d weeks ago': '%d minggu yang lalu',
'%d years ago': '%d tahun yang lalu',
'%s %%{row} deleted': '%s %%{row} dihapuskan',
'%s %%{row} updated': '%s %%{row} dikemas kini',
'%s selected': '%s dipilih',
'%Y-%m-%d': '%d-%m-%Y',
'%Y-%m-%d %H:%M:%S': '%d-%m-%Y %H:%M:%S',
'(requires internet access, experimental)': '(memerlukan akses internet, percubaan)',
'(something like "it-it")': '(sesuatu seperti "it-it")',
'1 day ago': '1 hari yang lalu',
'1 hour ago': '1 jam yang lalu',
'1 minute ago': '1 minit yang lalu',
'1 month ago': '1 bulan yang lalu',
'1 second ago': '1 saat yang lalu',
'1 week ago': '1 minggu yang lalu',
'1 year ago': '1 tahun yang lalu',
'< Previous': '< Sebelumnya',
'About': 'Mengenai',
'Add': 'Tambah',
'Admin language': 'Bahasa admin',
'Administrator Password:': 'Kata laluan Administrator:',
'Ajax Recipes': 'Resipi Ajax',
'An error occured, please %s the page': 'Kesilapan telah berlaku, sila %s laman',
'And': 'Dan',
'and rename it:': 'dan menamakan itu:',
'are not used yet': 'tidak digunakan lagi',
'Are you sure you want to delete this object?': 'Apakah anda yakin anda mahu memadam ini?',
'Back': 'Kembali',
'Buy this book': 'Beli buku ini',
'cache, errors and sessions cleaned': 'cache, kesilapan dan sesi dibersihkan',
'Cancel': 'Batal',
'Cannot be empty': 'Tidak boleh kosong',
'Change admin password': 'Tukar kata laluan admin',
'Change password': 'Tukar kata laluan',
'Clean': 'Bersihkan',
'Clear': 'Hapus',
'Clear CACHE?': 'Hapus CACHE?',
'Clear DISK': 'Hapus DISK',
'Clear RAM': 'Hapus RAM',
'Click row to expand traceback': 'Klik baris untuk mengembangkan traceback',
'Close': 'Tutup',
'Community': 'Komuniti',
'Components and Plugins': 'Komponen dan Plugin',
'contains': 'mengandung',
'Copyright': 'Hak Cipta',
'Create': 'Buat',
'create file with filename:': 'mencipta fail dengan nama:',
'created by': 'dicipta oleh',
'currently running': 'sedang berjalan',
'data uploaded': 'data diunggah',
'Delete': 'Hapus',
'Delete this file (you will be asked to confirm deletion)': 'Padam fail ini (anda akan diminta untuk mengesahkan pemadaman)',
'Delete:': 'Hapus:',
'design': 'disain',
'direction: ltr': 'arah: ltr',
'Disk Cleared': 'Disk Dihapuskan',
'Documentation': 'Dokumentasi',
"Don't know what to do?": 'Tidak tahu apa yang perlu dilakukan?',
'done!': 'selesai!',
'Download': 'Unduh',
'Duration': 'Tempoh',
'Email : ': 'Emel : ',
'Email sent': 'Emel dihantar',
'enter a valid email address': 'masukkan alamat emel yang benar',
'enter a valid URL': 'masukkan URL yang benar',
'enter a value': 'masukkan data',
'Error': 'Kesalahan',
'Errors': 'Kesalahan',
'export as csv file': 'eksport sebagai file csv',
'Export:': 'Eksport:',
'File': 'Fail',
'filter': 'menapis',
'First Name': 'Nama Depan',
'Forgot username?': 'Lupa nama pengguna?',
'Free Applications': 'Aplikasi Percuma',
'Gender': 'Jenis Kelamin',
'Group %(group_id)s created': 'Kumpulan %(group_id)s dicipta',
'Group uniquely assigned to user %(id)s': 'Kumpulan unik yang diberikan kepada pengguna %(id)s',
'Groups': 'Kumpulan',
'Hello World': 'Halo Dunia',
'Help': 'Bantuan',
'Home': 'Laman Utama',
'How did you get here?': 'Bagaimana kamu boleh di sini?',
'Image': 'Gambar',
'import': 'import',
'Import/Export': 'Import/Eksport',
'includes': 'termasuk',
'Install': 'Pasang',
'Installation': 'Pemasangan',
'Introduction': 'Pengenalan',
'Invalid email': 'Emel tidak benar',
'Language': 'Bahasa',
'languages': 'bahasa',
'Languages': 'Bahasa',
'Last Name': 'Nama Belakang',
'License for': 'lesen untuk',
'loading...': 'sedang memuat...',
'Logged in': 'Masuk',
'Logged out': 'Keluar',
'Login': 'Masuk',
'Logout': 'Keluar',
'Lost Password': 'Lupa Kata Laluan',
'Lost password?': 'Lupa kata laluan?',
'Maintenance': 'Penyelenggaraan',
'Manage': 'Menguruskan',
'Manage Cache': 'Menguruskan Cache',
'models': 'model',
'Models': 'Model',
'Modules': 'Modul',
'modules': 'modul',
'My Sites': 'Laman Saya',
'New': 'Baru',
'New password': 'Kata laluan baru',
'next 100 rows': '100 baris seterusnya',
'Next >': 'Seterusnya >',
'Next Page': 'Laman Seterusnya',
'No ticket_storage.txt found under /private folder': 'Ticket_storage.txt tidak dijumpai di bawah folder /private',
'not a Zip Code': 'bukan Pos',
'Old password': 'Kata laluan lama',
'Online examples': 'Contoh Online',
'Or': 'Atau',
'or alternatively': 'atau sebagai alternatif',
'Or Get from URL:': 'Atau Dapatkan dari URL:',
'or import from csv file': 'atau import dari file csv',
'Other Plugins': 'Plugin Lain',
'Other Recipes': 'Resipi Lain',
'Overview': 'Tinjauan',
'Pack all': 'Mengemaskan semua',
'Password': 'Kata laluan',
'Password changed': 'Kata laluan berubah',
"Password fields don't match": 'Kata laluan tidak sama',
'please input your password again': 'sila masukan kata laluan anda lagi',
'plugins': 'plugin',
'Plugins': 'Plugin',
'Powered by': 'Disokong oleh',
'Preface': 'Pendahuluan',
'previous 100 rows': '100 baris sebelumnya',
'Previous Page': 'Laman Sebelumnya',
'private files': 'fail peribadi',
'Private files': 'Fail peribadi',
'Profile': 'Profil',
'Profile updated': 'Profil dikemaskini',
'Project Progress': 'Kemajuan Projek',
'Quick Examples': 'Contoh Cepat',
'Ram Cleared': 'Ram Dihapuskan',
'Recipes': 'Resipi',
'Register': 'Daftar',
'Registration successful': 'Pendaftaran berjaya',
'reload': 'memuat kembali',
'Reload routes': 'Memuat laluan kembali',
'Remember me (for 30 days)': 'Ingat saya (selama 30 hari)',
'Request reset password': 'Meminta reset kata laluan',
'Rows selected': 'Baris dipilih',
'Running on %s': 'Berjalan pada %s',
'Save model as...': 'Simpan model sebagai ...',
'Save profile': 'Simpan profil',
'Search': 'Cari',
'Select Files to Package': 'Pilih Fail untuk Pakej',
'Send Email': 'Kirim Emel',
'Size of cache:': 'Saiz cache:',
'Solution': 'Penyelesaian',
'starts with': 'bermula dengan',
'static': 'statik',
'Static': 'Statik',
'Statistics': 'Statistik',
'Support': 'Menyokong',
'test': 'ujian',
'There are no plugins': 'Tiada plugin',
'There are no private files': 'Tiada fail peribadi',
'These files are not served, they are only available from within your app': 'Fail-fail ini tidak disampaikan, mereka hanya boleh didapati dari dalam aplikasi anda',
'These files are served without processing, your images go here': 'Ini fail disampaikan tanpa pemprosesan, imej anda di sini',
'This App': 'App Ini',
'Time in Cache (h:m:s)': 'Waktu di Cache (h: m: s)',
'Title': 'Judul',
'To create a plugin, name a file/folder plugin_[name]': 'Untuk mencipta plugin, nama fail/folder plugin_ [nama]',
'too short': 'terlalu pendek',
'Unable to download because:': 'Tidak dapat memuat turun kerana:',
'unable to parse csv file': 'tidak mampu mengurai file csv',
'update all languages': 'mengemaskini semua bahasa',
'Update:': 'Kemas kini:',
'Upgrade': 'Menaik taraf',
'Upload': 'Unggah',
'Upload a package:': 'Unggah pakej:',
'upload file:': 'unggah fail:',
'upload plugin file:': 'unggah fail plugin:',
'User %(id)s Logged-in': 'Pengguna %(id)s Masuk',
'User %(id)s Logged-out': 'Pengguna %(id)s Keluar',
'User %(id)s Password changed': 'Pengguna %(id)s Kata Laluan berubah',
'User %(id)s Password reset': 'Pengguna %(id)s Kata Laluan telah direset',
'User %(id)s Profile updated': 'Pengguna %(id)s Profil dikemaskini',
'User %(id)s Registered': 'Pengguna %(id)s Didaftarkan',
'value not allowed': 'data tidak benar',
'Verify Password': 'Pengesahan Kata Laluan',
'Version': 'Versi',
'Versioning': 'Pembuatan Sejarah',
'View': 'Lihat',
'Views': 'Lihat',
'views': 'Lihat',
'Web Framework': 'Rangka Kerja Web',
'web2py Recent Tweets': 'Tweet terbaru web2py',
'Website': 'Laman Web',
'Welcome': 'Selamat Datang',
'Welcome to web2py!': 'Selamat Datang di web2py!',
'You are successfully running web2py': 'Anda berjaya menjalankan web2py',
'You can modify this application and adapt it to your needs': 'Anda boleh mengubah suai aplikasi ini dan menyesuaikan dengan keperluan anda',
'You visited the url %s': 'Anda melawat url %s',
}
|
m4droid/Restriccion-API
|
refs/heads/master
|
tests/test_models_base_report.py
|
2
|
from .base_tests import BaseTestCase
from restriccion.models.base_report import BaseReport
class TestModelsRestriction(BaseTestCase):
@classmethod
def setUpClass(cls):
super(TestModelsRestriction, cls).setUpClass()
cls.base_report = BaseReport()
def test_get_mongo_collection_not_implemented(self):
self.assertRaises(NotImplementedError, self.base_report.get_mongo_collection)
def test_get_fields_not_implemented(self):
self.assertRaises(NotImplementedError, self.base_report.get_fields)
def test_get_unique_fields_not_implemented(self):
self.assertRaises(NotImplementedError, self.base_report.get_unique_fields)
|
bongo-project/bongo
|
refs/heads/master
|
src/libs/python/bongo/external/simpletal/simpleTAL.py
|
1
|
""" simpleTAL Interpreter
Copyright (c) 2005 Colin Stewart (http://www.owlfish.com/)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
If you make any bug fixes or feature enhancements please let me know!
The classes in this module implement the TAL language, expanding
both XML and HTML templates.
Module Dependencies: logging, simpleTALES, simpleTALTemplates
"""
try:
import logging
except:
import bongo.external.simpletal.DummyLogger as logging
import xml.sax, cgi, StringIO, codecs, re, types, copy, sys
import bongo.external.simpletal.sgmlentitynames as sgmlentitynames
import bongo.external.simpletal as simpletal
import bongo.external.simpletal.FixedHTMLParser as FixedHTMLParser
__version__ = simpletal.__version__
try:
# Is PyXML's LexicalHandler available?
from xml.sax.saxlib import LexicalHandler
use_lexical_handler = 1
except ImportError:
use_lexical_handler = 0
class LexicalHandler:
pass
try:
# Is PyXML's DOM2SAX available?
import xml.dom.ext.Dom2Sax
use_dom2sax = 1
except ImportError:
use_dom2sax = 0
import bongo.external.simpletal.simpleTALES as simpleTALES
# Name-space URIs
METAL_NAME_URI="http://xml.zope.org/namespaces/metal"
TAL_NAME_URI="http://xml.zope.org/namespaces/tal"
# All commands are of the form (opcode, args, commandList)
# The numbers are the opcodes, and also the order of priority
# Argument: [(isLocalFlag (Y/n), variableName, variablePath),...]
TAL_DEFINE = 1
# Argument: expression, endTagSymbol
TAL_CONDITION = 2
# Argument: (varname, expression, endTagSymbol)
TAL_REPEAT = 3
# Argument: (replaceFlag, type, expression)
TAL_CONTENT = 4
# Not used in byte code, only ordering.
TAL_REPLACE = 5
# Argument: [(attributeName, expression)]
TAL_ATTRIBUTES = 6
# Argument: expression
TAL_OMITTAG = 7
# Argument: (originalAttributeList, currentAttributeList)
TAL_START_SCOPE = 8
# Argument: String to output
TAL_OUTPUT = 9
# Argument: None
TAL_STARTTAG = 10
# Argument: Tag, omitTagFlag
TAL_ENDTAG_ENDSCOPE = 11
# Argument: None
TAL_NOOP = 13
# METAL Starts here
# Argument: expression, slotParams, endTagSymbol
METAL_USE_MACRO = 14
# Argument: macroName, endTagSymbol
METAL_DEFINE_SLOT=15
# Only used for parsing
METAL_FILL_SLOT=16
METAL_DEFINE_MACRO=17
METAL_NAME_REGEX = re.compile ("[a-zA-Z_][a-zA-Z0-9_]*")
SINGLETON_XML_REGEX = re.compile ('^<[^\s/>]+(?:\s*[^=>]+="[^">]+")*\s*/>')
ENTITY_REF_REGEX = re.compile (r'(?:&[a-zA-Z][\-\.a-zA-Z0-9]*[^\-\.a-zA-Z0-9])|(?:&#[xX]?[a-eA-E0-9]*[^0-9a-eA-E])')
# The list of elements in HTML that can not have end tags - done as a dictionary for fast
# lookup.
HTML_FORBIDDEN_ENDTAG = {'AREA': 1, 'BASE': 1, 'BASEFONT': 1, 'BR': 1, 'COL': 1
,'FRAME': 1, 'HR': 1, 'IMG': 1, 'INPUT': 1, 'ISINDEX': 1
,'LINK': 1, 'META': 1, 'PARAM': 1}
# List of element:attribute pairs that can use minimized form in HTML
HTML_BOOLEAN_ATTS = {'AREA:NOHREF': 1, 'IMG:ISMAP': 1, 'OBJECT:DECLARE': 1
, 'INPUT:CHECKED': 1, 'INPUT:DISABLED': 1, 'INPUT:READONLY': 1, 'INPUT:ISMAP': 1
, 'SELECT:MULTIPLE': 1, 'SELECT:DISABLED': 1
, 'OPTGROUP:DISABLED': 1
, 'OPTION:SELECTED': 1, 'OPTION:DISABLED': 1
, 'TEXTAREA:DISABLED': 1, 'TEXTAREA:READONLY': 1
, 'BUTTON:DISABLED': 1, 'SCRIPT:DEFER': 1}
class TemplateInterpreter:
def __init__ (self):
self.programStack = []
self.commandList = None
self.symbolTable = None
self.slotParameters = {}
self.commandHandler = {}
self.commandHandler [TAL_DEFINE] = self.cmdDefine
self.commandHandler [TAL_CONDITION] = self.cmdCondition
self.commandHandler [TAL_REPEAT] = self.cmdRepeat
self.commandHandler [TAL_CONTENT] = self.cmdContent
self.commandHandler [TAL_ATTRIBUTES] = self.cmdAttributes
self.commandHandler [TAL_OMITTAG] = self.cmdOmitTag
self.commandHandler [TAL_START_SCOPE] = self.cmdStartScope
self.commandHandler [TAL_OUTPUT] = self.cmdOutput
self.commandHandler [TAL_STARTTAG] = self.cmdOutputStartTag
self.commandHandler [TAL_ENDTAG_ENDSCOPE] = self.cmdEndTagEndScope
self.commandHandler [METAL_USE_MACRO] = self.cmdUseMacro
self.commandHandler [METAL_DEFINE_SLOT] = self.cmdDefineSlot
self.commandHandler [TAL_NOOP] = self.cmdNoOp
def tagAsText (self, (tag,atts), singletonFlag=0):
""" This returns a tag as text.
"""
result = ["<"]
result.append (tag)
for attName, attValue in atts:
result.append (' ')
result.append (attName)
result.append ('="')
result.append (cgi.escape (attValue, quote=1))
result.append ('"')
if (singletonFlag):
result.append (" />")
else:
result.append (">")
return "".join (result)
def initialise (self, context, outputFile):
self.context = context
self.file = outputFile
def cleanState (self):
self.scopeStack = []
self.programCounter = 0
self.movePCForward = None
self.movePCBack = None
self.outputTag = 1
self.originalAttributes = {}
self.currentAttributes = []
# Used in repeat only.
self.repeatAttributesCopy = []
self.currentSlots = {}
self.repeatVariable = None
self.tagContent = None
# tagState flag as to whether there are any local variables to pop
self.localVarsDefined = 0
# Pass in the parameters
self.currentSlots = self.slotParameters
def popProgram (self):
vars, self.commandList, self.symbolTable = self.programStack.pop()
self.programCounter,self.scopeStack,self.slotParameters,self.currentSlots, self.movePCForward,self.movePCBack,self.outputTag,self.originalAttributes,self.currentAttributes,self.repeatVariable,self.tagContent,self.localVarsDefined = vars
def pushProgram (self):
vars = (self.programCounter
,self.scopeStack
,self.slotParameters
,self.currentSlots
,self.movePCForward
,self.movePCBack
,self.outputTag
,self.originalAttributes
,self.currentAttributes
,self.repeatVariable
,self.tagContent
,self.localVarsDefined)
self.programStack.append ((vars,self.commandList, self.symbolTable))
def execute (self, template):
self.cleanState()
self.commandList, self.programCounter, programLength, self.symbolTable = template.getProgram()
cmndList = self.commandList
while (self.programCounter < programLength):
cmnd = cmndList [self.programCounter]
#print "PC: %s - Executing command: %s" % (str (self.programCounter), str (cmnd))
self.commandHandler[cmnd[0]] (cmnd[0], cmnd[1])
def cmdDefine (self, command, args):
""" args: [(isLocalFlag (Y/n), variableName, variablePath),...]
Define variables in either the local or global context
"""
foundLocals = 0
for isLocal, varName, varPath in args:
result = self.context.evaluate (varPath, self.originalAttributes)
if (isLocal):
if (not foundLocals):
foundLocals = 1
self.context.pushLocals ()
self.context.setLocal (varName, result)
else:
self.context.addGlobal (varName, result)
self.localVarsDefined = foundLocals
self.programCounter += 1
def cmdCondition (self, command, args):
""" args: expression, endTagSymbol
Conditionally continues with execution of all content contained
by it.
"""
result = self.context.evaluate (args[0], self.originalAttributes)
#~ if (result is None or (not result)):
conditionFalse = 0
if (result is None):
conditionFalse = 1
else:
if (not result): conditionFalse = 1
try:
temp = len (result)
if (temp == 0): conditionFalse = 1
except:
# Result is not a sequence.
pass
if (conditionFalse):
# Nothing to output - evaluated to false.
self.outputTag = 0
self.tagContent = None
self.programCounter = self.symbolTable[args[1]]
return
self.programCounter += 1
def cmdRepeat (self, command, args):
""" args: (varName, expression, endTagSymbol)
Repeats anything in the cmndList
"""
if (self.repeatVariable is not None):
# We are already part way through a repeat
# Restore any attributes that might have been changed.
if (self.currentAttributes != self.repeatAttributesCopy):
self.currentAttributes = copy.copy (self.repeatAttributesCopy)
self.outputTag = 1
self.tagContent = None
self.movePCForward = None
try:
self.repeatVariable.increment()
self.context.setLocal (args[0], self.repeatVariable.getCurrentValue())
self.programCounter += 1
return
except IndexError, e:
# We have finished the repeat
self.repeatVariable = None
self.context.removeRepeat (args[0])
# The locals were pushed in context.addRepeat
self.context.popLocals()
self.movePCBack = None
# Suppress the final close tag and content
self.tagContent = None
self.outputTag = 0
self.programCounter = self.symbolTable [args[2]]
# Restore the state of repeatAttributesCopy in case we are nested.
self.repeatAttributesCopy = self.scopeStack.pop()
return
# The first time through this command
result = self.context.evaluate (args[1], self.originalAttributes)
if (result is not None and result == simpleTALES.DEFAULTVALUE):
# Leave everything un-touched.
self.programCounter += 1
return
try:
# We have three options, either the result is a natural sequence, an iterator., or something that can produce an iterator.
isSequence = len (result)
if (isSequence):
# Only setup if we have a sequence with length
self.repeatVariable = simpleTALES.RepeatVariable (result)
else:
# Delete the tags and their contents
self.outputTag = 0
self.programCounter = self.symbolTable [args[2]]
return
except:
# Not a natural sequence, can it produce an iterator?
if (hasattr (result, "__iter__") and callable (result.__iter__)):
# We can get an iterator!
self.repeatVariable = simpleTALES.IteratorRepeatVariable (result.__iter__())
elif (hasattr (result, "next") and callable (result.next)):
# Treat as an iterator
self.repeatVariable = simpleTALES.IteratorRepeatVariable (result)
else:
# Just a plain object, let's not loop
# Delete the tags and their contents
self.outputTag = 0
self.programCounter = self.symbolTable [args[2]]
return
try:
curValue = self.repeatVariable.getCurrentValue()
except IndexError, e:
# The iterator ran out of values before we started - treat as an empty list
self.outputTag = 0
self.repeatVariable = None
self.programCounter = self.symbolTable [args[2]]
return
# We really do want to repeat - so lets do it
self.movePCBack = self.programCounter
self.context.addRepeat (args[0], self.repeatVariable, curValue)
# We keep the old state of the repeatAttributesCopy for nested loops
self.scopeStack.append (self.repeatAttributesCopy)
# Keep a copy of the current attributes for this tag
self.repeatAttributesCopy = copy.copy (self.currentAttributes)
self.programCounter += 1
def cmdContent (self, command, args):
""" args: (replaceFlag, structureFlag, expression, endTagSymbol)
Expands content
"""
result = self.context.evaluate (args[2], self.originalAttributes)
if (result is None):
if (args[0]):
# Only output tags if this is a content not a replace
self.outputTag = 0
# Output none of our content or the existing content, but potentially the tags
self.movePCForward = self.symbolTable [args[3]]
self.programCounter += 1
return
elif (not result == simpleTALES.DEFAULTVALUE):
# We have content, so let's suppress the natural content and output this!
if (args[0]):
self.outputTag = 0
self.tagContent = (args[1], result)
self.movePCForward = self.symbolTable [args[3]]
self.programCounter += 1
return
else:
# Default, let's just run through as normal
self.programCounter += 1
return
def cmdAttributes (self, command, args):
""" args: [(attributeName, expression)]
Add, leave, or remove attributes from the start tag
"""
attsToRemove = {}
newAtts = []
for attName, attExpr in args:
resultVal = self.context.evaluate (attExpr, self.originalAttributes)
if (resultVal is None):
# Remove this attribute from the current attributes
attsToRemove [attName]=1
elif (not resultVal == simpleTALES.DEFAULTVALUE):
# We have a value - let's use it!
attsToRemove [attName]=1
if (isinstance (resultVal, types.UnicodeType)):
escapedAttVal = resultVal
elif (isinstance (resultVal, types.StringType)):
# THIS IS NOT A BUG!
# Use Unicode in the Context object if you are not using Ascii
escapedAttVal = unicode (resultVal, 'ascii')
else:
# THIS IS NOT A BUG!
# Use Unicode in the Context object if you are not using Ascii
escapedAttVal = unicode (resultVal)
newAtts.append ((attName, escapedAttVal))
# Copy over the old attributes
for oldAttName, oldAttValue in self.currentAttributes:
if (not attsToRemove.has_key (oldAttName)):
newAtts.append ((oldAttName, oldAttValue))
self.currentAttributes = newAtts
# Evaluate all other commands
self.programCounter += 1
def cmdOmitTag (self, command, args):
""" args: expression
Conditionally turn off tag output
"""
result = self.context.evaluate (args, self.originalAttributes)
if (result is not None and result):
# Turn tag output off
self.outputTag = 0
self.programCounter += 1
def cmdOutputStartTag (self, command, args):
# Args: tagName
tagName, singletonTag = args
if (self.outputTag):
if (self.tagContent is None and singletonTag):
self.file.write (self.tagAsText ((tagName, self.currentAttributes), 1))
else:
self.file.write (self.tagAsText ((tagName, self.currentAttributes)))
if (self.movePCForward is not None):
self.programCounter = self.movePCForward
return
self.programCounter += 1
return
def cmdEndTagEndScope (self, command, args):
# Args: tagName, omitFlag, singletonTag
if (self.tagContent is not None):
contentType, resultVal = self.tagContent
if (contentType):
if (isinstance (resultVal, Template)):
# We have another template in the context, evaluate it!
# Save our state!
self.pushProgram()
resultVal.expandInline (self.context, self.file, self)
# Restore state
self.popProgram()
# End of the macro expansion (if any) so clear the parameters
self.slotParameters = {}
else:
if (isinstance (resultVal, types.UnicodeType)):
self.file.write (resultVal)
elif (isinstance (resultVal, types.StringType)):
# THIS IS NOT A BUG!
# Use Unicode in the Context object if you are not using Ascii
self.file.write (unicode (resultVal, 'ascii'))
else:
# THIS IS NOT A BUG!
# Use Unicode in the Context object if you are not using Ascii
self.file.write (unicode (resultVal))
else:
if (isinstance (resultVal, types.UnicodeType)):
self.file.write (cgi.escape (resultVal))
elif (isinstance (resultVal, types.StringType)):
# THIS IS NOT A BUG!
# Use Unicode in the Context object if you are not using Ascii
self.file.write (cgi.escape (unicode (resultVal, 'ascii')))
else:
# THIS IS NOT A BUG!
# Use Unicode in the Context object if you are not using Ascii
self.file.write (cgi.escape (unicode (resultVal)))
if (self.outputTag and not args[1]):
# Do NOT output end tag if a singleton with no content
if not (args[2] and self.tagContent is None):
self.file.write ('</' + args[0] + '>')
if (self.movePCBack is not None):
self.programCounter = self.movePCBack
return
if (self.localVarsDefined):
self.context.popLocals()
self.movePCForward,self.movePCBack,self.outputTag,self.originalAttributes,self.currentAttributes,self.repeatVariable,self.tagContent,self.localVarsDefined = self.scopeStack.pop()
self.programCounter += 1
def cmdOutput (self, command, args):
self.file.write (args)
self.programCounter += 1
def cmdStartScope (self, command, args):
""" args: (originalAttributes, currentAttributes)
Pushes the current state onto the stack, and sets up the new state
"""
self.scopeStack.append ((self.movePCForward
,self.movePCBack
,self.outputTag
,self.originalAttributes
,self.currentAttributes
,self.repeatVariable
,self.tagContent
,self.localVarsDefined))
self.movePCForward = None
self.movePCBack = None
self.outputTag = 1
self.originalAttributes = args[0]
self.currentAttributes = args[1]
self.repeatVariable = None
self.tagContent = None
self.localVarsDefined = 0
self.programCounter += 1
def cmdNoOp (self, command, args):
self.programCounter += 1
def cmdUseMacro (self, command, args):
""" args: (macroExpression, slotParams, endTagSymbol)
Evaluates the expression, if it resolves to a SubTemplate it then places
the slotParams into currentSlots and then jumps to the end tag
"""
value = self.context.evaluate (args[0], self.originalAttributes)
if (value is None):
# Don't output anything
self.outputTag = 0
# Output none of our content or the existing content
self.movePCForward = self.symbolTable [args[2]]
self.programCounter += 1
return
if (not value == simpleTALES.DEFAULTVALUE and isinstance (value, SubTemplate)):
# We have a macro, so let's use it
self.outputTag = 0
self.slotParameters = args[1]
self.tagContent = (1, value)
# NOTE: WE JUMP STRAIGHT TO THE END TAG, NO OTHER TAL/METAL COMMANDS ARE EVALUATED.
self.programCounter = self.symbolTable [args[2]]
return
else:
# Default, let's just run through as normal
self.programCounter += 1
return
def cmdDefineSlot (self, command, args):
""" args: (slotName, endTagSymbol)
If the slotName is filled then that is used, otherwise the original conent
is used.
"""
if (self.currentSlots.has_key (args[0])):
# This slot is filled, so replace us with that content
self.outputTag = 0
self.tagContent = (1, self.currentSlots [args[0]])
# Output none of our content or the existing content
# NOTE: NO FURTHER TAL/METAL COMMANDS ARE EVALUATED
self.programCounter = self.symbolTable [args[1]]
return
# Slot isn't filled, so just use our own content
self.programCounter += 1
return
class HTMLTemplateInterpreter (TemplateInterpreter):
def __init__ (self, minimizeBooleanAtts = 0):
TemplateInterpreter.__init__ (self)
self.minimizeBooleanAtts = minimizeBooleanAtts
if (minimizeBooleanAtts):
# Override the tagAsText method for this instance
self.tagAsText = self.tagAsTextMinimizeAtts
def tagAsTextMinimizeAtts (self, (tag,atts), singletonFlag=0):
""" This returns a tag as text.
"""
result = ["<"]
result.append (tag)
upperTag = tag.upper()
for attName, attValue in atts:
if (HTML_BOOLEAN_ATTS.has_key ('%s:%s' % (upperTag, attName.upper()))):
# We should output a minimised boolean value
result.append (' ')
result.append (attName)
else:
result.append (' ')
result.append (attName)
result.append ('="')
result.append (cgi.escape (attValue, quote=1))
result.append ('"')
if (singletonFlag):
result.append (" />")
else:
result.append (">")
return "".join (result)
class Template:
def __init__ (self, commands, macros, symbols, doctype = None):
self.commandList = commands
self.macros = macros
self.symbolTable = symbols
self.doctype = doctype
# Setup the macros
for macro in self.macros.values():
macro.setParentTemplate (self)
# Setup the slots
for cmnd, arg in self.commandList:
if (cmnd == METAL_USE_MACRO):
# Set the parent of each slot
slotMap = arg[1]
for slot in slotMap.values():
slot.setParentTemplate (self)
def expand (self, context, outputFile, outputEncoding=None, interpreter=None):
""" This method will write to the outputFile, using the encoding specified,
the expanded version of this template. The context passed in is used to resolve
all expressions with the template.
"""
# This method must wrap outputFile if required by the encoding, and write out
# any template pre-amble (DTD, Encoding, etc)
self.expandInline (context, outputFile, interpreter)
def expandInline (self, context, outputFile, interpreter=None):
""" Internally used when expanding a template that is part of a context."""
if (interpreter is None):
ourInterpreter = TemplateInterpreter()
ourInterpreter.initialise (context, outputFile)
else:
ourInterpreter = interpreter
try:
ourInterpreter.execute (self)
except UnicodeError, unierror:
logging.error ("UnicodeError caused by placing a non-Unicode string in the Context object.")
raise simpleTALES.ContextContentException ("Found non-unicode string in Context!")
def getProgram (self):
""" Returns a tuple of (commandList, startPoint, endPoint, symbolTable) """
return (self.commandList, 0, len (self.commandList), self.symbolTable)
def __str__ (self):
result = "Commands:\n"
index = 0
for cmd in self.commandList:
if (cmd[0] != METAL_USE_MACRO):
result = result + "\n[%s] %s" % (str (index), str (cmd))
else:
result = result + "\n[%s] %s, (%s{" % (str (index), str (cmd[0]), str (cmd[1][0]))
for slot in cmd[1][1].keys():
result = result + "%s: %s" % (slot, str (cmd[1][1][slot]))
result = result + "}, %s)" % str (cmd[1][2])
index += 1
result = result + "\n\nSymbols:\n"
for symbol in self.symbolTable.keys():
result = result + "Symbol: " + str (symbol) + " points to: " + str (self.symbolTable[symbol]) + ", which is command: " + str (self.commandList[self.symbolTable[symbol]]) + "\n"
result = result + "\n\nMacros:\n"
for macro in self.macros.keys():
result = result + "Macro: " + str (macro) + " value of: " + str (self.macros[macro])
return result
class SubTemplate (Template):
""" A SubTemplate is part of another template, and is used for the METAL implementation.
The two uses for this class are:
1 - metal:define-macro results in a SubTemplate that is the macro
2 - metal:fill-slot results in a SubTemplate that is a parameter to metal:use-macro
"""
def __init__ (self, startRange, endRangeSymbol):
""" The parentTemplate is the template for which we are a sub-template.
The startRange and endRange are indexes into the parent templates command list,
and defines the range of commands that we can execute
"""
Template.__init__ (self, [], {}, {})
self.startRange = startRange
self.endRangeSymbol = endRangeSymbol
def setParentTemplate (self, parentTemplate):
self.parentTemplate = parentTemplate
self.commandList = parentTemplate.commandList
self.symbolTable = parentTemplate.symbolTable
def getProgram (self):
""" Returns a tuple of (commandList, startPoint, endPoint, symbolTable) """
return (self.commandList, self.startRange, self.symbolTable[self.endRangeSymbol]+1, self.symbolTable)
def __str__ (self):
endRange = self.symbolTable [self.endRangeSymbol]
result = "SubTemplate from %s to %s\n" % (str (self.startRange), str (endRange))
return result
class HTMLTemplate (Template):
"""A specialised form of a template that knows how to output HTML
"""
def __init__ (self, commands, macros, symbols, doctype = None, minimizeBooleanAtts = 0):
self.minimizeBooleanAtts = minimizeBooleanAtts
Template.__init__ (self, commands, macros, symbols, doctype = None)
def expand (self, context, outputFile, outputEncoding="ISO-8859-1",interpreter=None):
""" This method will write to the outputFile, using the encoding specified,
the expanded version of this template. The context passed in is used to resolve
all expressions with the template.
"""
# This method must wrap outputFile if required by the encoding, and write out
# any template pre-amble (DTD, Encoding, etc)
encodingFile = codecs.lookup (outputEncoding)[3](outputFile, 'replace')
self.expandInline (context, encodingFile, interpreter)
def expandInline (self, context, outputFile, interpreter=None):
""" Ensure we use the HTMLTemplateInterpreter"""
if (interpreter is None):
ourInterpreter = HTMLTemplateInterpreter(minimizeBooleanAtts = self.minimizeBooleanAtts)
ourInterpreter.initialise (context, outputFile)
else:
ourInterpreter = interpreter
Template.expandInline (self, context, outputFile, ourInterpreter)
class XMLTemplate (Template):
"""A specialised form of a template that knows how to output XML
"""
def __init__ (self, commands, macros, symbols, doctype = None):
Template.__init__ (self, commands, macros, symbols)
self.doctype = doctype
def expand (self, context, outputFile, outputEncoding="iso-8859-1", docType=None, suppressXMLDeclaration=0,interpreter=None):
""" This method will write to the outputFile, using the encoding specified,
the expanded version of this template. The context passed in is used to resolve
all expressions with the template.
"""
# This method must wrap outputFile if required by the encoding, and write out
# any template pre-amble (DTD, Encoding, etc)
# Write out the XML prolog
encodingFile = codecs.lookup (outputEncoding)[3](outputFile, 'replace')
if (not suppressXMLDeclaration):
if (outputEncoding.lower() != "utf-8"):
encodingFile.write ('<?xml version="1.0" encoding="%s"?>\n' % outputEncoding.lower())
else:
encodingFile.write ('<?xml version="1.0"?>\n')
if not docType and self.doctype:
docType = self.doctype
if docType:
encodingFile.write (docType)
encodingFile.write ('\n')
self.expandInline (context, encodingFile, interpreter)
class TemplateCompiler:
def __init__ (self):
""" Initialise a template compiler.
"""
self.commandList = []
self.tagStack = []
self.symbolLocationTable = {}
self.macroMap = {}
self.endTagSymbol = 1
self.commandHandler = {}
self.commandHandler [TAL_DEFINE] = self.compileCmdDefine
self.commandHandler [TAL_CONDITION] = self.compileCmdCondition
self.commandHandler [TAL_REPEAT] = self.compileCmdRepeat
self.commandHandler [TAL_CONTENT] = self.compileCmdContent
self.commandHandler [TAL_REPLACE] = self.compileCmdReplace
self.commandHandler [TAL_ATTRIBUTES] = self.compileCmdAttributes
self.commandHandler [TAL_OMITTAG] = self.compileCmdOmitTag
# Metal commands
self.commandHandler [METAL_USE_MACRO] = self.compileMetalUseMacro
self.commandHandler [METAL_DEFINE_SLOT] = self.compileMetalDefineSlot
self.commandHandler [METAL_FILL_SLOT] = self.compileMetalFillSlot
self.commandHandler [METAL_DEFINE_MACRO] = self.compileMetalDefineMacro
# Default namespaces
self.setTALPrefix ('tal')
self.tal_namespace_prefix_stack = []
self.metal_namespace_prefix_stack = []
self.tal_namespace_prefix_stack.append ('tal')
self.setMETALPrefix ('metal')
self.metal_namespace_prefix_stack.append ('metal')
self.log = logging.getLogger ("simpleTAL.TemplateCompiler")
def setTALPrefix (self, prefix):
self.tal_namespace_prefix = prefix
self.tal_namespace_omittag = '%s:omit-tag' % self.tal_namespace_prefix
self.tal_attribute_map = {}
self.tal_attribute_map ['%s:attributes'%prefix] = TAL_ATTRIBUTES
self.tal_attribute_map ['%s:content'%prefix]= TAL_CONTENT
self.tal_attribute_map ['%s:define'%prefix] = TAL_DEFINE
self.tal_attribute_map ['%s:replace'%prefix] = TAL_REPLACE
self.tal_attribute_map ['%s:omit-tag'%prefix] = TAL_OMITTAG
self.tal_attribute_map ['%s:condition'%prefix] = TAL_CONDITION
self.tal_attribute_map ['%s:repeat'%prefix] = TAL_REPEAT
def setMETALPrefix (self, prefix):
self.metal_namespace_prefix = prefix
self.metal_attribute_map = {}
self.metal_attribute_map ['%s:define-macro'%prefix] = METAL_DEFINE_MACRO
self.metal_attribute_map ['%s:use-macro'%prefix] = METAL_USE_MACRO
self.metal_attribute_map ['%s:define-slot'%prefix] = METAL_DEFINE_SLOT
self.metal_attribute_map ['%s:fill-slot'%prefix] = METAL_FILL_SLOT
def popTALNamespace (self):
newPrefix = self.tal_namespace_prefix_stack.pop()
self.setTALPrefix (newPrefix)
def popMETALNamespace (self):
newPrefix = self.metal_namespace_prefix_stack.pop()
self.setMETALPrefix (newPrefix)
def tagAsText (self, (tag,atts), singletonFlag=0):
""" This returns a tag as text.
"""
result = ["<"]
result.append (tag)
for attName, attValue in atts:
result.append (' ')
result.append (attName)
result.append ('="')
result.append (cgi.escape (attValue, quote=1))
result.append ('"')
if (singletonFlag):
result.append (" />")
else:
result.append (">")
return "".join (result)
def getTemplate (self):
template = Template (self.commandList, self.macroMap, self.symbolLocationTable)
return template
def addCommand (self, command):
if (command[0] == TAL_OUTPUT and (len (self.commandList) > 0) and self.commandList[-1][0] == TAL_OUTPUT):
# We can combine output commands
self.commandList[-1] = (TAL_OUTPUT, self.commandList[-1][1] + command[1])
else:
self.commandList.append (command)
def addTag (self, tag, tagProperties={}):
""" Used to add a tag to the stack. Various properties can be passed in the dictionary
as being information required by the tag.
Currently supported properties are:
'command' - The (command,args) tuple associated with this command
'originalAtts' - The original attributes that include any metal/tal attributes
'endTagSymbol' - The symbol associated with the end tag for this element
'popFunctionList' - A list of functions to execute when this tag is popped
'singletonTag' - A boolean to indicate that this is a singleton flag
"""
# Add the tag to the tagStack (list of tuples (tag, properties, useMacroLocation))
self.log.debug ("Adding tag %s to stack" % tag[0])
command = tagProperties.get ('command',None)
originalAtts = tagProperties.get ('originalAtts', None)
singletonTag = tagProperties.get ('singletonTag', 0)
if (command is not None):
if (command[0] == METAL_USE_MACRO):
self.tagStack.append ((tag, tagProperties, len (self.commandList)+1))
else:
self.tagStack.append ((tag, tagProperties, None))
else:
self.tagStack.append ((tag, tagProperties, None))
if (command is not None):
# All tags that have a TAL attribute on them start with a 'start scope'
self.addCommand((TAL_START_SCOPE, (originalAtts, tag[1])))
# Now we add the TAL command
self.addCommand(command)
else:
# It's just a straight output, so create an output command and append it
self.addCommand((TAL_OUTPUT, self.tagAsText (tag, singletonTag)))
def popTag (self, tag, omitTagFlag=0):
""" omitTagFlag is used to control whether the end tag should be included in the
output or not. In HTML 4.01 there are several tags which should never have
end tags, this flag allows the template compiler to specify that these
should not be output.
"""
while (len (self.tagStack) > 0):
oldTag, tagProperties, useMacroLocation = self.tagStack.pop()
endTagSymbol = tagProperties.get ('endTagSymbol', None)
popCommandList = tagProperties.get ('popFunctionList', [])
singletonTag = tagProperties.get ('singletonTag', 0)
for func in popCommandList:
apply (func, ())
self.log.debug ("Popped tag %s off stack" % oldTag[0])
if (oldTag[0] == tag[0]):
# We've found the right tag, now check to see if we have any TAL commands on it
if (endTagSymbol is not None):
# We have a command (it's a TAL tag)
# Note where the end tag symbol should point (i.e. the next command)
self.symbolLocationTable [endTagSymbol] = len (self.commandList)
# We need a "close scope and tag" command
self.addCommand((TAL_ENDTAG_ENDSCOPE, (tag[0], omitTagFlag, singletonTag)))
return
elif (omitTagFlag == 0 and singletonTag == 0):
# We are popping off an un-interesting tag, just add the close as text
self.addCommand((TAL_OUTPUT, '</' + tag[0] + '>'))
return
else:
# We are suppressing the output of this tag, so just return
return
else:
# We have a different tag, which means something like <br> which never closes is in
# between us and the real tag.
# If the tag that we did pop off has a command though it means un-balanced TAL tags!
if (endTagSymbol is not None):
# ERROR
msg = "TAL/METAL Elements must be balanced - found close tag %s expecting %s" % (tag[0], oldTag[0])
self.log.error (msg)
raise TemplateParseException (self.tagAsText(oldTag), msg)
self.log.error ("Close tag %s found with no corresponding open tag." % tag[0])
raise TemplateParseException ("</%s>" % tag[0], "Close tag encountered with no corresponding open tag.")
def parseStartTag (self, tag, attributes, singletonElement=0):
# Note down the tag we are handling, it will be used for error handling during
# compilation
self.currentStartTag = (tag, attributes)
# Look for tal/metal attributes
foundTALAtts = []
foundMETALAtts = []
foundCommandsArgs = {}
cleanAttributes = []
originalAttributes = {}
tagProperties = {}
popTagFuncList = []
TALElementNameSpace = 0
prefixToAdd = ""
tagProperties ['singletonTag'] = singletonElement
# Determine whether this element is in either the METAL or TAL namespace
if (tag.find (':') > 0):
# We have a namespace involved, so let's look to see if its one of ours
namespace = tag[0:tag.find (':')]
if (namespace == self.metal_namespace_prefix):
TALElementNameSpace = 1
prefixToAdd = self.metal_namespace_prefix +":"
elif (namespace == self.tal_namespace_prefix):
TALElementNameSpace = 1
prefixToAdd = self.tal_namespace_prefix +":"
if (TALElementNameSpace):
# We should treat this an implicit omit-tag
foundTALAtts.append (TAL_OMITTAG)
# Will go to default, i.e. yes
foundCommandsArgs [TAL_OMITTAG] = ""
for att, value in attributes:
originalAttributes [att] = value
if (TALElementNameSpace and not att.find (':') > 0):
# This means that the attribute name does not have a namespace, so use the prefix for this tag.
commandAttName = prefixToAdd + att
else:
commandAttName = att
self.log.debug ("Command name is now %s" % commandAttName)
if (att[0:5] == "xmlns"):
# We have a namespace declaration.
prefix = att[6:]
if (value == METAL_NAME_URI):
# It's a METAL namespace declaration
if (len (prefix) > 0):
self.metal_namespace_prefix_stack.append (self.metal_namespace_prefix)
self.setMETALPrefix (prefix)
# We want this function called when the scope ends
popTagFuncList.append (self.popMETALNamespace)
else:
# We don't allow METAL/TAL to be declared as a default
msg = "Can not use METAL name space by default, a prefix must be provided."
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
elif (value == TAL_NAME_URI):
# TAL this time
if (len (prefix) > 0):
self.tal_namespace_prefix_stack.append (self.tal_namespace_prefix)
self.setTALPrefix (prefix)
# We want this function called when the scope ends
popTagFuncList.append (self.popTALNamespace)
else:
# We don't allow METAL/TAL to be declared as a default
msg = "Can not use TAL name space by default, a prefix must be provided."
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
else:
# It's nothing special, just an ordinary namespace declaration
cleanAttributes.append ((att, value))
elif (self.tal_attribute_map.has_key (commandAttName)):
# It's a TAL attribute
cmnd = self.tal_attribute_map [commandAttName]
if (cmnd == TAL_OMITTAG and TALElementNameSpace):
self.log.warn ("Supressing omit-tag command present on TAL or METAL element")
else:
foundCommandsArgs [cmnd] = value
foundTALAtts.append (cmnd)
elif (self.metal_attribute_map.has_key (commandAttName)):
# It's a METAL attribute
cmnd = self.metal_attribute_map [commandAttName]
foundCommandsArgs [cmnd] = value
foundMETALAtts.append (cmnd)
else:
cleanAttributes.append ((att, value))
tagProperties ['popFunctionList'] = popTagFuncList
# This might be just content
if ((len (foundTALAtts) + len (foundMETALAtts)) == 0):
# Just content, add it to the various stacks
self.addTag ((tag, cleanAttributes), tagProperties)
return
# Create a symbol for the end of the tag - we don't know what the offset is yet
self.endTagSymbol += 1
tagProperties ['endTagSymbol'] = self.endTagSymbol
# Sort the METAL commands
foundMETALAtts.sort()
# Sort the tags by priority
foundTALAtts.sort()
# We handle the METAL before the TAL
allCommands = foundMETALAtts + foundTALAtts
firstTag = 1
for talAtt in allCommands:
# Parse and create a command for each
cmnd = self.commandHandler [talAtt](foundCommandsArgs[talAtt])
if (cmnd is not None):
if (firstTag):
# The first one needs to add the tag
firstTag = 0
tagProperties ['originalAtts'] = originalAttributes
tagProperties ['command'] = cmnd
self.addTag ((tag, cleanAttributes), tagProperties)
else:
# All others just append
self.addCommand(cmnd)
if (firstTag):
tagProperties ['originalAtts'] = originalAttributes
tagProperties ['command'] = (TAL_STARTTAG, (tag, singletonElement))
self.addTag ((tag, cleanAttributes), tagProperties)
else:
# Add the start tag command in as a child of the last TAL command
self.addCommand((TAL_STARTTAG, (tag,singletonElement)))
def parseEndTag (self, tag):
""" Just pop the tag and related commands off the stack. """
self.popTag ((tag,None))
def parseData (self, data):
# Just add it as an output
self.addCommand((TAL_OUTPUT, data))
def compileCmdDefine (self, argument):
# Compile a define command, resulting argument is:
# [(isLocalFlag (Y/n), variableName, variablePath),...]
# Break up the list of defines first
commandArgs = []
# We only want to match semi-colons that are not escaped
argumentSplitter = re.compile ('(?<!;);(?!;)')
for defineStmt in argumentSplitter.split (argument):
# remove any leading space and un-escape any semi-colons
defineStmt = defineStmt.lstrip().replace (';;', ';')
# Break each defineStmt into pieces "[local|global] varName expression"
stmtBits = defineStmt.split (' ')
isLocal = 1
if (len (stmtBits) < 2):
# Error, badly formed define command
msg = "Badly formed define command '%s'. Define commands must be of the form: '[local|global] varName expression[;[local|global] varName expression]'" % argument
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
# Assume to start with that >2 elements means a local|global flag
if (len (stmtBits) > 2):
if (stmtBits[0] == 'global'):
isLocal = 0
varName = stmtBits[1]
expression = ' '.join (stmtBits[2:])
elif (stmtBits[0] == 'local'):
varName = stmtBits[1]
expression = ' '.join (stmtBits[2:])
else:
# Must be a space in the expression that caused the >3 thing
varName = stmtBits[0]
expression = ' '.join (stmtBits[1:])
else:
# Only two bits
varName = stmtBits[0]
expression = ' '.join (stmtBits[1:])
commandArgs.append ((isLocal, varName, expression))
return (TAL_DEFINE, commandArgs)
def compileCmdCondition (self, argument):
# Compile a condition command, resulting argument is:
# path, endTagSymbol
# Sanity check
if (len (argument) == 0):
# No argument passed
msg = "No argument passed! condition commands must be of the form: 'path'"
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
return (TAL_CONDITION, (argument, self.endTagSymbol))
def compileCmdRepeat (self, argument):
# Compile a repeat command, resulting argument is:
# (varname, expression, endTagSymbol)
attProps = argument.split (' ')
if (len (attProps) < 2):
# Error, badly formed repeat command
msg = "Badly formed repeat command '%s'. Repeat commands must be of the form: 'localVariable path'" % argument
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
varName = attProps [0]
expression = " ".join (attProps[1:])
return (TAL_REPEAT, (varName, expression, self.endTagSymbol))
def compileCmdContent (self, argument, replaceFlag=0):
# Compile a content command, resulting argument is
# (replaceFlag, structureFlag, expression, endTagSymbol)
# Sanity check
if (len (argument) == 0):
# No argument passed
msg = "No argument passed! content/replace commands must be of the form: 'path'"
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
structureFlag = 0
attProps = argument.split (' ')
if (len(attProps) > 1):
if (attProps[0] == "structure"):
structureFlag = 1
express = " ".join (attProps[1:])
elif (attProps[1] == "text"):
structureFlag = 0
express = " ".join (attProps[1:])
else:
# It's not a type selection after all - assume it's part of the path
express = argument
else:
express = argument
return (TAL_CONTENT, (replaceFlag, structureFlag, express, self.endTagSymbol))
def compileCmdReplace (self, argument):
return self.compileCmdContent (argument, replaceFlag=1)
def compileCmdAttributes (self, argument):
# Compile tal:attributes into attribute command
# Argument: [(attributeName, expression)]
# Break up the list of attribute settings first
commandArgs = []
# We only want to match semi-colons that are not escaped
argumentSplitter = re.compile ('(?<!;);(?!;)')
for attributeStmt in argumentSplitter.split (argument):
# remove any leading space and un-escape any semi-colons
attributeStmt = attributeStmt.lstrip().replace (';;', ';')
# Break each attributeStmt into name and expression
stmtBits = attributeStmt.split (' ')
if (len (stmtBits) < 2):
# Error, badly formed attributes command
msg = "Badly formed attributes command '%s'. Attributes commands must be of the form: 'name expression[;name expression]'" % argument
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
attName = stmtBits[0]
attExpr = " ".join (stmtBits[1:])
commandArgs.append ((attName, attExpr))
return (TAL_ATTRIBUTES, commandArgs)
def compileCmdOmitTag (self, argument):
# Compile a condition command, resulting argument is:
# path
# If no argument is given then set the path to default
if (len (argument) == 0):
expression = "default"
else:
expression = argument
return (TAL_OMITTAG, expression)
# METAL compilation commands go here
def compileMetalUseMacro (self, argument):
# Sanity check
if (len (argument) == 0):
# No argument passed
msg = "No argument passed! use-macro commands must be of the form: 'use-macro: path'"
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
cmnd = (METAL_USE_MACRO, (argument, {}, self.endTagSymbol))
self.log.debug ("Returning METAL_USE_MACRO: %s" % str (cmnd))
return cmnd
def compileMetalDefineMacro (self, argument):
if (len (argument) == 0):
# No argument passed
msg = "No argument passed! define-macro commands must be of the form: 'define-macro: name'"
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
# Check that the name of the macro is valid
if (METAL_NAME_REGEX.match (argument).end() != len (argument)):
msg = "Macro name %s is invalid." % argument
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
if (self.macroMap.has_key (argument)):
msg = "Macro name %s is already defined!" % argument
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
# The macro starts at the next command.
macro = SubTemplate (len (self.commandList), self.endTagSymbol)
self.macroMap [argument] = macro
return None
def compileMetalFillSlot (self, argument):
if (len (argument) == 0):
# No argument passed
msg = "No argument passed! fill-slot commands must be of the form: 'fill-slot: name'"
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
# Check that the name of the macro is valid
if (METAL_NAME_REGEX.match (argument).end() != len (argument)):
msg = "Slot name %s is invalid." % argument
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
# Determine what use-macro statement this belongs to by working through the list backwards
ourMacroLocation = None
location = len (self.tagStack) - 1
while (ourMacroLocation is None):
macroLocation = self.tagStack[location][2]
if (macroLocation is not None):
ourMacroLocation = macroLocation
else:
location -= 1
if (location < 0):
msg = "metal:fill-slot must be used inside a metal:use-macro call"
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
# Get the use-macro command we are going to adjust
cmnd, args = self.commandList [ourMacroLocation]
self.log.debug ("Use macro argument: %s" % str (args))
macroName, slotMap, endSymbol = args
# Check that the name of the slot is valid
if (METAL_NAME_REGEX.match (argument).end() != len (argument)):
msg = "Slot name %s is invalid." % argument
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
if (slotMap.has_key (argument)):
msg = "Slot %s has already been filled!" % argument
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
# The slot starts at the next command.
slot = SubTemplate (len (self.commandList), self.endTagSymbol)
slotMap [argument] = slot
# Update the command
self.commandList [ourMacroLocation] = (cmnd, (macroName, slotMap, endSymbol))
return None
def compileMetalDefineSlot (self, argument):
if (len (argument) == 0):
# No argument passed
msg = "No argument passed! define-slot commands must be of the form: 'name'"
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
# Check that the name of the slot is valid
if (METAL_NAME_REGEX.match (argument).end() != len (argument)):
msg = "Slot name %s is invalid." % argument
self.log.error (msg)
raise TemplateParseException (self.tagAsText (self.currentStartTag), msg)
return (METAL_DEFINE_SLOT, (argument, self.endTagSymbol))
class TemplateParseException (Exception):
def __init__ (self, location, errorDescription):
self.location = location
self.errorDescription = errorDescription
def __str__ (self):
return "[" + self.location + "] " + self.errorDescription
class HTMLTemplateCompiler (TemplateCompiler, FixedHTMLParser.HTMLParser):
def __init__ (self):
TemplateCompiler.__init__ (self)
FixedHTMLParser.HTMLParser.__init__ (self)
self.log = logging.getLogger ("simpleTAL.HTMLTemplateCompiler")
def parseTemplate (self, file, encoding="iso-8859-1", minimizeBooleanAtts = 0):
encodedFile = codecs.lookup (encoding)[2](file, 'replace')
self.encoding = encoding
self.minimizeBooleanAtts = minimizeBooleanAtts
self.feed (encodedFile.read())
self.close()
def tagAsText (self, (tag,atts), singletonFlag=0):
""" This returns a tag as text.
"""
result = ["<"]
result.append (tag)
upperTag = tag.upper()
for attName, attValue in atts:
if (self.minimizeBooleanAtts and HTML_BOOLEAN_ATTS.has_key ('%s:%s' % (upperTag, attName.upper()))):
# We should output a minimised boolean value
result.append (' ')
result.append (attName)
else:
result.append (' ')
result.append (attName)
result.append ('="')
result.append (cgi.escape (attValue, quote=1))
result.append ('"')
if (singletonFlag):
result.append (" />")
else:
result.append (">")
return "".join (result)
def handle_startendtag (self, tag, attributes):
self.handle_starttag (tag, attributes)
if not (HTML_FORBIDDEN_ENDTAG.has_key (tag.upper())):
self.handle_endtag(tag)
def handle_starttag (self, tag, attributes):
self.log.debug ("Recieved Start Tag: " + tag + " Attributes: " + str (attributes))
atts = []
for att, attValue in attributes:
# We need to spot empty tal:omit-tags
if (attValue is None):
if (att == self.tal_namespace_omittag):
atts.append ((att, u""))
else:
atts.append ((att, att))
else:
# Expand any SGML entity references or char references
goodAttValue = []
last = 0
match = ENTITY_REF_REGEX.search (attValue)
while (match):
goodAttValue.append (attValue[last:match.start()])
ref = attValue[match.start():match.end()]
if (ref.startswith ('&#')):
# A char reference
if (ref[2] in ['x', 'X']):
# Hex
refValue = int (ref[3:-1], 16)
else:
refValue = int (ref[2:-1])
goodAttValue.append (unichr (refValue))
else:
# A named reference.
goodAttValue.append (unichr (sgmlentitynames.htmlNameToUnicodeNumber.get (ref[1:-1], 65533)))
last = match.end()
match = ENTITY_REF_REGEX.search (attValue, last)
goodAttValue.append (attValue [last:])
atts.append ((att, u"".join (goodAttValue)))
if (HTML_FORBIDDEN_ENDTAG.has_key (tag.upper())):
# This should have no end tag, so we just do the start and suppress the end
self.parseStartTag (tag, atts)
self.log.debug ("End tag forbidden, generating close tag with no output.")
self.popTag ((tag, None), omitTagFlag=1)
else:
self.parseStartTag (tag, atts)
def handle_endtag (self, tag):
self.log.debug ("Recieved End Tag: " + tag)
if (HTML_FORBIDDEN_ENDTAG.has_key (tag.upper())):
self.log.warn ("HTML 4.01 forbids end tags for the %s element" % tag)
else:
# Normal end tag
self.popTag ((tag, None))
def handle_data (self, data):
self.parseData (cgi.escape (data))
# These two methods are required so that we expand all character and entity references prior to parsing the template.
def handle_charref (self, ref):
self.log.debug ("Got Ref: %s", ref)
self.parseData (unichr (int (ref)))
def handle_entityref (self, ref):
self.log.debug ("Got Ref: %s", ref)
# Use handle_data so that <&> are re-encoded as required.
self.handle_data( unichr (sgmlentitynames.htmlNameToUnicodeNumber.get (ref, 65533)))
# Handle document type declarations
def handle_decl (self, data):
self.parseData (u'<!%s>' % data)
# Pass comments through un-affected.
def handle_comment (self, data):
self.parseData (u'<!--%s-->' % data)
def handle_pi (self, data):
self.log.debug ("Recieved processing instruction.")
self.parseData (u'<?%s>' % data)
def report_unbalanced (self, tag):
self.log.warn ("End tag %s present with no corresponding open tag.")
def getTemplate (self):
template = HTMLTemplate (self.commandList, self.macroMap, self.symbolLocationTable, minimizeBooleanAtts = self.minimizeBooleanAtts)
return template
class XMLTemplateCompiler (TemplateCompiler, xml.sax.handler.ContentHandler, xml.sax.handler.DTDHandler, LexicalHandler):
def __init__ (self):
TemplateCompiler.__init__ (self)
xml.sax.handler.ContentHandler.__init__ (self)
self.doctype = None
self.log = logging.getLogger ("simpleTAL.XMLTemplateCompiler")
self.singletonElement = 0
def parseTemplate (self, file):
self.ourParser = xml.sax.make_parser()
self.log.debug ("Setting features of parser")
try:
self.ourParser.setFeature (xml.sax.handler.feature_external_ges, 0)
except:
pass
if use_lexical_handler:
self.ourParser.setProperty(xml.sax.handler.property_lexical_handler, self)
self.ourParser.setContentHandler (self)
self.ourParser.setDTDHandler (self)
self.ourParser.parse (file)
def parseDOM (self, dom):
if (not use_dom2sax):
self.log.critical ("PyXML is not available, DOM can not be parsed.")
self.ourParser = xml.dom.ext.Dom2Sax.Dom2SaxParser()
self.log.debug ("Setting features of parser")
if use_lexical_handler:
self.ourParser.setProperty(xml.sax.handler.property_lexical_handler, self)
self.ourParser.setContentHandler (self)
self.ourParser.setDTDHandler (self)
self.ourParser.parse (dom)
def startDTD(self, name, public_id, system_id):
self.log.debug ("Recieved DOCTYPE: " + name + " public_id: " + public_id + " system_id: " + system_id)
if public_id:
self.doctype = '<!DOCTYPE %s PUBLIC "%s" "%s">' % (name, public_id, system_id,)
else:
self.doctype = '<!DOCTYPE %s SYSTEM "%s">' % (name, system_id,)
def startElement (self, tag, attributes):
self.log.debug ("Recieved Real Start Tag: " + tag + " Attributes: " + str (attributes))
try:
xmlText = self.ourParser.getProperty (xml.sax.handler.property_xml_string)
if (SINGLETON_XML_REGEX.match (xmlText)):
# This is a singleton!
self.singletonElement=1
except xml.sax.SAXException, e:
# Parser doesn't support this property
pass
# Convert attributes into a list of tuples
atts = []
for att in attributes.getNames():
self.log.debug ("Attribute name %s has value %s" % (att, attributes[att]))
atts.append ((att, attributes [att]))
self.parseStartTag (tag, atts, singletonElement=self.singletonElement)
def endElement (self, tag):
self.log.debug ("Recieved Real End Tag: " + tag)
self.parseEndTag (tag)
self.singletonElement = 0
def characters (self, data):
#self.log.debug ("Recieved Real Data: " + data)
# Escape any data we recieve - we don't want any: <&> in there.
self.parseData (cgi.escape (data))
def processingInstruction (self, target, data):
self.log.debug ("Recieved processing instruction.")
self.parseData (u'<?%s %s?>' % (target, data))
def comment (self, data):
# This is only called if your XML parser supports the LexicalHandler interface.
self.parseData (u'<!--%s-->' % data)
def getTemplate (self):
template = XMLTemplate (self.commandList, self.macroMap, self.symbolLocationTable, self.doctype)
return template
def compileHTMLTemplate (template, inputEncoding="ISO-8859-1", minimizeBooleanAtts = 0):
""" Reads the templateFile and produces a compiled template.
To use the resulting template object call:
template.expand (context, outputFile)
"""
if (isinstance (template, types.StringType) or isinstance (template, types.UnicodeType)):
# It's a string!
templateFile = StringIO.StringIO (template)
else:
templateFile = template
compiler = HTMLTemplateCompiler()
compiler.parseTemplate (templateFile, inputEncoding, minimizeBooleanAtts)
return compiler.getTemplate()
def compileXMLTemplate (template):
""" Reads the templateFile and produces a compiled template.
To use the resulting template object call:
template.expand (context, outputFile)
"""
if (isinstance (template, types.StringType)):
# It's a string!
templateFile = StringIO.StringIO (template)
else:
templateFile = template
compiler = XMLTemplateCompiler()
compiler.parseTemplate (templateFile)
return compiler.getTemplate()
def compileDOMTemplate (template):
""" Traverses the DOM and produces a compiled template.
To use the resulting template object call:
template.expand (context, outputFile)
"""
compiler = XMLTemplateCompiler ()
compiler.parseDOM (template)
return compiler.getTemplate()
|
mzdaniel/oh-mainline
|
refs/heads/master
|
vendor/packages/south/south/migration/migrators.py
|
15
|
from copy import copy
from cStringIO import StringIO
import datetime
import inspect
import sys
import traceback
from django.core.management import call_command
from django.core.management.commands import loaddata
from django.db import models
import south.db
from south import exceptions
from south.db import DEFAULT_DB_ALIAS
from south.models import MigrationHistory
from south.signals import ran_migration
class Migrator(object):
def __init__(self, verbosity=0, interactive=False):
self.verbosity = int(verbosity)
self.interactive = bool(interactive)
@staticmethod
def title(target):
raise NotImplementedError()
def print_title(self, target):
if self.verbosity:
print self.title(target)
@staticmethod
def status(target):
raise NotImplementedError()
def print_status(self, migration):
status = self.status(migration)
if self.verbosity and status:
print status
@staticmethod
def orm(migration):
raise NotImplementedError()
def backwards(self, migration):
return self._wrap_direction(migration.backwards(), migration.prev_orm())
def direction(self, migration):
raise NotImplementedError()
@staticmethod
def _wrap_direction(direction, orm):
args = inspect.getargspec(direction)
if len(args[0]) == 1:
# Old migration, no ORM should be passed in
return direction
return (lambda: direction(orm))
@staticmethod
def record(migration, database):
raise NotImplementedError()
def run_migration_error(self, migration, extra_info=''):
return (
' ! Error found during real run of migration! Aborting.\n'
'\n'
' ! Since you have a database that does not support running\n'
' ! schema-altering statements in transactions, we have had \n'
' ! to leave it in an interim state between migrations.\n'
'%s\n'
' ! The South developers regret this has happened, and would\n'
' ! like to gently persuade you to consider a slightly\n'
' ! easier-to-deal-with DBMS.\n'
' ! NOTE: The error which caused the migration to fail is further up.'
) % extra_info
def run_migration(self, migration):
migration_function = self.direction(migration)
south.db.db.start_transaction()
try:
migration_function()
south.db.db.execute_deferred_sql()
except:
south.db.db.rollback_transaction()
if not south.db.db.has_ddl_transactions:
print self.run_migration_error(migration)
raise
else:
south.db.db.commit_transaction()
def run(self, migration):
# Get the correct ORM.
south.db.db.current_orm = self.orm(migration)
# If the database doesn't support running DDL inside a transaction
# *cough*MySQL*cough* then do a dry run first.
if not south.db.db.has_ddl_transactions:
dry_run = DryRunMigrator(migrator=self, ignore_fail=False)
dry_run.run_migration(migration)
return self.run_migration(migration)
def done_migrate(self, migration, database):
south.db.db.start_transaction()
try:
# Record us as having done this
self.record(migration, database)
except:
south.db.db.rollback_transaction()
raise
else:
south.db.db.commit_transaction()
def send_ran_migration(self, migration):
ran_migration.send(None,
app=migration.app_label(),
migration=migration,
method=self.__class__.__name__.lower())
def migrate(self, migration, database):
"""
Runs the specified migration forwards/backwards, in order.
"""
app = migration.migrations._migrations
migration_name = migration.name()
self.print_status(migration)
result = self.run(migration)
self.done_migrate(migration, database)
self.send_ran_migration(migration)
return result
def migrate_many(self, target, migrations, database):
raise NotImplementedError()
class MigratorWrapper(object):
def __init__(self, migrator, *args, **kwargs):
self._migrator = copy(migrator)
attributes = dict([(k, getattr(self, k))
for k in self.__class__.__dict__.iterkeys()
if not k.startswith('__')])
self._migrator.__dict__.update(attributes)
def __getattr__(self, name):
return getattr(self._migrator, name)
class DryRunMigrator(MigratorWrapper):
def __init__(self, ignore_fail=True, *args, **kwargs):
super(DryRunMigrator, self).__init__(*args, **kwargs)
self._ignore_fail = ignore_fail
def _run_migration(self, migration):
if migration.no_dry_run():
if self.verbosity:
print " - Migration '%s' is marked for no-dry-run." % migration
return
south.db.db.dry_run = True
if self._ignore_fail:
south.db.db.debug, old_debug = False, south.db.db.debug
pending_creates = south.db.db.get_pending_creates()
south.db.db.start_transaction()
migration_function = self.direction(migration)
try:
try:
migration_function()
south.db.db.execute_deferred_sql()
except:
raise exceptions.FailedDryRun(migration, sys.exc_info())
finally:
south.db.db.rollback_transactions_dry_run()
if self._ignore_fail:
south.db.db.debug = old_debug
south.db.db.clear_run_data(pending_creates)
south.db.db.dry_run = False
def run_migration(self, migration):
try:
self._run_migration(migration)
except exceptions.FailedDryRun:
if self._ignore_fail:
return False
raise
def done_migrate(self, *args, **kwargs):
pass
def send_ran_migration(self, *args, **kwargs):
pass
class FakeMigrator(MigratorWrapper):
def run(self, migration):
if self.verbosity:
print ' (faked)'
def send_ran_migration(self, *args, **kwargs):
pass
class LoadInitialDataMigrator(MigratorWrapper):
def load_initial_data(self, target, db='default'):
if target is None or target != target.migrations[-1]:
return
# Load initial data, if we ended up at target
if self.verbosity:
print " - Loading initial data for %s." % target.app_label()
# Override Django's get_apps call temporarily to only load from the
# current app
old_get_apps = models.get_apps
new_get_apps = lambda: [models.get_app(target.app_label())]
models.get_apps = new_get_apps
loaddata.get_apps = new_get_apps
try:
call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=db)
finally:
models.get_apps = old_get_apps
loaddata.get_apps = old_get_apps
def migrate_many(self, target, migrations, database):
migrator = self._migrator
result = migrator.__class__.migrate_many(migrator, target, migrations, database)
if result:
self.load_initial_data(target, db=database)
return True
class Forwards(Migrator):
"""
Runs the specified migration forwards, in order.
"""
torun = 'forwards'
@staticmethod
def title(target):
if target is not None:
return " - Migrating forwards to %s." % target.name()
else:
assert False, "You cannot migrate forwards to zero."
@staticmethod
def status(migration):
return ' > %s' % migration
@staticmethod
def orm(migration):
return migration.orm()
def forwards(self, migration):
return self._wrap_direction(migration.forwards(), migration.orm())
direction = forwards
@staticmethod
def record(migration, database):
# Record us as having done this
record = MigrationHistory.for_migration(migration, database)
record.applied = datetime.datetime.utcnow()
if database != DEFAULT_DB_ALIAS:
record.save(using=database)
else:
# Django 1.1 and below always go down this branch.
record.save()
def format_backwards(self, migration):
if migration.no_dry_run():
return " (migration cannot be dry-run; cannot discover commands)"
old_debug, old_dry_run = south.db.db.debug, south.db.db.dry_run
south.db.db.debug = south.db.db.dry_run = True
stdout = sys.stdout
sys.stdout = StringIO()
try:
try:
self.backwards(migration)()
return sys.stdout.getvalue()
except:
raise
finally:
south.db.db.debug, south.db.db.dry_run = old_debug, old_dry_run
sys.stdout = stdout
def run_migration_error(self, migration, extra_info=''):
extra_info = ('\n'
'! You *might* be able to recover with:'
'%s'
'%s' %
(self.format_backwards(migration), extra_info))
return super(Forwards, self).run_migration_error(migration, extra_info)
def migrate_many(self, target, migrations, database):
try:
for migration in migrations:
result = self.migrate(migration, database)
if result is False: # The migrations errored, but nicely.
return False
finally:
# Call any pending post_syncdb signals
south.db.db.send_pending_create_signals(verbosity=self.verbosity,
interactive=self.interactive)
return True
class Backwards(Migrator):
"""
Runs the specified migration backwards, in order.
"""
torun = 'backwards'
@staticmethod
def title(target):
if target is None:
return " - Migrating backwards to zero state."
else:
return " - Migrating backwards to just after %s." % target.name()
@staticmethod
def status(migration):
return ' < %s' % migration
@staticmethod
def orm(migration):
return migration.prev_orm()
direction = Migrator.backwards
@staticmethod
def record(migration, database):
# Record us as having not done this
record = MigrationHistory.for_migration(migration, database)
if record.id is not None:
if database != DEFAULT_DB_ALIAS:
record.delete(using=database)
else:
# Django 1.1 always goes down here
record.delete()
def migrate_many(self, target, migrations, database):
for migration in migrations:
self.migrate(migration, database)
return True
|
harterj/moose
|
refs/heads/devel
|
modules/navier_stokes/test/tests/ins/mms/pspg/generate_forcing_functions.py
|
21
|
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
# Note that this script requires sympy to be installed in your python environment
import sys, os
split_path = list(filter(None, os.getcwd().split('/')))
s = '/'
ns_root = ''
for sub in split_path:
ns_root = s.join((ns_root, sub))
if sub == 'navier_stokes':
break
ns_python = s.join((ns_root, 'python'))
sys.path.append(ns_python)
from ins_calc_routines import *
from sympy import *
import sympy as sp
import numpy as np
x, y = var('x y')
u = 0.4*sin(0.5*pi*x) + 0.4*sin(pi*y) + 0.7*sin(0.2*pi*x*y) + 0.5
v = 0.6*sin(0.8*pi*x) + 0.3*sin(0.3*pi*y) + 0.2*sin(0.3*pi*x*y) + 0.3
p = 0.5*sin(0.5*pi*x) + 1.0*sin(0.3*pi*y) + 0.5*sin(0.2*pi*x*y) + 0.5
vxx = diff(u, x)
px = diff(p, x)
uvec = sp.Matrix([u, v])
volume_source = {
'vel_x' : prep_moose_input(L_momentum_laplace_no_turbulence(uvec, p, x, y)[0]),
'vel_y' : prep_moose_input(L_momentum_laplace_no_turbulence(uvec, p, x, y)[1]),
'p' : prep_moose_input(L_pressure(uvec, x, y))}
solution_dict = {'vel_x' : u, 'vel_y' : v, 'p' : p, 'vxx' : vxx, 'px' : px}
h_list = ['4', '8', '16', '32']
h_array = np.array([.25, .125, .0625, .03125])
for key, value in solution_dict.items():
print("The solution function for %s is %s" % (key, value))
for key, value in volume_source.items():
print("The forcing function for %s is %s" % (key, value))
|
sdgdsffdsfff/redis-ctl
|
refs/heads/master
|
utils.py
|
2
|
import redistrib.command
import file_ipc
def masters_detail(host, port):
node_details = file_ipc.read_details()['nodes']
result = []
masters, myself = redistrib.command.list_masters(host, port)
for n in masters:
r = {'host': n.host, 'port': n.port}
try:
r['slots_count'] = len(node_details[
'%s:%d' % (n.host, n.port)]['slots'])
except KeyError:
pass
result.append(r)
return result, myself
|
bartholomewbischoff/Sp-Sk
|
refs/heads/master
|
node_modules/closure-util/.deps/library/87f29481f1e57fbba986e2304ac80d4173f94c9c/closure/bin/build/depswriter.py
|
76
|
#!/usr/bin/env python
#
# Copyright 2009 The Closure Library Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates out a Closure deps.js file given a list of JavaScript sources.
Paths can be specified as arguments or (more commonly) specifying trees
with the flags (call with --help for descriptions).
Usage: depswriter.py [path/to/js1.js [path/to/js2.js] ...]
"""
import json
import logging
import optparse
import os
import posixpath
import shlex
import sys
import source
import treescan
__author__ = 'nnaze@google.com (Nathan Naze)'
def MakeDepsFile(source_map):
"""Make a generated deps file.
Args:
source_map: A dict map of the source path to source.Source object.
Returns:
str, A generated deps file source.
"""
# Write in path alphabetical order
paths = sorted(source_map.keys())
lines = []
for path in paths:
js_source = source_map[path]
# We don't need to add entries that don't provide anything.
if js_source.provides:
lines.append(_GetDepsLine(path, js_source))
return ''.join(lines)
def _GetDepsLine(path, js_source):
"""Get a deps.js file string for a source."""
provides = _ToJsSrc(sorted(js_source.provides))
requires = _ToJsSrc(sorted(js_source.requires))
module = 'true' if js_source.is_goog_module else 'false'
return 'goog.addDependency(\'%s\', %s, %s, %s);\n' % (
path, provides, requires, module)
def _ToJsSrc(arr):
"""Convert a python arr to a js source string."""
return json.dumps(arr).replace('"', '\'')
def _GetOptionsParser():
"""Get the options parser."""
parser = optparse.OptionParser(__doc__)
parser.add_option('--output_file',
dest='output_file',
action='store',
help=('If specified, write output to this path instead of '
'writing to standard output.'))
parser.add_option('--root',
dest='roots',
default=[],
action='append',
help='A root directory to scan for JS source files. '
'Paths of JS files in generated deps file will be '
'relative to this path. This flag may be specified '
'multiple times.')
parser.add_option('--root_with_prefix',
dest='roots_with_prefix',
default=[],
action='append',
help='A root directory to scan for JS source files, plus '
'a prefix (if either contains a space, surround with '
'quotes). Paths in generated deps file will be relative '
'to the root, but preceded by the prefix. This flag '
'may be specified multiple times.')
parser.add_option('--path_with_depspath',
dest='paths_with_depspath',
default=[],
action='append',
help='A path to a source file and an alternate path to '
'the file in the generated deps file (if either contains '
'a space, surround with whitespace). This flag may be '
'specified multiple times.')
return parser
def _NormalizePathSeparators(path):
"""Replaces OS-specific path separators with POSIX-style slashes.
Args:
path: str, A file path.
Returns:
str, The path with any OS-specific path separators (such as backslash on
Windows) replaced with URL-compatible forward slashes. A no-op on systems
that use POSIX paths.
"""
return path.replace(os.sep, posixpath.sep)
def _GetRelativePathToSourceDict(root, prefix=''):
"""Scans a top root directory for .js sources.
Args:
root: str, Root directory.
prefix: str, Prefix for returned paths.
Returns:
dict, A map of relative paths (with prefix, if given), to source.Source
objects.
"""
# Remember and restore the cwd when we're done. We work from the root so
# that paths are relative from the root.
start_wd = os.getcwd()
os.chdir(root)
path_to_source = {}
for path in treescan.ScanTreeForJsFiles('.'):
prefixed_path = _NormalizePathSeparators(os.path.join(prefix, path))
path_to_source[prefixed_path] = source.Source(source.GetFileContents(path))
os.chdir(start_wd)
return path_to_source
def _GetPair(s):
"""Return a string as a shell-parsed tuple. Two values expected."""
try:
# shlex uses '\' as an escape character, so they must be escaped.
s = s.replace('\\', '\\\\')
first, second = shlex.split(s)
return (first, second)
except:
raise Exception('Unable to parse input line as a pair: %s' % s)
def main():
"""CLI frontend to MakeDepsFile."""
logging.basicConfig(format=(sys.argv[0] + ': %(message)s'),
level=logging.INFO)
options, args = _GetOptionsParser().parse_args()
path_to_source = {}
# Roots without prefixes
for root in options.roots:
path_to_source.update(_GetRelativePathToSourceDict(root))
# Roots with prefixes
for root_and_prefix in options.roots_with_prefix:
root, prefix = _GetPair(root_and_prefix)
path_to_source.update(_GetRelativePathToSourceDict(root, prefix=prefix))
# Source paths
for path in args:
path_to_source[path] = source.Source(source.GetFileContents(path))
# Source paths with alternate deps paths
for path_with_depspath in options.paths_with_depspath:
srcpath, depspath = _GetPair(path_with_depspath)
path_to_source[depspath] = source.Source(source.GetFileContents(srcpath))
# Make our output pipe.
if options.output_file:
out = open(options.output_file, 'w')
else:
out = sys.stdout
out.write(('// This file was autogenerated by %s.\n' %
os.path.basename(__file__)))
out.write('// Please do not edit.\n')
out.write(MakeDepsFile(path_to_source))
if __name__ == '__main__':
main()
|
stewnorriss/LibCloud
|
refs/heads/master
|
libcloud/test/storage/test_google_storage.py
|
28
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
from libcloud.utils.py3 import httplib
from libcloud.storage.drivers.google_storage import GoogleStorageDriver
from libcloud.test.storage.test_s3 import S3Tests, S3MockHttp
from libcloud.test.file_fixtures import StorageFileFixtures
from libcloud.test.secrets import STORAGE_GOOGLE_STORAGE_PARAMS
class GoogleStorageMockHttp(S3MockHttp):
fixtures = StorageFileFixtures('google_storage')
def _test2_test_get_object(self, method, url, body, headers):
# test_get_object
# Google uses a different HTTP header prefix for meta data
body = self.fixtures.load('list_containers.xml')
headers = {'content-type': 'application/zip',
'etag': '"e31208wqsdoj329jd"',
'x-goog-meta-rabbits': 'monkeys',
'content-length': 12345,
'last-modified': 'Thu, 13 Sep 2012 07:13:22 GMT'
}
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
class GoogleStorageTests(S3Tests):
driver_type = GoogleStorageDriver
driver_args = STORAGE_GOOGLE_STORAGE_PARAMS
mock_response_klass = GoogleStorageMockHttp
def test_billing_not_enabled(self):
# TODO
pass
def test_token(self):
# Not supported on Google Storage
pass
if __name__ == '__main__':
sys.exit(unittest.main())
|
DOV-Vlaanderen/pydov
|
refs/heads/master
|
tests/test_search_sondering.py
|
1
|
"""Module grouping tests for the sondering search module."""
import datetime
import pandas as pd
from owslib.fes import PropertyIsEqualTo
from pydov.search.sondering import SonderingSearch
from pydov.types.sondering import Sondering
from tests.abstract import AbstractTestSearch
location_md_metadata = 'tests/data/types/sondering/md_metadata.xml'
location_fc_featurecatalogue = \
'tests/data/types/sondering/fc_featurecatalogue.xml'
location_wfs_describefeaturetype = \
'tests/data/types/sondering/wfsdescribefeaturetype.xml'
location_wfs_getfeature = 'tests/data/types/sondering/wfsgetfeature.xml'
location_wfs_feature = 'tests/data/types/sondering/feature.xml'
location_dov_xml = 'tests/data/types/sondering/sondering.xml'
location_xsd_base = 'tests/data/types/sondering/xsd_*.xml'
class TestSonderingSearch(AbstractTestSearch):
search_instance = SonderingSearch()
datatype_class = Sondering
valid_query_single = PropertyIsEqualTo(propertyname='sondeernummer',
literal='GEO-61/3075-S1')
inexistent_field = 'onbestaand'
wfs_field = 'sondeernummer'
xml_field = 'gw_meting'
valid_returnfields = (
'pkey_sondering', 'sondeernummer', 'diepte_sondering_tot',
'datum_aanvang')
valid_returnfields_subtype = (
'pkey_sondering', 'sondeernummer', 'lengte', 'qc', 'Qt')
valid_returnfields_extra = ('pkey_sondering', 'conus')
df_default_columns = [
'pkey_sondering', 'sondeernummer', 'x', 'y', 'mv_mtaw',
'start_sondering_mtaw', 'diepte_sondering_van',
'diepte_sondering_tot', 'datum_aanvang', 'uitvoerder',
'sondeermethode', 'apparaat', 'datum_gw_meting',
'diepte_gw_m', 'lengte', 'diepte', 'qc', 'Qt', 'fs', 'u', 'i']
def test_search_date(self, mp_wfs, mp_get_schema,
mp_remote_describefeaturetype, mp_remote_md,
mp_remote_fc, mp_remote_wfs_feature, mp_dov_xml):
"""Test the search method with only the query parameter.
Test whether the result is correct.
Parameters
----------
mp_wfs : pytest.fixture
Monkeypatch the call to the remote GetCapabilities request.
mp_get_schema : pytest.fixture
Monkeypatch the call to a remote OWSLib schema.
mp_remote_describefeaturetype : pytest.fixture
Monkeypatch the call to a remote DescribeFeatureType.
mp_remote_md : pytest.fixture
Monkeypatch the call to get the remote metadata.
mp_remote_fc : pytest.fixture
Monkeypatch the call to get the remote feature catalogue.
mp_remote_wfs_feature : pytest.fixture
Monkeypatch the call to get WFS features.
mp_dov_xml : pytest.fixture
Monkeypatch the call to get the remote XML data.
"""
df = self.search_instance.search(
query=self.valid_query_single)
# specific test for the Zulu time wfs 1.1.0 issue
assert df.datum_aanvang.unique()[0] == datetime.date(2002, 12, 17)
assert pd.Timestamp(
df.datum_gw_meting.unique()[0]).to_pydatetime() == \
datetime.datetime(2002, 12, 17, 14, 30, 0, 0)
def test_search_nan(self, mp_wfs, mp_get_schema,
mp_remote_describefeaturetype, mp_remote_md,
mp_remote_fc, mp_remote_wfs_feature, mp_dov_xml):
"""Test the search method with only the query parameter.
Test whether the result is correct.
Parameters
----------
mp_wfs : pytest.fixture
Monkeypatch the call to the remote GetCapabilities request.
mp_get_schema : pytest.fixture
Monkeypatch the call to a remote OWSLib schema.
mp_remote_describefeaturetype : pytest.fixture
Monkeypatch the call to a remote DescribeFeatureType.
mp_remote_md : pytest.fixture
Monkeypatch the call to get the remote metadata.
mp_remote_fc : pytest.fixture
Monkeypatch the call to get the remote feature catalogue.
mp_remote_wfs_feature : pytest.fixture
Monkeypatch the call to get WFS features.
mp_dov_xml : pytest.fixture
Monkeypatch the call to get the remote XML data.
"""
df = self.search_instance.search(
query=self.valid_query_single)
assert df.Qt.hasnans
def test_search_xmlresolving(self, mp_get_schema,
mp_remote_describefeaturetype,
mp_remote_wfs_feature, mp_dov_xml):
"""Test the search method with return fields from XML but not from a
subtype.
Test whether the output dataframe contains the resolved XML data.
Parameters
----------
mp_get_schema : pytest.fixture
Monkeypatch the call to a remote OWSLib schema.
mp_remote_describefeaturetype : pytest.fixture
Monkeypatch the call to a remote DescribeFeatureType.
mp_remote_wfs_feature : pytest.fixture
Monkeypatch the call to get WFS features.
mp_dov_xml : pytest.fixture
Monkeypatch the call to get the remote XML data.
"""
df = self.search_instance.search(
query=self.valid_query_single,
return_fields=('pkey_sondering', 'sondeernummer', 'diepte_gw_m'))
assert df.diepte_gw_m[0] == 3.60
|
matplo/rootutils
|
refs/heads/master
|
python/2.7/THnSparseWrapper.py
|
1
|
'''
Created on Jan 8, 2015
Wrapper class around a ROOT THnSparse adding the functionality of cut and project in one step,
making the handling of THnSparses a lot more handy for users.
@author: markus
'''
from copy import copy,deepcopy
from numpy import array as nparray
class AxisFormat(object):
'''
Definition of the axis format of a THnSparse
'''
def __init__(self, formatname):
'''
Constructor
'''
self._axes = {}
self.__formatname = ""
def GetAxes(self):
'''
Get the list of axes defined
'''
return self._axes
def FindAxis(self, axisname):
'''
Find axis by axis name. Returns the dimension of the axis.
'''
result = -1
if axisname in self._axes.keys():
result = self._axes[axisname]
return result
def _Deepcopy(self, other, memo):
'''
Performing deep copy
'''
self._axes = deepcopy(other.GetAxes(), memo)
def _Copy(self, other):
'''
Performing shallow copy
'''
self._axes = copy(other.GetAxes())
class THnSparseCut(object):
'''
Cut class used in the THnSparse wrapper
'''
def __init__(self, axisname, minv, maxv):
'''
Constructor
'''
self.__axisname = axisname
self.__minimum = minv
self.__maximum = maxv
def GetCutname(self):
'''
Get axis name
'''
return self.__axisname
def GetMinimum(self):
'''
Get the minimum of the range
'''
return self.__minimum
def GetMaximum(self):
'''
Get the maximum of the range
'''
return self.__maximum
def SetMinimum(self, minv):
'''
Set the minimum of the range
'''
self.__minimum = minv
def SetMaximum(self, maxv):
'''
Set the maximum of the range
'''
self.__maximum = maxv
class THnSparseWrapper(object):
'''
Wrapper class around THnSparse applying cuts on axes and performing projections
'''
def __init__(self, rootthnsparse):
'''
Constructor
'''
self._rootthnsparse = rootthnsparse
self._axisdefinition = None
self._cutlist = []
def __deepcopy__(self, memo):
'''
Deep copy constructor
'''
result = THnSparseWrapper(deepcopy(self._rootthnsparse))
result.CopyCuts(self._cutlist, True)
return result
def __copy__(self):
'''
Shallow copy constructor
'''
result = THnSparseWrapper(copy(self._rootthnsparse))
result.CopyCuts(self._cutlist, False)
return result
def CopyCuts(self, reference, isDeep):
'''
Copy cuts into this object from a reference object
'''
for cut in reference.GetListOfCuts():
newcut = None
if isDeep:
newcut = deepcopy(cut)
else:
newcut = copy(cut)
self._cutlist.append(newcut)
def GetListOfCuts(self):
'''
Access list of cuts
'''
return self._cutlist
def GetHistogram(self):
'''
Access to underlying root histogram
'''
return self._rootthnsparse
def Add(self, otherwrapper):
self._rootthnsparse.Add(otherwrapper.GetHistogram())
def Scale(self, scalefactor):
self._rootthnsparse.Scale(scalefactor)
def ApplyCut(self, axisname, minv, maxv):
'''
Apply cut on a given variable, defined by its axis name
minv and maxv define the range. If either of them is None, the range is
open on one side.
'''
if not self._axisdefinition or self._axisdefinition.FindAxis(axisname) < 0:
return
existing = self.__FindCut(axisname)
if not existing:
self._cutlist.append(THnSparseCut(axisname, minv, maxv))
else:
existing.SetMinimum(minv)
existing.SetMaximum(maxv)
def RemoveCut(self, axisname):
'''
Remove cut again from the list
'''
for entry in self._cutlist:
if entry.GetCutname() == axisname:
self._cutlist.remove(entry)
def ResetAxis(self, axisname):
'''
Reset axis range
'''
if not self._axisdefinition or self._axisdefinition.FindAxis(axisname) < 0:
return
myaxis = self._rootthnsparse.GetAxis(self._axisdefinition.FindAxis(axisname))
myaxis.SetRange(0, myaxis.GetNbins()+1)
def Projection1D(self, histname, axisname):
'''
Make projection, applying cuts defined before, and releasing the cuts afterwards.
Projects to 1D with the axisname as dimension
'''
if not self._axisdefinition or self._axisdefinition.FindAxis(axisname):
return None
self._PrepareProjection()
result = self._rootthnsparse.Projection(self._axisdefinition.FindAxis(axisname))
result.SetName(histname)
self._CleanumProjection()
return result
def Projection2D(self, histname, axisdictionary):
'''
Make projection, applying cuts defined before, and releasing the cuts afterwards.
Projects to 2D with the content in the axis dictionary as dimensions
Dictionary works in the way name -> dimension, starting with 0
'''
if not self._axisdefinition:
return None
hasfound = True
for axisname in axisdictionary.keys():
if self._axisdefinition.FindAxis(axisname):
hasfound = False
break
if not hasfound:
return None
self._PrepareProjection()
xdim = None
ydim = None
for k,v in axisdictionary.iteritems():
if v == 1:
ydim = self._axisdefinition.FindAxis(k)
else:
xdim = self._axisdefinition.FindAxis(k)
result = self._rootthnsparse.Projection(ydim, xdim)
result.SetName(histname)
self._CleanumProjection()
return result
def ProjectionND(self, histname, axisdictionary):
'''
Make projection, applying cuts defined before, and releasing the cuts afterwards.
Projects to 2D with the content in the axis dictionary as dimensions
Dictionary works in the way name -> dimension, starting with 0
'''
if not self._axisdefinition:
return None
hasfound = True
for axisname in axisdictionary.keys():
if self._axisdefinition.FindAxis(axisname):
hasfound = False
break
if not hasfound:
return None
self._PrepareProjection()
axismap = {}
for k,v in axisdictionary.iteritems():
axismap[v] = k
axislist = []
for mydim in sorted(axismap.keys()):
axislist.append(self._axisdefinition.FindAxis(axismap[mydim]))
result = self._rootthnsparse.Projection(len(axislist), nparray(axislist))
result.SetName(histname)
self._CleanumProjection()
return result
def _PrepareProjection(self):
'''
Apply all requested cuts before the projection
'''
for entry in self._cutlist:
myaxis = self._rootthnsparse.GetAxis(self._axisdefinition.FindAxis(entry.GetCutname()))
minv = 0 if not entry.GetMinimum() else myaxis.FindBin(entry.GetMinimum())
maxv = myaxis.GetNbins()+1 if not entry.GetMaximum() else myaxis.FindBin(entry.GetMaximum())
myaxis.SetRange(minv, maxv)
def _CleanumProjection(self):
'''
Reset all possible axis cuts
Does not remove a cut again from the list, but only releases the THnSparse
'''
for entry in self._cutlist:
self.ResetAxis(entry.GetCutname())
def __FindCut(self, cutname):
'''
Find cut in list by the axis name
'''
if not len(self._cutlist):
return None
result = None
for entry in self._cutlist:
if entry.GetCutname() == cutname:
result = entry
break
return result
|
laslabs/odoo
|
refs/heads/9.0
|
setup/win32/win32_service.py
|
362
|
# -*- coding: utf-8 -*-
import servicemanager
import win32api
import win32process
import win32service
import win32serviceutil
import subprocess
import sys
from os.path import dirname, join, split
execfile(join(dirname(__file__), '..', 'server', 'openerp', 'release.py'))
class OdooService(win32serviceutil.ServiceFramework):
_svc_name_ = nt_service_name
_svc_display_name_ = "%s %s" % (nt_service_name, serie)
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.odooprocess = None # Reference to the server's process
def SvcStop(self):
# Before we do anything, tell the SCM we are starting the stop process.
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
# Stop the running Odoo: say it's a normal exit
win32api.TerminateProcess(int(self.odooprocess._handle), 0)
servicemanager.LogInfoMsg("Odoo stopped correctly")
def SvcDoRun(self):
# We start Odoo as an independent process, but we keep its handle
service_dir = dirname(sys.argv[0])
server_dir = split(service_dir)[0]
server_path = join(server_dir, 'server', 'openerp-server.exe')
self.odooprocess = subprocess.Popen(
[server_path], cwd=server_dir, creationflags=win32process.CREATE_NO_WINDOW
)
servicemanager.LogInfoMsg('Odoo up and running')
# exit with same exit code as Odoo process
sys.exit(self.odooprocess.wait())
def option_handler(opts):
# configure the service to auto restart on failures...
subprocess.call([
'sc', 'failure', nt_service_name, 'reset=', '0', 'actions=', 'restart/0/restart/0/restart/0'
])
if __name__ == '__main__':
win32serviceutil.HandleCommandLine(OdooService, customOptionHandler=option_handler)
|
godfreyy/scrapy
|
refs/heads/master
|
tests/test_webclient.py
|
112
|
"""
from twisted.internet import defer
Tests borrowed from the twisted.web.client tests.
"""
import os
from six.moves.urllib.parse import urlparse
from twisted.trial import unittest
from twisted.web import server, static, error, util
from twisted.internet import reactor, defer
from twisted.test.proto_helpers import StringTransport
from twisted.python.filepath import FilePath
from twisted.protocols.policies import WrappingFactory
from scrapy.core.downloader import webclient as client
from scrapy.http import Request, Headers
def getPage(url, contextFactory=None, *args, **kwargs):
"""Adapted version of twisted.web.client.getPage"""
def _clientfactory(*args, **kwargs):
timeout = kwargs.pop('timeout', 0)
f = client.ScrapyHTTPClientFactory(Request(*args, **kwargs), timeout=timeout)
f.deferred.addCallback(lambda r: r.body)
return f
from twisted.web.client import _makeGetterFactory
return _makeGetterFactory(url, _clientfactory,
contextFactory=contextFactory, *args, **kwargs).deferred
class ParseUrlTestCase(unittest.TestCase):
"""Test URL parsing facility and defaults values."""
def _parse(self, url):
f = client.ScrapyHTTPClientFactory(Request(url))
return (f.scheme, f.netloc, f.host, f.port, f.path)
def testParse(self):
lip = '127.0.0.1'
tests = (
("http://127.0.0.1?c=v&c2=v2#fragment", ('http', lip, lip, 80, '/?c=v&c2=v2')),
("http://127.0.0.1/?c=v&c2=v2#fragment", ('http', lip, lip, 80, '/?c=v&c2=v2')),
("http://127.0.0.1/foo?c=v&c2=v2#frag", ('http', lip, lip, 80, '/foo?c=v&c2=v2')),
("http://127.0.0.1:100?c=v&c2=v2#fragment", ('http', lip+':100', lip, 100, '/?c=v&c2=v2')),
("http://127.0.0.1:100/?c=v&c2=v2#frag", ('http', lip+':100', lip, 100, '/?c=v&c2=v2')),
("http://127.0.0.1:100/foo?c=v&c2=v2#frag", ('http', lip+':100', lip, 100, '/foo?c=v&c2=v2')),
("http://127.0.0.1", ('http', lip, lip, 80, '/')),
("http://127.0.0.1/", ('http', lip, lip, 80, '/')),
("http://127.0.0.1/foo", ('http', lip, lip, 80, '/foo')),
("http://127.0.0.1?param=value", ('http', lip, lip, 80, '/?param=value')),
("http://127.0.0.1/?param=value", ('http', lip, lip, 80, '/?param=value')),
("http://127.0.0.1:12345/foo", ('http', lip+':12345', lip, 12345, '/foo')),
("http://spam:12345/foo", ('http', 'spam:12345', 'spam', 12345, '/foo')),
("http://spam.test.org/foo", ('http', 'spam.test.org', 'spam.test.org', 80, '/foo')),
("https://127.0.0.1/foo", ('https', lip, lip, 443, '/foo')),
("https://127.0.0.1/?param=value", ('https', lip, lip, 443, '/?param=value')),
("https://127.0.0.1:12345/", ('https', lip+':12345', lip, 12345, '/')),
("http://scrapytest.org/foo ", ('http', 'scrapytest.org', 'scrapytest.org', 80, '/foo')),
("http://egg:7890 ", ('http', 'egg:7890', 'egg', 7890, '/')),
)
for url, test in tests:
self.assertEquals(client._parse(url), test, url)
def test_externalUnicodeInterference(self):
"""
L{client._parse} should return C{str} for the scheme, host, and path
elements of its return tuple, even when passed an URL which has
previously been passed to L{urlparse} as a C{unicode} string.
"""
badInput = u'http://example.com/path'
goodInput = badInput.encode('ascii')
urlparse(badInput)
scheme, netloc, host, port, path = self._parse(goodInput)
self.assertTrue(isinstance(scheme, str))
self.assertTrue(isinstance(netloc, str))
self.assertTrue(isinstance(host, str))
self.assertTrue(isinstance(path, str))
self.assertTrue(isinstance(port, int))
class ScrapyHTTPPageGetterTests(unittest.TestCase):
def test_earlyHeaders(self):
# basic test stolen from twisted HTTPageGetter
factory = client.ScrapyHTTPClientFactory(Request(
url='http://foo/bar',
body="some data",
headers={
'Host': 'example.net',
'User-Agent': 'fooble',
'Cookie': 'blah blah',
'Content-Length': '12981',
'Useful': 'value'}))
self._test(factory,
"GET /bar HTTP/1.0\r\n"
"Content-Length: 9\r\n"
"Useful: value\r\n"
"Connection: close\r\n"
"User-Agent: fooble\r\n"
"Host: example.net\r\n"
"Cookie: blah blah\r\n"
"\r\n"
"some data")
# test minimal sent headers
factory = client.ScrapyHTTPClientFactory(Request('http://foo/bar'))
self._test(factory,
"GET /bar HTTP/1.0\r\n"
"Host: foo\r\n"
"\r\n")
# test a simple POST with body and content-type
factory = client.ScrapyHTTPClientFactory(Request(
method='POST',
url='http://foo/bar',
body='name=value',
headers={'Content-Type': 'application/x-www-form-urlencoded'}))
self._test(factory,
"POST /bar HTTP/1.0\r\n"
"Host: foo\r\n"
"Connection: close\r\n"
"Content-Type: application/x-www-form-urlencoded\r\n"
"Content-Length: 10\r\n"
"\r\n"
"name=value")
# test a POST method with no body provided
factory = client.ScrapyHTTPClientFactory(Request(
method='POST',
url='http://foo/bar'
))
self._test(factory,
"POST /bar HTTP/1.0\r\n"
"Host: foo\r\n"
"Content-Length: 0\r\n"
"\r\n")
# test with single and multivalued headers
factory = client.ScrapyHTTPClientFactory(Request(
url='http://foo/bar',
headers={
'X-Meta-Single': 'single',
'X-Meta-Multivalued': ['value1', 'value2'],
}))
self._test(factory,
"GET /bar HTTP/1.0\r\n"
"Host: foo\r\n"
"X-Meta-Multivalued: value1\r\n"
"X-Meta-Multivalued: value2\r\n"
"X-Meta-Single: single\r\n"
"\r\n")
# same test with single and multivalued headers but using Headers class
factory = client.ScrapyHTTPClientFactory(Request(
url='http://foo/bar',
headers=Headers({
'X-Meta-Single': 'single',
'X-Meta-Multivalued': ['value1', 'value2'],
})))
self._test(factory,
"GET /bar HTTP/1.0\r\n"
"Host: foo\r\n"
"X-Meta-Multivalued: value1\r\n"
"X-Meta-Multivalued: value2\r\n"
"X-Meta-Single: single\r\n"
"\r\n")
def _test(self, factory, testvalue):
transport = StringTransport()
protocol = client.ScrapyHTTPPageGetter()
protocol.factory = factory
protocol.makeConnection(transport)
self.assertEqual(
set(transport.value().splitlines()),
set(testvalue.splitlines()))
return testvalue
def test_non_standard_line_endings(self):
# regression test for: http://dev.scrapy.org/ticket/258
factory = client.ScrapyHTTPClientFactory(Request(
url='http://foo/bar'))
protocol = client.ScrapyHTTPPageGetter()
protocol.factory = factory
protocol.headers = Headers()
protocol.dataReceived("HTTP/1.0 200 OK\n")
protocol.dataReceived("Hello: World\n")
protocol.dataReceived("Foo: Bar\n")
protocol.dataReceived("\n")
self.assertEqual(protocol.headers,
Headers({'Hello': ['World'], 'Foo': ['Bar']}))
from twisted.web.test.test_webclient import ForeverTakingResource, \
ErrorResource, NoLengthResource, HostHeaderResource, \
PayloadResource, BrokenDownloadResource
class WebClientTestCase(unittest.TestCase):
def _listen(self, site):
return reactor.listenTCP(0, site, interface="127.0.0.1")
def setUp(self):
name = self.mktemp()
os.mkdir(name)
FilePath(name).child("file").setContent("0123456789")
r = static.File(name)
r.putChild("redirect", util.Redirect("/file"))
r.putChild("wait", ForeverTakingResource())
r.putChild("error", ErrorResource())
r.putChild("nolength", NoLengthResource())
r.putChild("host", HostHeaderResource())
r.putChild("payload", PayloadResource())
r.putChild("broken", BrokenDownloadResource())
self.site = server.Site(r, timeout=None)
self.wrapper = WrappingFactory(self.site)
self.port = self._listen(self.wrapper)
self.portno = self.port.getHost().port
def tearDown(self):
return self.port.stopListening()
def getURL(self, path):
return "http://127.0.0.1:%d/%s" % (self.portno, path)
def testPayload(self):
s = "0123456789" * 10
return getPage(self.getURL("payload"), body=s).addCallback(self.assertEquals, s)
def testHostHeader(self):
# if we pass Host header explicitly, it should be used, otherwise
# it should extract from url
return defer.gatherResults([
getPage(self.getURL("host")).addCallback(self.assertEquals, "127.0.0.1:%d" % self.portno),
getPage(self.getURL("host"), headers={"Host": "www.example.com"}).addCallback(self.assertEquals, "www.example.com")])
def test_getPage(self):
"""
L{client.getPage} returns a L{Deferred} which is called back with
the body of the response if the default method B{GET} is used.
"""
d = getPage(self.getURL("file"))
d.addCallback(self.assertEquals, "0123456789")
return d
def test_getPageHead(self):
"""
L{client.getPage} returns a L{Deferred} which is called back with
the empty string if the method is C{HEAD} and there is a successful
response code.
"""
def _getPage(method):
return getPage(self.getURL("file"), method=method)
return defer.gatherResults([
_getPage("head").addCallback(self.assertEqual, ""),
_getPage("HEAD").addCallback(self.assertEqual, "")])
def test_timeoutNotTriggering(self):
"""
When a non-zero timeout is passed to L{getPage} and the page is
retrieved before the timeout period elapses, the L{Deferred} is
called back with the contents of the page.
"""
d = getPage(self.getURL("host"), timeout=100)
d.addCallback(self.assertEquals, "127.0.0.1:%d" % self.portno)
return d
def test_timeoutTriggering(self):
"""
When a non-zero timeout is passed to L{getPage} and that many
seconds elapse before the server responds to the request. the
L{Deferred} is errbacked with a L{error.TimeoutError}.
"""
finished = self.assertFailure(
getPage(self.getURL("wait"), timeout=0.000001),
defer.TimeoutError)
def cleanup(passthrough):
# Clean up the server which is hanging around not doing
# anything.
connected = self.wrapper.protocols.keys()
# There might be nothing here if the server managed to already see
# that the connection was lost.
if connected:
connected[0].transport.loseConnection()
return passthrough
finished.addBoth(cleanup)
return finished
def testNotFound(self):
return getPage(self.getURL('notsuchfile')).addCallback(self._cbNoSuchFile)
def _cbNoSuchFile(self, pageData):
self.assert_('404 - No Such Resource' in pageData)
def testFactoryInfo(self):
url = self.getURL('file')
scheme, netloc, host, port, path = client._parse(url)
factory = client.ScrapyHTTPClientFactory(Request(url))
reactor.connectTCP(host, port, factory)
return factory.deferred.addCallback(self._cbFactoryInfo, factory)
def _cbFactoryInfo(self, ignoredResult, factory):
self.assertEquals(factory.status, '200')
self.assert_(factory.version.startswith('HTTP/'))
self.assertEquals(factory.message, 'OK')
self.assertEquals(factory.response_headers['content-length'], '10')
def testRedirect(self):
return getPage(self.getURL("redirect")).addCallback(self._cbRedirect)
def _cbRedirect(self, pageData):
self.assertEquals(pageData,
'\n<html>\n <head>\n <meta http-equiv="refresh" content="0;URL=/file">\n'
' </head>\n <body bgcolor="#FFFFFF" text="#000000">\n '
'<a href="/file">click here</a>\n </body>\n</html>\n')
|
Victor-Haefner/polyvr
|
refs/heads/master
|
extras/python/udpsend.py
|
2
|
import socket
msg = "Hello, World!"
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(msg, ('141.3.150.20', 6060))
|
FactoryBoy/factory_boy
|
refs/heads/master
|
tests/test_regression.py
|
1
|
# Copyright: See the LICENSE file.
"""Regression tests related to issues found with the project"""
import datetime
import typing as T
import unittest
import factory
# Example objects
# ===============
class Author(T.NamedTuple):
fullname: str
pseudonym: T.Optional[str] = None
class Book(T.NamedTuple):
title: str
author: Author
class PublishedBook(T.NamedTuple):
book: Book
published_on: datetime.date
countries: T.List[str]
class FakerRegressionTests(unittest.TestCase):
def test_locale_issue(self):
"""Regression test for `KeyError: 'locale'`
See #785 #786 #787 #788 #790 #796.
"""
class AuthorFactory(factory.Factory):
class Meta:
model = Author
class Params:
unknown = factory.Trait(
fullname="",
)
fullname = factory.Faker("name")
public_author = AuthorFactory(unknown=False)
self.assertIsNone(public_author.pseudonym)
unknown_author = AuthorFactory(unknown=True)
self.assertEqual("", unknown_author.fullname)
|
citrix-openstack-build/horizon
|
refs/heads/master
|
openstack_dashboard/dashboards/admin/instances/tables.py
|
5
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.template.defaultfilters import timesince # noqa
from django.template.defaultfilters import title # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import tables
from horizon.utils import filters
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.instances \
import tables as project_tables
class AdminEditInstance(project_tables.EditInstance):
url = "horizon:admin:instances:update"
class MigrateInstance(tables.BatchAction):
name = "migrate"
action_present = _("Migrate")
action_past = _("Scheduled migration (pending confirmation) of")
data_type_singular = _("Instance")
data_type_plural = _("Instances")
classes = ("btn-migrate", "btn-danger")
def allowed(self, request, instance):
return ((instance.status in project_tables.ACTIVE_STATES
or instance.status == 'SHUTOFF')
and not project_tables.is_deleting(instance))
def action(self, request, obj_id):
api.nova.server_migrate(request, obj_id)
class AdminUpdateRow(project_tables.UpdateRow):
def get_data(self, request, instance_id):
instance = super(AdminUpdateRow, self).get_data(request, instance_id)
tenant = api.keystone.tenant_get(request,
instance.tenant_id,
admin=True)
instance.tenant_name = getattr(tenant, "name", None)
return instance
class AdminInstanceFilterAction(tables.FilterAction):
def filter(self, table, instances, filter_string):
""" Naive case-insensitive search. """
q = filter_string.lower()
return [instance for instance in instances
if q in instance.name.lower()]
class AdminInstancesTable(tables.DataTable):
TASK_STATUS_CHOICES = (
(None, True),
("none", True)
)
STATUS_CHOICES = (
("active", True),
("shutoff", True),
("suspended", True),
("paused", True),
("error", False),
)
tenant = tables.Column("tenant_name", verbose_name=_("Project"))
# NOTE(gabriel): Commenting out the user column because all we have
# is an ID, and correlating that at production scale using our current
# techniques isn't practical. It can be added back in when we have names
# returned in a practical manner by the API.
#user = tables.Column("user_id", verbose_name=_("User"))
host = tables.Column("OS-EXT-SRV-ATTR:host",
verbose_name=_("Host"),
classes=('nowrap-col',))
name = tables.Column("name",
link=("horizon:admin:instances:detail"),
verbose_name=_("Name"))
image_name = tables.Column("image_name",
verbose_name=_("Image Name"))
ip = tables.Column(project_tables.get_ips,
verbose_name=_("IP Address"),
attrs={'data-type': "ip"})
size = tables.Column(project_tables.get_size,
verbose_name=_("Size"),
classes=('nowrap-col',),
attrs={'data-type': 'size'})
status = tables.Column("status",
filters=(title, filters.replace_underscores),
verbose_name=_("Status"),
status=True,
status_choices=STATUS_CHOICES,
display_choices=
project_tables.STATUS_DISPLAY_CHOICES)
task = tables.Column("OS-EXT-STS:task_state",
verbose_name=_("Task"),
filters=(title, filters.replace_underscores),
status=True,
status_choices=TASK_STATUS_CHOICES,
display_choices=project_tables.TASK_DISPLAY_CHOICES)
state = tables.Column(project_tables.get_power_state,
filters=(title, filters.replace_underscores),
verbose_name=_("Power State"))
created = tables.Column("created",
verbose_name=_("Uptime"),
filters=(filters.parse_isotime, timesince))
class Meta:
name = "instances"
verbose_name = _("Instances")
status_columns = ["status", "task"]
table_actions = (project_tables.TerminateInstance,
AdminInstanceFilterAction)
row_class = AdminUpdateRow
row_actions = (project_tables.ConfirmResize,
project_tables.RevertResize,
AdminEditInstance,
project_tables.ConsoleLink,
project_tables.LogLink,
project_tables.CreateSnapshot,
project_tables.TogglePause,
project_tables.ToggleSuspend,
MigrateInstance,
project_tables.SoftRebootInstance,
project_tables.RebootInstance,
project_tables.TerminateInstance)
|
StevenVanAcker/mitmproxy
|
refs/heads/master
|
test/mitmproxy/data/addonscripts/load_error.py
|
10
|
def load(_):
raise ValueError()
|
Jackeriss/Typora-Blog
|
refs/heads/master
|
app/util/handler_util.py
|
1
|
import functools
import orjson
import os
import asyncio
import tornado
import aiotask_context
from schema import Schema, SchemaError
from app.util import error_util
from app.util.config_util import config
from app.config import constant
class BasicHandler(tornado.web.RequestHandler):
def __init__(self, application, request, **kwargs):
super(BasicHandler, self).__init__(application, request, **kwargs)
self._basic_info = None
self._all_arguments = None
self._json_arguments = None
def initialize(self, **kwargs):
self.__dict__.update(kwargs)
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", config.server["allow_origin"])
self.set_header("Access-Control-Allow-Credentials", "true")
self.set_header("Access-Control-Max-Age", 3600)
method = self.get_header("Access-Control-Request-Method")
if method:
self.set_header("Access-Control-Allow-Methods", method)
headers = self.get_header("Access-Control-Request-Headers")
if headers:
self.set_header("Access-Control-Allow-Headers", headers)
def get_header(self, name, default=None):
return self.request.headers.get(name, default)
@property
def content_type(self):
return self.get_header("Content-Type", "")
@property
def host(self):
return self.get_header("Host", "")
@property
def basic_info_str(self):
""" 客户端可将常用信息以 JSON 形式放在这个 header 中 """
return self.get_header("Basic-Info", "{}")
@property
def basic_info(self):
if self._basic_info is None:
self._basic_info = orjson.loads(self.basic_info_str)
return self._basic_info
@property
def request_body(self):
return self.request.body.decode()
@property
def request_query(self):
return self.request.query
@property
def request_headers(self):
return dict(self.request.headers)
def get_param(self, name):
return self.get_query_arguments(name).decode()
@property
def json_arguments(self):
if self._json_arguments is None:
content_type = self.content_type
if content_type and content_type.find(r"application/json") >= 0:
try:
json_dict = orjson.loads(self.request.body)
if isinstance(json_dict, dict):
self._json_arguments = json_dict
else:
raise error_util.ResponseError(
error_util.ERROR_CODE.JSON_PARAMS_NOT_IN_DICT
)
except orjson.JSONDecodeError:
raise error_util.ResponseError(
error_util.ERROR_CODE.JSON_PARAMS_FORMAT_ERROR
)
self._json_arguments = self._json_arguments if self._json_arguments else {}
return self._json_arguments
@property
def all_arguments(self):
if self._all_arguments is None:
self._all_arguments = {
key: self.get_argument(key) for key in self.request.arguments.keys()
}
self._all_arguments.update(self.json_arguments)
return self._all_arguments
def validate_argument(self, schema, error=None, data=None, ignore_extra_keys=True):
data = data if data else self.all_arguments
try:
return Schema(
schema, error=error, ignore_extra_keys=ignore_extra_keys
).validate(data)
except SchemaError as e:
raise error_util.ResponseError(error_util.ERROR_CODE.SCHEMA_ERROR)
def error(self, code, message=None, status=None):
status = status or error_util.ERROR_MAP.get(code, {"status": 500})["status"]
message = (
message
or error_util.ERROR_MAP.get(code, {"message": "UNKNOWN ERROR"})["message"]
)
self.set_status(status)
response = {"code": code.value, "message": message, "body": None}
return self.finish(orjson.dumps(response))
def success(self, data, status=constant.HTTPCode.OK):
self.set_status(status)
response = {
"code": error_util.ERROR_CODE.SUCCESS.value,
"message": error_util.ERROR_MAP.get(error_util.ERROR_CODE.SUCCESS)[
"message"
],
"body": data,
}
return self.finish(orjson.dumps(response))
def page(self, template, **kwargs):
return self.render(os.path.join("dist", template) if config.env != "dev" else template, **kwargs)
def post_page(self, post, **kwargs):
return self.render(os.path.join("post", f"{post}.html"), **kwargs)
def write_error(self, status_code, **kwargs):
self.error(status_code)
def render_error(self, status_code):
page_params = {"status_code": status_code.value}
return self.page("error.html", **page_params)
class PageNotFoundHandler(BasicHandler):
""" 404 页面 handler """
def get(self):
self.render_error(constant.HTTPCode.NOT_FOUND)
def post(self):
return self.error(error_util.ERROR_CODE.NOT_FOUND)
class StaticHandler(tornado.web.StaticFileHandler, BasicHandler):
""" 静态文件 handler """
def set_context(func, *args, **kwargs):
""" request handler 的装饰器,给每一个 request 设置协程上下文 """
@functools.wraps(func)
async def _async_wrapper(handler, *args, **kwargs):
aiotask_context.set("handler", handler)
await func(handler, *args, **kwargs)
@functools.wraps(func)
def _sync_wrapper(handler, *args, **kwargs):
aiotask_context.set("handler", handler)
func(handler, *args, **kwargs)
if asyncio.iscoroutinefunction(func):
return _async_wrapper
return _sync_wrapper
|
foreni-packages/golismero
|
refs/heads/master
|
thirdparty_libs/geopy/distance.py
|
51
|
from math import atan, tan, sin, cos, pi, sqrt, atan2, acos, asin
from geopy.units import radians
from geopy import units, util
from geopy.point import Point
# Average great-circle radius in kilometers, from Wikipedia.
# Using a sphere with this radius results in an error of up to about 0.5%.
EARTH_RADIUS = 6372.795
# From http://www.movable-type.co.uk/scripts/LatLongVincenty.html:
# The most accurate and widely used globally-applicable model for the earth
# ellipsoid is WGS-84, used in this script. Other ellipsoids offering a
# better fit to the local geoid include Airy (1830) in the UK, International
# 1924 in much of Europe, Clarke (1880) in Africa, and GRS-67 in South
# America. America (NAD83) and Australia (GDA) use GRS-80, functionally
# equivalent to the WGS-84 ellipsoid.
ELLIPSOIDS = {
# model major (km) minor (km) flattening
'WGS-84': (6378.137, 6356.7523142, 1 / 298.257223563),
'GRS-80': (6378.137, 6356.7523141, 1 / 298.257222101),
'Airy (1830)': (6377.563396, 6356.256909, 1 / 299.3249646),
'Intl 1924': (6378.388, 6356.911946, 1 / 297.0),
'Clarke (1880)': (6378.249145, 6356.51486955, 1 / 293.465),
'GRS-67': (6378.1600, 6356.774719, 1 / 298.25)
}
class Distance(object):
def __init__(self, *args, **kwargs):
kilometers = kwargs.pop('kilometers', 0)
if len(args) == 1:
# if we only get one argument we assume
# it's a known distance instead of
# calculating it first
kilometers += args[0]
elif len(args) > 1:
for a, b in util.pairwise(args):
kilometers += self.measure(a, b)
kilometers += units.kilometers(**kwargs)
self.__kilometers = kilometers
def __add__(self, other):
if isinstance(other, Distance):
return self.__class__(self.kilometers + other.kilometers)
else:
raise TypeError(
"Distance instance must be added with Distance instance."
)
def __neg__(self):
return self.__class__(-self.kilometers)
def __sub__(self, other):
return self + -other
def __mul__(self, other):
return self.__class__(self.kilometers * other)
def __div__(self, other):
if isinstance(other, Distance):
return self.kilometers / other.kilometers
else:
return self.__class__(self.kilometers / other)
def __abs__(self):
return self.__class__(abs(self.kilometers))
def __nonzero__(self):
return bool(self.kilometers)
def measure(self, a, b):
raise NotImplementedError
def __repr__(self):
return 'Distance(%s)' % self.kilometers
def __str__(self):
return '%s km' % self.__kilometers
def __cmp__(self, other):
if isinstance(other, Distance):
return cmp(self.kilometers, other.kilometers)
else:
return cmp(self.kilometers, other)
@property
def kilometers(self):
return self.__kilometers
@property
def km(self):
return self.kilometers
@property
def meters(self):
return units.meters(kilometers=self.kilometers)
@property
def m(self):
return self.meters
@property
def miles(self):
return units.miles(kilometers=self.kilometers)
@property
def mi(self):
return self.miles
@property
def feet(self):
return units.feet(kilometers=self.kilometers)
@property
def ft(self):
return self.feet
@property
def nautical(self):
return units.nautical(kilometers=self.kilometers)
@property
def nm(self):
return self.nautical
class GreatCircleDistance(Distance):
"""
Use spherical geometry to calculate the surface distance between two
geodesic points. This formula can be written many different ways,
including just the use of the spherical law of cosines or the haversine
formula.
The class attribute `RADIUS` indicates which radius of the earth to use,
in kilometers. The default is to use the module constant `EARTH_RADIUS`,
which uses the average great-circle radius.
"""
RADIUS = EARTH_RADIUS
def measure(self, a, b):
a, b = Point(a), Point(b)
lat1, lng1 = radians(degrees=a.latitude), radians(degrees=a.longitude)
lat2, lng2 = radians(degrees=b.latitude), radians(degrees=b.longitude)
sin_lat1, cos_lat1 = sin(lat1), cos(lat1)
sin_lat2, cos_lat2 = sin(lat2), cos(lat2)
delta_lng = lng2 - lng1
cos_delta_lng, sin_delta_lng = cos(delta_lng), sin(delta_lng)
central_angle = acos(
# We're correcting from floating point rounding errors on very-near and exact points here
min(1.0, sin_lat1 * sin_lat2 +
cos_lat1 * cos_lat2 * cos_delta_lng))
# From http://en.wikipedia.org/wiki/Great_circle_distance:
# Historically, the use of this formula was simplified by the
# availability of tables for the haversine function. Although this
# formula is accurate for most distances, it too suffers from
# rounding errors for the special (and somewhat unusual) case of
# antipodal points (on opposite ends of the sphere). A more
# complicated formula that is accurate for all distances is: (below)
d = atan2(sqrt((cos_lat2 * sin_delta_lng) ** 2 +
(cos_lat1 * sin_lat2 -
sin_lat1 * cos_lat2 * cos_delta_lng) ** 2),
sin_lat1 * sin_lat2 + cos_lat1 * cos_lat2 * cos_delta_lng)
return self.RADIUS * d
def destination(self, point, bearing, distance=None):
point = Point(point)
lat1 = units.radians(degrees=point.latitude)
lng1 = units.radians(degrees=point.longitude)
bearing = units.radians(degrees=bearing)
if distance is None:
distance = self
if isinstance(distance, Distance):
distance = distance.kilometers
d_div_r = float(distance) / self.RADIUS
lat2 = asin(
sin(lat1) * cos(d_div_r) +
cos(lat1) * sin(d_div_r) * cos(bearing)
)
lng2 = lng1 + atan2(
sin(bearing) * sin(d_div_r) * cos(lat1),
cos(d_div_r) - sin(lat1) * sin(lat2)
)
return Point(units.degrees(radians=lat2), units.degrees(radians=lng2))
class VincentyDistance(Distance):
"""
Calculate the geodesic distance between two points using the formula
devised by Thaddeus Vincenty, with an accurate ellipsoidal model of the
earth.
The class attribute `ELLIPSOID` indicates which ellipsoidal model of the
earth to use. If it is a string, it is looked up in the `ELLIPSOIDS`
dictionary to obtain the major and minor semiaxes and the flattening.
Otherwise, it should be a tuple with those values. The most globally
accurate model is WGS-84. See the comments above the `ELLIPSOIDS`
dictionary for more information.
"""
ELLIPSOID = 'WGS-84'
def measure(self, a, b):
a, b = Point(a), Point(b)
lat1, lng1 = radians(degrees=a.latitude), radians(degrees=a.longitude)
lat2, lng2 = radians(degrees=b.latitude), radians(degrees=b.longitude)
if isinstance(self.ELLIPSOID, basestring):
major, minor, f = ELLIPSOIDS[self.ELLIPSOID]
else:
major, minor, f = self.ELLIPSOID
delta_lng = lng2 - lng1
reduced_lat1 = atan((1 - f) * tan(lat1))
reduced_lat2 = atan((1 - f) * tan(lat2))
sin_reduced1, cos_reduced1 = sin(reduced_lat1), cos(reduced_lat1)
sin_reduced2, cos_reduced2 = sin(reduced_lat2), cos(reduced_lat2)
lambda_lng = delta_lng
lambda_prime = 2 * pi
iter_limit = 20
while abs(lambda_lng - lambda_prime) > 10e-12 and iter_limit > 0:
sin_lambda_lng, cos_lambda_lng = sin(lambda_lng), cos(lambda_lng)
sin_sigma = sqrt(
(cos_reduced2 * sin_lambda_lng) ** 2 +
(cos_reduced1 * sin_reduced2 -
sin_reduced1 * cos_reduced2 * cos_lambda_lng) ** 2
)
if sin_sigma == 0:
return 0 # Coincident points
cos_sigma = (
sin_reduced1 * sin_reduced2 +
cos_reduced1 * cos_reduced2 * cos_lambda_lng
)
sigma = atan2(sin_sigma, cos_sigma)
sin_alpha = (
cos_reduced1 * cos_reduced2 * sin_lambda_lng / sin_sigma
)
cos_sq_alpha = 1 - sin_alpha ** 2
if cos_sq_alpha != 0:
cos2_sigma_m = cos_sigma - 2 * (
sin_reduced1 * sin_reduced2 / cos_sq_alpha
)
else:
cos2_sigma_m = 0.0 # Equatorial line
C = f / 16. * cos_sq_alpha * (4 + f * (4 - 3 * cos_sq_alpha))
lambda_prime = lambda_lng
lambda_lng = (
delta_lng + (1 - C) * f * sin_alpha * (
sigma + C * sin_sigma * (
cos2_sigma_m + C * cos_sigma * (
-1 + 2 * cos2_sigma_m ** 2
)
)
)
)
iter_limit -= 1
if iter_limit == 0:
raise ValueError("Vincenty formula failed to converge!")
u_sq = cos_sq_alpha * (major ** 2 - minor ** 2) / minor ** 2
A = 1 + u_sq / 16384. * (
4096 + u_sq * (-768 + u_sq * (320 - 175 * u_sq))
)
B = u_sq / 1024. * (256 + u_sq * (-128 + u_sq * (74 - 47 * u_sq)))
delta_sigma = (
B * sin_sigma * (
cos2_sigma_m + B / 4. * (
cos_sigma * (
-1 + 2 * cos2_sigma_m ** 2
) - B / 6. * cos2_sigma_m * (
-3 + 4 * sin_sigma ** 2
) * (
-3 + 4 * cos2_sigma_m ** 2
)
)
)
)
s = minor * A * (sigma - delta_sigma)
return s
def destination(self, point, bearing, distance=None):
point = Point(point)
lat1 = units.radians(degrees=point.latitude)
lng1 = units.radians(degrees=point.longitude)
bearing = units.radians(degrees=bearing)
if distance is None:
distance = self
if isinstance(distance, Distance):
distance = distance.kilometers
ellipsoid = self.ELLIPSOID
if isinstance(ellipsoid, basestring):
ellipsoid = ELLIPSOIDS[ellipsoid]
major, minor, f = ellipsoid
tan_reduced1 = (1 - f) * tan(lat1)
cos_reduced1 = 1 / sqrt(1 + tan_reduced1 ** 2)
sin_reduced1 = tan_reduced1 * cos_reduced1
sin_bearing, cos_bearing = sin(bearing), cos(bearing)
sigma1 = atan2(tan_reduced1, cos_bearing)
sin_alpha = cos_reduced1 * sin_bearing
cos_sq_alpha = 1 - sin_alpha ** 2
u_sq = cos_sq_alpha * (major ** 2 - minor ** 2) / minor ** 2
A = 1 + u_sq / 16384. * (
4096 + u_sq * (-768 + u_sq * (320 - 175 * u_sq))
)
B = u_sq / 1024. * (256 + u_sq * (-128 + u_sq * (74 - 47 * u_sq)))
sigma = distance / (minor * A)
sigma_prime = 2 * pi
while abs(sigma - sigma_prime) > 10e-12:
cos2_sigma_m = cos(2 * sigma1 + sigma)
sin_sigma, cos_sigma = sin(sigma), cos(sigma)
delta_sigma = B * sin_sigma * (
cos2_sigma_m + B / 4. * (
cos_sigma * (
-1 + 2 * cos2_sigma_m
) - B / 6. * cos2_sigma_m * (
-3 + 4 * sin_sigma ** 2) * (
-3 + 4 * cos2_sigma_m ** 2
)
)
)
sigma_prime = sigma
sigma = distance / (minor * A) + delta_sigma
sin_sigma, cos_sigma = sin(sigma), cos(sigma)
lat2 = atan2(
sin_reduced1 * cos_sigma + cos_reduced1 * sin_sigma * cos_bearing,
(1 - f) * sqrt(
sin_alpha ** 2 + (
sin_reduced1 * sin_sigma -
cos_reduced1 * cos_sigma * cos_bearing
) ** 2
)
)
lambda_lng = atan2(
sin_sigma * sin_bearing,
cos_reduced1 * cos_sigma - sin_reduced1 * sin_sigma * cos_bearing
)
C = f / 16. * cos_sq_alpha * (4 + f * (4 - 3 * cos_sq_alpha))
delta_lng = (
lambda_lng - (1 - C) * f * sin_alpha * (
sigma + C * sin_sigma * (
cos2_sigma_m + C * cos_sigma * (
-1 + 2 * cos2_sigma_m ** 2
)
)
)
)
final_bearing = atan2(
sin_alpha,
cos_reduced1 * cos_sigma * cos_bearing - sin_reduced1 * sin_sigma
)
lng2 = lng1 + delta_lng
return Point(units.degrees(radians=lat2), units.degrees(radians=lng2))
# Set the default distance formula to the most generally accurate.
distance = VincentyDistance
|
Yas3r/Empire
|
refs/heads/master
|
lib/modules/management/enable_rdp.py
|
22
|
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Enable-RDP',
'Author': ['@harmj0y'],
'Description': ("Enables RDP on the remote machine and adds a firewall exception."),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : True,
'OpsecSafe' : False,
'MinPSVersion' : '2',
'Comments': [ ]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
# command to enable RDP
script = "reg add \"HKLM\\SYSTEM\\CurrentControlSet\\Control\\Terminal Server\" /v fDenyTSConnections /t REG_DWORD /d 0 /f;"
# command to add the firewall exception only if the enable runs successfully
script += " if($?) {$null = netsh firewall set service type = remotedesktop mod = enable;"
# command to disable NLA
script += "$null = reg add \"HKLM\\SYSTEM\\CurrentControlSet\\Control\\Terminal Server\\WinStations\\RDP-Tcp\" /v UserAuthentication /t REG_DWORD /d 0 /f }"
return script
|
wuhengzhi/chromium-crosswalk
|
refs/heads/master
|
chrome/test/ispy/common/ispy_utils.py
|
88
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Internal utilities for managing I-Spy test results in Google Cloud Storage.
See the ispy.ispy_api module for the external API.
"""
import collections
import itertools
import json
import os
import sys
import image_tools
_INVALID_EXPECTATION_CHARS = ['/', '\\', ' ', '"', '\'']
def IsValidExpectationName(expectation_name):
return not any(c in _INVALID_EXPECTATION_CHARS for c in expectation_name)
def GetExpectationPath(expectation, file_name=''):
"""Get the path to a test file in the given test run and expectation.
Args:
expectation: name of the expectation.
file_name: name of the file.
Returns:
the path as a string relative to the bucket.
"""
return 'expectations/%s/%s' % (expectation, file_name)
def GetFailurePath(test_run, expectation, file_name=''):
"""Get the path to a failure file in the given test run and test.
Args:
test_run: name of the test run.
expectation: name of the expectation.
file_name: name of the file.
Returns:
the path as a string relative to the bucket.
"""
return GetTestRunPath(test_run, '%s/%s' % (expectation, file_name))
def GetTestRunPath(test_run, file_name=''):
"""Get the path to a the given test run.
Args:
test_run: name of the test run.
file_name: name of the file.
Returns:
the path as a string relative to the bucket.
"""
return 'failures/%s/%s' % (test_run, file_name)
class ISpyUtils(object):
"""Utility functions for working with an I-Spy google storage bucket."""
def __init__(self, cloud_bucket):
"""Initialize with a cloud bucket instance to supply GS functionality.
Args:
cloud_bucket: An object implementing the cloud_bucket.BaseCloudBucket
interface.
"""
self.cloud_bucket = cloud_bucket
def UploadImage(self, full_path, image):
"""Uploads an image to a location in GS.
Args:
full_path: the path to the file in GS including the file extension.
image: a RGB PIL.Image to be uploaded.
"""
self.cloud_bucket.UploadFile(
full_path, image_tools.EncodePNG(image), 'image/png')
def DownloadImage(self, full_path):
"""Downloads an image from a location in GS.
Args:
full_path: the path to the file in GS including the file extension.
Returns:
The downloaded RGB PIL.Image.
Raises:
cloud_bucket.NotFoundError: if the path to the image is not valid.
"""
return image_tools.DecodePNG(self.cloud_bucket.DownloadFile(full_path))
def UpdateImage(self, full_path, image):
"""Updates an existing image in GS, preserving permissions and metadata.
Args:
full_path: the path to the file in GS including the file extension.
image: a RGB PIL.Image.
"""
self.cloud_bucket.UpdateFile(full_path, image_tools.EncodePNG(image))
def GenerateExpectation(self, expectation, images):
"""Creates and uploads an expectation to GS from a set of images and name.
This method generates a mask from the uploaded images, then
uploads the mask and first of the images to GS as a expectation.
Args:
expectation: name for this expectation, any existing expectation with the
name will be replaced.
images: a list of RGB encoded PIL.Images
Raises:
ValueError: if the expectation name is invalid.
"""
if not IsValidExpectationName(expectation):
raise ValueError("Expectation name contains an illegal character: %s." %
str(_INVALID_EXPECTATION_CHARS))
mask = image_tools.InflateMask(image_tools.CreateMask(images), 7)
self.UploadImage(
GetExpectationPath(expectation, 'expected.png'), images[0])
self.UploadImage(GetExpectationPath(expectation, 'mask.png'), mask)
def PerformComparison(self, test_run, expectation, actual):
"""Runs an image comparison, and uploads discrepancies to GS.
Args:
test_run: the name of the test_run.
expectation: the name of the expectation to use for comparison.
actual: an RGB-encoded PIL.Image that is the actual result.
Raises:
cloud_bucket.NotFoundError: if the given expectation is not found.
ValueError: if the expectation name is invalid.
"""
if not IsValidExpectationName(expectation):
raise ValueError("Expectation name contains an illegal character: %s." %
str(_INVALID_EXPECTATION_CHARS))
expectation_tuple = self.GetExpectation(expectation)
if not image_tools.SameImage(
actual, expectation_tuple.expected, mask=expectation_tuple.mask):
self.UploadImage(
GetFailurePath(test_run, expectation, 'actual.png'), actual)
diff, diff_pxls = image_tools.VisualizeImageDifferences(
expectation_tuple.expected, actual, mask=expectation_tuple.mask)
self.UploadImage(GetFailurePath(test_run, expectation, 'diff.png'), diff)
self.cloud_bucket.UploadFile(
GetFailurePath(test_run, expectation, 'info.txt'),
json.dumps({
'different_pixels': diff_pxls,
'fraction_different':
diff_pxls / float(actual.size[0] * actual.size[1])}),
'application/json')
def GetExpectation(self, expectation):
"""Returns the given expectation from GS.
Args:
expectation: the name of the expectation to get.
Returns:
A named tuple: 'Expectation', containing two images: expected and mask.
Raises:
cloud_bucket.NotFoundError: if the test is not found in GS.
"""
Expectation = collections.namedtuple('Expectation', ['expected', 'mask'])
return Expectation(self.DownloadImage(GetExpectationPath(expectation,
'expected.png')),
self.DownloadImage(GetExpectationPath(expectation,
'mask.png')))
def ExpectationExists(self, expectation):
"""Returns whether the given expectation exists in GS.
Args:
expectation: the name of the expectation to check.
Returns:
A boolean indicating whether the test exists.
"""
expected_image_exists = self.cloud_bucket.FileExists(
GetExpectationPath(expectation, 'expected.png'))
mask_image_exists = self.cloud_bucket.FileExists(
GetExpectationPath(expectation, 'mask.png'))
return expected_image_exists and mask_image_exists
def FailureExists(self, test_run, expectation):
"""Returns whether a failure for the expectation exists for the given run.
Args:
test_run: the name of the test_run.
expectation: the name of the expectation that failed.
Returns:
A boolean indicating whether the failure exists.
"""
actual_image_exists = self.cloud_bucket.FileExists(
GetFailurePath(test_run, expectation, 'actual.png'))
test_exists = self.ExpectationExists(expectation)
info_exists = self.cloud_bucket.FileExists(
GetFailurePath(test_run, expectation, 'info.txt'))
return test_exists and actual_image_exists and info_exists
def RemoveExpectation(self, expectation):
"""Removes an expectation and all associated failures with that test.
Args:
expectation: the name of the expectation to remove.
"""
test_paths = self.cloud_bucket.GetAllPaths(
GetExpectationPath(expectation))
for path in test_paths:
self.cloud_bucket.RemoveFile(path)
def GenerateExpectationPinkOut(self, expectation, images, pint_out, rgb):
"""Uploads an ispy-test to GS with the pink_out workaround.
Args:
expectation: the name of the expectation to be uploaded.
images: a json encoded list of base64 encoded png images.
pink_out: an image.
RGB: a json list representing the RGB values of a color to mask out.
Raises:
ValueError: if expectation name is invalid.
"""
if not IsValidExpectationName(expectation):
raise ValueError("Expectation name contains an illegal character: %s." %
str(_INVALID_EXPECTATION_CHARS))
# convert the pink_out into a mask
black = (0, 0, 0, 255)
white = (255, 255, 255, 255)
pink_out.putdata(
[black if px == (rgb[0], rgb[1], rgb[2], 255) else white
for px in pink_out.getdata()])
mask = image_tools.CreateMask(images)
mask = image_tools.InflateMask(image_tools.CreateMask(images), 7)
combined_mask = image_tools.AddMasks([mask, pink_out])
self.UploadImage(GetExpectationPath(expectation, 'expected.png'), images[0])
self.UploadImage(GetExpectationPath(expectation, 'mask.png'), combined_mask)
def RemoveFailure(self, test_run, expectation):
"""Removes a failure from GS.
Args:
test_run: the name of the test_run.
expectation: the expectation on which the failure to be removed occured.
"""
failure_paths = self.cloud_bucket.GetAllPaths(
GetFailurePath(test_run, expectation))
for path in failure_paths:
self.cloud_bucket.RemoveFile(path)
def GetFailure(self, test_run, expectation):
"""Returns a given test failure's expected, diff, and actual images.
Args:
test_run: the name of the test_run.
expectation: the name of the expectation the result corresponds to.
Returns:
A named tuple: Failure containing three images: expected, diff, and
actual.
Raises:
cloud_bucket.NotFoundError: if the result is not found in GS.
"""
expected = self.DownloadImage(
GetExpectationPath(expectation, 'expected.png'))
actual = self.DownloadImage(
GetFailurePath(test_run, expectation, 'actual.png'))
diff = self.DownloadImage(
GetFailurePath(test_run, expectation, 'diff.png'))
info = json.loads(self.cloud_bucket.DownloadFile(
GetFailurePath(test_run, expectation, 'info.txt')))
Failure = collections.namedtuple(
'Failure', ['expected', 'diff', 'actual', 'info'])
return Failure(expected, diff, actual, info)
def GetAllPaths(self, prefix, max_keys=None, marker=None, delimiter=None):
"""Gets urls to all files in GS whose path starts with a given prefix.
Args:
prefix: the prefix to filter files in GS by.
max_keys: Integer. Specifies the maximum number of objects returned
marker: String. Only objects whose fullpath starts lexicographically
after marker (exclusively) will be returned
delimiter: String. Turns on directory mode, specifies characters
to be used as directory separators
Returns:
a list containing urls to all objects that started with
the prefix.
"""
return self.cloud_bucket.GetAllPaths(
prefix, max_keys=max_keys, marker=marker, delimiter=delimiter)
|
dmilith/SublimeText3-dmilith
|
refs/heads/master
|
Packages/pygments/all/pygments/lexers/_php_builtins.py
|
48
|
# -*- coding: utf-8 -*-
"""
pygments.lexers._php_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file loads the function names and their modules from the
php webpage and generates itself.
Do not alter the MODULES dict by hand!
WARNING: the generation transfers quite much data over your
internet connection. don't run that at home, use
a server ;-)
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
MODULES = {'.NET': ('dotnet_load',),
'APC': ('apc_add',
'apc_bin_dump',
'apc_bin_dumpfile',
'apc_bin_load',
'apc_bin_loadfile',
'apc_cache_info',
'apc_cas',
'apc_clear_cache',
'apc_compile_file',
'apc_dec',
'apc_define_constants',
'apc_delete_file',
'apc_delete',
'apc_exists',
'apc_fetch',
'apc_inc',
'apc_load_constants',
'apc_sma_info',
'apc_store'),
'APD': ('apd_breakpoint',
'apd_callstack',
'apd_clunk',
'apd_continue',
'apd_croak',
'apd_dump_function_table',
'apd_dump_persistent_resources',
'apd_dump_regular_resources',
'apd_echo',
'apd_get_active_symbols',
'apd_set_pprof_trace',
'apd_set_session_trace_socket',
'apd_set_session_trace',
'apd_set_session',
'override_function',
'rename_function'),
'Aliases and deprecated Mysqli': ('mysqli_bind_param',
'mysqli_bind_result',
'mysqli_client_encoding',
'mysqli_connect',
'mysqli_disable_rpl_parse',
'mysqli_enable_reads_from_master',
'mysqli_enable_rpl_parse',
'mysqli_escape_string',
'mysqli_execute',
'mysqli_fetch',
'mysqli_get_cache_stats',
'mysqli_get_metadata',
'mysqli_master_query',
'mysqli_param_count',
'mysqli_report',
'mysqli_rpl_parse_enabled',
'mysqli_rpl_probe',
'mysqli_send_long_data',
'mysqli_slave_query'),
'Apache': ('apache_child_terminate',
'apache_get_modules',
'apache_get_version',
'apache_getenv',
'apache_lookup_uri',
'apache_note',
'apache_request_headers',
'apache_reset_timeout',
'apache_response_headers',
'apache_setenv',
'getallheaders',
'virtual'),
'Array': ('array_change_key_case',
'array_chunk',
'array_column',
'array_combine',
'array_count_values',
'array_diff_assoc',
'array_diff_key',
'array_diff_uassoc',
'array_diff_ukey',
'array_diff',
'array_fill_keys',
'array_fill',
'array_filter',
'array_flip',
'array_intersect_assoc',
'array_intersect_key',
'array_intersect_uassoc',
'array_intersect_ukey',
'array_intersect',
'array_key_exists',
'array_keys',
'array_map',
'array_merge_recursive',
'array_merge',
'array_multisort',
'array_pad',
'array_pop',
'array_product',
'array_push',
'array_rand',
'array_reduce',
'array_replace_recursive',
'array_replace',
'array_reverse',
'array_search',
'array_shift',
'array_slice',
'array_splice',
'array_sum',
'array_udiff_assoc',
'array_udiff_uassoc',
'array_udiff',
'array_uintersect_assoc',
'array_uintersect_uassoc',
'array_uintersect',
'array_unique',
'array_unshift',
'array_values',
'array_walk_recursive',
'array_walk',
'array',
'arsort',
'asort',
'compact',
'count',
'current',
'each',
'end',
'extract',
'in_array',
'key_exists',
'key',
'krsort',
'ksort',
'list',
'natcasesort',
'natsort',
'next',
'pos',
'prev',
'range',
'reset',
'rsort',
'shuffle',
'sizeof',
'sort',
'uasort',
'uksort',
'usort'),
'BBCode': ('bbcode_add_element',
'bbcode_add_smiley',
'bbcode_create',
'bbcode_destroy',
'bbcode_parse',
'bbcode_set_arg_parser',
'bbcode_set_flags'),
'BC Math': ('bcadd',
'bccomp',
'bcdiv',
'bcmod',
'bcmul',
'bcpow',
'bcpowmod',
'bcscale',
'bcsqrt',
'bcsub'),
'Blenc': ('blenc_encrypt',),
'Bzip2': ('bzclose',
'bzcompress',
'bzdecompress',
'bzerrno',
'bzerror',
'bzerrstr',
'bzflush',
'bzopen',
'bzread',
'bzwrite'),
'COM': ('com_addref',
'com_create_guid',
'com_event_sink',
'com_get_active_object',
'com_get',
'com_invoke',
'com_isenum',
'com_load_typelib',
'com_load',
'com_message_pump',
'com_print_typeinfo',
'com_propget',
'com_propput',
'com_propset',
'com_release',
'com_set',
'variant_abs',
'variant_add',
'variant_and',
'variant_cast',
'variant_cat',
'variant_cmp',
'variant_date_from_timestamp',
'variant_date_to_timestamp',
'variant_div',
'variant_eqv',
'variant_fix',
'variant_get_type',
'variant_idiv',
'variant_imp',
'variant_int',
'variant_mod',
'variant_mul',
'variant_neg',
'variant_not',
'variant_or',
'variant_pow',
'variant_round',
'variant_set_type',
'variant_set',
'variant_sub',
'variant_xor'),
'CUBRID': ('cubrid_bind',
'cubrid_close_prepare',
'cubrid_close_request',
'cubrid_col_get',
'cubrid_col_size',
'cubrid_column_names',
'cubrid_column_types',
'cubrid_commit',
'cubrid_connect_with_url',
'cubrid_connect',
'cubrid_current_oid',
'cubrid_disconnect',
'cubrid_drop',
'cubrid_error_code_facility',
'cubrid_error_code',
'cubrid_error_msg',
'cubrid_execute',
'cubrid_fetch',
'cubrid_free_result',
'cubrid_get_autocommit',
'cubrid_get_charset',
'cubrid_get_class_name',
'cubrid_get_client_info',
'cubrid_get_db_parameter',
'cubrid_get_query_timeout',
'cubrid_get_server_info',
'cubrid_get',
'cubrid_insert_id',
'cubrid_is_instance',
'cubrid_lob_close',
'cubrid_lob_export',
'cubrid_lob_get',
'cubrid_lob_send',
'cubrid_lob_size',
'cubrid_lob2_bind',
'cubrid_lob2_close',
'cubrid_lob2_export',
'cubrid_lob2_import',
'cubrid_lob2_new',
'cubrid_lob2_read',
'cubrid_lob2_seek64',
'cubrid_lob2_seek',
'cubrid_lob2_size64',
'cubrid_lob2_size',
'cubrid_lob2_tell64',
'cubrid_lob2_tell',
'cubrid_lob2_write',
'cubrid_lock_read',
'cubrid_lock_write',
'cubrid_move_cursor',
'cubrid_next_result',
'cubrid_num_cols',
'cubrid_num_rows',
'cubrid_pconnect_with_url',
'cubrid_pconnect',
'cubrid_prepare',
'cubrid_put',
'cubrid_rollback',
'cubrid_schema',
'cubrid_seq_drop',
'cubrid_seq_insert',
'cubrid_seq_put',
'cubrid_set_add',
'cubrid_set_autocommit',
'cubrid_set_db_parameter',
'cubrid_set_drop',
'cubrid_set_query_timeout',
'cubrid_version'),
'Cairo': ('cairo_create',
'cairo_font_face_get_type',
'cairo_font_face_status',
'cairo_font_options_create',
'cairo_font_options_equal',
'cairo_font_options_get_antialias',
'cairo_font_options_get_hint_metrics',
'cairo_font_options_get_hint_style',
'cairo_font_options_get_subpixel_order',
'cairo_font_options_hash',
'cairo_font_options_merge',
'cairo_font_options_set_antialias',
'cairo_font_options_set_hint_metrics',
'cairo_font_options_set_hint_style',
'cairo_font_options_set_subpixel_order',
'cairo_font_options_status',
'cairo_format_stride_for_width',
'cairo_image_surface_create_for_data',
'cairo_image_surface_create_from_png',
'cairo_image_surface_create',
'cairo_image_surface_get_data',
'cairo_image_surface_get_format',
'cairo_image_surface_get_height',
'cairo_image_surface_get_stride',
'cairo_image_surface_get_width',
'cairo_matrix_create_scale',
'cairo_matrix_create_translate',
'cairo_matrix_invert',
'cairo_matrix_multiply',
'cairo_matrix_rotate',
'cairo_matrix_transform_distance',
'cairo_matrix_transform_point',
'cairo_matrix_translate',
'cairo_pattern_add_color_stop_rgb',
'cairo_pattern_add_color_stop_rgba',
'cairo_pattern_create_for_surface',
'cairo_pattern_create_linear',
'cairo_pattern_create_radial',
'cairo_pattern_create_rgb',
'cairo_pattern_create_rgba',
'cairo_pattern_get_color_stop_count',
'cairo_pattern_get_color_stop_rgba',
'cairo_pattern_get_extend',
'cairo_pattern_get_filter',
'cairo_pattern_get_linear_points',
'cairo_pattern_get_matrix',
'cairo_pattern_get_radial_circles',
'cairo_pattern_get_rgba',
'cairo_pattern_get_surface',
'cairo_pattern_get_type',
'cairo_pattern_set_extend',
'cairo_pattern_set_filter',
'cairo_pattern_set_matrix',
'cairo_pattern_status',
'cairo_pdf_surface_create',
'cairo_pdf_surface_set_size',
'cairo_ps_get_levels',
'cairo_ps_level_to_string',
'cairo_ps_surface_create',
'cairo_ps_surface_dsc_begin_page_setup',
'cairo_ps_surface_dsc_begin_setup',
'cairo_ps_surface_dsc_comment',
'cairo_ps_surface_get_eps',
'cairo_ps_surface_restrict_to_level',
'cairo_ps_surface_set_eps',
'cairo_ps_surface_set_size',
'cairo_scaled_font_create',
'cairo_scaled_font_extents',
'cairo_scaled_font_get_ctm',
'cairo_scaled_font_get_font_face',
'cairo_scaled_font_get_font_matrix',
'cairo_scaled_font_get_font_options',
'cairo_scaled_font_get_scale_matrix',
'cairo_scaled_font_get_type',
'cairo_scaled_font_glyph_extents',
'cairo_scaled_font_status',
'cairo_scaled_font_text_extents',
'cairo_surface_copy_page',
'cairo_surface_create_similar',
'cairo_surface_finish',
'cairo_surface_flush',
'cairo_surface_get_content',
'cairo_surface_get_device_offset',
'cairo_surface_get_font_options',
'cairo_surface_get_type',
'cairo_surface_mark_dirty_rectangle',
'cairo_surface_mark_dirty',
'cairo_surface_set_device_offset',
'cairo_surface_set_fallback_resolution',
'cairo_surface_show_page',
'cairo_surface_status',
'cairo_surface_write_to_png',
'cairo_svg_surface_create',
'cairo_svg_surface_restrict_to_version',
'cairo_svg_version_to_string'),
'Calendar': ('cal_days_in_month',
'cal_from_jd',
'cal_info',
'cal_to_jd',
'easter_date',
'easter_days',
'FrenchToJD',
'GregorianToJD',
'JDDayOfWeek',
'JDMonthName',
'JDToFrench',
'JDToGregorian',
'jdtojewish',
'JDToJulian',
'jdtounix',
'JewishToJD',
'JulianToJD',
'unixtojd'),
'Classes/Object': ('__autoload',
'call_user_method_array',
'call_user_method',
'class_alias',
'class_exists',
'get_called_class',
'get_class_methods',
'get_class_vars',
'get_class',
'get_declared_classes',
'get_declared_interfaces',
'get_declared_traits',
'get_object_vars',
'get_parent_class',
'interface_exists',
'is_a',
'is_subclass_of',
'method_exists',
'property_exists',
'trait_exists'),
'Classkit': ('classkit_import',
'classkit_method_add',
'classkit_method_copy',
'classkit_method_redefine',
'classkit_method_remove',
'classkit_method_rename'),
'Crack': ('crack_check',
'crack_closedict',
'crack_getlastmessage',
'crack_opendict'),
'Ctype': ('ctype_alnum',
'ctype_alpha',
'ctype_cntrl',
'ctype_digit',
'ctype_graph',
'ctype_lower',
'ctype_print',
'ctype_punct',
'ctype_space',
'ctype_upper',
'ctype_xdigit'),
'Cyrus': ('cyrus_authenticate',
'cyrus_bind',
'cyrus_close',
'cyrus_connect',
'cyrus_query',
'cyrus_unbind'),
'DB++': ('dbplus_add',
'dbplus_aql',
'dbplus_chdir',
'dbplus_close',
'dbplus_curr',
'dbplus_errcode',
'dbplus_errno',
'dbplus_find',
'dbplus_first',
'dbplus_flush',
'dbplus_freealllocks',
'dbplus_freelock',
'dbplus_freerlocks',
'dbplus_getlock',
'dbplus_getunique',
'dbplus_info',
'dbplus_last',
'dbplus_lockrel',
'dbplus_next',
'dbplus_open',
'dbplus_prev',
'dbplus_rchperm',
'dbplus_rcreate',
'dbplus_rcrtexact',
'dbplus_rcrtlike',
'dbplus_resolve',
'dbplus_restorepos',
'dbplus_rkeys',
'dbplus_ropen',
'dbplus_rquery',
'dbplus_rrename',
'dbplus_rsecindex',
'dbplus_runlink',
'dbplus_rzap',
'dbplus_savepos',
'dbplus_setindex',
'dbplus_setindexbynumber',
'dbplus_sql',
'dbplus_tcl',
'dbplus_tremove',
'dbplus_undo',
'dbplus_undoprepare',
'dbplus_unlockrel',
'dbplus_unselect',
'dbplus_update',
'dbplus_xlockrel',
'dbplus_xunlockrel'),
'DBA': ('dba_close',
'dba_delete',
'dba_exists',
'dba_fetch',
'dba_firstkey',
'dba_handlers',
'dba_insert',
'dba_key_split',
'dba_list',
'dba_nextkey',
'dba_open',
'dba_optimize',
'dba_popen',
'dba_replace',
'dba_sync'),
'DOM': ('dom_import_simplexml',),
'Date/Time': ('checkdate',
'date_add',
'date_create_from_format',
'date_create_immutable_from_format',
'date_create_immutable',
'date_create',
'date_date_set',
'date_default_timezone_get',
'date_default_timezone_set',
'date_diff',
'date_format',
'date_get_last_errors',
'date_interval_create_from_date_string',
'date_interval_format',
'date_isodate_set',
'date_modify',
'date_offset_get',
'date_parse_from_format',
'date_parse',
'date_sub',
'date_sun_info',
'date_sunrise',
'date_sunset',
'date_time_set',
'date_timestamp_get',
'date_timestamp_set',
'date_timezone_get',
'date_timezone_set',
'date',
'getdate',
'gettimeofday',
'gmdate',
'gmmktime',
'gmstrftime',
'idate',
'localtime',
'microtime',
'mktime',
'strftime',
'strptime',
'strtotime',
'time',
'timezone_abbreviations_list',
'timezone_identifiers_list',
'timezone_location_get',
'timezone_name_from_abbr',
'timezone_name_get',
'timezone_offset_get',
'timezone_open',
'timezone_transitions_get',
'timezone_version_get'),
'Direct IO': ('dio_close',
'dio_fcntl',
'dio_open',
'dio_read',
'dio_seek',
'dio_stat',
'dio_tcsetattr',
'dio_truncate',
'dio_write'),
'Directory': ('chdir',
'chroot',
'closedir',
'dir',
'getcwd',
'opendir',
'readdir',
'rewinddir',
'scandir'),
'Eio': ('eio_busy',
'eio_cancel',
'eio_chmod',
'eio_chown',
'eio_close',
'eio_custom',
'eio_dup2',
'eio_event_loop',
'eio_fallocate',
'eio_fchmod',
'eio_fchown',
'eio_fdatasync',
'eio_fstat',
'eio_fstatvfs',
'eio_fsync',
'eio_ftruncate',
'eio_futime',
'eio_get_event_stream',
'eio_get_last_error',
'eio_grp_add',
'eio_grp_cancel',
'eio_grp_limit',
'eio_grp',
'eio_init',
'eio_link',
'eio_lstat',
'eio_mkdir',
'eio_mknod',
'eio_nop',
'eio_npending',
'eio_nready',
'eio_nreqs',
'eio_nthreads',
'eio_open',
'eio_poll',
'eio_read',
'eio_readahead',
'eio_readdir',
'eio_readlink',
'eio_realpath',
'eio_rename',
'eio_rmdir',
'eio_seek',
'eio_sendfile',
'eio_set_max_idle',
'eio_set_max_parallel',
'eio_set_max_poll_reqs',
'eio_set_max_poll_time',
'eio_set_min_parallel',
'eio_stat',
'eio_statvfs',
'eio_symlink',
'eio_sync_file_range',
'eio_sync',
'eio_syncfs',
'eio_truncate',
'eio_unlink',
'eio_utime',
'eio_write'),
'Enchant': ('enchant_broker_describe',
'enchant_broker_dict_exists',
'enchant_broker_free_dict',
'enchant_broker_free',
'enchant_broker_get_error',
'enchant_broker_init',
'enchant_broker_list_dicts',
'enchant_broker_request_dict',
'enchant_broker_request_pwl_dict',
'enchant_broker_set_ordering',
'enchant_dict_add_to_personal',
'enchant_dict_add_to_session',
'enchant_dict_check',
'enchant_dict_describe',
'enchant_dict_get_error',
'enchant_dict_is_in_session',
'enchant_dict_quick_check',
'enchant_dict_store_replacement',
'enchant_dict_suggest'),
'Error Handling': ('debug_backtrace',
'debug_print_backtrace',
'error_get_last',
'error_log',
'error_reporting',
'restore_error_handler',
'restore_exception_handler',
'set_error_handler',
'set_exception_handler',
'trigger_error',
'user_error'),
'Exif': ('exif_imagetype',
'exif_read_data',
'exif_tagname',
'exif_thumbnail',
'read_exif_data'),
'Expect': ('expect_expectl', 'expect_popen'),
'FAM': ('fam_cancel_monitor',
'fam_close',
'fam_monitor_collection',
'fam_monitor_directory',
'fam_monitor_file',
'fam_next_event',
'fam_open',
'fam_pending',
'fam_resume_monitor',
'fam_suspend_monitor'),
'FDF': ('fdf_add_doc_javascript',
'fdf_add_template',
'fdf_close',
'fdf_create',
'fdf_enum_values',
'fdf_errno',
'fdf_error',
'fdf_get_ap',
'fdf_get_attachment',
'fdf_get_encoding',
'fdf_get_file',
'fdf_get_flags',
'fdf_get_opt',
'fdf_get_status',
'fdf_get_value',
'fdf_get_version',
'fdf_header',
'fdf_next_field_name',
'fdf_open_string',
'fdf_open',
'fdf_remove_item',
'fdf_save_string',
'fdf_save',
'fdf_set_ap',
'fdf_set_encoding',
'fdf_set_file',
'fdf_set_flags',
'fdf_set_javascript_action',
'fdf_set_on_import_javascript',
'fdf_set_opt',
'fdf_set_status',
'fdf_set_submit_form_action',
'fdf_set_target_frame',
'fdf_set_value',
'fdf_set_version'),
'FPM': ('fastcgi_finish_request',),
'FTP': ('ftp_alloc',
'ftp_cdup',
'ftp_chdir',
'ftp_chmod',
'ftp_close',
'ftp_connect',
'ftp_delete',
'ftp_exec',
'ftp_fget',
'ftp_fput',
'ftp_get_option',
'ftp_get',
'ftp_login',
'ftp_mdtm',
'ftp_mkdir',
'ftp_nb_continue',
'ftp_nb_fget',
'ftp_nb_fput',
'ftp_nb_get',
'ftp_nb_put',
'ftp_nlist',
'ftp_pasv',
'ftp_put',
'ftp_pwd',
'ftp_quit',
'ftp_raw',
'ftp_rawlist',
'ftp_rename',
'ftp_rmdir',
'ftp_set_option',
'ftp_site',
'ftp_size',
'ftp_ssl_connect',
'ftp_systype'),
'Fann': ('fann_cascadetrain_on_data',
'fann_cascadetrain_on_file',
'fann_clear_scaling_params',
'fann_copy',
'fann_create_from_file',
'fann_create_shortcut_array',
'fann_create_shortcut',
'fann_create_sparse_array',
'fann_create_sparse',
'fann_create_standard_array',
'fann_create_standard',
'fann_create_train_from_callback',
'fann_create_train',
'fann_descale_input',
'fann_descale_output',
'fann_descale_train',
'fann_destroy_train',
'fann_destroy',
'fann_duplicate_train_data',
'fann_get_activation_function',
'fann_get_activation_steepness',
'fann_get_bias_array',
'fann_get_bit_fail_limit',
'fann_get_bit_fail',
'fann_get_cascade_activation_functions_count',
'fann_get_cascade_activation_functions',
'fann_get_cascade_activation_steepnesses_count',
'fann_get_cascade_activation_steepnesses',
'fann_get_cascade_candidate_change_fraction',
'fann_get_cascade_candidate_limit',
'fann_get_cascade_candidate_stagnation_epochs',
'fann_get_cascade_max_cand_epochs',
'fann_get_cascade_max_out_epochs',
'fann_get_cascade_min_cand_epochs',
'fann_get_cascade_min_out_epochs',
'fann_get_cascade_num_candidate_groups',
'fann_get_cascade_num_candidates',
'fann_get_cascade_output_change_fraction',
'fann_get_cascade_output_stagnation_epochs',
'fann_get_cascade_weight_multiplier',
'fann_get_connection_array',
'fann_get_connection_rate',
'fann_get_errno',
'fann_get_errstr',
'fann_get_layer_array',
'fann_get_learning_momentum',
'fann_get_learning_rate',
'fann_get_MSE',
'fann_get_network_type',
'fann_get_num_input',
'fann_get_num_layers',
'fann_get_num_output',
'fann_get_quickprop_decay',
'fann_get_quickprop_mu',
'fann_get_rprop_decrease_factor',
'fann_get_rprop_delta_max',
'fann_get_rprop_delta_min',
'fann_get_rprop_delta_zero',
'fann_get_rprop_increase_factor',
'fann_get_sarprop_step_error_shift',
'fann_get_sarprop_step_error_threshold_factor',
'fann_get_sarprop_temperature',
'fann_get_sarprop_weight_decay_shift',
'fann_get_total_connections',
'fann_get_total_neurons',
'fann_get_train_error_function',
'fann_get_train_stop_function',
'fann_get_training_algorithm',
'fann_init_weights',
'fann_length_train_data',
'fann_merge_train_data',
'fann_num_input_train_data',
'fann_num_output_train_data',
'fann_print_error',
'fann_randomize_weights',
'fann_read_train_from_file',
'fann_reset_errno',
'fann_reset_errstr',
'fann_reset_MSE',
'fann_run',
'fann_save_train',
'fann_save',
'fann_scale_input_train_data',
'fann_scale_input',
'fann_scale_output_train_data',
'fann_scale_output',
'fann_scale_train_data',
'fann_scale_train',
'fann_set_activation_function_hidden',
'fann_set_activation_function_layer',
'fann_set_activation_function_output',
'fann_set_activation_function',
'fann_set_activation_steepness_hidden',
'fann_set_activation_steepness_layer',
'fann_set_activation_steepness_output',
'fann_set_activation_steepness',
'fann_set_bit_fail_limit',
'fann_set_callback',
'fann_set_cascade_activation_functions',
'fann_set_cascade_activation_steepnesses',
'fann_set_cascade_candidate_change_fraction',
'fann_set_cascade_candidate_limit',
'fann_set_cascade_candidate_stagnation_epochs',
'fann_set_cascade_max_cand_epochs',
'fann_set_cascade_max_out_epochs',
'fann_set_cascade_min_cand_epochs',
'fann_set_cascade_min_out_epochs',
'fann_set_cascade_num_candidate_groups',
'fann_set_cascade_output_change_fraction',
'fann_set_cascade_output_stagnation_epochs',
'fann_set_cascade_weight_multiplier',
'fann_set_error_log',
'fann_set_input_scaling_params',
'fann_set_learning_momentum',
'fann_set_learning_rate',
'fann_set_output_scaling_params',
'fann_set_quickprop_decay',
'fann_set_quickprop_mu',
'fann_set_rprop_decrease_factor',
'fann_set_rprop_delta_max',
'fann_set_rprop_delta_min',
'fann_set_rprop_delta_zero',
'fann_set_rprop_increase_factor',
'fann_set_sarprop_step_error_shift',
'fann_set_sarprop_step_error_threshold_factor',
'fann_set_sarprop_temperature',
'fann_set_sarprop_weight_decay_shift',
'fann_set_scaling_params',
'fann_set_train_error_function',
'fann_set_train_stop_function',
'fann_set_training_algorithm',
'fann_set_weight_array',
'fann_set_weight',
'fann_shuffle_train_data',
'fann_subset_train_data',
'fann_test_data',
'fann_test',
'fann_train_epoch',
'fann_train_on_data',
'fann_train_on_file',
'fann_train'),
'Fileinfo': ('finfo_buffer',
'finfo_close',
'finfo_file',
'finfo_open',
'finfo_set_flags',
'mime_content_type'),
'Filesystem': ('basename',
'chgrp',
'chmod',
'chown',
'clearstatcache',
'copy',
'dirname',
'disk_free_space',
'disk_total_space',
'diskfreespace',
'fclose',
'feof',
'fflush',
'fgetc',
'fgetcsv',
'fgets',
'fgetss',
'file_exists',
'file_get_contents',
'file_put_contents',
'file',
'fileatime',
'filectime',
'filegroup',
'fileinode',
'filemtime',
'fileowner',
'fileperms',
'filesize',
'filetype',
'flock',
'fnmatch',
'fopen',
'fpassthru',
'fputcsv',
'fputs',
'fread',
'fscanf',
'fseek',
'fstat',
'ftell',
'ftruncate',
'fwrite',
'glob',
'is_dir',
'is_executable',
'is_file',
'is_link',
'is_readable',
'is_uploaded_file',
'is_writable',
'is_writeable',
'lchgrp',
'lchown',
'link',
'linkinfo',
'lstat',
'mkdir',
'move_uploaded_file',
'parse_ini_file',
'parse_ini_string',
'pathinfo',
'pclose',
'popen',
'readfile',
'readlink',
'realpath_cache_get',
'realpath_cache_size',
'realpath',
'rename',
'rewind',
'rmdir',
'set_file_buffer',
'stat',
'symlink',
'tempnam',
'tmpfile',
'touch',
'umask',
'unlink'),
'Filter': ('filter_has_var',
'filter_id',
'filter_input_array',
'filter_input',
'filter_list',
'filter_var_array',
'filter_var'),
'Firebird/InterBase': ('ibase_add_user',
'ibase_affected_rows',
'ibase_backup',
'ibase_blob_add',
'ibase_blob_cancel',
'ibase_blob_close',
'ibase_blob_create',
'ibase_blob_echo',
'ibase_blob_get',
'ibase_blob_import',
'ibase_blob_info',
'ibase_blob_open',
'ibase_close',
'ibase_commit_ret',
'ibase_commit',
'ibase_connect',
'ibase_db_info',
'ibase_delete_user',
'ibase_drop_db',
'ibase_errcode',
'ibase_errmsg',
'ibase_execute',
'ibase_fetch_assoc',
'ibase_fetch_object',
'ibase_fetch_row',
'ibase_field_info',
'ibase_free_event_handler',
'ibase_free_query',
'ibase_free_result',
'ibase_gen_id',
'ibase_maintain_db',
'ibase_modify_user',
'ibase_name_result',
'ibase_num_fields',
'ibase_num_params',
'ibase_param_info',
'ibase_pconnect',
'ibase_prepare',
'ibase_query',
'ibase_restore',
'ibase_rollback_ret',
'ibase_rollback',
'ibase_server_info',
'ibase_service_attach',
'ibase_service_detach',
'ibase_set_event_handler',
'ibase_trans',
'ibase_wait_event'),
'FriBiDi': ('fribidi_log2vis',),
'FrontBase': ('fbsql_affected_rows',
'fbsql_autocommit',
'fbsql_blob_size',
'fbsql_change_user',
'fbsql_clob_size',
'fbsql_close',
'fbsql_commit',
'fbsql_connect',
'fbsql_create_blob',
'fbsql_create_clob',
'fbsql_create_db',
'fbsql_data_seek',
'fbsql_database_password',
'fbsql_database',
'fbsql_db_query',
'fbsql_db_status',
'fbsql_drop_db',
'fbsql_errno',
'fbsql_error',
'fbsql_fetch_array',
'fbsql_fetch_assoc',
'fbsql_fetch_field',
'fbsql_fetch_lengths',
'fbsql_fetch_object',
'fbsql_fetch_row',
'fbsql_field_flags',
'fbsql_field_len',
'fbsql_field_name',
'fbsql_field_seek',
'fbsql_field_table',
'fbsql_field_type',
'fbsql_free_result',
'fbsql_get_autostart_info',
'fbsql_hostname',
'fbsql_insert_id',
'fbsql_list_dbs',
'fbsql_list_fields',
'fbsql_list_tables',
'fbsql_next_result',
'fbsql_num_fields',
'fbsql_num_rows',
'fbsql_password',
'fbsql_pconnect',
'fbsql_query',
'fbsql_read_blob',
'fbsql_read_clob',
'fbsql_result',
'fbsql_rollback',
'fbsql_rows_fetched',
'fbsql_select_db',
'fbsql_set_characterset',
'fbsql_set_lob_mode',
'fbsql_set_password',
'fbsql_set_transaction',
'fbsql_start_db',
'fbsql_stop_db',
'fbsql_table_name',
'fbsql_tablename',
'fbsql_username',
'fbsql_warnings'),
'Function handling': ('call_user_func_array',
'call_user_func',
'create_function',
'forward_static_call_array',
'forward_static_call',
'func_get_arg',
'func_get_args',
'func_num_args',
'function_exists',
'get_defined_functions',
'register_shutdown_function',
'register_tick_function',
'unregister_tick_function'),
'GD and Image': ('gd_info',
'getimagesize',
'getimagesizefromstring',
'image_type_to_extension',
'image_type_to_mime_type',
'image2wbmp',
'imageaffine',
'imageaffinematrixconcat',
'imageaffinematrixget',
'imagealphablending',
'imageantialias',
'imagearc',
'imagechar',
'imagecharup',
'imagecolorallocate',
'imagecolorallocatealpha',
'imagecolorat',
'imagecolorclosest',
'imagecolorclosestalpha',
'imagecolorclosesthwb',
'imagecolordeallocate',
'imagecolorexact',
'imagecolorexactalpha',
'imagecolormatch',
'imagecolorresolve',
'imagecolorresolvealpha',
'imagecolorset',
'imagecolorsforindex',
'imagecolorstotal',
'imagecolortransparent',
'imageconvolution',
'imagecopy',
'imagecopymerge',
'imagecopymergegray',
'imagecopyresampled',
'imagecopyresized',
'imagecreate',
'imagecreatefromgd2',
'imagecreatefromgd2part',
'imagecreatefromgd',
'imagecreatefromgif',
'imagecreatefromjpeg',
'imagecreatefrompng',
'imagecreatefromstring',
'imagecreatefromwbmp',
'imagecreatefromwebp',
'imagecreatefromxbm',
'imagecreatefromxpm',
'imagecreatetruecolor',
'imagecrop',
'imagecropauto',
'imagedashedline',
'imagedestroy',
'imageellipse',
'imagefill',
'imagefilledarc',
'imagefilledellipse',
'imagefilledpolygon',
'imagefilledrectangle',
'imagefilltoborder',
'imagefilter',
'imageflip',
'imagefontheight',
'imagefontwidth',
'imageftbbox',
'imagefttext',
'imagegammacorrect',
'imagegd2',
'imagegd',
'imagegif',
'imagegrabscreen',
'imagegrabwindow',
'imageinterlace',
'imageistruecolor',
'imagejpeg',
'imagelayereffect',
'imageline',
'imageloadfont',
'imagepalettecopy',
'imagepalettetotruecolor',
'imagepng',
'imagepolygon',
'imagepsbbox',
'imagepsencodefont',
'imagepsextendfont',
'imagepsfreefont',
'imagepsloadfont',
'imagepsslantfont',
'imagepstext',
'imagerectangle',
'imagerotate',
'imagesavealpha',
'imagescale',
'imagesetbrush',
'imagesetinterpolation',
'imagesetpixel',
'imagesetstyle',
'imagesetthickness',
'imagesettile',
'imagestring',
'imagestringup',
'imagesx',
'imagesy',
'imagetruecolortopalette',
'imagettfbbox',
'imagettftext',
'imagetypes',
'imagewbmp',
'imagewebp',
'imagexbm',
'iptcembed',
'iptcparse',
'jpeg2wbmp',
'png2wbmp'),
'GMP': ('gmp_abs',
'gmp_add',
'gmp_and',
'gmp_clrbit',
'gmp_cmp',
'gmp_com',
'gmp_div_q',
'gmp_div_qr',
'gmp_div_r',
'gmp_div',
'gmp_divexact',
'gmp_fact',
'gmp_gcd',
'gmp_gcdext',
'gmp_hamdist',
'gmp_init',
'gmp_intval',
'gmp_invert',
'gmp_jacobi',
'gmp_legendre',
'gmp_mod',
'gmp_mul',
'gmp_neg',
'gmp_nextprime',
'gmp_or',
'gmp_perfect_square',
'gmp_popcount',
'gmp_pow',
'gmp_powm',
'gmp_prob_prime',
'gmp_random',
'gmp_scan0',
'gmp_scan1',
'gmp_setbit',
'gmp_sign',
'gmp_sqrt',
'gmp_sqrtrem',
'gmp_strval',
'gmp_sub',
'gmp_testbit',
'gmp_xor'),
'GeoIP': ('geoip_asnum_by_name',
'geoip_continent_code_by_name',
'geoip_country_code_by_name',
'geoip_country_code3_by_name',
'geoip_country_name_by_name',
'geoip_database_info',
'geoip_db_avail',
'geoip_db_filename',
'geoip_db_get_all_info',
'geoip_domain_by_name',
'geoip_id_by_name',
'geoip_isp_by_name',
'geoip_netspeedcell_by_name',
'geoip_org_by_name',
'geoip_record_by_name',
'geoip_region_by_name',
'geoip_region_name_by_code',
'geoip_setup_custom_directory',
'geoip_time_zone_by_country_and_region'),
'Gettext': ('bind_textdomain_codeset',
'bindtextdomain',
'dcgettext',
'dcngettext',
'dgettext',
'dngettext',
'gettext',
'ngettext',
'textdomain'),
'GnuPG': ('gnupg_adddecryptkey',
'gnupg_addencryptkey',
'gnupg_addsignkey',
'gnupg_cleardecryptkeys',
'gnupg_clearencryptkeys',
'gnupg_clearsignkeys',
'gnupg_decrypt',
'gnupg_decryptverify',
'gnupg_encrypt',
'gnupg_encryptsign',
'gnupg_export',
'gnupg_geterror',
'gnupg_getprotocol',
'gnupg_import',
'gnupg_init',
'gnupg_keyinfo',
'gnupg_setarmor',
'gnupg_seterrormode',
'gnupg_setsignmode',
'gnupg_sign',
'gnupg_verify'),
'Gopher': ('gopher_parsedir',),
'Grapheme': ('grapheme_extract',
'grapheme_stripos',
'grapheme_stristr',
'grapheme_strlen',
'grapheme_strpos',
'grapheme_strripos',
'grapheme_strrpos',
'grapheme_strstr',
'grapheme_substr'),
'Gupnp': ('gupnp_context_get_host_ip',
'gupnp_context_get_port',
'gupnp_context_get_subscription_timeout',
'gupnp_context_host_path',
'gupnp_context_new',
'gupnp_context_set_subscription_timeout',
'gupnp_context_timeout_add',
'gupnp_context_unhost_path',
'gupnp_control_point_browse_start',
'gupnp_control_point_browse_stop',
'gupnp_control_point_callback_set',
'gupnp_control_point_new',
'gupnp_device_action_callback_set',
'gupnp_device_info_get_service',
'gupnp_device_info_get',
'gupnp_root_device_get_available',
'gupnp_root_device_get_relative_location',
'gupnp_root_device_new',
'gupnp_root_device_set_available',
'gupnp_root_device_start',
'gupnp_root_device_stop',
'gupnp_service_action_get',
'gupnp_service_action_return_error',
'gupnp_service_action_return',
'gupnp_service_action_set',
'gupnp_service_freeze_notify',
'gupnp_service_info_get_introspection',
'gupnp_service_info_get',
'gupnp_service_introspection_get_state_variable',
'gupnp_service_notify',
'gupnp_service_proxy_action_get',
'gupnp_service_proxy_action_set',
'gupnp_service_proxy_add_notify',
'gupnp_service_proxy_callback_set',
'gupnp_service_proxy_get_subscribed',
'gupnp_service_proxy_remove_notify',
'gupnp_service_proxy_set_subscribed',
'gupnp_service_thaw_notify'),
'HTTP': ('http_cache_etag',
'http_cache_last_modified',
'http_chunked_decode',
'http_deflate',
'http_inflate',
'http_build_cookie',
'http_date',
'http_get_request_body_stream',
'http_get_request_body',
'http_get_request_headers',
'http_match_etag',
'http_match_modified',
'http_match_request_header',
'http_support',
'http_negotiate_charset',
'http_negotiate_content_type',
'http_negotiate_language',
'ob_deflatehandler',
'ob_etaghandler',
'ob_inflatehandler',
'http_parse_cookie',
'http_parse_headers',
'http_parse_message',
'http_parse_params',
'http_persistent_handles_clean',
'http_persistent_handles_count',
'http_persistent_handles_ident',
'http_get',
'http_head',
'http_post_data',
'http_post_fields',
'http_put_data',
'http_put_file',
'http_put_stream',
'http_request_body_encode',
'http_request_method_exists',
'http_request_method_name',
'http_request_method_register',
'http_request_method_unregister',
'http_request',
'http_redirect',
'http_send_content_disposition',
'http_send_content_type',
'http_send_data',
'http_send_file',
'http_send_last_modified',
'http_send_status',
'http_send_stream',
'http_throttle',
'http_build_str',
'http_build_url'),
'Hash': ('hash_algos',
'hash_copy',
'hash_file',
'hash_final',
'hash_hmac_file',
'hash_hmac',
'hash_init',
'hash_pbkdf2',
'hash_update_file',
'hash_update_stream',
'hash_update',
'hash'),
'Hyperwave': ('hw_Array2Objrec',
'hw_changeobject',
'hw_Children',
'hw_ChildrenObj',
'hw_Close',
'hw_Connect',
'hw_connection_info',
'hw_cp',
'hw_Deleteobject',
'hw_DocByAnchor',
'hw_DocByAnchorObj',
'hw_Document_Attributes',
'hw_Document_BodyTag',
'hw_Document_Content',
'hw_Document_SetContent',
'hw_Document_Size',
'hw_dummy',
'hw_EditText',
'hw_Error',
'hw_ErrorMsg',
'hw_Free_Document',
'hw_GetAnchors',
'hw_GetAnchorsObj',
'hw_GetAndLock',
'hw_GetChildColl',
'hw_GetChildCollObj',
'hw_GetChildDocColl',
'hw_GetChildDocCollObj',
'hw_GetObject',
'hw_GetObjectByQuery',
'hw_GetObjectByQueryColl',
'hw_GetObjectByQueryCollObj',
'hw_GetObjectByQueryObj',
'hw_GetParents',
'hw_GetParentsObj',
'hw_getrellink',
'hw_GetRemote',
'hw_getremotechildren',
'hw_GetSrcByDestObj',
'hw_GetText',
'hw_getusername',
'hw_Identify',
'hw_InCollections',
'hw_Info',
'hw_InsColl',
'hw_InsDoc',
'hw_insertanchors',
'hw_InsertDocument',
'hw_InsertObject',
'hw_mapid',
'hw_Modifyobject',
'hw_mv',
'hw_New_Document',
'hw_objrec2array',
'hw_Output_Document',
'hw_pConnect',
'hw_PipeDocument',
'hw_Root',
'hw_setlinkroot',
'hw_stat',
'hw_Unlock',
'hw_Who'),
'Hyperwave API': ('hwapi_attribute_new',
'hwapi_content_new',
'hwapi_hgcsp',
'hwapi_object_new'),
'IBM DB2': ('db2_autocommit',
'db2_bind_param',
'db2_client_info',
'db2_close',
'db2_column_privileges',
'db2_columns',
'db2_commit',
'db2_conn_error',
'db2_conn_errormsg',
'db2_connect',
'db2_cursor_type',
'db2_escape_string',
'db2_exec',
'db2_execute',
'db2_fetch_array',
'db2_fetch_assoc',
'db2_fetch_both',
'db2_fetch_object',
'db2_fetch_row',
'db2_field_display_size',
'db2_field_name',
'db2_field_num',
'db2_field_precision',
'db2_field_scale',
'db2_field_type',
'db2_field_width',
'db2_foreign_keys',
'db2_free_result',
'db2_free_stmt',
'db2_get_option',
'db2_last_insert_id',
'db2_lob_read',
'db2_next_result',
'db2_num_fields',
'db2_num_rows',
'db2_pclose',
'db2_pconnect',
'db2_prepare',
'db2_primary_keys',
'db2_procedure_columns',
'db2_procedures',
'db2_result',
'db2_rollback',
'db2_server_info',
'db2_set_option',
'db2_special_columns',
'db2_statistics',
'db2_stmt_error',
'db2_stmt_errormsg',
'db2_table_privileges',
'db2_tables'),
'ID3': ('id3_get_frame_long_name',
'id3_get_frame_short_name',
'id3_get_genre_id',
'id3_get_genre_list',
'id3_get_genre_name',
'id3_get_tag',
'id3_get_version',
'id3_remove_tag',
'id3_set_tag'),
'IDN': ('grapheme_substr', 'idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'),
'IIS': ('iis_add_server',
'iis_get_dir_security',
'iis_get_script_map',
'iis_get_server_by_comment',
'iis_get_server_by_path',
'iis_get_server_rights',
'iis_get_service_state',
'iis_remove_server',
'iis_set_app_settings',
'iis_set_dir_security',
'iis_set_script_map',
'iis_set_server_rights',
'iis_start_server',
'iis_start_service',
'iis_stop_server',
'iis_stop_service'),
'IMAP': ('imap_8bit',
'imap_alerts',
'imap_append',
'imap_base64',
'imap_binary',
'imap_body',
'imap_bodystruct',
'imap_check',
'imap_clearflag_full',
'imap_close',
'imap_create',
'imap_createmailbox',
'imap_delete',
'imap_deletemailbox',
'imap_errors',
'imap_expunge',
'imap_fetch_overview',
'imap_fetchbody',
'imap_fetchheader',
'imap_fetchmime',
'imap_fetchstructure',
'imap_fetchtext',
'imap_gc',
'imap_get_quota',
'imap_get_quotaroot',
'imap_getacl',
'imap_getmailboxes',
'imap_getsubscribed',
'imap_header',
'imap_headerinfo',
'imap_headers',
'imap_last_error',
'imap_list',
'imap_listmailbox',
'imap_listscan',
'imap_listsubscribed',
'imap_lsub',
'imap_mail_compose',
'imap_mail_copy',
'imap_mail_move',
'imap_mail',
'imap_mailboxmsginfo',
'imap_mime_header_decode',
'imap_msgno',
'imap_num_msg',
'imap_num_recent',
'imap_open',
'imap_ping',
'imap_qprint',
'imap_rename',
'imap_renamemailbox',
'imap_reopen',
'imap_rfc822_parse_adrlist',
'imap_rfc822_parse_headers',
'imap_rfc822_write_address',
'imap_savebody',
'imap_scan',
'imap_scanmailbox',
'imap_search',
'imap_set_quota',
'imap_setacl',
'imap_setflag_full',
'imap_sort',
'imap_status',
'imap_subscribe',
'imap_thread',
'imap_timeout',
'imap_uid',
'imap_undelete',
'imap_unsubscribe',
'imap_utf7_decode',
'imap_utf7_encode',
'imap_utf8'),
'Informix': ('ifx_affected_rows',
'ifx_blobinfile_mode',
'ifx_byteasvarchar',
'ifx_close',
'ifx_connect',
'ifx_copy_blob',
'ifx_create_blob',
'ifx_create_char',
'ifx_do',
'ifx_error',
'ifx_errormsg',
'ifx_fetch_row',
'ifx_fieldproperties',
'ifx_fieldtypes',
'ifx_free_blob',
'ifx_free_char',
'ifx_free_result',
'ifx_get_blob',
'ifx_get_char',
'ifx_getsqlca',
'ifx_htmltbl_result',
'ifx_nullformat',
'ifx_num_fields',
'ifx_num_rows',
'ifx_pconnect',
'ifx_prepare',
'ifx_query',
'ifx_textasvarchar',
'ifx_update_blob',
'ifx_update_char',
'ifxus_close_slob',
'ifxus_create_slob',
'ifxus_free_slob',
'ifxus_open_slob',
'ifxus_read_slob',
'ifxus_seek_slob',
'ifxus_tell_slob',
'ifxus_write_slob'),
'Ingres': ('ingres_autocommit_state',
'ingres_autocommit',
'ingres_charset',
'ingres_close',
'ingres_commit',
'ingres_connect',
'ingres_cursor',
'ingres_errno',
'ingres_error',
'ingres_errsqlstate',
'ingres_escape_string',
'ingres_execute',
'ingres_fetch_array',
'ingres_fetch_assoc',
'ingres_fetch_object',
'ingres_fetch_proc_return',
'ingres_fetch_row',
'ingres_field_length',
'ingres_field_name',
'ingres_field_nullable',
'ingres_field_precision',
'ingres_field_scale',
'ingres_field_type',
'ingres_free_result',
'ingres_next_error',
'ingres_num_fields',
'ingres_num_rows',
'ingres_pconnect',
'ingres_prepare',
'ingres_query',
'ingres_result_seek',
'ingres_rollback',
'ingres_set_environment',
'ingres_unbuffered_query'),
'Inotify': ('inotify_add_watch',
'inotify_init',
'inotify_queue_len',
'inotify_read',
'inotify_rm_watch'),
'JSON': ('json_decode',
'json_encode',
'json_last_error_msg',
'json_last_error'),
'Java': ('java_last_exception_clear', 'java_last_exception_get'),
'Judy': ('judy_type', 'judy_version'),
'KADM5': ('kadm5_chpass_principal',
'kadm5_create_principal',
'kadm5_delete_principal',
'kadm5_destroy',
'kadm5_flush',
'kadm5_get_policies',
'kadm5_get_principal',
'kadm5_get_principals',
'kadm5_init_with_password',
'kadm5_modify_principal'),
'LDAP': ('ldap_8859_to_t61',
'ldap_add',
'ldap_bind',
'ldap_close',
'ldap_compare',
'ldap_connect',
'ldap_control_paged_result_response',
'ldap_control_paged_result',
'ldap_count_entries',
'ldap_delete',
'ldap_dn2ufn',
'ldap_err2str',
'ldap_errno',
'ldap_error',
'ldap_explode_dn',
'ldap_first_attribute',
'ldap_first_entry',
'ldap_first_reference',
'ldap_free_result',
'ldap_get_attributes',
'ldap_get_dn',
'ldap_get_entries',
'ldap_get_option',
'ldap_get_values_len',
'ldap_get_values',
'ldap_list',
'ldap_mod_add',
'ldap_mod_del',
'ldap_mod_replace',
'ldap_modify',
'ldap_next_attribute',
'ldap_next_entry',
'ldap_next_reference',
'ldap_parse_reference',
'ldap_parse_result',
'ldap_read',
'ldap_rename',
'ldap_sasl_bind',
'ldap_search',
'ldap_set_option',
'ldap_set_rebind_proc',
'ldap_sort',
'ldap_start_tls',
'ldap_t61_to_8859',
'ldap_unbind'),
'LZF': ('lzf_compress', 'lzf_decompress', 'lzf_optimized_for'),
'Libevent': ('event_add',
'event_base_free',
'event_base_loop',
'event_base_loopbreak',
'event_base_loopexit',
'event_base_new',
'event_base_priority_init',
'event_base_set',
'event_buffer_base_set',
'event_buffer_disable',
'event_buffer_enable',
'event_buffer_fd_set',
'event_buffer_free',
'event_buffer_new',
'event_buffer_priority_set',
'event_buffer_read',
'event_buffer_set_callback',
'event_buffer_timeout_set',
'event_buffer_watermark_set',
'event_buffer_write',
'event_del',
'event_free',
'event_new',
'event_set'),
'Lotus Notes': ('notes_body',
'notes_copy_db',
'notes_create_db',
'notes_create_note',
'notes_drop_db',
'notes_find_note',
'notes_header_info',
'notes_list_msgs',
'notes_mark_read',
'notes_mark_unread',
'notes_nav_create',
'notes_search',
'notes_unread',
'notes_version'),
'MCVE': ('m_checkstatus',
'm_completeauthorizations',
'm_connect',
'm_connectionerror',
'm_deletetrans',
'm_destroyconn',
'm_destroyengine',
'm_getcell',
'm_getcellbynum',
'm_getcommadelimited',
'm_getheader',
'm_initconn',
'm_initengine',
'm_iscommadelimited',
'm_maxconntimeout',
'm_monitor',
'm_numcolumns',
'm_numrows',
'm_parsecommadelimited',
'm_responsekeys',
'm_responseparam',
'm_returnstatus',
'm_setblocking',
'm_setdropfile',
'm_setip',
'm_setssl_cafile',
'm_setssl_files',
'm_setssl',
'm_settimeout',
'm_sslcert_gen_hash',
'm_transactionssent',
'm_transinqueue',
'm_transkeyval',
'm_transnew',
'm_transsend',
'm_uwait',
'm_validateidentifier',
'm_verifyconnection',
'm_verifysslcert'),
'Mail': ('ezmlm_hash', 'mail'),
'Mailparse': ('mailparse_determine_best_xfer_encoding',
'mailparse_msg_create',
'mailparse_msg_extract_part_file',
'mailparse_msg_extract_part',
'mailparse_msg_extract_whole_part_file',
'mailparse_msg_free',
'mailparse_msg_get_part_data',
'mailparse_msg_get_part',
'mailparse_msg_get_structure',
'mailparse_msg_parse_file',
'mailparse_msg_parse',
'mailparse_rfc822_parse_addresses',
'mailparse_stream_encode',
'mailparse_uudecode_all'),
'Math': ('abs',
'acos',
'acosh',
'asin',
'asinh',
'atan2',
'atan',
'atanh',
'base_convert',
'bindec',
'ceil',
'cos',
'cosh',
'decbin',
'dechex',
'decoct',
'deg2rad',
'exp',
'expm1',
'floor',
'fmod',
'getrandmax',
'hexdec',
'hypot',
'is_finite',
'is_infinite',
'is_nan',
'lcg_value',
'log10',
'log1p',
'log',
'max',
'min',
'mt_getrandmax',
'mt_rand',
'mt_srand',
'octdec',
'pi',
'pow',
'rad2deg',
'rand',
'round',
'sin',
'sinh',
'sqrt',
'srand',
'tan',
'tanh'),
'MaxDB': ('maxdb_affected_rows',
'maxdb_autocommit',
'maxdb_bind_param',
'maxdb_bind_result',
'maxdb_change_user',
'maxdb_character_set_name',
'maxdb_client_encoding',
'maxdb_close_long_data',
'maxdb_close',
'maxdb_commit',
'maxdb_connect_errno',
'maxdb_connect_error',
'maxdb_connect',
'maxdb_data_seek',
'maxdb_debug',
'maxdb_disable_reads_from_master',
'maxdb_disable_rpl_parse',
'maxdb_dump_debug_info',
'maxdb_embedded_connect',
'maxdb_enable_reads_from_master',
'maxdb_enable_rpl_parse',
'maxdb_errno',
'maxdb_error',
'maxdb_escape_string',
'maxdb_execute',
'maxdb_fetch_array',
'maxdb_fetch_assoc',
'maxdb_fetch_field_direct',
'maxdb_fetch_field',
'maxdb_fetch_fields',
'maxdb_fetch_lengths',
'maxdb_fetch_object',
'maxdb_fetch_row',
'maxdb_fetch',
'maxdb_field_count',
'maxdb_field_seek',
'maxdb_field_tell',
'maxdb_free_result',
'maxdb_get_client_info',
'maxdb_get_client_version',
'maxdb_get_host_info',
'maxdb_get_metadata',
'maxdb_get_proto_info',
'maxdb_get_server_info',
'maxdb_get_server_version',
'maxdb_info',
'maxdb_init',
'maxdb_insert_id',
'maxdb_kill',
'maxdb_master_query',
'maxdb_more_results',
'maxdb_multi_query',
'maxdb_next_result',
'maxdb_num_fields',
'maxdb_num_rows',
'maxdb_options',
'maxdb_param_count',
'maxdb_ping',
'maxdb_prepare',
'maxdb_query',
'maxdb_real_connect',
'maxdb_real_escape_string',
'maxdb_real_query',
'maxdb_report',
'maxdb_rollback',
'maxdb_rpl_parse_enabled',
'maxdb_rpl_probe',
'maxdb_rpl_query_type',
'maxdb_select_db',
'maxdb_send_long_data',
'maxdb_send_query',
'maxdb_server_end',
'maxdb_server_init',
'maxdb_set_opt',
'maxdb_sqlstate',
'maxdb_ssl_set',
'maxdb_stat',
'maxdb_stmt_affected_rows',
'maxdb_stmt_bind_param',
'maxdb_stmt_bind_result',
'maxdb_stmt_close_long_data',
'maxdb_stmt_close',
'maxdb_stmt_data_seek',
'maxdb_stmt_errno',
'maxdb_stmt_error',
'maxdb_stmt_execute',
'maxdb_stmt_fetch',
'maxdb_stmt_free_result',
'maxdb_stmt_init',
'maxdb_stmt_num_rows',
'maxdb_stmt_param_count',
'maxdb_stmt_prepare',
'maxdb_stmt_reset',
'maxdb_stmt_result_metadata',
'maxdb_stmt_send_long_data',
'maxdb_stmt_sqlstate',
'maxdb_stmt_store_result',
'maxdb_store_result',
'maxdb_thread_id',
'maxdb_thread_safe',
'maxdb_use_result',
'maxdb_warning_count'),
'Mcrypt': ('mcrypt_cbc',
'mcrypt_cfb',
'mcrypt_create_iv',
'mcrypt_decrypt',
'mcrypt_ecb',
'mcrypt_enc_get_algorithms_name',
'mcrypt_enc_get_block_size',
'mcrypt_enc_get_iv_size',
'mcrypt_enc_get_key_size',
'mcrypt_enc_get_modes_name',
'mcrypt_enc_get_supported_key_sizes',
'mcrypt_enc_is_block_algorithm_mode',
'mcrypt_enc_is_block_algorithm',
'mcrypt_enc_is_block_mode',
'mcrypt_enc_self_test',
'mcrypt_encrypt',
'mcrypt_generic_deinit',
'mcrypt_generic_end',
'mcrypt_generic_init',
'mcrypt_generic',
'mcrypt_get_block_size',
'mcrypt_get_cipher_name',
'mcrypt_get_iv_size',
'mcrypt_get_key_size',
'mcrypt_list_algorithms',
'mcrypt_list_modes',
'mcrypt_module_close',
'mcrypt_module_get_algo_block_size',
'mcrypt_module_get_algo_key_size',
'mcrypt_module_get_supported_key_sizes',
'mcrypt_module_is_block_algorithm_mode',
'mcrypt_module_is_block_algorithm',
'mcrypt_module_is_block_mode',
'mcrypt_module_open',
'mcrypt_module_self_test',
'mcrypt_ofb',
'mdecrypt_generic'),
'Memcache': ('memcache_debug',),
'Mhash': ('mhash_count',
'mhash_get_block_size',
'mhash_get_hash_name',
'mhash_keygen_s2k',
'mhash'),
'Ming': ('ming_keypress',
'ming_setcubicthreshold',
'ming_setscale',
'ming_setswfcompression',
'ming_useconstants',
'ming_useswfversion'),
'Misc.': ('connection_aborted',
'connection_status',
'connection_timeout',
'constant',
'define',
'defined',
'die',
'eval',
'exit',
'get_browser',
'__halt_compiler',
'highlight_file',
'highlight_string',
'ignore_user_abort',
'pack',
'php_check_syntax',
'php_strip_whitespace',
'show_source',
'sleep',
'sys_getloadavg',
'time_nanosleep',
'time_sleep_until',
'uniqid',
'unpack',
'usleep'),
'Mongo': ('bson_decode', 'bson_encode'),
'Msession': ('msession_connect',
'msession_count',
'msession_create',
'msession_destroy',
'msession_disconnect',
'msession_find',
'msession_get_array',
'msession_get_data',
'msession_get',
'msession_inc',
'msession_list',
'msession_listvar',
'msession_lock',
'msession_plugin',
'msession_randstr',
'msession_set_array',
'msession_set_data',
'msession_set',
'msession_timeout',
'msession_uniq',
'msession_unlock'),
'Mssql': ('mssql_bind',
'mssql_close',
'mssql_connect',
'mssql_data_seek',
'mssql_execute',
'mssql_fetch_array',
'mssql_fetch_assoc',
'mssql_fetch_batch',
'mssql_fetch_field',
'mssql_fetch_object',
'mssql_fetch_row',
'mssql_field_length',
'mssql_field_name',
'mssql_field_seek',
'mssql_field_type',
'mssql_free_result',
'mssql_free_statement',
'mssql_get_last_message',
'mssql_guid_string',
'mssql_init',
'mssql_min_error_severity',
'mssql_min_message_severity',
'mssql_next_result',
'mssql_num_fields',
'mssql_num_rows',
'mssql_pconnect',
'mssql_query',
'mssql_result',
'mssql_rows_affected',
'mssql_select_db'),
'Multibyte String': ('mb_check_encoding',
'mb_convert_case',
'mb_convert_encoding',
'mb_convert_kana',
'mb_convert_variables',
'mb_decode_mimeheader',
'mb_decode_numericentity',
'mb_detect_encoding',
'mb_detect_order',
'mb_encode_mimeheader',
'mb_encode_numericentity',
'mb_encoding_aliases',
'mb_ereg_match',
'mb_ereg_replace_callback',
'mb_ereg_replace',
'mb_ereg_search_getpos',
'mb_ereg_search_getregs',
'mb_ereg_search_init',
'mb_ereg_search_pos',
'mb_ereg_search_regs',
'mb_ereg_search_setpos',
'mb_ereg_search',
'mb_ereg',
'mb_eregi_replace',
'mb_eregi',
'mb_get_info',
'mb_http_input',
'mb_http_output',
'mb_internal_encoding',
'mb_language',
'mb_list_encodings',
'mb_output_handler',
'mb_parse_str',
'mb_preferred_mime_name',
'mb_regex_encoding',
'mb_regex_set_options',
'mb_send_mail',
'mb_split',
'mb_strcut',
'mb_strimwidth',
'mb_stripos',
'mb_stristr',
'mb_strlen',
'mb_strpos',
'mb_strrchr',
'mb_strrichr',
'mb_strripos',
'mb_strrpos',
'mb_strstr',
'mb_strtolower',
'mb_strtoupper',
'mb_strwidth',
'mb_substitute_character',
'mb_substr_count',
'mb_substr'),
'MySQL': ('mysql_affected_rows',
'mysql_client_encoding',
'mysql_close',
'mysql_connect',
'mysql_create_db',
'mysql_data_seek',
'mysql_db_name',
'mysql_db_query',
'mysql_drop_db',
'mysql_errno',
'mysql_error',
'mysql_escape_string',
'mysql_fetch_array',
'mysql_fetch_assoc',
'mysql_fetch_field',
'mysql_fetch_lengths',
'mysql_fetch_object',
'mysql_fetch_row',
'mysql_field_flags',
'mysql_field_len',
'mysql_field_name',
'mysql_field_seek',
'mysql_field_table',
'mysql_field_type',
'mysql_free_result',
'mysql_get_client_info',
'mysql_get_host_info',
'mysql_get_proto_info',
'mysql_get_server_info',
'mysql_info',
'mysql_insert_id',
'mysql_list_dbs',
'mysql_list_fields',
'mysql_list_processes',
'mysql_list_tables',
'mysql_num_fields',
'mysql_num_rows',
'mysql_pconnect',
'mysql_ping',
'mysql_query',
'mysql_real_escape_string',
'mysql_result',
'mysql_select_db',
'mysql_set_charset',
'mysql_stat',
'mysql_tablename',
'mysql_thread_id',
'mysql_unbuffered_query'),
'Mysqlnd_memcache': ('mysqlnd_memcache_get_config', 'mysqlnd_memcache_set'),
'Mysqlnd_ms': ('mysqlnd_ms_dump_servers',
'mysqlnd_ms_fabric_select_global',
'mysqlnd_ms_fabric_select_shard',
'mysqlnd_ms_get_last_gtid',
'mysqlnd_ms_get_last_used_connection',
'mysqlnd_ms_get_stats',
'mysqlnd_ms_match_wild',
'mysqlnd_ms_query_is_select',
'mysqlnd_ms_set_qos',
'mysqlnd_ms_set_user_pick_server'),
'Mysqlnd_uh': ('mysqlnd_uh_convert_to_mysqlnd',
'mysqlnd_uh_set_connection_proxy',
'mysqlnd_uh_set_statement_proxy'),
'NSAPI': ('nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'),
'Ncurses': ('ncurses_addch',
'ncurses_addchnstr',
'ncurses_addchstr',
'ncurses_addnstr',
'ncurses_addstr',
'ncurses_assume_default_colors',
'ncurses_attroff',
'ncurses_attron',
'ncurses_attrset',
'ncurses_baudrate',
'ncurses_beep',
'ncurses_bkgd',
'ncurses_bkgdset',
'ncurses_border',
'ncurses_bottom_panel',
'ncurses_can_change_color',
'ncurses_cbreak',
'ncurses_clear',
'ncurses_clrtobot',
'ncurses_clrtoeol',
'ncurses_color_content',
'ncurses_color_set',
'ncurses_curs_set',
'ncurses_def_prog_mode',
'ncurses_def_shell_mode',
'ncurses_define_key',
'ncurses_del_panel',
'ncurses_delay_output',
'ncurses_delch',
'ncurses_deleteln',
'ncurses_delwin',
'ncurses_doupdate',
'ncurses_echo',
'ncurses_echochar',
'ncurses_end',
'ncurses_erase',
'ncurses_erasechar',
'ncurses_filter',
'ncurses_flash',
'ncurses_flushinp',
'ncurses_getch',
'ncurses_getmaxyx',
'ncurses_getmouse',
'ncurses_getyx',
'ncurses_halfdelay',
'ncurses_has_colors',
'ncurses_has_ic',
'ncurses_has_il',
'ncurses_has_key',
'ncurses_hide_panel',
'ncurses_hline',
'ncurses_inch',
'ncurses_init_color',
'ncurses_init_pair',
'ncurses_init',
'ncurses_insch',
'ncurses_insdelln',
'ncurses_insertln',
'ncurses_insstr',
'ncurses_instr',
'ncurses_isendwin',
'ncurses_keyok',
'ncurses_keypad',
'ncurses_killchar',
'ncurses_longname',
'ncurses_meta',
'ncurses_mouse_trafo',
'ncurses_mouseinterval',
'ncurses_mousemask',
'ncurses_move_panel',
'ncurses_move',
'ncurses_mvaddch',
'ncurses_mvaddchnstr',
'ncurses_mvaddchstr',
'ncurses_mvaddnstr',
'ncurses_mvaddstr',
'ncurses_mvcur',
'ncurses_mvdelch',
'ncurses_mvgetch',
'ncurses_mvhline',
'ncurses_mvinch',
'ncurses_mvvline',
'ncurses_mvwaddstr',
'ncurses_napms',
'ncurses_new_panel',
'ncurses_newpad',
'ncurses_newwin',
'ncurses_nl',
'ncurses_nocbreak',
'ncurses_noecho',
'ncurses_nonl',
'ncurses_noqiflush',
'ncurses_noraw',
'ncurses_pair_content',
'ncurses_panel_above',
'ncurses_panel_below',
'ncurses_panel_window',
'ncurses_pnoutrefresh',
'ncurses_prefresh',
'ncurses_putp',
'ncurses_qiflush',
'ncurses_raw',
'ncurses_refresh',
'ncurses_replace_panel',
'ncurses_reset_prog_mode',
'ncurses_reset_shell_mode',
'ncurses_resetty',
'ncurses_savetty',
'ncurses_scr_dump',
'ncurses_scr_init',
'ncurses_scr_restore',
'ncurses_scr_set',
'ncurses_scrl',
'ncurses_show_panel',
'ncurses_slk_attr',
'ncurses_slk_attroff',
'ncurses_slk_attron',
'ncurses_slk_attrset',
'ncurses_slk_clear',
'ncurses_slk_color',
'ncurses_slk_init',
'ncurses_slk_noutrefresh',
'ncurses_slk_refresh',
'ncurses_slk_restore',
'ncurses_slk_set',
'ncurses_slk_touch',
'ncurses_standend',
'ncurses_standout',
'ncurses_start_color',
'ncurses_termattrs',
'ncurses_termname',
'ncurses_timeout',
'ncurses_top_panel',
'ncurses_typeahead',
'ncurses_ungetch',
'ncurses_ungetmouse',
'ncurses_update_panels',
'ncurses_use_default_colors',
'ncurses_use_env',
'ncurses_use_extended_names',
'ncurses_vidattr',
'ncurses_vline',
'ncurses_waddch',
'ncurses_waddstr',
'ncurses_wattroff',
'ncurses_wattron',
'ncurses_wattrset',
'ncurses_wborder',
'ncurses_wclear',
'ncurses_wcolor_set',
'ncurses_werase',
'ncurses_wgetch',
'ncurses_whline',
'ncurses_wmouse_trafo',
'ncurses_wmove',
'ncurses_wnoutrefresh',
'ncurses_wrefresh',
'ncurses_wstandend',
'ncurses_wstandout',
'ncurses_wvline'),
'Network': ('checkdnsrr',
'closelog',
'define_syslog_variables',
'dns_check_record',
'dns_get_mx',
'dns_get_record',
'fsockopen',
'gethostbyaddr',
'gethostbyname',
'gethostbynamel',
'gethostname',
'getmxrr',
'getprotobyname',
'getprotobynumber',
'getservbyname',
'getservbyport',
'header_register_callback',
'header_remove',
'header',
'headers_list',
'headers_sent',
'http_response_code',
'inet_ntop',
'inet_pton',
'ip2long',
'long2ip',
'openlog',
'pfsockopen',
'setcookie',
'setrawcookie',
'socket_get_status',
'socket_set_blocking',
'socket_set_timeout',
'syslog'),
'Newt': ('newt_bell',
'newt_button_bar',
'newt_button',
'newt_centered_window',
'newt_checkbox_get_value',
'newt_checkbox_set_flags',
'newt_checkbox_set_value',
'newt_checkbox_tree_add_item',
'newt_checkbox_tree_find_item',
'newt_checkbox_tree_get_current',
'newt_checkbox_tree_get_entry_value',
'newt_checkbox_tree_get_multi_selection',
'newt_checkbox_tree_get_selection',
'newt_checkbox_tree_multi',
'newt_checkbox_tree_set_current',
'newt_checkbox_tree_set_entry_value',
'newt_checkbox_tree_set_entry',
'newt_checkbox_tree_set_width',
'newt_checkbox_tree',
'newt_checkbox',
'newt_clear_key_buffer',
'newt_cls',
'newt_compact_button',
'newt_component_add_callback',
'newt_component_takes_focus',
'newt_create_grid',
'newt_cursor_off',
'newt_cursor_on',
'newt_delay',
'newt_draw_form',
'newt_draw_root_text',
'newt_entry_get_value',
'newt_entry_set_filter',
'newt_entry_set_flags',
'newt_entry_set',
'newt_entry',
'newt_finished',
'newt_form_add_component',
'newt_form_add_components',
'newt_form_add_hot_key',
'newt_form_destroy',
'newt_form_get_current',
'newt_form_run',
'newt_form_set_background',
'newt_form_set_height',
'newt_form_set_size',
'newt_form_set_timer',
'newt_form_set_width',
'newt_form_watch_fd',
'newt_form',
'newt_get_screen_size',
'newt_grid_add_components_to_form',
'newt_grid_basic_window',
'newt_grid_free',
'newt_grid_get_size',
'newt_grid_h_close_stacked',
'newt_grid_h_stacked',
'newt_grid_place',
'newt_grid_set_field',
'newt_grid_simple_window',
'newt_grid_v_close_stacked',
'newt_grid_v_stacked',
'newt_grid_wrapped_window_at',
'newt_grid_wrapped_window',
'newt_init',
'newt_label_set_text',
'newt_label',
'newt_listbox_append_entry',
'newt_listbox_clear_selection',
'newt_listbox_clear',
'newt_listbox_delete_entry',
'newt_listbox_get_current',
'newt_listbox_get_selection',
'newt_listbox_insert_entry',
'newt_listbox_item_count',
'newt_listbox_select_item',
'newt_listbox_set_current_by_key',
'newt_listbox_set_current',
'newt_listbox_set_data',
'newt_listbox_set_entry',
'newt_listbox_set_width',
'newt_listbox',
'newt_listitem_get_data',
'newt_listitem_set',
'newt_listitem',
'newt_open_window',
'newt_pop_help_line',
'newt_pop_window',
'newt_push_help_line',
'newt_radio_get_current',
'newt_radiobutton',
'newt_redraw_help_line',
'newt_reflow_text',
'newt_refresh',
'newt_resize_screen',
'newt_resume',
'newt_run_form',
'newt_scale_set',
'newt_scale',
'newt_scrollbar_set',
'newt_set_help_callback',
'newt_set_suspend_callback',
'newt_suspend',
'newt_textbox_get_num_lines',
'newt_textbox_reflowed',
'newt_textbox_set_height',
'newt_textbox_set_text',
'newt_textbox',
'newt_vertical_scrollbar',
'newt_wait_for_key',
'newt_win_choice',
'newt_win_entries',
'newt_win_menu',
'newt_win_message',
'newt_win_messagev',
'newt_win_ternary'),
'OAuth': ('oauth_get_sbs', 'oauth_urlencode'),
'OCI8': ('oci_bind_array_by_name',
'oci_bind_by_name',
'oci_cancel',
'oci_client_version',
'oci_close',
'oci_commit',
'oci_connect',
'oci_define_by_name',
'oci_error',
'oci_execute',
'oci_fetch_all',
'oci_fetch_array',
'oci_fetch_assoc',
'oci_fetch_object',
'oci_fetch_row',
'oci_fetch',
'oci_field_is_null',
'oci_field_name',
'oci_field_precision',
'oci_field_scale',
'oci_field_size',
'oci_field_type_raw',
'oci_field_type',
'oci_free_descriptor',
'oci_free_statement',
'oci_get_implicit_resultset',
'oci_internal_debug',
'oci_lob_copy',
'oci_lob_is_equal',
'oci_new_collection',
'oci_new_connect',
'oci_new_cursor',
'oci_new_descriptor',
'oci_num_fields',
'oci_num_rows',
'oci_parse',
'oci_password_change',
'oci_pconnect',
'oci_result',
'oci_rollback',
'oci_server_version',
'oci_set_action',
'oci_set_client_identifier',
'oci_set_client_info',
'oci_set_edition',
'oci_set_module_name',
'oci_set_prefetch',
'oci_statement_type'),
'ODBC': ('odbc_autocommit',
'odbc_binmode',
'odbc_close_all',
'odbc_close',
'odbc_columnprivileges',
'odbc_columns',
'odbc_commit',
'odbc_connect',
'odbc_cursor',
'odbc_data_source',
'odbc_do',
'odbc_error',
'odbc_errormsg',
'odbc_exec',
'odbc_execute',
'odbc_fetch_array',
'odbc_fetch_into',
'odbc_fetch_object',
'odbc_fetch_row',
'odbc_field_len',
'odbc_field_name',
'odbc_field_num',
'odbc_field_precision',
'odbc_field_scale',
'odbc_field_type',
'odbc_foreignkeys',
'odbc_free_result',
'odbc_gettypeinfo',
'odbc_longreadlen',
'odbc_next_result',
'odbc_num_fields',
'odbc_num_rows',
'odbc_pconnect',
'odbc_prepare',
'odbc_primarykeys',
'odbc_procedurecolumns',
'odbc_procedures',
'odbc_result_all',
'odbc_result',
'odbc_rollback',
'odbc_setoption',
'odbc_specialcolumns',
'odbc_statistics',
'odbc_tableprivileges',
'odbc_tables'),
'OPcache': ('opcache_compile_file',
'opcache_get_configuration',
'opcache_get_status',
'opcache_invalidate',
'opcache_reset'),
'Object Aggregation': ('aggregate_info',
'aggregate_methods_by_list',
'aggregate_methods_by_regexp',
'aggregate_methods',
'aggregate_properties_by_list',
'aggregate_properties_by_regexp',
'aggregate_properties',
'aggregate',
'aggregation_info',
'deaggregate'),
'OpenAL': ('openal_buffer_create',
'openal_buffer_data',
'openal_buffer_destroy',
'openal_buffer_get',
'openal_buffer_loadwav',
'openal_context_create',
'openal_context_current',
'openal_context_destroy',
'openal_context_process',
'openal_context_suspend',
'openal_device_close',
'openal_device_open',
'openal_listener_get',
'openal_listener_set',
'openal_source_create',
'openal_source_destroy',
'openal_source_get',
'openal_source_pause',
'openal_source_play',
'openal_source_rewind',
'openal_source_set',
'openal_source_stop',
'openal_stream'),
'OpenSSL': ('openssl_cipher_iv_length',
'openssl_csr_export_to_file',
'openssl_csr_export',
'openssl_csr_get_public_key',
'openssl_csr_get_subject',
'openssl_csr_new',
'openssl_csr_sign',
'openssl_decrypt',
'openssl_dh_compute_key',
'openssl_digest',
'openssl_encrypt',
'openssl_error_string',
'openssl_free_key',
'openssl_get_cipher_methods',
'openssl_get_md_methods',
'openssl_get_privatekey',
'openssl_get_publickey',
'openssl_open',
'openssl_pbkdf2',
'openssl_pkcs12_export_to_file',
'openssl_pkcs12_export',
'openssl_pkcs12_read',
'openssl_pkcs7_decrypt',
'openssl_pkcs7_encrypt',
'openssl_pkcs7_sign',
'openssl_pkcs7_verify',
'openssl_pkey_export_to_file',
'openssl_pkey_export',
'openssl_pkey_free',
'openssl_pkey_get_details',
'openssl_pkey_get_private',
'openssl_pkey_get_public',
'openssl_pkey_new',
'openssl_private_decrypt',
'openssl_private_encrypt',
'openssl_public_decrypt',
'openssl_public_encrypt',
'openssl_random_pseudo_bytes',
'openssl_seal',
'openssl_sign',
'openssl_spki_export_challenge',
'openssl_spki_export',
'openssl_spki_new',
'openssl_spki_verify',
'openssl_verify',
'openssl_x509_check_private_key',
'openssl_x509_checkpurpose',
'openssl_x509_export_to_file',
'openssl_x509_export',
'openssl_x509_free',
'openssl_x509_parse',
'openssl_x509_read'),
'Output Control': ('flush',
'ob_clean',
'ob_end_clean',
'ob_end_flush',
'ob_flush',
'ob_get_clean',
'ob_get_contents',
'ob_get_flush',
'ob_get_length',
'ob_get_level',
'ob_get_status',
'ob_gzhandler',
'ob_implicit_flush',
'ob_list_handlers',
'ob_start',
'output_add_rewrite_var',
'output_reset_rewrite_vars'),
'Ovrimos SQL': ('ovrimos_close',
'ovrimos_commit',
'ovrimos_connect',
'ovrimos_cursor',
'ovrimos_exec',
'ovrimos_execute',
'ovrimos_fetch_into',
'ovrimos_fetch_row',
'ovrimos_field_len',
'ovrimos_field_name',
'ovrimos_field_num',
'ovrimos_field_type',
'ovrimos_free_result',
'ovrimos_longreadlen',
'ovrimos_num_fields',
'ovrimos_num_rows',
'ovrimos_prepare',
'ovrimos_result_all',
'ovrimos_result',
'ovrimos_rollback'),
'PCNTL': ('pcntl_alarm',
'pcntl_errno',
'pcntl_exec',
'pcntl_fork',
'pcntl_get_last_error',
'pcntl_getpriority',
'pcntl_setpriority',
'pcntl_signal_dispatch',
'pcntl_signal',
'pcntl_sigprocmask',
'pcntl_sigtimedwait',
'pcntl_sigwaitinfo',
'pcntl_strerror',
'pcntl_wait',
'pcntl_waitpid',
'pcntl_wexitstatus',
'pcntl_wifexited',
'pcntl_wifsignaled',
'pcntl_wifstopped',
'pcntl_wstopsig',
'pcntl_wtermsig'),
'PCRE': ('preg_filter',
'preg_grep',
'preg_last_error',
'preg_match_all',
'preg_match',
'preg_quote',
'preg_replace_callback',
'preg_replace',
'preg_split'),
'PDF': ('PDF_activate_item',
'PDF_add_annotation',
'PDF_add_bookmark',
'PDF_add_launchlink',
'PDF_add_locallink',
'PDF_add_nameddest',
'PDF_add_note',
'PDF_add_outline',
'PDF_add_pdflink',
'PDF_add_table_cell',
'PDF_add_textflow',
'PDF_add_thumbnail',
'PDF_add_weblink',
'PDF_arc',
'PDF_arcn',
'PDF_attach_file',
'PDF_begin_document',
'PDF_begin_font',
'PDF_begin_glyph',
'PDF_begin_item',
'PDF_begin_layer',
'PDF_begin_page_ext',
'PDF_begin_page',
'PDF_begin_pattern',
'PDF_begin_template_ext',
'PDF_begin_template',
'PDF_circle',
'PDF_clip',
'PDF_close_image',
'PDF_close_pdi_page',
'PDF_close_pdi',
'PDF_close',
'PDF_closepath_fill_stroke',
'PDF_closepath_stroke',
'PDF_closepath',
'PDF_concat',
'PDF_continue_text',
'PDF_create_3dview',
'PDF_create_action',
'PDF_create_annotation',
'PDF_create_bookmark',
'PDF_create_field',
'PDF_create_fieldgroup',
'PDF_create_gstate',
'PDF_create_pvf',
'PDF_create_textflow',
'PDF_curveto',
'PDF_define_layer',
'PDF_delete_pvf',
'PDF_delete_table',
'PDF_delete_textflow',
'PDF_delete',
'PDF_encoding_set_char',
'PDF_end_document',
'PDF_end_font',
'PDF_end_glyph',
'PDF_end_item',
'PDF_end_layer',
'PDF_end_page_ext',
'PDF_end_page',
'PDF_end_pattern',
'PDF_end_template',
'PDF_endpath',
'PDF_fill_imageblock',
'PDF_fill_pdfblock',
'PDF_fill_stroke',
'PDF_fill_textblock',
'PDF_fill',
'PDF_findfont',
'PDF_fit_image',
'PDF_fit_pdi_page',
'PDF_fit_table',
'PDF_fit_textflow',
'PDF_fit_textline',
'PDF_get_apiname',
'PDF_get_buffer',
'PDF_get_errmsg',
'PDF_get_errnum',
'PDF_get_font',
'PDF_get_fontname',
'PDF_get_fontsize',
'PDF_get_image_height',
'PDF_get_image_width',
'PDF_get_majorversion',
'PDF_get_minorversion',
'PDF_get_parameter',
'PDF_get_pdi_parameter',
'PDF_get_pdi_value',
'PDF_get_value',
'PDF_info_font',
'PDF_info_matchbox',
'PDF_info_table',
'PDF_info_textflow',
'PDF_info_textline',
'PDF_initgraphics',
'PDF_lineto',
'PDF_load_3ddata',
'PDF_load_font',
'PDF_load_iccprofile',
'PDF_load_image',
'PDF_makespotcolor',
'PDF_moveto',
'PDF_new',
'PDF_open_ccitt',
'PDF_open_file',
'PDF_open_gif',
'PDF_open_image_file',
'PDF_open_image',
'PDF_open_jpeg',
'PDF_open_memory_image',
'PDF_open_pdi_document',
'PDF_open_pdi_page',
'PDF_open_pdi',
'PDF_open_tiff',
'PDF_pcos_get_number',
'PDF_pcos_get_stream',
'PDF_pcos_get_string',
'PDF_place_image',
'PDF_place_pdi_page',
'PDF_process_pdi',
'PDF_rect',
'PDF_restore',
'PDF_resume_page',
'PDF_rotate',
'PDF_save',
'PDF_scale',
'PDF_set_border_color',
'PDF_set_border_dash',
'PDF_set_border_style',
'PDF_set_char_spacing',
'PDF_set_duration',
'PDF_set_gstate',
'PDF_set_horiz_scaling',
'PDF_set_info_author',
'PDF_set_info_creator',
'PDF_set_info_keywords',
'PDF_set_info_subject',
'PDF_set_info_title',
'PDF_set_info',
'PDF_set_layer_dependency',
'PDF_set_leading',
'PDF_set_parameter',
'PDF_set_text_matrix',
'PDF_set_text_pos',
'PDF_set_text_rendering',
'PDF_set_text_rise',
'PDF_set_value',
'PDF_set_word_spacing',
'PDF_setcolor',
'PDF_setdash',
'PDF_setdashpattern',
'PDF_setflat',
'PDF_setfont',
'PDF_setgray_fill',
'PDF_setgray_stroke',
'PDF_setgray',
'PDF_setlinecap',
'PDF_setlinejoin',
'PDF_setlinewidth',
'PDF_setmatrix',
'PDF_setmiterlimit',
'PDF_setpolydash',
'PDF_setrgbcolor_fill',
'PDF_setrgbcolor_stroke',
'PDF_setrgbcolor',
'PDF_shading_pattern',
'PDF_shading',
'PDF_shfill',
'PDF_show_boxed',
'PDF_show_xy',
'PDF_show',
'PDF_skew',
'PDF_stringwidth',
'PDF_stroke',
'PDF_suspend_page',
'PDF_translate',
'PDF_utf16_to_utf8',
'PDF_utf32_to_utf16',
'PDF_utf8_to_utf16'),
'PHP Options/Info': ('assert_options',
'assert',
'cli_get_process_title',
'cli_set_process_title',
'dl',
'extension_loaded',
'gc_collect_cycles',
'gc_disable',
'gc_enable',
'gc_enabled',
'get_cfg_var',
'get_current_user',
'get_defined_constants',
'get_extension_funcs',
'get_include_path',
'get_included_files',
'get_loaded_extensions',
'get_magic_quotes_gpc',
'get_magic_quotes_runtime',
'get_required_files',
'getenv',
'getlastmod',
'getmygid',
'getmyinode',
'getmypid',
'getmyuid',
'getopt',
'getrusage',
'ini_alter',
'ini_get_all',
'ini_get',
'ini_restore',
'ini_set',
'magic_quotes_runtime',
'memory_get_peak_usage',
'memory_get_usage',
'php_ini_loaded_file',
'php_ini_scanned_files',
'php_logo_guid',
'php_sapi_name',
'php_uname',
'phpcredits',
'phpinfo',
'phpversion',
'putenv',
'restore_include_path',
'set_include_path',
'set_magic_quotes_runtime',
'set_time_limit',
'sys_get_temp_dir',
'version_compare',
'zend_logo_guid',
'zend_thread_id',
'zend_version'),
'POSIX': ('posix_access',
'posix_ctermid',
'posix_errno',
'posix_get_last_error',
'posix_getcwd',
'posix_getegid',
'posix_geteuid',
'posix_getgid',
'posix_getgrgid',
'posix_getgrnam',
'posix_getgroups',
'posix_getlogin',
'posix_getpgid',
'posix_getpgrp',
'posix_getpid',
'posix_getppid',
'posix_getpwnam',
'posix_getpwuid',
'posix_getrlimit',
'posix_getsid',
'posix_getuid',
'posix_initgroups',
'posix_isatty',
'posix_kill',
'posix_mkfifo',
'posix_mknod',
'posix_setegid',
'posix_seteuid',
'posix_setgid',
'posix_setpgid',
'posix_setsid',
'posix_setuid',
'posix_strerror',
'posix_times',
'posix_ttyname',
'posix_uname'),
'POSIX Regex': ('ereg_replace',
'ereg',
'eregi_replace',
'eregi',
'split',
'spliti',
'sql_regcase'),
'PS': ('ps_add_bookmark',
'ps_add_launchlink',
'ps_add_locallink',
'ps_add_note',
'ps_add_pdflink',
'ps_add_weblink',
'ps_arc',
'ps_arcn',
'ps_begin_page',
'ps_begin_pattern',
'ps_begin_template',
'ps_circle',
'ps_clip',
'ps_close_image',
'ps_close',
'ps_closepath_stroke',
'ps_closepath',
'ps_continue_text',
'ps_curveto',
'ps_delete',
'ps_end_page',
'ps_end_pattern',
'ps_end_template',
'ps_fill_stroke',
'ps_fill',
'ps_findfont',
'ps_get_buffer',
'ps_get_parameter',
'ps_get_value',
'ps_hyphenate',
'ps_include_file',
'ps_lineto',
'ps_makespotcolor',
'ps_moveto',
'ps_new',
'ps_open_file',
'ps_open_image_file',
'ps_open_image',
'ps_open_memory_image',
'ps_place_image',
'ps_rect',
'ps_restore',
'ps_rotate',
'ps_save',
'ps_scale',
'ps_set_border_color',
'ps_set_border_dash',
'ps_set_border_style',
'ps_set_info',
'ps_set_parameter',
'ps_set_text_pos',
'ps_set_value',
'ps_setcolor',
'ps_setdash',
'ps_setflat',
'ps_setfont',
'ps_setgray',
'ps_setlinecap',
'ps_setlinejoin',
'ps_setlinewidth',
'ps_setmiterlimit',
'ps_setoverprintmode',
'ps_setpolydash',
'ps_shading_pattern',
'ps_shading',
'ps_shfill',
'ps_show_boxed',
'ps_show_xy2',
'ps_show_xy',
'ps_show2',
'ps_show',
'ps_string_geometry',
'ps_stringwidth',
'ps_stroke',
'ps_symbol_name',
'ps_symbol_width',
'ps_symbol',
'ps_translate'),
'Paradox': ('px_close',
'px_create_fp',
'px_date2string',
'px_delete_record',
'px_delete',
'px_get_field',
'px_get_info',
'px_get_parameter',
'px_get_record',
'px_get_schema',
'px_get_value',
'px_insert_record',
'px_new',
'px_numfields',
'px_numrecords',
'px_open_fp',
'px_put_record',
'px_retrieve_record',
'px_set_blob_file',
'px_set_parameter',
'px_set_tablename',
'px_set_targetencoding',
'px_set_value',
'px_timestamp2string',
'px_update_record'),
'Parsekit': ('parsekit_compile_file',
'parsekit_compile_string',
'parsekit_func_arginfo'),
'Password Hashing': ('password_get_info',
'password_hash',
'password_needs_rehash',
'password_verify'),
'PostgreSQL': ('pg_affected_rows',
'pg_cancel_query',
'pg_client_encoding',
'pg_close',
'pg_connect',
'pg_connection_busy',
'pg_connection_reset',
'pg_connection_status',
'pg_convert',
'pg_copy_from',
'pg_copy_to',
'pg_dbname',
'pg_delete',
'pg_end_copy',
'pg_escape_bytea',
'pg_escape_identifier',
'pg_escape_literal',
'pg_escape_string',
'pg_execute',
'pg_fetch_all_columns',
'pg_fetch_all',
'pg_fetch_array',
'pg_fetch_assoc',
'pg_fetch_object',
'pg_fetch_result',
'pg_fetch_row',
'pg_field_is_null',
'pg_field_name',
'pg_field_num',
'pg_field_prtlen',
'pg_field_size',
'pg_field_table',
'pg_field_type_oid',
'pg_field_type',
'pg_free_result',
'pg_get_notify',
'pg_get_pid',
'pg_get_result',
'pg_host',
'pg_insert',
'pg_last_error',
'pg_last_notice',
'pg_last_oid',
'pg_lo_close',
'pg_lo_create',
'pg_lo_export',
'pg_lo_import',
'pg_lo_open',
'pg_lo_read_all',
'pg_lo_read',
'pg_lo_seek',
'pg_lo_tell',
'pg_lo_truncate',
'pg_lo_unlink',
'pg_lo_write',
'pg_meta_data',
'pg_num_fields',
'pg_num_rows',
'pg_options',
'pg_parameter_status',
'pg_pconnect',
'pg_ping',
'pg_port',
'pg_prepare',
'pg_put_line',
'pg_query_params',
'pg_query',
'pg_result_error_field',
'pg_result_error',
'pg_result_seek',
'pg_result_status',
'pg_select',
'pg_send_execute',
'pg_send_prepare',
'pg_send_query_params',
'pg_send_query',
'pg_set_client_encoding',
'pg_set_error_verbosity',
'pg_trace',
'pg_transaction_status',
'pg_tty',
'pg_unescape_bytea',
'pg_untrace',
'pg_update',
'pg_version'),
'Printer': ('printer_abort',
'printer_close',
'printer_create_brush',
'printer_create_dc',
'printer_create_font',
'printer_create_pen',
'printer_delete_brush',
'printer_delete_dc',
'printer_delete_font',
'printer_delete_pen',
'printer_draw_bmp',
'printer_draw_chord',
'printer_draw_elipse',
'printer_draw_line',
'printer_draw_pie',
'printer_draw_rectangle',
'printer_draw_roundrect',
'printer_draw_text',
'printer_end_doc',
'printer_end_page',
'printer_get_option',
'printer_list',
'printer_logical_fontheight',
'printer_open',
'printer_select_brush',
'printer_select_font',
'printer_select_pen',
'printer_set_option',
'printer_start_doc',
'printer_start_page',
'printer_write'),
'Proctitle': ('setproctitle', 'setthreadtitle'),
'Program execution': ('escapeshellarg',
'escapeshellcmd',
'exec',
'passthru',
'proc_close',
'proc_get_status',
'proc_nice',
'proc_open',
'proc_terminate',
'shell_exec',
'system'),
'Pspell': ('pspell_add_to_personal',
'pspell_add_to_session',
'pspell_check',
'pspell_clear_session',
'pspell_config_create',
'pspell_config_data_dir',
'pspell_config_dict_dir',
'pspell_config_ignore',
'pspell_config_mode',
'pspell_config_personal',
'pspell_config_repl',
'pspell_config_runtogether',
'pspell_config_save_repl',
'pspell_new_config',
'pspell_new_personal',
'pspell_new',
'pspell_save_wordlist',
'pspell_store_replacement',
'pspell_suggest'),
'RPM Reader': ('rpm_close',
'rpm_get_tag',
'rpm_is_valid',
'rpm_open',
'rpm_version'),
'RRD': ('rrd_create',
'rrd_error',
'rrd_fetch',
'rrd_first',
'rrd_graph',
'rrd_info',
'rrd_last',
'rrd_lastupdate',
'rrd_restore',
'rrd_tune',
'rrd_update',
'rrd_version',
'rrd_xport',
'rrdc_disconnect'),
'Radius': ('radius_acct_open',
'radius_add_server',
'radius_auth_open',
'radius_close',
'radius_config',
'radius_create_request',
'radius_cvt_addr',
'radius_cvt_int',
'radius_cvt_string',
'radius_demangle_mppe_key',
'radius_demangle',
'radius_get_attr',
'radius_get_tagged_attr_data',
'radius_get_tagged_attr_tag',
'radius_get_vendor_attr',
'radius_put_addr',
'radius_put_attr',
'radius_put_int',
'radius_put_string',
'radius_put_vendor_addr',
'radius_put_vendor_attr',
'radius_put_vendor_int',
'radius_put_vendor_string',
'radius_request_authenticator',
'radius_salt_encrypt_attr',
'radius_send_request',
'radius_server_secret',
'radius_strerror'),
'Rar': ('rar_wrapper_cache_stats',),
'Readline': ('readline_add_history',
'readline_callback_handler_install',
'readline_callback_handler_remove',
'readline_callback_read_char',
'readline_clear_history',
'readline_completion_function',
'readline_info',
'readline_list_history',
'readline_on_new_line',
'readline_read_history',
'readline_redisplay',
'readline_write_history',
'readline'),
'Recode': ('recode_file', 'recode_string', 'recode'),
'SNMP': ('snmp_get_quick_print',
'snmp_get_valueretrieval',
'snmp_read_mib',
'snmp_set_enum_print',
'snmp_set_oid_numeric_print',
'snmp_set_oid_output_format',
'snmp_set_quick_print',
'snmp_set_valueretrieval',
'snmp2_get',
'snmp2_getnext',
'snmp2_real_walk',
'snmp2_set',
'snmp2_walk',
'snmp3_get',
'snmp3_getnext',
'snmp3_real_walk',
'snmp3_set',
'snmp3_walk',
'snmpget',
'snmpgetnext',
'snmprealwalk',
'snmpset',
'snmpwalk',
'snmpwalkoid'),
'SOAP': ('is_soap_fault', 'use_soap_error_handler'),
'SPL': ('class_implements',
'class_parents',
'class_uses',
'iterator_apply',
'iterator_count',
'iterator_to_array',
'spl_autoload_call',
'spl_autoload_extensions',
'spl_autoload_functions',
'spl_autoload_register',
'spl_autoload_unregister',
'spl_autoload',
'spl_classes',
'spl_object_hash'),
'SPPLUS': ('calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'),
'SQLSRV': ('sqlsrv_begin_transaction',
'sqlsrv_cancel',
'sqlsrv_client_info',
'sqlsrv_close',
'sqlsrv_commit',
'sqlsrv_configure',
'sqlsrv_connect',
'sqlsrv_errors',
'sqlsrv_execute',
'sqlsrv_fetch_array',
'sqlsrv_fetch_object',
'sqlsrv_fetch',
'sqlsrv_field_metadata',
'sqlsrv_free_stmt',
'sqlsrv_get_config',
'sqlsrv_get_field',
'sqlsrv_has_rows',
'sqlsrv_next_result',
'sqlsrv_num_fields',
'sqlsrv_num_rows',
'sqlsrv_prepare',
'sqlsrv_query',
'sqlsrv_rollback',
'sqlsrv_rows_affected',
'sqlsrv_send_stream_data',
'sqlsrv_server_info'),
'SQLite': ('sqlite_array_query',
'sqlite_busy_timeout',
'sqlite_changes',
'sqlite_close',
'sqlite_column',
'sqlite_create_aggregate',
'sqlite_create_function',
'sqlite_current',
'sqlite_error_string',
'sqlite_escape_string',
'sqlite_exec',
'sqlite_factory',
'sqlite_fetch_all',
'sqlite_fetch_array',
'sqlite_fetch_column_types',
'sqlite_fetch_object',
'sqlite_fetch_single',
'sqlite_fetch_string',
'sqlite_field_name',
'sqlite_has_more',
'sqlite_has_prev',
'sqlite_key',
'sqlite_last_error',
'sqlite_last_insert_rowid',
'sqlite_libencoding',
'sqlite_libversion',
'sqlite_next',
'sqlite_num_fields',
'sqlite_num_rows',
'sqlite_open',
'sqlite_popen',
'sqlite_prev',
'sqlite_query',
'sqlite_rewind',
'sqlite_seek',
'sqlite_single_query',
'sqlite_udf_decode_binary',
'sqlite_udf_encode_binary',
'sqlite_unbuffered_query',
'sqlite_valid'),
'SSH2': ('ssh2_auth_agent',
'ssh2_auth_hostbased_file',
'ssh2_auth_none',
'ssh2_auth_password',
'ssh2_auth_pubkey_file',
'ssh2_connect',
'ssh2_exec',
'ssh2_fetch_stream',
'ssh2_fingerprint',
'ssh2_methods_negotiated',
'ssh2_publickey_add',
'ssh2_publickey_init',
'ssh2_publickey_list',
'ssh2_publickey_remove',
'ssh2_scp_recv',
'ssh2_scp_send',
'ssh2_sftp_chmod',
'ssh2_sftp_lstat',
'ssh2_sftp_mkdir',
'ssh2_sftp_readlink',
'ssh2_sftp_realpath',
'ssh2_sftp_rename',
'ssh2_sftp_rmdir',
'ssh2_sftp_stat',
'ssh2_sftp_symlink',
'ssh2_sftp_unlink',
'ssh2_sftp',
'ssh2_shell',
'ssh2_tunnel'),
'SVN': ('svn_add',
'svn_auth_get_parameter',
'svn_auth_set_parameter',
'svn_blame',
'svn_cat',
'svn_checkout',
'svn_cleanup',
'svn_client_version',
'svn_commit',
'svn_delete',
'svn_diff',
'svn_export',
'svn_fs_abort_txn',
'svn_fs_apply_text',
'svn_fs_begin_txn2',
'svn_fs_change_node_prop',
'svn_fs_check_path',
'svn_fs_contents_changed',
'svn_fs_copy',
'svn_fs_delete',
'svn_fs_dir_entries',
'svn_fs_file_contents',
'svn_fs_file_length',
'svn_fs_is_dir',
'svn_fs_is_file',
'svn_fs_make_dir',
'svn_fs_make_file',
'svn_fs_node_created_rev',
'svn_fs_node_prop',
'svn_fs_props_changed',
'svn_fs_revision_prop',
'svn_fs_revision_root',
'svn_fs_txn_root',
'svn_fs_youngest_rev',
'svn_import',
'svn_log',
'svn_ls',
'svn_mkdir',
'svn_repos_create',
'svn_repos_fs_begin_txn_for_commit',
'svn_repos_fs_commit_txn',
'svn_repos_fs',
'svn_repos_hotcopy',
'svn_repos_open',
'svn_repos_recover',
'svn_revert',
'svn_status',
'svn_update'),
'SWF': ('swf_actiongeturl',
'swf_actiongotoframe',
'swf_actiongotolabel',
'swf_actionnextframe',
'swf_actionplay',
'swf_actionprevframe',
'swf_actionsettarget',
'swf_actionstop',
'swf_actiontogglequality',
'swf_actionwaitforframe',
'swf_addbuttonrecord',
'swf_addcolor',
'swf_closefile',
'swf_definebitmap',
'swf_definefont',
'swf_defineline',
'swf_definepoly',
'swf_definerect',
'swf_definetext',
'swf_endbutton',
'swf_enddoaction',
'swf_endshape',
'swf_endsymbol',
'swf_fontsize',
'swf_fontslant',
'swf_fonttracking',
'swf_getbitmapinfo',
'swf_getfontinfo',
'swf_getframe',
'swf_labelframe',
'swf_lookat',
'swf_modifyobject',
'swf_mulcolor',
'swf_nextid',
'swf_oncondition',
'swf_openfile',
'swf_ortho2',
'swf_ortho',
'swf_perspective',
'swf_placeobject',
'swf_polarview',
'swf_popmatrix',
'swf_posround',
'swf_pushmatrix',
'swf_removeobject',
'swf_rotate',
'swf_scale',
'swf_setfont',
'swf_setframe',
'swf_shapearc',
'swf_shapecurveto3',
'swf_shapecurveto',
'swf_shapefillbitmapclip',
'swf_shapefillbitmaptile',
'swf_shapefilloff',
'swf_shapefillsolid',
'swf_shapelinesolid',
'swf_shapelineto',
'swf_shapemoveto',
'swf_showframe',
'swf_startbutton',
'swf_startdoaction',
'swf_startshape',
'swf_startsymbol',
'swf_textwidth',
'swf_translate',
'swf_viewport'),
'Semaphore': ('ftok',
'msg_get_queue',
'msg_queue_exists',
'msg_receive',
'msg_remove_queue',
'msg_send',
'msg_set_queue',
'msg_stat_queue',
'sem_acquire',
'sem_get',
'sem_release',
'sem_remove',
'shm_attach',
'shm_detach',
'shm_get_var',
'shm_has_var',
'shm_put_var',
'shm_remove_var',
'shm_remove'),
'Session': ('session_cache_expire',
'session_cache_limiter',
'session_commit',
'session_decode',
'session_destroy',
'session_encode',
'session_get_cookie_params',
'session_id',
'session_is_registered',
'session_module_name',
'session_name',
'session_regenerate_id',
'session_register_shutdown',
'session_register',
'session_save_path',
'session_set_cookie_params',
'session_set_save_handler',
'session_start',
'session_status',
'session_unregister',
'session_unset',
'session_write_close'),
'Session PgSQL': ('session_pgsql_add_error',
'session_pgsql_get_error',
'session_pgsql_get_field',
'session_pgsql_reset',
'session_pgsql_set_field',
'session_pgsql_status'),
'Shared Memory': ('shmop_close',
'shmop_delete',
'shmop_open',
'shmop_read',
'shmop_size',
'shmop_write'),
'SimpleXML': ('simplexml_import_dom',
'simplexml_load_file',
'simplexml_load_string'),
'Socket': ('socket_accept',
'socket_bind',
'socket_clear_error',
'socket_close',
'socket_cmsg_space',
'socket_connect',
'socket_create_listen',
'socket_create_pair',
'socket_create',
'socket_get_option',
'socket_getpeername',
'socket_getsockname',
'socket_import_stream',
'socket_last_error',
'socket_listen',
'socket_read',
'socket_recv',
'socket_recvfrom',
'socket_recvmsg',
'socket_select',
'socket_send',
'socket_sendmsg',
'socket_sendto',
'socket_set_block',
'socket_set_nonblock',
'socket_set_option',
'socket_shutdown',
'socket_strerror',
'socket_write'),
'Solr': ('solr_get_version',),
'Statistic': ('stats_absolute_deviation',
'stats_cdf_beta',
'stats_cdf_binomial',
'stats_cdf_cauchy',
'stats_cdf_chisquare',
'stats_cdf_exponential',
'stats_cdf_f',
'stats_cdf_gamma',
'stats_cdf_laplace',
'stats_cdf_logistic',
'stats_cdf_negative_binomial',
'stats_cdf_noncentral_chisquare',
'stats_cdf_noncentral_f',
'stats_cdf_poisson',
'stats_cdf_t',
'stats_cdf_uniform',
'stats_cdf_weibull',
'stats_covariance',
'stats_den_uniform',
'stats_dens_beta',
'stats_dens_cauchy',
'stats_dens_chisquare',
'stats_dens_exponential',
'stats_dens_f',
'stats_dens_gamma',
'stats_dens_laplace',
'stats_dens_logistic',
'stats_dens_negative_binomial',
'stats_dens_normal',
'stats_dens_pmf_binomial',
'stats_dens_pmf_hypergeometric',
'stats_dens_pmf_poisson',
'stats_dens_t',
'stats_dens_weibull',
'stats_harmonic_mean',
'stats_kurtosis',
'stats_rand_gen_beta',
'stats_rand_gen_chisquare',
'stats_rand_gen_exponential',
'stats_rand_gen_f',
'stats_rand_gen_funiform',
'stats_rand_gen_gamma',
'stats_rand_gen_ibinomial_negative',
'stats_rand_gen_ibinomial',
'stats_rand_gen_int',
'stats_rand_gen_ipoisson',
'stats_rand_gen_iuniform',
'stats_rand_gen_noncenral_chisquare',
'stats_rand_gen_noncentral_f',
'stats_rand_gen_noncentral_t',
'stats_rand_gen_normal',
'stats_rand_gen_t',
'stats_rand_get_seeds',
'stats_rand_phrase_to_seeds',
'stats_rand_ranf',
'stats_rand_setall',
'stats_skew',
'stats_standard_deviation',
'stats_stat_binomial_coef',
'stats_stat_correlation',
'stats_stat_gennch',
'stats_stat_independent_t',
'stats_stat_innerproduct',
'stats_stat_noncentral_t',
'stats_stat_paired_t',
'stats_stat_percentile',
'stats_stat_powersum',
'stats_variance'),
'Stomp': ('stomp_connect_error', 'stomp_version'),
'Stream': ('set_socket_blocking',
'stream_bucket_append',
'stream_bucket_make_writeable',
'stream_bucket_new',
'stream_bucket_prepend',
'stream_context_create',
'stream_context_get_default',
'stream_context_get_options',
'stream_context_get_params',
'stream_context_set_default',
'stream_context_set_option',
'stream_context_set_params',
'stream_copy_to_stream',
'stream_encoding',
'stream_filter_append',
'stream_filter_prepend',
'stream_filter_register',
'stream_filter_remove',
'stream_get_contents',
'stream_get_filters',
'stream_get_line',
'stream_get_meta_data',
'stream_get_transports',
'stream_get_wrappers',
'stream_is_local',
'stream_notification_callback',
'stream_register_wrapper',
'stream_resolve_include_path',
'stream_select',
'stream_set_blocking',
'stream_set_chunk_size',
'stream_set_read_buffer',
'stream_set_timeout',
'stream_set_write_buffer',
'stream_socket_accept',
'stream_socket_client',
'stream_socket_enable_crypto',
'stream_socket_get_name',
'stream_socket_pair',
'stream_socket_recvfrom',
'stream_socket_sendto',
'stream_socket_server',
'stream_socket_shutdown',
'stream_supports_lock',
'stream_wrapper_register',
'stream_wrapper_restore',
'stream_wrapper_unregister'),
'String': ('addcslashes',
'addslashes',
'bin2hex',
'chop',
'chr',
'chunk_split',
'convert_cyr_string',
'convert_uudecode',
'convert_uuencode',
'count_chars',
'crc32',
'crypt',
'echo',
'explode',
'fprintf',
'get_html_translation_table',
'hebrev',
'hebrevc',
'hex2bin',
'html_entity_decode',
'htmlentities',
'htmlspecialchars_decode',
'htmlspecialchars',
'implode',
'join',
'lcfirst',
'levenshtein',
'localeconv',
'ltrim',
'md5_file',
'md5',
'metaphone',
'money_format',
'nl_langinfo',
'nl2br',
'number_format',
'ord',
'parse_str',
'print',
'printf',
'quoted_printable_decode',
'quoted_printable_encode',
'quotemeta',
'rtrim',
'setlocale',
'sha1_file',
'sha1',
'similar_text',
'soundex',
'sprintf',
'sscanf',
'str_getcsv',
'str_ireplace',
'str_pad',
'str_repeat',
'str_replace',
'str_rot13',
'str_shuffle',
'str_split',
'str_word_count',
'strcasecmp',
'strchr',
'strcmp',
'strcoll',
'strcspn',
'strip_tags',
'stripcslashes',
'stripos',
'stripslashes',
'stristr',
'strlen',
'strnatcasecmp',
'strnatcmp',
'strncasecmp',
'strncmp',
'strpbrk',
'strpos',
'strrchr',
'strrev',
'strripos',
'strrpos',
'strspn',
'strstr',
'strtok',
'strtolower',
'strtoupper',
'strtr',
'substr_compare',
'substr_count',
'substr_replace',
'substr',
'trim',
'ucfirst',
'ucwords',
'vfprintf',
'vprintf',
'vsprintf',
'wordwrap'),
'Sybase': ('sybase_affected_rows',
'sybase_close',
'sybase_connect',
'sybase_data_seek',
'sybase_deadlock_retry_count',
'sybase_fetch_array',
'sybase_fetch_assoc',
'sybase_fetch_field',
'sybase_fetch_object',
'sybase_fetch_row',
'sybase_field_seek',
'sybase_free_result',
'sybase_get_last_message',
'sybase_min_client_severity',
'sybase_min_error_severity',
'sybase_min_message_severity',
'sybase_min_server_severity',
'sybase_num_fields',
'sybase_num_rows',
'sybase_pconnect',
'sybase_query',
'sybase_result',
'sybase_select_db',
'sybase_set_message_handler',
'sybase_unbuffered_query'),
'TCP': ('tcpwrap_check',),
'Taint': ('is_tainted', 'taint', 'untaint'),
'Tidy': ('ob_tidyhandler',
'tidy_access_count',
'tidy_config_count',
'tidy_error_count',
'tidy_get_output',
'tidy_load_config',
'tidy_reset_config',
'tidy_save_config',
'tidy_set_encoding',
'tidy_setopt',
'tidy_warning_count'),
'Tokenizer': ('token_get_all', 'token_name'),
'Trader': ('trader_acos',
'trader_ad',
'trader_add',
'trader_adosc',
'trader_adx',
'trader_adxr',
'trader_apo',
'trader_aroon',
'trader_aroonosc',
'trader_asin',
'trader_atan',
'trader_atr',
'trader_avgprice',
'trader_bbands',
'trader_beta',
'trader_bop',
'trader_cci',
'trader_cdl2crows',
'trader_cdl3blackcrows',
'trader_cdl3inside',
'trader_cdl3linestrike',
'trader_cdl3outside',
'trader_cdl3starsinsouth',
'trader_cdl3whitesoldiers',
'trader_cdlabandonedbaby',
'trader_cdladvanceblock',
'trader_cdlbelthold',
'trader_cdlbreakaway',
'trader_cdlclosingmarubozu',
'trader_cdlconcealbabyswall',
'trader_cdlcounterattack',
'trader_cdldarkcloudcover',
'trader_cdldoji',
'trader_cdldojistar',
'trader_cdldragonflydoji',
'trader_cdlengulfing',
'trader_cdleveningdojistar',
'trader_cdleveningstar',
'trader_cdlgapsidesidewhite',
'trader_cdlgravestonedoji',
'trader_cdlhammer',
'trader_cdlhangingman',
'trader_cdlharami',
'trader_cdlharamicross',
'trader_cdlhighwave',
'trader_cdlhikkake',
'trader_cdlhikkakemod',
'trader_cdlhomingpigeon',
'trader_cdlidentical3crows',
'trader_cdlinneck',
'trader_cdlinvertedhammer',
'trader_cdlkicking',
'trader_cdlkickingbylength',
'trader_cdlladderbottom',
'trader_cdllongleggeddoji',
'trader_cdllongline',
'trader_cdlmarubozu',
'trader_cdlmatchinglow',
'trader_cdlmathold',
'trader_cdlmorningdojistar',
'trader_cdlmorningstar',
'trader_cdlonneck',
'trader_cdlpiercing',
'trader_cdlrickshawman',
'trader_cdlrisefall3methods',
'trader_cdlseparatinglines',
'trader_cdlshootingstar',
'trader_cdlshortline',
'trader_cdlspinningtop',
'trader_cdlstalledpattern',
'trader_cdlsticksandwich',
'trader_cdltakuri',
'trader_cdltasukigap',
'trader_cdlthrusting',
'trader_cdltristar',
'trader_cdlunique3river',
'trader_cdlupsidegap2crows',
'trader_cdlxsidegap3methods',
'trader_ceil',
'trader_cmo',
'trader_correl',
'trader_cos',
'trader_cosh',
'trader_dema',
'trader_div',
'trader_dx',
'trader_ema',
'trader_errno',
'trader_exp',
'trader_floor',
'trader_get_compat',
'trader_get_unstable_period',
'trader_ht_dcperiod',
'trader_ht_dcphase',
'trader_ht_phasor',
'trader_ht_sine',
'trader_ht_trendline',
'trader_ht_trendmode',
'trader_kama',
'trader_linearreg_angle',
'trader_linearreg_intercept',
'trader_linearreg_slope',
'trader_linearreg',
'trader_ln',
'trader_log10',
'trader_ma',
'trader_macd',
'trader_macdext',
'trader_macdfix',
'trader_mama',
'trader_mavp',
'trader_max',
'trader_maxindex',
'trader_medprice',
'trader_mfi',
'trader_midpoint',
'trader_midprice',
'trader_min',
'trader_minindex',
'trader_minmax',
'trader_minmaxindex',
'trader_minus_di',
'trader_minus_dm',
'trader_mom',
'trader_mult',
'trader_natr',
'trader_obv',
'trader_plus_di',
'trader_plus_dm',
'trader_ppo',
'trader_roc',
'trader_rocp',
'trader_rocr100',
'trader_rocr',
'trader_rsi',
'trader_sar',
'trader_sarext',
'trader_set_compat',
'trader_set_unstable_period',
'trader_sin',
'trader_sinh',
'trader_sma',
'trader_sqrt',
'trader_stddev',
'trader_stoch',
'trader_stochf',
'trader_stochrsi',
'trader_sub',
'trader_sum',
'trader_t3',
'trader_tan',
'trader_tanh',
'trader_tema',
'trader_trange',
'trader_trima',
'trader_trix',
'trader_tsf',
'trader_typprice',
'trader_ultosc',
'trader_var',
'trader_wclprice',
'trader_willr',
'trader_wma'),
'URL': ('base64_decode',
'base64_encode',
'get_headers',
'get_meta_tags',
'http_build_query',
'parse_url',
'rawurldecode',
'rawurlencode',
'urldecode',
'urlencode'),
'Uopz': ('uopz_backup',
'uopz_compose',
'uopz_copy',
'uopz_delete',
'uopz_extend',
'uopz_flags',
'uopz_function',
'uopz_implement',
'uopz_overload',
'uopz_redefine',
'uopz_rename',
'uopz_restore',
'uopz_undefine'),
'Variable handling': ('boolval',
'debug_zval_dump',
'doubleval',
'empty',
'floatval',
'get_defined_vars',
'get_resource_type',
'gettype',
'import_request_variables',
'intval',
'is_array',
'is_bool',
'is_callable',
'is_double',
'is_float',
'is_int',
'is_integer',
'is_long',
'is_null',
'is_numeric',
'is_object',
'is_real',
'is_resource',
'is_scalar',
'is_string',
'isset',
'print_r',
'serialize',
'settype',
'strval',
'unserialize',
'unset',
'var_dump',
'var_export'),
'W32api': ('w32api_deftype',
'w32api_init_dtype',
'w32api_invoke_function',
'w32api_register_function',
'w32api_set_call_method'),
'WDDX': ('wddx_add_vars',
'wddx_deserialize',
'wddx_packet_end',
'wddx_packet_start',
'wddx_serialize_value',
'wddx_serialize_vars'),
'WinCache': ('wincache_fcache_fileinfo',
'wincache_fcache_meminfo',
'wincache_lock',
'wincache_ocache_fileinfo',
'wincache_ocache_meminfo',
'wincache_refresh_if_changed',
'wincache_rplist_fileinfo',
'wincache_rplist_meminfo',
'wincache_scache_info',
'wincache_scache_meminfo',
'wincache_ucache_add',
'wincache_ucache_cas',
'wincache_ucache_clear',
'wincache_ucache_dec',
'wincache_ucache_delete',
'wincache_ucache_exists',
'wincache_ucache_get',
'wincache_ucache_inc',
'wincache_ucache_info',
'wincache_ucache_meminfo',
'wincache_ucache_set',
'wincache_unlock'),
'XML Parser': ('utf8_decode',
'utf8_encode',
'xml_error_string',
'xml_get_current_byte_index',
'xml_get_current_column_number',
'xml_get_current_line_number',
'xml_get_error_code',
'xml_parse_into_struct',
'xml_parse',
'xml_parser_create_ns',
'xml_parser_create',
'xml_parser_free',
'xml_parser_get_option',
'xml_parser_set_option',
'xml_set_character_data_handler',
'xml_set_default_handler',
'xml_set_element_handler',
'xml_set_end_namespace_decl_handler',
'xml_set_external_entity_ref_handler',
'xml_set_notation_decl_handler',
'xml_set_object',
'xml_set_processing_instruction_handler',
'xml_set_start_namespace_decl_handler',
'xml_set_unparsed_entity_decl_handler'),
'XML-RPC': ('xmlrpc_decode_request',
'xmlrpc_decode',
'xmlrpc_encode_request',
'xmlrpc_encode',
'xmlrpc_get_type',
'xmlrpc_is_fault',
'xmlrpc_parse_method_descriptions',
'xmlrpc_server_add_introspection_data',
'xmlrpc_server_call_method',
'xmlrpc_server_create',
'xmlrpc_server_destroy',
'xmlrpc_server_register_introspection_callback',
'xmlrpc_server_register_method',
'xmlrpc_set_type'),
'XSLT (PHP 4)': ('xslt_backend_info',
'xslt_backend_name',
'xslt_backend_version',
'xslt_create',
'xslt_errno',
'xslt_error',
'xslt_free',
'xslt_getopt',
'xslt_process',
'xslt_set_base',
'xslt_set_encoding',
'xslt_set_error_handler',
'xslt_set_log',
'xslt_set_object',
'xslt_set_sax_handler',
'xslt_set_sax_handlers',
'xslt_set_scheme_handler',
'xslt_set_scheme_handlers',
'xslt_setopt'),
'Xhprof': ('xhprof_disable',
'xhprof_enable',
'xhprof_sample_disable',
'xhprof_sample_enable'),
'YAZ': ('yaz_addinfo',
'yaz_ccl_conf',
'yaz_ccl_parse',
'yaz_close',
'yaz_connect',
'yaz_database',
'yaz_element',
'yaz_errno',
'yaz_error',
'yaz_es_result',
'yaz_es',
'yaz_get_option',
'yaz_hits',
'yaz_itemorder',
'yaz_present',
'yaz_range',
'yaz_record',
'yaz_scan_result',
'yaz_scan',
'yaz_schema',
'yaz_search',
'yaz_set_option',
'yaz_sort',
'yaz_syntax',
'yaz_wait'),
'YP/NIS': ('yp_all',
'yp_cat',
'yp_err_string',
'yp_errno',
'yp_first',
'yp_get_default_domain',
'yp_master',
'yp_match',
'yp_next',
'yp_order'),
'Yaml': ('yaml_emit_file',
'yaml_emit',
'yaml_parse_file',
'yaml_parse_url',
'yaml_parse'),
'Zip': ('zip_close',
'zip_entry_close',
'zip_entry_compressedsize',
'zip_entry_compressionmethod',
'zip_entry_filesize',
'zip_entry_name',
'zip_entry_open',
'zip_entry_read',
'zip_open',
'zip_read'),
'Zlib': ('gzclose',
'gzcompress',
'gzdecode',
'gzdeflate',
'gzencode',
'gzeof',
'gzfile',
'gzgetc',
'gzgets',
'gzgetss',
'gzinflate',
'gzopen',
'gzpassthru',
'gzputs',
'gzread',
'gzrewind',
'gzseek',
'gztell',
'gzuncompress',
'gzwrite',
'readgzfile',
'zlib_decode',
'zlib_encode',
'zlib_get_coding_type'),
'bcompiler': ('bcompiler_load_exe',
'bcompiler_load',
'bcompiler_parse_class',
'bcompiler_read',
'bcompiler_write_class',
'bcompiler_write_constant',
'bcompiler_write_exe_footer',
'bcompiler_write_file',
'bcompiler_write_footer',
'bcompiler_write_function',
'bcompiler_write_functions_from_file',
'bcompiler_write_header',
'bcompiler_write_included_filename'),
'cURL': ('curl_close',
'curl_copy_handle',
'curl_errno',
'curl_error',
'curl_escape',
'curl_exec',
'curl_file_create',
'curl_getinfo',
'curl_init',
'curl_multi_add_handle',
'curl_multi_close',
'curl_multi_exec',
'curl_multi_getcontent',
'curl_multi_info_read',
'curl_multi_init',
'curl_multi_remove_handle',
'curl_multi_select',
'curl_multi_setopt',
'curl_multi_strerror',
'curl_pause',
'curl_reset',
'curl_setopt_array',
'curl_setopt',
'curl_share_close',
'curl_share_init',
'curl_share_setopt',
'curl_strerror',
'curl_unescape',
'curl_version'),
'chdb': ('chdb_create',),
'dBase': ('dbase_add_record',
'dbase_close',
'dbase_create',
'dbase_delete_record',
'dbase_get_header_info',
'dbase_get_record_with_names',
'dbase_get_record',
'dbase_numfields',
'dbase_numrecords',
'dbase_open',
'dbase_pack',
'dbase_replace_record'),
'dbx': ('dbx_close',
'dbx_compare',
'dbx_connect',
'dbx_error',
'dbx_escape_string',
'dbx_fetch_row',
'dbx_query',
'dbx_sort'),
'filePro': ('filepro_fieldcount',
'filepro_fieldname',
'filepro_fieldtype',
'filepro_fieldwidth',
'filepro_retrieve',
'filepro_rowcount',
'filepro'),
'iconv': ('iconv_get_encoding',
'iconv_mime_decode_headers',
'iconv_mime_decode',
'iconv_mime_encode',
'iconv_set_encoding',
'iconv_strlen',
'iconv_strpos',
'iconv_strrpos',
'iconv_substr',
'iconv',
'ob_iconv_handler'),
'inclued': ('inclued_get_data',),
'intl': ('intl_error_name',
'intl_get_error_code',
'intl_get_error_message',
'intl_is_failure'),
'libxml': ('libxml_clear_errors',
'libxml_disable_entity_loader',
'libxml_get_errors',
'libxml_get_last_error',
'libxml_set_external_entity_loader',
'libxml_set_streams_context',
'libxml_use_internal_errors'),
'mSQL': ('msql_affected_rows',
'msql_close',
'msql_connect',
'msql_create_db',
'msql_createdb',
'msql_data_seek',
'msql_db_query',
'msql_dbname',
'msql_drop_db',
'msql_error',
'msql_fetch_array',
'msql_fetch_field',
'msql_fetch_object',
'msql_fetch_row',
'msql_field_flags',
'msql_field_len',
'msql_field_name',
'msql_field_seek',
'msql_field_table',
'msql_field_type',
'msql_fieldflags',
'msql_fieldlen',
'msql_fieldname',
'msql_fieldtable',
'msql_fieldtype',
'msql_free_result',
'msql_list_dbs',
'msql_list_fields',
'msql_list_tables',
'msql_num_fields',
'msql_num_rows',
'msql_numfields',
'msql_numrows',
'msql_pconnect',
'msql_query',
'msql_regcase',
'msql_result',
'msql_select_db',
'msql_tablename',
'msql'),
'mnoGoSearch': ('udm_add_search_limit',
'udm_alloc_agent_array',
'udm_alloc_agent',
'udm_api_version',
'udm_cat_list',
'udm_cat_path',
'udm_check_charset',
'udm_check_stored',
'udm_clear_search_limits',
'udm_close_stored',
'udm_crc32',
'udm_errno',
'udm_error',
'udm_find',
'udm_free_agent',
'udm_free_ispell_data',
'udm_free_res',
'udm_get_doc_count',
'udm_get_res_field',
'udm_get_res_param',
'udm_hash32',
'udm_load_ispell_data',
'udm_open_stored',
'udm_set_agent_param'),
'mqseries': ('mqseries_back',
'mqseries_begin',
'mqseries_close',
'mqseries_cmit',
'mqseries_conn',
'mqseries_connx',
'mqseries_disc',
'mqseries_get',
'mqseries_inq',
'mqseries_open',
'mqseries_put1',
'mqseries_put',
'mqseries_set',
'mqseries_strerror'),
'mysqlnd_qc': ('mysqlnd_qc_clear_cache',
'mysqlnd_qc_get_available_handlers',
'mysqlnd_qc_get_cache_info',
'mysqlnd_qc_get_core_stats',
'mysqlnd_qc_get_normalized_query_trace_log',
'mysqlnd_qc_get_query_trace_log',
'mysqlnd_qc_set_cache_condition',
'mysqlnd_qc_set_is_select',
'mysqlnd_qc_set_storage_handler',
'mysqlnd_qc_set_user_handlers'),
'qtdom': ('qdom_error', 'qdom_tree'),
'runkit': ('runkit_class_adopt',
'runkit_class_emancipate',
'runkit_constant_add',
'runkit_constant_redefine',
'runkit_constant_remove',
'runkit_function_add',
'runkit_function_copy',
'runkit_function_redefine',
'runkit_function_remove',
'runkit_function_rename',
'runkit_import',
'runkit_lint_file',
'runkit_lint',
'runkit_method_add',
'runkit_method_copy',
'runkit_method_redefine',
'runkit_method_remove',
'runkit_method_rename',
'runkit_return_value_used',
'runkit_sandbox_output_handler',
'runkit_superglobals'),
'ssdeep': ('ssdeep_fuzzy_compare',
'ssdeep_fuzzy_hash_filename',
'ssdeep_fuzzy_hash'),
'vpopmail': ('vpopmail_add_alias_domain_ex',
'vpopmail_add_alias_domain',
'vpopmail_add_domain_ex',
'vpopmail_add_domain',
'vpopmail_add_user',
'vpopmail_alias_add',
'vpopmail_alias_del_domain',
'vpopmail_alias_del',
'vpopmail_alias_get_all',
'vpopmail_alias_get',
'vpopmail_auth_user',
'vpopmail_del_domain_ex',
'vpopmail_del_domain',
'vpopmail_del_user',
'vpopmail_error',
'vpopmail_passwd',
'vpopmail_set_user_quota'),
'win32ps': ('win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'),
'win32service': ('win32_continue_service',
'win32_create_service',
'win32_delete_service',
'win32_get_last_control_message',
'win32_pause_service',
'win32_query_service_status',
'win32_set_service_status',
'win32_start_service_ctrl_dispatcher',
'win32_start_service',
'win32_stop_service'),
'xattr': ('xattr_get',
'xattr_list',
'xattr_remove',
'xattr_set',
'xattr_supported'),
'xdiff': ('xdiff_file_bdiff_size',
'xdiff_file_bdiff',
'xdiff_file_bpatch',
'xdiff_file_diff_binary',
'xdiff_file_diff',
'xdiff_file_merge3',
'xdiff_file_patch_binary',
'xdiff_file_patch',
'xdiff_file_rabdiff',
'xdiff_string_bdiff_size',
'xdiff_string_bdiff',
'xdiff_string_bpatch',
'xdiff_string_diff_binary',
'xdiff_string_diff',
'xdiff_string_merge3',
'xdiff_string_patch_binary',
'xdiff_string_patch',
'xdiff_string_rabdiff')}
if __name__ == '__main__': # pragma: no cover
import glob
import os
import pprint
import re
import shutil
import tarfile
try:
from urllib import urlretrieve
except ImportError:
from urllib.request import urlretrieve
PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
PHP_MANUAL_DIR = './php-chunked-xhtml/'
PHP_REFERENCE_GLOB = 'ref.*'
PHP_FUNCTION_RE = '<a href="function\..*?\.html">(.*?)</a>'
PHP_MODULE_RE = '<title>(.*?) Functions</title>'
def get_php_functions():
function_re = re.compile(PHP_FUNCTION_RE)
module_re = re.compile(PHP_MODULE_RE)
modules = {}
for file in get_php_references():
module = ''
for line in open(file):
if not module:
search = module_re.search(line)
if search:
module = search.group(1)
modules[module] = []
elif 'href="function.' in line:
for match in function_re.finditer(line):
fn = match.group(1)
if '->' not in fn and '::' not in fn and fn not in modules[module]:
modules[module].append(fn)
if module:
# These are dummy manual pages, not actual functions
if module == 'PHP Options/Info':
modules[module].remove('main')
if module == 'Filesystem':
modules[module].remove('delete')
if not modules[module]:
del modules[module]
return modules
def get_php_references():
download = urlretrieve(PHP_MANUAL_URL)
tar = tarfile.open(download[0])
tar.extractall()
tar.close()
for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)):
yield file
os.remove(download[0])
def regenerate(filename, modules):
with open(filename) as fp:
content = fp.read()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
with open(filename, 'w') as fp:
fp.write(header)
fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
fp.write(footer)
def run():
print('>> Downloading Function Index')
modules = get_php_functions()
total = sum(len(v) for v in modules.values())
print('%d functions found' % total)
regenerate(__file__, modules)
shutil.rmtree(PHP_MANUAL_DIR)
run()
|
SambaDemon/python_vantiv
|
refs/heads/master
|
vantiv/request/model/merchant.py
|
2
|
from ..schemas import Schema, fields
from ..utilities import frozen
class MerchantSchema(Schema):
Name = fields.String()
AmexMid = fields.String()
DiscoverConveyedMid = fields.String()
URL = fields.String()
CustomerServiceNumber = fields.String()
HardCodedBillingDescriptor = fields.String()
MaxTransactionAmount = fields.String()
PurchaseCurrency = fields.String()
CategoryCode = fields.String()
BankRoutingNumber = fields.String()
BankAccountNumber = fields.String()
PSPMerchantID = fields.String()
Disable = fields.String()
CreateCredentials = fields.String()
SettlementCurrency = fields.String()
FraudEnabled = fields.String()
class Merchant(object):
__schema__ = MerchantSchema()
Name = None
AmexMid = None
DiscoverConveyedMid = None
URL = None
CustomerServiceNumber = None
HardCodedBillingDescriptor = None
MaxTransactionAmount = None
PurchaseCurrency = None
CategoryCode = None
BankRoutingNumber = None
BankAccountNumber = None
PSPMerchantID = None
Disable = None
CreateCredentials = None
SettlementCurrency = None
FraudEnabled = None
__setattr__ = frozen(object.__setattr__)
|
AutorestCI/azure-sdk-for-python
|
refs/heads/master
|
azure-mgmt-servicebus/azure/mgmt/servicebus/models/sb_subscription.py
|
1
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class SBSubscription(Resource):
"""Description of subscription resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:ivar message_count: Number of messages.
:vartype message_count: long
:ivar created_at: Exact time the message was created.
:vartype created_at: datetime
:ivar accessed_at: Last time there was a receive request to this
subscription.
:vartype accessed_at: datetime
:ivar updated_at: The exact time the message was updated.
:vartype updated_at: datetime
:ivar count_details: Message count details
:vartype count_details: ~azure.mgmt.servicebus.models.MessageCountDetails
:param lock_duration: ISO 8061 lock duration timespan for the
subscription. The default value is 1 minute.
:type lock_duration: timedelta
:param requires_session: Value indicating if a subscription supports the
concept of sessions.
:type requires_session: bool
:param default_message_time_to_live: ISO 8061 Default message timespan to
live value. This is the duration after which the message expires, starting
from when the message is sent to Service Bus. This is the default value
used when TimeToLive is not set on a message itself.
:type default_message_time_to_live: timedelta
:param dead_lettering_on_message_expiration: Value that indicates whether
a subscription has dead letter support when a message expires.
:type dead_lettering_on_message_expiration: bool
:param duplicate_detection_history_time_window: ISO 8601 timeSpan
structure that defines the duration of the duplicate detection history.
The default value is 10 minutes.
:type duplicate_detection_history_time_window: timedelta
:param max_delivery_count: Number of maximum deliveries.
:type max_delivery_count: int
:param status: Enumerates the possible values for the status of a
messaging entity. Possible values include: 'Active', 'Disabled',
'Restoring', 'SendDisabled', 'ReceiveDisabled', 'Creating', 'Deleting',
'Renaming', 'Unknown'
:type status: str or ~azure.mgmt.servicebus.models.EntityStatus
:param enable_batched_operations: Value that indicates whether server-side
batched operations are enabled.
:type enable_batched_operations: bool
:param auto_delete_on_idle: ISO 8061 timeSpan idle interval after which
the topic is automatically deleted. The minimum duration is 5 minutes.
:type auto_delete_on_idle: timedelta
:param forward_to: Queue/Topic name to forward the messages
:type forward_to: str
:param forward_dead_lettered_messages_to: Queue/Topic name to forward the
Dead Letter message
:type forward_dead_lettered_messages_to: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'message_count': {'readonly': True},
'created_at': {'readonly': True},
'accessed_at': {'readonly': True},
'updated_at': {'readonly': True},
'count_details': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'message_count': {'key': 'properties.messageCount', 'type': 'long'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
'accessed_at': {'key': 'properties.accessedAt', 'type': 'iso-8601'},
'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'},
'count_details': {'key': 'properties.countDetails', 'type': 'MessageCountDetails'},
'lock_duration': {'key': 'properties.lockDuration', 'type': 'duration'},
'requires_session': {'key': 'properties.requiresSession', 'type': 'bool'},
'default_message_time_to_live': {'key': 'properties.defaultMessageTimeToLive', 'type': 'duration'},
'dead_lettering_on_message_expiration': {'key': 'properties.deadLetteringOnMessageExpiration', 'type': 'bool'},
'duplicate_detection_history_time_window': {'key': 'properties.duplicateDetectionHistoryTimeWindow', 'type': 'duration'},
'max_delivery_count': {'key': 'properties.maxDeliveryCount', 'type': 'int'},
'status': {'key': 'properties.status', 'type': 'EntityStatus'},
'enable_batched_operations': {'key': 'properties.enableBatchedOperations', 'type': 'bool'},
'auto_delete_on_idle': {'key': 'properties.autoDeleteOnIdle', 'type': 'duration'},
'forward_to': {'key': 'properties.forwardTo', 'type': 'str'},
'forward_dead_lettered_messages_to': {'key': 'properties.forwardDeadLetteredMessagesTo', 'type': 'str'},
}
def __init__(self, lock_duration=None, requires_session=None, default_message_time_to_live=None, dead_lettering_on_message_expiration=None, duplicate_detection_history_time_window=None, max_delivery_count=None, status=None, enable_batched_operations=None, auto_delete_on_idle=None, forward_to=None, forward_dead_lettered_messages_to=None):
super(SBSubscription, self).__init__()
self.message_count = None
self.created_at = None
self.accessed_at = None
self.updated_at = None
self.count_details = None
self.lock_duration = lock_duration
self.requires_session = requires_session
self.default_message_time_to_live = default_message_time_to_live
self.dead_lettering_on_message_expiration = dead_lettering_on_message_expiration
self.duplicate_detection_history_time_window = duplicate_detection_history_time_window
self.max_delivery_count = max_delivery_count
self.status = status
self.enable_batched_operations = enable_batched_operations
self.auto_delete_on_idle = auto_delete_on_idle
self.forward_to = forward_to
self.forward_dead_lettered_messages_to = forward_dead_lettered_messages_to
|
HuaweiSwitch/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/lenovo/cnos_save.py
|
59
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to save running config to start up config to Lenovo Switches
# Lenovo Networking
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cnos_save
author: "Dave Kasberg (@dkasberg)"
short_description: Save the running configuration as the startup configuration on devices running Lenovo CNOS
description:
- This module allows you to copy the running configuration of a switch over its startup configuration.
It is recommended to use this module shortly after any major configuration changes so they persist after
a switch restart. This module uses SSH to manage network device configuration.
The results of the operation will be placed in a directory named 'results'
that must be created by the user in their local directory to where the playbook is run.
For more information about this module from Lenovo and customizing it usage for your
use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_save.html)
version_added: "2.3"
extends_documentation_fragment: cnos
options: {}
'''
EXAMPLES = '''
Tasks : The following are examples of using the module cnos_save. These are written in the main.yml file of the tasks directory.
---
- name: Test Save
cnos_save:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['username'] }}"
password: "{{ hostvars[inventory_hostname]['password'] }}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}"
enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}"
outputfile: "./results/test_save_{{ inventory_hostname }}_output.txt"
'''
RETURN = '''
msg:
description: Success or failure message
returned: always
type: string
sample: "Switch Running Config is Saved to Startup Config"
'''
import sys
import paramiko
import time
import argparse
import socket
import array
import json
import time
import re
try:
from ansible.module_utils import cnos
HAS_LIB = True
except:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from collections import defaultdict
def main():
module = AnsibleModule(
argument_spec=dict(
outputfile=dict(required=True),
host=dict(required=True),
username=dict(required=True),
password=dict(required=True, no_log=True),
enablePassword=dict(required=False, no_log=True),
deviceType=dict(required=True),),
supports_check_mode=False)
username = module.params['username']
password = module.params['password']
enablePassword = module.params['enablePassword']
cliCommand = "save memory \n"
outputfile = module.params['outputfile']
hostIP = module.params['host']
deviceType = module.params['deviceType']
output = ""
# Create instance of SSHClient object
remote_conn_pre = paramiko.SSHClient()
# Automatically add untrusted hosts (make sure okay for security policy in your environment)
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# initiate SSH connection with the switch
remote_conn_pre.connect(hostIP, username=username, password=password)
time.sleep(2)
# Use invoke_shell to establish an 'interactive session'
remote_conn = remote_conn_pre.invoke_shell()
time.sleep(2)
# Enable and enter configure terminal then send command
output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn)
output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn)
# Make terminal length = 0
output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn)
# cnos.debugOutput(cliCommand)
# Send the CLi command
output = output + cnos.waitForDeviceResponse(cliCommand, "#", 2, remote_conn)
# Save it into the file
file = open(outputfile, "a")
file.write(output)
file.close()
errorMsg = cnos.checkOutputForError(output)
if(errorMsg is None):
module.exit_json(changed=True, msg="Switch Running Config is Saved to Startup Config ")
else:
module.fail_json(msg=errorMsg)
if __name__ == '__main__':
main()
|
moylop260/odoo-dev
|
refs/heads/master
|
addons/project_mrp/__openerp__.py
|
31
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Create Tasks on SO',
'version': '1.0',
'category': 'Project Management',
'description': """
Automatically creates project tasks from procurement lines.
===========================================================
This module will automatically create a new task for each procurement order line
(e.g. for sale order lines), if the corresponding product meets the following
characteristics:
* Product Type = Service
* Procurement Method (Order fulfillment) = MTO (Make to Order)
* Supply/Procurement Method = Manufacture
If on top of that a projet is specified on the product form (in the Procurement
tab), then the new task will be created in that specific project. Otherwise, the
new task will not belong to any project, and may be added to a project manually
later.
When the project task is completed or cancelled, the corresponding procurement
is updated accordingly. For example, if this procurement corresponds to a sale
order line, the sale order line will be considered delivered when the task is
completed.
""",
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'images': ['images/product.jpeg', 'images/task_from_SO.jpeg'],
'depends': ['project', 'procurement', 'sale', 'procurement_jit'],
'data': ['project_mrp_view.xml'], #'process/project_mrp_process.xml'
'demo': ['project_mrp_demo.xml'],
'test': ['test/project_task_procurement.yml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
graphite-server/graphite-web
|
refs/heads/master
|
webapp/graphite/__init__.py
|
161
|
# Two wrongs don't make a right, but three lefts do.
|
Intel-Corporation/tensorflow
|
refs/heads/master
|
tensorflow/contrib/summary/summary.py
|
23
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TensorFlow Summary API v2.
The operations in this package are safe to use with eager execution turned on or
off. It has a more flexible API that allows summaries to be written directly
from ops to places other than event log files, rather than propagating protos
from `tf.summary.merge_all` to `tf.summary.FileWriter`.
To use with eager execution enabled, write your code as follows:
```python
global_step = tf.train.get_or_create_global_step()
summary_writer = tf.contrib.summary.create_file_writer(
train_dir, flush_millis=10000)
with summary_writer.as_default(), tf.contrib.summary.always_record_summaries():
# model code goes here
# and in it call
tf.contrib.summary.scalar("loss", my_loss)
# In this case every call to tf.contrib.summary.scalar will generate a record
# ...
```
To use it with graph execution, write your code as follows:
```python
global_step = tf.train.get_or_create_global_step()
summary_writer = tf.contrib.summary.create_file_writer(
train_dir, flush_millis=10000)
with summary_writer.as_default(), tf.contrib.summary.always_record_summaries():
# model definition code goes here
# and in it call
tf.contrib.summary.scalar("loss", my_loss)
# In this case every call to tf.contrib.summary.scalar will generate an op,
# note the need to run tf.contrib.summary.all_summary_ops() to make sure these
# ops get executed.
# ...
train_op = ....
with tf.Session(...) as sess:
tf.global_variables_initializer().run()
tf.contrib.summary.initialize(graph=tf.get_default_graph())
# ...
while not_done_training:
sess.run([train_op, tf.contrib.summary.all_summary_ops()])
# ...
```
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.python.ops.summary_ops_v2 import all_summary_ops
from tensorflow.python.ops.summary_ops_v2 import always_record_summaries
from tensorflow.python.ops.summary_ops_v2 import audio
from tensorflow.python.ops.summary_ops_v2 import create_db_writer
from tensorflow.python.ops.summary_ops_v2 import create_file_writer
from tensorflow.python.ops.summary_ops_v2 import create_summary_file_writer
from tensorflow.python.ops.summary_ops_v2 import eval_dir
from tensorflow.python.ops.summary_ops_v2 import flush
from tensorflow.python.ops.summary_ops_v2 import generic
from tensorflow.python.ops.summary_ops_v2 import graph
from tensorflow.python.ops.summary_ops_v2 import histogram
from tensorflow.python.ops.summary_ops_v2 import image
from tensorflow.python.ops.summary_ops_v2 import import_event
from tensorflow.python.ops.summary_ops_v2 import initialize
from tensorflow.python.ops.summary_ops_v2 import never_record_summaries
from tensorflow.python.ops.summary_ops_v2 import record_summaries_every_n_global_steps
from tensorflow.python.ops.summary_ops_v2 import scalar
from tensorflow.python.ops.summary_ops_v2 import should_record_summaries
from tensorflow.python.ops.summary_ops_v2 import summary_writer_initializer_op
from tensorflow.python.ops.summary_ops_v2 import SummaryWriter
|
stuntman723/rap-analyzer
|
refs/heads/master
|
rap_analyzer/lib/python2.7/site-packages/dj_static.py
|
13
|
# -*- coding: utf-8 -*-
import static
from django.conf import settings
from django.core.handlers.wsgi import WSGIHandler
from django.contrib.staticfiles.handlers import StaticFilesHandler as DebugHandler
try:
from urllib.parse import urlparse
except ImportError: # Python 2
from urlparse import urlparse
from django.contrib.staticfiles import utils
try:
from django.core.handlers.wsgi import get_path_info
except ImportError: # django < 1.7
try:
from django.core.handlers.base import get_path_info
except ImportError: # django < 1.5
import sys
py3 = sys.version_info[0] == 3
def get_path_info(environ):
"""
Returns the HTTP request's PATH_INFO as a unicode string.
"""
path_info = environ.get('PATH_INFO', str('/'))
# Under Python 3, strings in environ are decoded with ISO-8859-1;
# re-encode to recover the original bytestring provided by the web server.
if py3:
path_info = path_info.encode('iso-8859-1')
# It'd be better to implement URI-to-IRI decoding, see #19508.
return path_info.decode('utf-8')
class Cling(WSGIHandler):
"""WSGI middleware that intercepts calls to the static files
directory, as defined by the STATIC_URL setting, and serves those files.
"""
def __init__(self, application, base_dir=None, ignore_debug=False):
self.application = application
self.ignore_debug = ignore_debug
if not base_dir:
base_dir = self.get_base_dir()
self.base_url = urlparse(self.get_base_url())
self.cling = static.Cling(base_dir)
try:
self.debug_cling = DebugHandler(application, base_dir=base_dir)
except TypeError:
self.debug_cling = DebugHandler(application)
super(Cling, self).__init__()
def get_base_dir(self):
return settings.STATIC_ROOT
def get_base_url(self):
utils.check_settings()
return settings.STATIC_URL
@property
def debug(self):
return settings.DEBUG
def _transpose_environ(self, environ):
"""Translates a given environ to static.Cling's expectations."""
environ['PATH_INFO'] = environ['PATH_INFO'][len(self.base_url[2]) - 1:]
return environ
def _should_handle(self, path):
"""Checks if the path should be handled. Ignores the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def __call__(self, environ, start_response):
# Hand non-static requests to Django
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
# Serve static requests from static.Cling
if not self.debug or self.ignore_debug:
environ = self._transpose_environ(environ)
return self.cling(environ, start_response)
# Serve static requests in debug mode from StaticFilesHandler
else:
return self.debug_cling(environ, start_response)
class MediaCling(Cling):
def __init__(self, application, base_dir=None):
super(MediaCling, self).__init__(application, base_dir=base_dir)
# override callable attribute with method
self.debug_cling = self._debug_cling
def _debug_cling(self, environ, start_response):
environ = self._transpose_environ(environ)
return self.cling(environ, start_response)
def get_base_dir(self):
return settings.MEDIA_ROOT
def get_base_url(self):
return settings.MEDIA_URL
|
inclement/kivy
|
refs/heads/master
|
examples/frameworks/twisted/echo_client_app.py
|
13
|
# install_twisted_rector must be called before importing the reactor
from __future__ import unicode_literals
from kivy.support import install_twisted_reactor
install_twisted_reactor()
# A Simple Client that send messages to the Echo Server
from twisted.internet import reactor, protocol
class EchoClient(protocol.Protocol):
def connectionMade(self):
self.factory.app.on_connection(self.transport)
def dataReceived(self, data):
self.factory.app.print_message(data.decode('utf-8'))
class EchoClientFactory(protocol.ClientFactory):
protocol = EchoClient
def __init__(self, app):
self.app = app
def startedConnecting(self, connector):
self.app.print_message('Started to connect.')
def clientConnectionLost(self, connector, reason):
self.app.print_message('Lost connection.')
def clientConnectionFailed(self, connector, reason):
self.app.print_message('Connection failed.')
from kivy.app import App
from kivy.uix.label import Label
from kivy.uix.textinput import TextInput
from kivy.uix.boxlayout import BoxLayout
# A simple kivy App, with a textbox to enter messages, and
# a large label to display all the messages received from
# the server
class TwistedClientApp(App):
connection = None
textbox = None
label = None
def build(self):
root = self.setup_gui()
self.connect_to_server()
return root
def setup_gui(self):
self.textbox = TextInput(size_hint_y=.1, multiline=False)
self.textbox.bind(on_text_validate=self.send_message)
self.label = Label(text='connecting...\n')
layout = BoxLayout(orientation='vertical')
layout.add_widget(self.label)
layout.add_widget(self.textbox)
return layout
def connect_to_server(self):
reactor.connectTCP('localhost', 8000, EchoClientFactory(self))
def on_connection(self, connection):
self.print_message("Connected successfully!")
self.connection = connection
def send_message(self, *args):
msg = self.textbox.text
if msg and self.connection:
self.connection.write(msg.encode('utf-8'))
self.textbox.text = ""
def print_message(self, msg):
self.label.text += "{}\n".format(msg)
if __name__ == '__main__':
TwistedClientApp().run()
|
MattDevo/edk2
|
refs/heads/master
|
AppPkg/Applications/Python/Python-2.7.2/Lib/wave.py
|
53
|
"""Stuff to parse WAVE files.
Usage.
Reading WAVE files:
f = wave.open(file, 'r')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods read(), seek(), and close().
When the setpos() and rewind() methods are not used, the seek()
method is not necessary.
This returns an instance of a class with the following public methods:
getnchannels() -- returns number of audio channels (1 for
mono, 2 for stereo)
getsampwidth() -- returns sample width in bytes
getframerate() -- returns sampling frequency
getnframes() -- returns number of audio frames
getcomptype() -- returns compression type ('NONE' for linear samples)
getcompname() -- returns human-readable version of
compression type ('not compressed' linear samples)
getparams() -- returns a tuple consisting of all of the
above in the above order
getmarkers() -- returns None (for compatibility with the
aifc module)
getmark(id) -- raises an error since the mark does not
exist (for compatibility with the aifc module)
readframes(n) -- returns at most n frames of audio
rewind() -- rewind to the beginning of the audio stream
setpos(pos) -- seek to the specified position
tell() -- return the current position
close() -- close the instance (make it unusable)
The position returned by tell() and the position given to setpos()
are compatible and have nothing to do with the actual position in the
file.
The close() method is called automatically when the class instance
is destroyed.
Writing WAVE files:
f = wave.open(file, 'w')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods write(), tell(), seek(), and
close().
This returns an instance of a class with the following public methods:
setnchannels(n) -- set the number of channels
setsampwidth(n) -- set the sample width
setframerate(n) -- set the frame rate
setnframes(n) -- set the number of frames
setcomptype(type, name)
-- set the compression type and the
human-readable compression type
setparams(tuple)
-- set all parameters at once
tell() -- return current position in output file
writeframesraw(data)
-- write audio frames without pathing up the
file header
writeframes(data)
-- write audio frames and patch up the file header
close() -- patch up the file header and close the
output file
You should set the parameters before the first writeframesraw or
writeframes. The total number of frames does not need to be set,
but when it is set to the correct value, the header does not have to
be patched up.
It is best to first set all parameters, perhaps possibly the
compression type, and then write audio frames using writeframesraw.
When all frames have been written, either call writeframes('') or
close() to patch up the sizes in the header.
The close() method is called automatically when the class instance
is destroyed.
"""
import __builtin__
__all__ = ["open", "openfp", "Error"]
class Error(Exception):
pass
WAVE_FORMAT_PCM = 0x0001
_array_fmts = None, 'b', 'h', None, 'l'
# Determine endian-ness
import struct
if struct.pack("h", 1) == "\000\001":
big_endian = 1
else:
big_endian = 0
from chunk import Chunk
class Wave_read:
"""Variables used in this class:
These variables are available to the user though appropriate
methods of this class:
_file -- the open file with methods read(), close(), and seek()
set through the __init__() method
_nchannels -- the number of audio channels
available through the getnchannels() method
_nframes -- the number of audio frames
available through the getnframes() method
_sampwidth -- the number of bytes per audio sample
available through the getsampwidth() method
_framerate -- the sampling frequency
available through the getframerate() method
_comptype -- the AIFF-C compression type ('NONE' if AIFF)
available through the getcomptype() method
_compname -- the human-readable AIFF-C compression type
available through the getcomptype() method
_soundpos -- the position in the audio stream
available through the tell() method, set through the
setpos() method
These variables are used internally only:
_fmt_chunk_read -- 1 iff the FMT chunk has been read
_data_seek_needed -- 1 iff positioned correctly in audio
file for readframes()
_data_chunk -- instantiation of a chunk class for the DATA chunk
_framesize -- size of one frame in the file
"""
def initfp(self, file):
self._convert = None
self._soundpos = 0
self._file = Chunk(file, bigendian = 0)
if self._file.getname() != 'RIFF':
raise Error, 'file does not start with RIFF id'
if self._file.read(4) != 'WAVE':
raise Error, 'not a WAVE file'
self._fmt_chunk_read = 0
self._data_chunk = None
while 1:
self._data_seek_needed = 1
try:
chunk = Chunk(self._file, bigendian = 0)
except EOFError:
break
chunkname = chunk.getname()
if chunkname == 'fmt ':
self._read_fmt_chunk(chunk)
self._fmt_chunk_read = 1
elif chunkname == 'data':
if not self._fmt_chunk_read:
raise Error, 'data chunk before fmt chunk'
self._data_chunk = chunk
self._nframes = chunk.chunksize // self._framesize
self._data_seek_needed = 0
break
chunk.skip()
if not self._fmt_chunk_read or not self._data_chunk:
raise Error, 'fmt chunk and/or data chunk missing'
def __init__(self, f):
self._i_opened_the_file = None
if isinstance(f, basestring):
f = __builtin__.open(f, 'rb')
self._i_opened_the_file = f
# else, assume it is an open file object already
try:
self.initfp(f)
except:
if self._i_opened_the_file:
f.close()
raise
def __del__(self):
self.close()
#
# User visible methods.
#
def getfp(self):
return self._file
def rewind(self):
self._data_seek_needed = 1
self._soundpos = 0
def close(self):
if self._i_opened_the_file:
self._i_opened_the_file.close()
self._i_opened_the_file = None
self._file = None
def tell(self):
return self._soundpos
def getnchannels(self):
return self._nchannels
def getnframes(self):
return self._nframes
def getsampwidth(self):
return self._sampwidth
def getframerate(self):
return self._framerate
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def getparams(self):
return self.getnchannels(), self.getsampwidth(), \
self.getframerate(), self.getnframes(), \
self.getcomptype(), self.getcompname()
def getmarkers(self):
return None
def getmark(self, id):
raise Error, 'no marks'
def setpos(self, pos):
if pos < 0 or pos > self._nframes:
raise Error, 'position not in range'
self._soundpos = pos
self._data_seek_needed = 1
def readframes(self, nframes):
if self._data_seek_needed:
self._data_chunk.seek(0, 0)
pos = self._soundpos * self._framesize
if pos:
self._data_chunk.seek(pos, 0)
self._data_seek_needed = 0
if nframes == 0:
return ''
if self._sampwidth > 1 and big_endian:
# unfortunately the fromfile() method does not take
# something that only looks like a file object, so
# we have to reach into the innards of the chunk object
import array
chunk = self._data_chunk
data = array.array(_array_fmts[self._sampwidth])
nitems = nframes * self._nchannels
if nitems * self._sampwidth > chunk.chunksize - chunk.size_read:
nitems = (chunk.chunksize - chunk.size_read) / self._sampwidth
data.fromfile(chunk.file.file, nitems)
# "tell" data chunk how much was read
chunk.size_read = chunk.size_read + nitems * self._sampwidth
# do the same for the outermost chunk
chunk = chunk.file
chunk.size_read = chunk.size_read + nitems * self._sampwidth
data.byteswap()
data = data.tostring()
else:
data = self._data_chunk.read(nframes * self._framesize)
if self._convert and data:
data = self._convert(data)
self._soundpos = self._soundpos + len(data) // (self._nchannels * self._sampwidth)
return data
#
# Internal methods.
#
def _read_fmt_chunk(self, chunk):
wFormatTag, self._nchannels, self._framerate, dwAvgBytesPerSec, wBlockAlign = struct.unpack('<hhllh', chunk.read(14))
if wFormatTag == WAVE_FORMAT_PCM:
sampwidth = struct.unpack('<h', chunk.read(2))[0]
self._sampwidth = (sampwidth + 7) // 8
else:
raise Error, 'unknown format: %r' % (wFormatTag,)
self._framesize = self._nchannels * self._sampwidth
self._comptype = 'NONE'
self._compname = 'not compressed'
class Wave_write:
"""Variables used in this class:
These variables are user settable through appropriate methods
of this class:
_file -- the open file with methods write(), close(), tell(), seek()
set through the __init__() method
_comptype -- the AIFF-C compression type ('NONE' in AIFF)
set through the setcomptype() or setparams() method
_compname -- the human-readable AIFF-C compression type
set through the setcomptype() or setparams() method
_nchannels -- the number of audio channels
set through the setnchannels() or setparams() method
_sampwidth -- the number of bytes per audio sample
set through the setsampwidth() or setparams() method
_framerate -- the sampling frequency
set through the setframerate() or setparams() method
_nframes -- the number of audio frames written to the header
set through the setnframes() or setparams() method
These variables are used internally only:
_datalength -- the size of the audio samples written to the header
_nframeswritten -- the number of frames actually written
_datawritten -- the size of the audio samples actually written
"""
def __init__(self, f):
self._i_opened_the_file = None
if isinstance(f, basestring):
f = __builtin__.open(f, 'wb')
self._i_opened_the_file = f
try:
self.initfp(f)
except:
if self._i_opened_the_file:
f.close()
raise
def initfp(self, file):
self._file = file
self._convert = None
self._nchannels = 0
self._sampwidth = 0
self._framerate = 0
self._nframes = 0
self._nframeswritten = 0
self._datawritten = 0
self._datalength = 0
self._headerwritten = False
def __del__(self):
self.close()
#
# User visible methods.
#
def setnchannels(self, nchannels):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
if nchannels < 1:
raise Error, 'bad # of channels'
self._nchannels = nchannels
def getnchannels(self):
if not self._nchannels:
raise Error, 'number of channels not set'
return self._nchannels
def setsampwidth(self, sampwidth):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
if sampwidth < 1 or sampwidth > 4:
raise Error, 'bad sample width'
self._sampwidth = sampwidth
def getsampwidth(self):
if not self._sampwidth:
raise Error, 'sample width not set'
return self._sampwidth
def setframerate(self, framerate):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
if framerate <= 0:
raise Error, 'bad frame rate'
self._framerate = framerate
def getframerate(self):
if not self._framerate:
raise Error, 'frame rate not set'
return self._framerate
def setnframes(self, nframes):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
self._nframes = nframes
def getnframes(self):
return self._nframeswritten
def setcomptype(self, comptype, compname):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
if comptype not in ('NONE',):
raise Error, 'unsupported compression type'
self._comptype = comptype
self._compname = compname
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def setparams(self, params):
nchannels, sampwidth, framerate, nframes, comptype, compname = params
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
self.setnchannels(nchannels)
self.setsampwidth(sampwidth)
self.setframerate(framerate)
self.setnframes(nframes)
self.setcomptype(comptype, compname)
def getparams(self):
if not self._nchannels or not self._sampwidth or not self._framerate:
raise Error, 'not all parameters set'
return self._nchannels, self._sampwidth, self._framerate, \
self._nframes, self._comptype, self._compname
def setmark(self, id, pos, name):
raise Error, 'setmark() not supported'
def getmark(self, id):
raise Error, 'no marks'
def getmarkers(self):
return None
def tell(self):
return self._nframeswritten
def writeframesraw(self, data):
self._ensure_header_written(len(data))
nframes = len(data) // (self._sampwidth * self._nchannels)
if self._convert:
data = self._convert(data)
if self._sampwidth > 1 and big_endian:
import array
data = array.array(_array_fmts[self._sampwidth], data)
data.byteswap()
data.tofile(self._file)
self._datawritten = self._datawritten + len(data) * self._sampwidth
else:
self._file.write(data)
self._datawritten = self._datawritten + len(data)
self._nframeswritten = self._nframeswritten + nframes
def writeframes(self, data):
self.writeframesraw(data)
if self._datalength != self._datawritten:
self._patchheader()
def close(self):
if self._file:
self._ensure_header_written(0)
if self._datalength != self._datawritten:
self._patchheader()
self._file.flush()
self._file = None
if self._i_opened_the_file:
self._i_opened_the_file.close()
self._i_opened_the_file = None
#
# Internal methods.
#
def _ensure_header_written(self, datasize):
if not self._headerwritten:
if not self._nchannels:
raise Error, '# channels not specified'
if not self._sampwidth:
raise Error, 'sample width not specified'
if not self._framerate:
raise Error, 'sampling rate not specified'
self._write_header(datasize)
def _write_header(self, initlength):
assert not self._headerwritten
self._file.write('RIFF')
if not self._nframes:
self._nframes = initlength / (self._nchannels * self._sampwidth)
self._datalength = self._nframes * self._nchannels * self._sampwidth
self._form_length_pos = self._file.tell()
self._file.write(struct.pack('<l4s4slhhllhh4s',
36 + self._datalength, 'WAVE', 'fmt ', 16,
WAVE_FORMAT_PCM, self._nchannels, self._framerate,
self._nchannels * self._framerate * self._sampwidth,
self._nchannels * self._sampwidth,
self._sampwidth * 8, 'data'))
self._data_length_pos = self._file.tell()
self._file.write(struct.pack('<l', self._datalength))
self._headerwritten = True
def _patchheader(self):
assert self._headerwritten
if self._datawritten == self._datalength:
return
curpos = self._file.tell()
self._file.seek(self._form_length_pos, 0)
self._file.write(struct.pack('<l', 36 + self._datawritten))
self._file.seek(self._data_length_pos, 0)
self._file.write(struct.pack('<l', self._datawritten))
self._file.seek(curpos, 0)
self._datalength = self._datawritten
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
mode = f.mode
else:
mode = 'rb'
if mode in ('r', 'rb'):
return Wave_read(f)
elif mode in ('w', 'wb'):
return Wave_write(f)
else:
raise Error, "mode must be 'r', 'rb', 'w', or 'wb'"
openfp = open # B/W compatibility
|
kcompher/BuildingMachineLearningSystemsWithPython
|
refs/heads/master
|
ch07/figure1.py
|
4
|
# This code is supporting material for the book
# Building Machine Learning Systems with Python
# by Willi Richert and Luis Pedro Coelho
# published by PACKT Publishing
#
# It is made available under the MIT License
import numpy as np
from sklearn.datasets import load_boston
import pylab as plt
from mpltools import style
style.use('ggplot')
boston = load_boston()
plt.scatter(boston.data[:, 5], boston.target)
plt.xlabel("RM")
plt.ylabel("House Price")
x = boston.data[:, 5]
x = np.array([[v] for v in x])
y = boston.target
slope, res, _, _ = np.linalg.lstsq(x, y)
plt.plot([0, boston.data[:, 5].max() + 1],
[0, slope * (boston.data[:, 5].max() + 1)], '-', lw=4)
plt.savefig('Figure1.png', dpi=150)
rmse = np.sqrt(res[0] / len(x))
print('Residual: {}'.format(rmse))
|
raags/ansible-modules-core
|
refs/heads/devel
|
test/unit/cloud/openstack/test_os_server.py
|
85
|
import mock
import pytest
import yaml
import inspect
import collections
from cloud.openstack import os_server
class AnsibleFail(Exception):
pass
class AnsibleExit(Exception):
pass
def params_from_doc(func):
'''This function extracts the docstring from the specified function,
parses it as a YAML document, and returns parameters for the os_server
module.'''
doc = inspect.getdoc(func)
cfg = yaml.load(doc)
for task in cfg:
for module, params in task.items():
for k, v in params.items():
if k in ['nics'] and type(v) == str:
params[k] = [v]
task[module] = collections.defaultdict(str,
params)
return cfg[0]['os_server']
class FakeCloud (object):
ports = [
{'name': 'port1', 'id': '1234'},
{'name': 'port2', 'id': '4321'},
]
networks = [
{'name': 'network1', 'id': '5678'},
{'name': 'network2', 'id': '8765'},
]
images = [
{'name': 'cirros', 'id': '1'},
{'name': 'fedora', 'id': '2'},
]
flavors = [
{'name': 'm1.small', 'id': '1', 'flavor_ram': 1024},
{'name': 'm1.tiny', 'id': '2', 'flavor_ram': 512},
]
def _find(self, source, name):
for item in source:
if item['name'] == name or item['id'] == name:
return item
def get_image_id(self, name, exclude=None):
image = self._find(self.images, name)
if image:
return image['id']
def get_flavor(self, name):
return self._find(self.flavors, name)
def get_flavor_by_ram(self, ram, include=None):
for flavor in self.flavors:
if flavor['ram'] >= ram and (include is None or include in
flavor['name']):
return flavor
def get_port(self, name):
return self._find(self.ports, name)
def get_network(self, name):
return self._find(self.networks, name)
create_server = mock.MagicMock()
class TestNetworkArgs(object):
'''This class exercises the _network_args function of the
os_server module. For each test, we parse the YAML document
contained in the docstring to retrieve the module parameters for the
test.'''
def setup_method(self, method):
self.cloud = FakeCloud()
self.module = mock.MagicMock()
self.module.params = params_from_doc(method)
def test_nics_string_net_id(self):
'''
- os_server:
nics: net-id=1234
'''
args = os_server._network_args(self.module, self.cloud)
assert(args[0]['net-id'] == '1234')
def test_nics_string_net_id_list(self):
'''
- os_server:
nics: net-id=1234,net-id=4321
'''
args = os_server._network_args(self.module, self.cloud)
assert(args[0]['net-id'] == '1234')
assert(args[1]['net-id'] == '4321')
def test_nics_string_port_id(self):
'''
- os_server:
nics: port-id=1234
'''
args = os_server._network_args(self.module, self.cloud)
assert(args[0]['port-id'] == '1234')
def test_nics_string_net_name(self):
'''
- os_server:
nics: net-name=network1
'''
args = os_server._network_args(self.module, self.cloud)
assert(args[0]['net-id'] == '5678')
def test_nics_string_port_name(self):
'''
- os_server:
nics: port-name=port1
'''
args = os_server._network_args(self.module, self.cloud)
assert(args[0]['port-id'] == '1234')
def test_nics_structured_net_id(self):
'''
- os_server:
nics:
- net-id: '1234'
'''
args = os_server._network_args(self.module, self.cloud)
assert(args[0]['net-id'] == '1234')
def test_nics_structured_mixed(self):
'''
- os_server:
nics:
- net-id: '1234'
- port-name: port1
- 'net-name=network1,port-id=4321'
'''
args = os_server._network_args(self.module, self.cloud)
assert(args[0]['net-id'] == '1234')
assert(args[1]['port-id'] == '1234')
assert(args[2]['net-id'] == '5678')
assert(args[3]['port-id'] == '4321')
class TestCreateServer(object):
def setup_method(self, method):
self.cloud = FakeCloud()
self.module = mock.MagicMock()
self.module.params = params_from_doc(method)
self.module.fail_json.side_effect = AnsibleFail()
self.module.exit_json.side_effect = AnsibleExit()
self.meta = mock.MagicMock()
self.meta.gett_hostvars_from_server.return_value = {
'id': '1234'
}
os_server.meta = self.meta
def test_create_server(self):
'''
- os_server:
image: cirros
flavor: m1.tiny
nics:
- net-name: network1
'''
with pytest.raises(AnsibleExit):
os_server._create_server(self.module, self.cloud)
assert(self.cloud.create_server.call_count == 1)
assert(self.cloud.create_server.call_args[1]['image']
== self.cloud.get_image_id('cirros'))
assert(self.cloud.create_server.call_args[1]['flavor']
== self.cloud.get_flavor('m1.tiny')['id'])
assert(self.cloud.create_server.call_args[1]['nics'][0]['net-id']
== self.cloud.get_network('network1')['id'])
def test_create_server_bad_flavor(self):
'''
- os_server:
image: cirros
flavor: missing_flavor
nics:
- net-name: network1
'''
with pytest.raises(AnsibleFail):
os_server._create_server(self.module, self.cloud)
assert('missing_flavor' in
self.module.fail_json.call_args[1]['msg'])
def test_create_server_bad_nic(self):
'''
- os_server:
image: cirros
flavor: m1.tiny
nics:
- net-name: missing_network
'''
with pytest.raises(AnsibleFail):
os_server._create_server(self.module, self.cloud)
assert('missing_network' in
self.module.fail_json.call_args[1]['msg'])
|
plotly/python-api
|
refs/heads/master
|
packages/python/plotly/plotly/graph_objs/candlestick/__init__.py
|
2
|
import sys
if sys.version_info < (3, 7):
from ._decreasing import Decreasing
from ._hoverlabel import Hoverlabel
from ._increasing import Increasing
from ._line import Line
from ._stream import Stream
from . import decreasing
from . import hoverlabel
from . import increasing
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__,
[".decreasing", ".hoverlabel", ".increasing"],
[
"._decreasing.Decreasing",
"._hoverlabel.Hoverlabel",
"._increasing.Increasing",
"._line.Line",
"._stream.Stream",
],
)
|
cforth/CryptoApp
|
refs/heads/master
|
ImgLook.py
|
1
|
import io
import os
import logging
import tkinter as tk
import tkinter.ttk as ttk
import tkinter.filedialog as filedialog
import tkinter.messagebox as tkmessagebox
import libs.CFCrypto as CFCrypto
import libs.CFCryptoX as CFCryptoX
from libs.CFCanvas import CFCanvas
from libs.Util import set_combobox_item
from libs.Util import IMG_EXT_LIST
logging.basicConfig(level=logging.INFO)
# 窗口类
class Window(ttk.Frame):
def __init__(self, master=None, **kwargs):
super().__init__(master, padding=2)
# 选择使用哪种加密模式,ECB或CBC
self.cryptModeOption = tk.StringVar()
self.cryptModeCombobox = ttk.Combobox(self, width=10, textvariable=self.cryptModeOption)
self.cryptModeCombobox.grid(row=0, column=0, sticky=('w', 'e'))
self.cryptModeCombobox.state(('readonly',))
self.cryptModeCombobox.config(values=["ECB", "CBC"])
set_combobox_item(self.cryptModeCombobox, "ECB", True)
self.cryptoOptionCombobox = ttk.Combobox(self, state="readonly", values=["解密文件", "不需解密", "解密保名"], width=10)
self.cryptoOption = tk.StringVar()
self.cryptoOptionCombobox['textvariable'] = self.cryptoOption
self.cryptoOptionCombobox.grid(sticky=('w', 'e'), row=0, column=1)
self.passwordEntry = tk.Entry(self, show="*", width=40)
self.password = tk.StringVar()
self.passwordEntry['textvariable'] = self.password
self.passwordEntry.grid(sticky=('w', 'e'), row=0, column=2)
self.pageOptionCombobox = ttk.Combobox(self, state="readonly", values=["单页", "双页"], width=10)
self.pageOption = tk.StringVar()
self.pageOptionCombobox['textvariable'] = self.pageOption
self.pageOptionCombobox.grid(sticky=('w', 'e'), row=0, column=3)
self.orderOptionCombobox = ttk.Combobox(self, state="readonly", values=["左开", "右开"], width=10)
self.orderOption = tk.StringVar()
self.orderOptionCombobox['textvariable'] = self.orderOption
self.orderOptionCombobox.grid(sticky=('w', 'e'), row=0, column=4)
self.fileFromButton = ttk.Button(self, text="选择文件", width=10)
self.fileFromButton.grid(sticky=('w', 'e'), row=0, column=5)
self.fileFromButton['command'] = self.file_from_button_callback
self.refreshButton = ttk.Button(self, text="重新加载", width=10)
self.refreshButton.grid(sticky=('w', 'e'), row=0, column=6)
self.refreshButton['command'] = self.refresh_button_callback
self.imgCanvas = CFCanvas(500, 500, self)
self.imgCanvas.grid(sticky=('w', 'e', 'n', 's'), row=1, column=0, columnspan=7)
self.imgSizeNameLabel = tk.Label(self, text="调整大小", width=10)
self.imgSizeNameLabel.grid(sticky=('e',), row=2, column=0)
self.imgSizeScale = ttk.Scale(self, orient="horizontal", from_=1, to=100)
self.imgSizeScale.grid(sticky=('w', 'e'), row=2, column=1, columnspan=2)
self.imgSizeScale.bind('<ButtonRelease-1>', self.set_img_size)
self.imgSizeScale.bind('<B1-Motion>', self.set_img_size_info)
self.imgSizeInfoLabel = tk.Label(self, width=10)
self.imgSizeInfo = tk.StringVar()
self.imgSizeInfoLabel['textvariable'] = self.imgSizeInfo
self.imgSizeInfoLabel.grid(sticky=('w', 'e'), row=2, column=3)
self.prevImgButton = ttk.Button(self, text="<")
self.prevImgButton.grid(sticky=('w', 'n', 's'), row=2, column=4)
self.prevImgButton['command'] = self.prev_img_button_callback
self.nextImgButton = ttk.Button(self, text=">")
self.nextImgButton.grid(sticky=('w', 'n', 's'), row=2, column=5)
self.nextImgButton['command'] = self.next_img_button_callback
self.rotateImgButton = ttk.Button(self, text="旋转")
self.rotateImgButton.grid(sticky=('w',), row=2, column=6)
self.rotateImgButton['command'] = self.rotate_img_button_callback
self.imgInfoLabel = tk.Label(self, text="图片信息")
self.imgInfo = tk.StringVar()
self.imgInfoLabel['textvariable'] = self.imgInfo
self.imgInfoLabel.grid(sticky=('w',), row=3, column=1)
self.jumpPageNumberLabel = tk.Label(self, text="跳转页码:")
self.jumpPageNumberLabel.grid(sticky=('e',), row=3, column=4)
self.jumpPageNumberEntry = tk.Entry(self, width=10)
self.jumpPageNumber = tk.StringVar()
self.jumpPageNumberEntry['textvariable'] = self.jumpPageNumber
self.jumpPageNumberEntry.grid(sticky=('w', 'e'), row=3, column=5)
self.jumpPageNumberButton = ttk.Button(self, text="GO", width=10)
self.jumpPageNumberButton.grid(sticky=('w', 'e'), row=3, column=6)
self.jumpPageNumberButton['command'] = self.jump_page_callback
# 存储图片地址列表,用于前后翻页
self.img_list = []
# 保存当前的图片路径
self.current_img_path = ""
# 初始化下拉列表,设置默认值
self.init_default_combobox_item()
# 设置图片最大的宽度
self.img_max_width = 1960
# 设置默认的图片宽度,并设置图片大小滑动条的位置
self.zoom_width = self.img_max_width * 0.22
# 图片需要逆时针旋转的角度
self.rotate_angle = 0
self.imgSizeScale.set(self.zoom_width * 100 / self.img_max_width)
self.imgSizeInfo.set(str(self.zoom_width * 100 // self.img_max_width) + "%")
# 绑定键盘事件
self.master.bind("<Key>", self.key_event)
self.jumpPageNumberEntry.bind("<Return>", self.jump_page_callback)
# 主窗口大小发生变化时,居中图片
self.master.bind("<Configure>", self.img_center)
# 绑定鼠标滚轴到图片缩放
self.master.bind("<MouseWheel>", self.process_wheel)
self.master.columnconfigure(0, weight=1)
self.master.rowconfigure(0, weight=1)
self.grid(row=0, column=0, sticky=(tk.N, tk.S, tk.E, tk.W))
self.columnconfigure(2, weight=1)
self.rowconfigure(1, weight=1)
# 保存传入的初始参数,如果传入参数则直接打开图片
self.kwargs = dict(**kwargs)
if self.kwargs:
self.open_img(**self.kwargs)
# 选择加密解密使用的模式
def choose_crypt_mode(self):
if self.cryptModeOption.get() == "ECB":
return CFCrypto
elif self.cryptModeOption.get() == "CBC":
return CFCryptoX
# 跳转到指定页码
def jump_page_callback(self, event=None):
try:
page_number = int(self.jumpPageNumber.get())
if 0 < page_number <= len(self.img_list):
self.current_img_path = self.img_list[page_number-1]
self.img_show()
self.set_img_info()
except Exception as e:
logging.error("Jump page number error!")
# 绑定鼠标滚轴到图片缩放
def process_wheel(self, event=None):
img_size_scale = self.imgSizeScale.get()
if event.delta > 0:
if img_size_scale * 1.2 <= 100:
self.imgSizeScale.set(img_size_scale * 1.2)
else:
self.imgSizeScale.set(100.0)
else:
if img_size_scale * 0.8 >= 5:
self.imgSizeScale.set(img_size_scale * 0.8)
else:
self.imgSizeScale.set(5.0)
self.set_img_size_info()
self.set_img_size()
# 初始化下拉列表,设置默认值
def init_default_combobox_item(self):
# 设置默认的选项
set_combobox_item(self.cryptoOptionCombobox, "不需解密", True)
# 设置单页显示
set_combobox_item(self.pageOptionCombobox, "单页", True)
# 设置双页阅读顺序
set_combobox_item(self.orderOptionCombobox, "左开", True)
# 根据图片路径,将当前文件夹内所有图片保存在图片列表,用于前后翻页显示
def set_img_list(self):
crypto_algorithm = self.choose_crypt_mode()
img_dir_path = self.current_img_path[:self.current_img_path.rindex("/") + 1]
crypto_option = self.cryptoOption.get()
if crypto_option == "解密文件":
self.img_list = []
# 解密后的图片名称临时列表,用于排序
decrypt_img_name_list = []
for img_name in os.listdir(img_dir_path):
try:
decrypt_img_name = crypto_algorithm.StringCrypto(self.password.get()).decrypt(img_name)
if os.path.splitext(decrypt_img_name.lower())[1][1:] in IMG_EXT_LIST:
decrypt_img_name_list.append(decrypt_img_name)
except Exception as e:
logging.error("Decrypt img name error!")
# 将解密后的图片名称列表排序,再加密后放入img_list中,用于前后翻页顺序显示
decrypt_img_name_list.sort()
for decrypt_img_name in decrypt_img_name_list:
img_name = crypto_algorithm.StringCrypto(self.password.get()).encrypt(decrypt_img_name)
self.img_list.append(os.path.join(img_dir_path, img_name))
elif crypto_option == "解密保名" or crypto_option == "不需解密":
self.img_list = [os.path.join(img_dir_path, img_name) for img_name in os.listdir(img_dir_path)
if os.path.splitext(img_name.lower())[1][1:] in IMG_EXT_LIST]
# 解密字符串方法
def decrypt_string(self, str):
crypto_algorithm = self.choose_crypt_mode()
try:
decrypt_str = crypto_algorithm.StringCrypto(self.password.get()).decrypt(str)
except Exception as e:
logging.error("Decrypt img name error!")
decrypt_str = ""
return decrypt_str
# 设置显示图片信息
def set_img_info(self):
page_option = self.pageOption.get()
crypto_option = self.cryptoOption.get()
if crypto_option == "解密文件":
img_name = self.decrypt_string(os.path.basename(self.current_img_path))
print(img_name)
else:
img_name = os.path.basename(self.current_img_path)
if not self.img_list or self.current_img_path not in self.img_list:
self.imgInfo.set("")
elif page_option == "单页":
img_index = self.img_list.index(self.current_img_path)
index_str = str(img_index + 1) + "/" + str(len(self.img_list))
self.imgInfo.set(index_str + " : " + img_name)
elif page_option == "双页":
img_index = self.img_list.index(self.current_img_path)
index_str = str(img_index + 1) + "/" + str(len(self.img_list))
if img_index < len(self.img_list) - 1:
img_index_next = img_index + 1
index_str_next = str(img_index_next + 1) + "/" + str(len(self.img_list))
if crypto_option == "解密文件":
img_name_next = self.decrypt_string(os.path.basename(self.img_list[img_index_next]))
else:
img_name_next = os.path.basename(self.img_list[img_index_next])
order_option = self.orderOption.get()
if order_option == "左开":
self.imgInfo.set(index_str + ", " + index_str_next + " : " + img_name + " | " + img_name_next)
else:
self.imgInfo.set(index_str_next + ", " + index_str + " : " + img_name_next + " | " + img_name)
else:
self.imgInfo.set(index_str + " : " + img_name)
def key_event(self, event=None):
# 右方向键下一张图片
if event.keycode == 39:
self.next_img_button_callback()
# 左方向键上一张图片
elif event.keycode == 37:
self.prev_img_button_callback()
# 选择待显示的图片,填充图片路径,设置图片地址列表
def file_from_button_callback(self, event=None):
img_path = filedialog.askopenfilename()
if img_path:
self.current_img_path = img_path
self.set_img_list()
self.img_show()
self.set_img_info()
# 传入图片地址,解密选项和密码来打开图片
def open_img(self, img_path="", password="", crypto_option="不需解密", crypto_mode="ECB", page_option="单页", order_option="左开"):
if img_path and os.path.isfile(img_path):
self.current_img_path = os.path.abspath(img_path).replace("\\", "/")
self.password.set(str(password))
if page_option in ["单页", "双页"]:
self.pageOption.set(page_option)
if order_option in ["左开", "右开"]:
self.orderOption.set(order_option)
if crypto_option in ["解密文件", "不需解密", "解密保名"]:
self.cryptoOption.set(crypto_option)
if crypto_mode in ["ECB", "CBC"]:
self.cryptModeOption.set(crypto_mode)
self.set_img_list()
self.img_show()
self.set_img_info()
# 重新加载图片
def refresh_button_callback(self, event=None):
self.set_img_list()
self.img_show()
self.set_img_info()
# 设置密码输入栏中的内容显示或者隐藏
def password_show_button_callback(self, event=None):
if self.passwordEntry["show"] == "*":
self.passwordEntry["show"] = ""
else:
self.passwordEntry["show"] = "*"
# 向前翻页显示图片
def prev_img_button_callback(self, event=None):
page_option = self.pageOption.get()
self.rotate_angle = 0
if not self.img_list:
return
elif self.current_img_path not in self.img_list:
index = len(self.img_list)
else:
index = self.img_list.index(self.current_img_path)
if page_option == "单页":
if index == 0:
return
else:
self.current_img_path = self.img_list[index - 1]
elif page_option == "双页":
if index == 0:
return
elif index == 1:
self.current_img_path = self.img_list[index - 1]
else:
self.current_img_path = self.img_list[index - 2]
self.img_show()
self.set_img_info()
# 向后翻页显示图片
def next_img_button_callback(self, event=None):
page_option = self.pageOption.get()
self.rotate_angle = 0
if not self.img_list:
return
elif self.current_img_path not in self.img_list:
index = -1
else:
index = self.img_list.index(self.current_img_path)
if page_option == "单页":
if index >= len(self.img_list) - 1:
return
else:
self.current_img_path = self.img_list[index + 1]
elif page_option == "双页":
if index >= len(self.img_list) - 2:
return
else:
self.current_img_path = self.img_list[index + 2]
self.img_show()
self.set_img_info()
# 逆时针旋转图片
def rotate_img_button_callback(self, event=None):
# 逆时针旋转90度
self.rotate_angle += 90
# 超过360度取余
self.rotate_angle %= 360
self.img_show()
def img_center(self, event=None):
if self.imgCanvas:
self.imgCanvas.img_center()
# 拖动图片大小滑动条时,显示图片大小百分比
def set_img_size_info(self, event=None):
self.zoom_width = int(self.imgSizeScale.get() * self.img_max_width / 100)
self.imgSizeInfo.set(str(self.zoom_width * 100 // self.img_max_width) + "%")
# 设置当前显示的图片的大小,保持横纵比缩放
def set_img_size(self, event=None):
self.set_img_size_info()
self.img_show()
# 静态图片显示
def default_img_show(self, img_path):
self.imgCanvas.default_img_show(img_path, self.rotate_angle, self.zoom_width)
# 双页静态图片显示
def default_double_img_show(self, img_path, next_img_path, order_option):
self.imgCanvas.default_double_img_show(img_path, next_img_path,
order_option, self.rotate_angle, self.zoom_width)
def default_gif_show(self, img_path):
self.imgCanvas.default_gif_show(img_path, self.rotate_angle, self.zoom_width)
# 加密静态图片显示
def crypto_img_show(self, img_path):
crypto_algorithm = self.choose_crypt_mode()
img_file_like = io.BytesIO(crypto_algorithm.ByteCrypto(self.password.get()).decrypt(img_path))
self.imgCanvas.default_img_show(img_file_like, self.rotate_angle, self.zoom_width)
# 双页加密静态图片显示
def crypto_double_img_show(self, img_path, next_img_path, order_option):
crypto_algorithm = self.choose_crypt_mode()
img_file_like = io.BytesIO(crypto_algorithm.ByteCrypto(self.password.get()).decrypt(img_path))
next_img_file_like = io.BytesIO(crypto_algorithm.ByteCrypto(self.password.get()).decrypt(next_img_path))
self.imgCanvas.default_double_img_show(img_file_like, next_img_file_like, order_option,
self.rotate_angle, self.zoom_width)
# 加密动态图片显示
def crypto_gif_show(self, img_path):
crypto_algorithm = self.choose_crypt_mode()
img_file_like = io.BytesIO(crypto_algorithm.ByteCrypto(self.password.get()).decrypt(img_path))
self.imgCanvas.default_gif_show(img_file_like, self.rotate_angle, self.zoom_width)
def cancel_img(self):
self.imgCanvas.cancel_img()
self.imgCanvas = None
# 根据不同图片类型和解密选项,显示图片
def img_show(self, event=None):
crypto_algorithm = self.choose_crypt_mode()
page_option = self.pageOption.get()
self.imgCanvas.cancel_img()
crypto_option = self.cryptoOption.get()
# 双页显示的顺序设定
order_option = self.orderOption.get()
# 如果路径不存在直接返回
if not self.current_img_path or not os.path.exists(self.current_img_path):
return
img_name = os.path.basename(self.current_img_path)
if crypto_option == "解密文件":
try:
decrypt_img_name = crypto_algorithm.StringCrypto(self.password.get()).decrypt(img_name)
# 如果图片后缀不支持,则直接返回
if os.path.splitext(decrypt_img_name.lower())[1][1:] not in IMG_EXT_LIST:
tkmessagebox.showerror("错误", "文件格式不支持")
return
if page_option == "单页":
if os.path.splitext(decrypt_img_name)[1] == ".gif":
self.crypto_gif_show(self.current_img_path)
else:
self.crypto_img_show(self.current_img_path)
elif page_option == "双页":
index = self.img_list.index(self.current_img_path)
# 如果已经到了最后一页,则只显示列表末尾两页
if index == len(self.img_list) - 1:
next_img_path = self.current_img_path
self.current_img_path = self.img_list[index - 1]
else:
next_img_path = self.img_list[index + 1]
self.crypto_double_img_show(self.current_img_path, next_img_path, order_option)
except ValueError as e:
logging.error("Decrypt img error!")
tkmessagebox.showerror("错误", "图片解密失败")
elif crypto_option == "不需解密":
try:
# 如果图片后缀不支持,则直接返回
if os.path.splitext(img_name.lower())[1][1:] not in IMG_EXT_LIST:
tkmessagebox.showerror("错误", "文件格式不支持")
return
if page_option == "单页":
if os.path.splitext(self.current_img_path)[1] == ".gif":
self.default_gif_show(self.current_img_path)
else:
self.default_img_show(self.current_img_path)
elif page_option == "双页":
index = self.img_list.index(self.current_img_path)
# 如果已经到了最后一页,则只显示列表末尾两页
if index == len(self.img_list) - 1:
next_img_path = self.current_img_path
self.current_img_path = self.img_list[index - 1]
else:
next_img_path = self.img_list[index + 1]
self.default_double_img_show(self.current_img_path, next_img_path, order_option)
except OSError as e:
logging.error("Img format error!")
tkmessagebox.showerror("错误", "图片格式错误")
elif crypto_option == "解密保名":
try:
# 如果图片后缀不支持,则直接返回
if os.path.splitext(img_name.lower())[1][1:] not in IMG_EXT_LIST:
tkmessagebox.showerror("错误", "文件格式不支持")
return
if page_option == "单页":
if os.path.splitext(self.current_img_path)[1] == ".gif":
self.crypto_gif_show(self.current_img_path)
else:
self.crypto_img_show(self.current_img_path)
elif page_option == "双页":
index = self.img_list.index(self.current_img_path)
# 如果已经到了最后一页,则只显示列表末尾两页
if index == len(self.img_list) - 1:
next_img_path = self.current_img_path
self.current_img_path = self.img_list[index - 1]
else:
next_img_path = self.img_list[index + 1]
self.crypto_double_img_show(self.current_img_path, next_img_path, order_option)
except ValueError as e:
logging.error("Decrypt img error!")
tkmessagebox.showerror("错误", "图片解密失败")
# 通过外部参数直接打开图片窗口
def main_window(img_path="", password="", crypto_option="不需解密", crypto_mode="ECB", page_option="单页", order_option="左开"):
app = Window(master=None, img_path=img_path, password=password,
crypto_option=crypto_option, crypto_mode=crypto_mode, page_option=page_option, order_option=order_option)
app.master.title("图片查看器")
app.master.minsize(600, 600)
app.mainloop()
if __name__ == '__main__':
app = Window()
# 设置窗口标题:
app.master.title("图片查看器")
app.master.minsize(600, 600)
# 主消息循环:
app.mainloop()
|
xuegang/gpdb
|
refs/heads/master
|
src/test/tinc/tincrepo/mpp/gpdb/lib/models/nic_failure/__init__.py
|
9
|
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest2 as unittest
import tinctest
from gppylib.commands.base import Command, REMOTE, WorkerPool
from gppylib.commands.unix import Ping
from mpp.models import MPPTestCase
class NICFailureTestCase(MPPTestCase):
"""
Base class for an NIC failure test case
"""
def __init__(self, methodName):
self.TESTDB = 'testdb'
self.nic_to_address_map = dict() #(interface, hostname) -> ip address map
super(NICFailureTestCase, self).__init__(methodName)
def bring_down_nic(self, nics, hostname):
"""
Bring down nics based on the input nic names
"""
if nics is None:
return False
pool = WorkerPool()
try:
#get the ip address of the interface
for nic in nics:
cmd = Command(name='get the ip of the interface', cmdStr="/sbin/ifconfig %s | grep \'inet addr:\' | cut -d: -f2 | awk \'{ print $1}\'" % nic, ctxt=REMOTE, remoteHost=hostname)
cmd.run(validateAfter=True)
results = cmd.get_results()
if results.rc != 0:
raise Exception('Unable to map interface to ipaddress')
self.nic_to_address_map[(nic, hostname)] = results.stdout.split()[0].strip()
for nic in nics:
tinctest.logger.info("Bringing down %s:%s ..." % (hostname, nic))
cmd = Command(name='bring NIC down', cmdStr='sudo /sbin/ifdown %s' % nic, ctxt=REMOTE, remoteHost=hostname)
pool.addCommand(cmd)
pool.join()
for cmd in pool.getCompletedItems():
results = cmd.get_results()
if results.rc != 0:
return False
finally:
pool.haltWork()
pool.joinWorkers()
pool.join()
return True
def validate_nic_down(self):
"""
Ping validation on the nics.
"""
pool = WorkerPool()
try:
for nic, hostname in self.nic_to_address_map:
address = self.nic_to_address_map[(nic, hostname)]
cmd = Ping('ping validation', address, ctxt=REMOTE, remoteHost='localhost')
pool.addCommand(cmd)
pool.join()
for cmd in pool.getCompletedItems():
results = cmd.get_results()
if results.rc == 0:
return False
finally:
pool.haltWork()
pool.joinWorkers()
pool.join()
tinctest.logger.info("Successfully brought down nics ...")
return True
|
pwoodworth/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/contrib/staticfiles/views.py
|
71
|
"""
Views and functions for serving static files. These are only to be used during
development, and SHOULD NOT be used in a production setting.
"""
import mimetypes
import os
import posixpath
import re
import stat
import urllib
from email.Utils import parsedate_tz, mktime_tz
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.http import Http404, HttpResponse, HttpResponseRedirect, HttpResponseNotModified
from django.template import loader, Template, Context, TemplateDoesNotExist
from django.utils.http import http_date
from django.contrib.staticfiles import finders, utils
def serve(request, path, document_root=None, show_indexes=False, insecure=False):
"""
Serve static files below a given point in the directory structure or
from locations inferred from the static files finders.
To use, put a URL pattern such as::
(r'^(?P<path>.*)$', 'django.contrib.staticfiles.views.serve')
in your URLconf.
If you provide the ``document_root`` parameter, the file won't be looked
up with the staticfiles finders, but in the given filesystem path, e.g.::
(r'^(?P<path>.*)$', 'django.contrib.staticfiles.views.serve', {'document_root' : '/path/to/my/files/'})
You may also set ``show_indexes`` to ``True`` if you'd like to serve a
basic index of the directory. This index view will use the
template hardcoded below, but if you'd like to override it, you can create
a template called ``static/directory_index.html``.
"""
if not settings.DEBUG and not insecure:
raise ImproperlyConfigured("The view to serve static files can only "
"be used if the DEBUG setting is True or "
"the --insecure option of 'runserver' is "
"used")
if not document_root:
absolute_path = finders.find(path)
if not absolute_path:
raise Http404('"%s" could not be found' % path)
document_root, path = os.path.split(absolute_path)
# Clean up given path to only allow serving files below document_root.
path = posixpath.normpath(urllib.unquote(path))
path = path.lstrip('/')
newpath = ''
for part in path.split('/'):
if not part:
# Strip empty path components.
continue
drive, part = os.path.splitdrive(part)
head, part = os.path.split(part)
if part in (os.curdir, os.pardir):
# Strip '.' and '..' in path.
continue
newpath = os.path.join(newpath, part).replace('\\', '/')
if newpath and path != newpath:
return HttpResponseRedirect(newpath)
fullpath = os.path.join(document_root, newpath)
if os.path.isdir(fullpath):
if show_indexes:
return directory_index(newpath, fullpath)
raise Http404("Directory indexes are not allowed here.")
if not os.path.exists(fullpath):
raise Http404('"%s" does not exist' % fullpath)
# Respect the If-Modified-Since header.
statobj = os.stat(fullpath)
mimetype, encoding = mimetypes.guess_type(fullpath)
mimetype = mimetype or 'application/octet-stream'
if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),
statobj[stat.ST_MTIME], statobj[stat.ST_SIZE]):
return HttpResponseNotModified(mimetype=mimetype)
contents = open(fullpath, 'rb').read()
response = HttpResponse(contents, mimetype=mimetype)
response["Last-Modified"] = http_date(statobj[stat.ST_MTIME])
response["Content-Length"] = len(contents)
if encoding:
response["Content-Encoding"] = encoding
return response
DEFAULT_DIRECTORY_INDEX_TEMPLATE = """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<meta http-equiv="Content-Language" content="en-us" />
<meta name="robots" content="NONE,NOARCHIVE" />
<title>Index of {{ directory }}</title>
</head>
<body>
<h1>Index of {{ directory }}</h1>
<ul>
{% ifnotequal directory "/" %}
<li><a href="../">../</a></li>
{% endifnotequal %}
{% for f in file_list %}
<li><a href="{{ f|urlencode }}">{{ f }}</a></li>
{% endfor %}
</ul>
</body>
</html>
"""
def directory_index(path, fullpath):
try:
t = loader.select_template(['static/directory_index.html',
'static/directory_index'])
except TemplateDoesNotExist:
t = Template(DEFAULT_DIRECTORY_INDEX_TEMPLATE, name='Default directory index template')
files = []
for f in os.listdir(fullpath):
if not f.startswith('.'):
if os.path.isdir(os.path.join(fullpath, f)):
f += '/'
files.append(f)
c = Context({
'directory' : path + '/',
'file_list' : files,
})
return HttpResponse(t.render(c))
def was_modified_since(header=None, mtime=0, size=0):
"""
Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
size
This is the size of the item we're talking about.
"""
try:
if header is None:
raise ValueError
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header,
re.IGNORECASE)
header_date = parsedate_tz(matches.group(1))
if header_date is None:
raise ValueError
header_mtime = mktime_tz(header_date)
header_len = matches.group(3)
if header_len and int(header_len) != size:
raise ValueError
if mtime > header_mtime:
raise ValueError
except (AttributeError, ValueError, OverflowError):
return True
return False
|
savanu/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/executors/executormarionette.py
|
1
|
import os
import socket
import threading
import traceback
import urlparse
import uuid
errors = None
marionette = None
pytestrunner = None
here = os.path.join(os.path.split(__file__)[0])
from .base import (ExecutorException,
Protocol,
RefTestExecutor,
RefTestImplementation,
TestExecutor,
TestharnessExecutor,
WdspecExecutor,
WdspecRun,
WebDriverProtocol,
extra_timeout,
testharness_result_converter,
reftest_result_converter,
strip_server)
from ..testrunner import Stop
from ..webdriver_server import GeckoDriverServer
def do_delayed_imports():
global errors, marionette
# Marionette client used to be called marionette, recently it changed
# to marionette_driver for unfathomable reasons
try:
import marionette
from marionette import errors
except ImportError:
from marionette_driver import marionette, errors
class MarionetteProtocol(Protocol):
def __init__(self, executor, browser, capabilities=None, timeout_multiplier=1):
do_delayed_imports()
Protocol.__init__(self, executor, browser)
self.marionette = None
self.marionette_port = browser.marionette_port
self.capabilities = capabilities
self.timeout_multiplier = timeout_multiplier
self.timeout = None
self.runner_handle = None
def setup(self, runner):
"""Connect to browser via Marionette."""
Protocol.setup(self, runner)
self.logger.debug("Connecting to Marionette on port %i" % self.marionette_port)
startup_timeout = marionette.Marionette.DEFAULT_STARTUP_TIMEOUT * self.timeout_multiplier
self.marionette = marionette.Marionette(host='localhost',
port=self.marionette_port,
socket_timeout=None,
startup_timeout=None)
# XXX Move this timeout somewhere
self.logger.debug("Waiting for Marionette connection")
while True:
success = self.marionette.wait_for_port(startup_timeout)
#When running in a debugger wait indefinitely for firefox to start
if success or self.executor.debug_info is None:
break
session_started = False
if success:
try:
self.logger.debug("Starting Marionette session")
self.marionette.start_session(capabilities=self.capabilities)
except Exception as e:
self.logger.warning("Starting marionette session failed: %s" % e)
else:
self.logger.debug("Marionette session started")
session_started = True
if not success or not session_started:
self.logger.warning("Failed to connect to Marionette")
self.executor.runner.send_message("init_failed")
else:
try:
self.after_connect()
except Exception:
self.logger.warning("Post-connection steps failed")
self.logger.error(traceback.format_exc())
self.executor.runner.send_message("init_failed")
else:
self.executor.runner.send_message("init_succeeded")
def teardown(self):
try:
self.marionette._request_in_app_shutdown()
self.marionette.delete_session(send_request=False, reset_session_id=True)
except Exception:
# This is typically because the session never started
pass
if self.marionette is not None:
del self.marionette
@property
def is_alive(self):
"""Check if the Marionette connection is still active."""
try:
self.marionette.current_window_handle
except Exception:
return False
return True
def after_connect(self):
self.load_runner(self.executor.last_environment["protocol"])
def set_timeout(self, timeout):
"""Set the Marionette script timeout.
:param timeout: Script timeout in seconds
"""
self.marionette.timeout.script = timeout
self.timeout = timeout
def load_runner(self, protocol):
# Check if we previously had a test window open, and if we did make sure it's closed
self.marionette.execute_script("if (window.wrappedJSObject.win) {window.wrappedJSObject.win.close()}")
url = urlparse.urljoin(self.executor.server_url(protocol), "/testharness_runner.html")
self.logger.debug("Loading %s" % url)
self.runner_handle = self.marionette.current_window_handle
try:
self.marionette.navigate(url)
except Exception as e:
self.logger.critical(
"Loading initial page %s failed. Ensure that the "
"there are no other programs bound to this port and "
"that your firewall rules or network setup does not "
"prevent access.\e%s" % (url, traceback.format_exc(e)))
self.marionette.execute_script(
"document.title = '%s'" % threading.current_thread().name.replace("'", '"'))
def close_old_windows(self, protocol):
handles = self.marionette.window_handles
runner_handle = None
try:
handles.remove(self.runner_handle)
runner_handle = self.runner_handle
except ValueError:
# The runner window probably changed id but we can restore it
# This isn't supposed to happen, but marionette ids are not yet stable
# We assume that the first handle returned corresponds to the runner,
# but it hopefully doesn't matter too much if that assumption is
# wrong since we reload the runner in that tab anyway.
runner_handle = handles.pop(0)
for handle in handles:
self.marionette.switch_to_window(handle)
self.marionette.close()
self.marionette.switch_to_window(runner_handle)
if runner_handle != self.runner_handle:
self.load_runner(protocol)
def wait(self):
socket_timeout = self.marionette.client.sock.gettimeout()
if socket_timeout:
self.marionette.timeout.script = socket_timeout / 2
self.marionette.switch_to_window(self.runner_handle)
while True:
try:
self.marionette.execute_async_script("")
except errors.NoSuchWindowException:
# The window closed
break
except errors.ScriptTimeoutException:
self.logger.debug("Script timed out")
pass
except (socket.timeout, IOError):
self.logger.debug("Socket closed")
break
except Exception as e:
self.logger.warning(traceback.format_exc(e))
break
def on_environment_change(self, old_environment, new_environment):
#Unset all the old prefs
for name in old_environment.get("prefs", {}).iterkeys():
value = self.executor.original_pref_values[name]
if value is None:
self.clear_user_pref(name)
else:
self.set_pref(name, value)
for name, value in new_environment.get("prefs", {}).iteritems():
self.executor.original_pref_values[name] = self.get_pref(name)
self.set_pref(name, value)
def set_pref(self, name, value):
if value.lower() not in ("true", "false"):
try:
int(value)
except ValueError:
value = "'%s'" % value
else:
value = value.lower()
self.logger.info("Setting pref %s (%s)" % (name, value))
script = """
let prefInterface = Components.classes["@mozilla.org/preferences-service;1"]
.getService(Components.interfaces.nsIPrefBranch);
let pref = '%s';
let type = prefInterface.getPrefType(pref);
let value = %s;
switch(type) {
case prefInterface.PREF_STRING:
prefInterface.setCharPref(pref, value);
break;
case prefInterface.PREF_BOOL:
prefInterface.setBoolPref(pref, value);
break;
case prefInterface.PREF_INT:
prefInterface.setIntPref(pref, value);
break;
}
""" % (name, value)
with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
self.marionette.execute_script(script)
def clear_user_pref(self, name):
self.logger.info("Clearing pref %s" % (name))
script = """
let prefInterface = Components.classes["@mozilla.org/preferences-service;1"]
.getService(Components.interfaces.nsIPrefBranch);
let pref = '%s';
prefInterface.clearUserPref(pref);
""" % name
with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
self.marionette.execute_script(script)
def get_pref(self, name):
script = """
let prefInterface = Components.classes["@mozilla.org/preferences-service;1"]
.getService(Components.interfaces.nsIPrefBranch);
let pref = '%s';
let type = prefInterface.getPrefType(pref);
switch(type) {
case prefInterface.PREF_STRING:
return prefInterface.getCharPref(pref);
case prefInterface.PREF_BOOL:
return prefInterface.getBoolPref(pref);
case prefInterface.PREF_INT:
return prefInterface.getIntPref(pref);
case prefInterface.PREF_INVALID:
return null;
}
""" % name
with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
self.marionette.execute_script(script)
def clear_origin(self, url):
self.logger.info("Clearing origin %s" % (url))
script = """
let url = '%s';
let uri = Components.classes["@mozilla.org/network/io-service;1"]
.getService(Ci.nsIIOService)
.newURI(url);
let ssm = Components.classes["@mozilla.org/scriptsecuritymanager;1"]
.getService(Ci.nsIScriptSecurityManager);
let principal = ssm.createCodebasePrincipal(uri, {});
let qms = Components.classes["@mozilla.org/dom/quota-manager-service;1"]
.getService(Components.interfaces.nsIQuotaManagerService);
qms.clearStoragesForPrincipal(principal, "default", true);
""" % url
with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
self.marionette.execute_script(script)
class ExecuteAsyncScriptRun(object):
def __init__(self, logger, func, protocol, url, timeout):
self.logger = logger
self.result = (None, None)
self.protocol = protocol
self.marionette = protocol.marionette
self.func = func
self.url = url
self.timeout = timeout
self.result_flag = threading.Event()
def run(self):
index = self.url.rfind("/storage/");
if index != -1:
# Clear storage
self.protocol.clear_origin(self.url)
timeout = self.timeout
try:
if timeout is not None:
if timeout + extra_timeout != self.protocol.timeout:
self.protocol.set_timeout(timeout + extra_timeout)
else:
# We just want it to never time out, really, but marionette doesn't
# make that possible. It also seems to time out immediately if the
# timeout is set too high. This works at least.
self.protocol.set_timeout(2**28 - 1)
except IOError:
self.logger.error("Lost marionette connection before starting test")
return Stop
executor = threading.Thread(target = self._run)
executor.start()
if timeout is not None:
wait_timeout = timeout + 2 * extra_timeout
else:
wait_timeout = None
flag = self.result_flag.wait(wait_timeout)
if self.result == (None, None):
self.logger.debug("Timed out waiting for a result")
self.result = False, ("EXTERNAL-TIMEOUT", None)
elif self.result[1] is None:
# We didn't get any data back from the test, so check if the
# browser is still responsive
if self.protocol.is_alive:
self.result = False, ("ERROR", None)
else:
self.result = False, ("CRASH", None)
return self.result
def _run(self):
try:
self.result = True, self.func(self.marionette, self.url, self.timeout)
except errors.ScriptTimeoutException:
self.logger.debug("Got a marionette timeout")
self.result = False, ("EXTERNAL-TIMEOUT", None)
except (socket.timeout, IOError):
# This can happen on a crash
# Also, should check after the test if the firefox process is still running
# and otherwise ignore any other result and set it to crash
self.result = False, ("CRASH", None)
except Exception as e:
message = getattr(e, "message", "")
if message:
message += "\n"
message += traceback.format_exc(e)
self.result = False, ("ERROR", e)
finally:
self.result_flag.set()
class MarionetteTestharnessExecutor(TestharnessExecutor):
def __init__(self, browser, server_config, timeout_multiplier=1,
close_after_done=True, debug_info=None, capabilities=None,
**kwargs):
"""Marionette-based executor for testharness.js tests"""
TestharnessExecutor.__init__(self, browser, server_config,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = MarionetteProtocol(self, browser, capabilities, timeout_multiplier)
self.script = open(os.path.join(here, "testharness_marionette.js")).read()
self.close_after_done = close_after_done
self.window_id = str(uuid.uuid4())
self.original_pref_values = {}
if marionette is None:
do_delayed_imports()
def is_alive(self):
return self.protocol.is_alive
def on_environment_change(self, new_environment):
self.protocol.on_environment_change(self.last_environment, new_environment)
if new_environment["protocol"] != self.last_environment["protocol"]:
self.protocol.load_runner(new_environment["protocol"])
def do_test(self, test):
timeout = (test.timeout * self.timeout_multiplier if self.debug_info is None
else None)
success, data = ExecuteAsyncScriptRun(self.logger,
self.do_testharness,
self.protocol,
self.test_url(test),
timeout).run()
if success:
return self.convert_result(test, data)
return (test.result_cls(*data), [])
def do_testharness(self, marionette, url, timeout):
if self.close_after_done:
marionette.execute_script("if (window.wrappedJSObject.win) {window.wrappedJSObject.win.close()}")
self.protocol.close_old_windows(self.protocol)
if timeout is not None:
timeout_ms = str(timeout * 1000)
else:
timeout_ms = "null"
script = self.script % {"abs_url": url,
"url": strip_server(url),
"window_id": self.window_id,
"timeout_multiplier": self.timeout_multiplier,
"timeout": timeout_ms,
"explicit_timeout": timeout is None}
rv = marionette.execute_async_script(script, new_sandbox=False)
return rv
class MarionetteRefTestExecutor(RefTestExecutor):
def __init__(self, browser, server_config, timeout_multiplier=1,
screenshot_cache=None, close_after_done=True,
debug_info=None, reftest_internal=False,
reftest_screenshot="unexpected",
group_metadata=None, capabilities=None, **kwargs):
"""Marionette-based executor for reftests"""
RefTestExecutor.__init__(self,
browser,
server_config,
screenshot_cache=screenshot_cache,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = MarionetteProtocol(self, browser, capabilities,
timeout_multiplier)
self.implementation = (InternalRefTestImplementation
if reftest_internal
else RefTestImplementation)(self)
self.implementation_kwargs = ({"screenshot": reftest_screenshot} if
reftest_internal else {})
self.close_after_done = close_after_done
self.has_window = False
self.original_pref_values = {}
self.group_metadata = group_metadata
with open(os.path.join(here, "reftest.js")) as f:
self.script = f.read()
with open(os.path.join(here, "reftest-wait_marionette.js")) as f:
self.wait_script = f.read()
def setup(self, runner):
super(self.__class__, self).setup(runner)
self.implementation.setup(**self.implementation_kwargs)
def teardown(self):
try:
self.implementation.teardown()
handle = self.protocol.marionette.window_handles[0]
self.protocol.marionette.switch_to_window(handle)
super(self.__class__, self).teardown()
except Exception as e:
# Ignore errors during teardown
self.logger.warning(traceback.format_exc(e))
def is_alive(self):
return self.protocol.is_alive
def on_environment_change(self, new_environment):
self.protocol.on_environment_change(self.last_environment, new_environment)
def do_test(self, test):
if not isinstance(self.implementation, InternalRefTestImplementation):
if self.close_after_done and self.has_window:
self.protocol.marionette.close()
self.protocol.marionette.switch_to_window(
self.protocol.marionette.window_handles[-1])
self.has_window = False
if not self.has_window:
self.protocol.marionette.execute_script(self.script)
self.protocol.marionette.switch_to_window(self.protocol.marionette.window_handles[-1])
self.has_window = True
result = self.implementation.run_test(test)
return self.convert_result(test, result)
def screenshot(self, test, viewport_size, dpi):
# https://github.com/w3c/wptrunner/issues/166
assert viewport_size is None
assert dpi is None
timeout = self.timeout_multiplier * test.timeout if self.debug_info is None else None
test_url = self.test_url(test)
return ExecuteAsyncScriptRun(self.logger,
self._screenshot,
self.protocol,
test_url,
timeout).run()
def _screenshot(self, marionette, url, timeout):
marionette.navigate(url)
marionette.execute_async_script(self.wait_script)
screenshot = marionette.screenshot(full=False)
# strip off the data:img/png, part of the url
if screenshot.startswith("data:image/png;base64,"):
screenshot = screenshot.split(",", 1)[1]
return screenshot
class InternalRefTestImplementation(object):
def __init__(self, executor):
self.timeout_multiplier = executor.timeout_multiplier
self.executor = executor
@property
def logger(self):
return self.executor.logger
def setup(self, screenshot="unexpected"):
data = {"screenshot": screenshot}
if self.executor.group_metadata is not None:
data["urlCount"] = {urlparse.urljoin(self.executor.server_url(key[0]), key[1]):value
for key, value in self.executor.group_metadata.get("url_count", {}).iteritems()
if value > 1}
self.executor.protocol.marionette.set_context(self.executor.protocol.marionette.CONTEXT_CHROME)
self.executor.protocol.marionette._send_message("reftest:setup", data)
def run_test(self, test):
viewport_size = test.viewport_size
dpi = test.dpi
references = self.get_references(test)
rv = self.executor.protocol.marionette._send_message("reftest:run",
{"test": self.executor.test_url(test),
"references": references,
"expected": test.expected(),
"timeout": test.timeout * 1000})["value"]
return rv
def get_references(self, node):
rv = []
for item, relation in node.references:
rv.append([self.executor.test_url(item), self.get_references(item), relation])
return rv
def teardown(self):
try:
self.executor.protocol.marionette._send_message("reftest:teardown", {})
self.executor.protocol.marionette.set_context(self.executor.protocol.marionette.CONTEXT_CONTENT)
except Exception as e:
# Ignore errors during teardown
self.logger.warning(traceback.traceback.format_exc(e))
class GeckoDriverProtocol(WebDriverProtocol):
server_cls = GeckoDriverServer
class MarionetteWdspecExecutor(WdspecExecutor):
protocol_cls = GeckoDriverProtocol
|
vladmm/intellij-community
|
refs/heads/master
|
python/testData/testRunner/env/unit/another_file_for_pattern.py
|
79
|
__author__ = 'Ilya.Kazakevich'
from unittest import TestCase
class UTests(TestCase):
def testByPattern(self):
pass
def testByPattern2(self):
pass
|
tedsunnyday/SE-Server
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/response.py
|
316
|
# urllib3/response.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import logging
import zlib
import io
from .exceptions import DecodeError
from .packages.six import string_types as basestring, binary_type
from .util import is_fp_closed
log = logging.getLogger(__name__)
class DeflateDecoder(object):
def __init__(self):
self._first_try = True
self._data = binary_type()
self._obj = zlib.decompressobj()
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not self._first_try:
return self._obj.decompress(data)
self._data += data
try:
return self._obj.decompress(data)
except zlib.error:
self._first_try = False
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self.decompress(self._data)
finally:
self._data = None
def _get_decoder(mode):
if mode == 'gzip':
return zlib.decompressobj(16 + zlib.MAX_WBITS)
return DeflateDecoder()
class HTTPResponse(io.IOBase):
"""
HTTP Response container.
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed.
Extra parameters for behaviour not present in httplib.HTTPResponse:
:param preload_content:
If True, the response's body will be preloaded during construction.
:param decode_content:
If True, attempts to decode specific content-encoding's based on headers
(like 'gzip' and 'deflate') will be skipped and raw data will be used
instead.
:param original_response:
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
"""
CONTENT_DECODERS = ['gzip', 'deflate']
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None):
self.headers = headers or {}
self.status = status
self.version = version
self.reason = reason
self.strict = strict
self.decode_content = decode_content
self._decoder = None
self._body = body if body and isinstance(body, basestring) else None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
self._pool = pool
self._connection = connection
if hasattr(body, 'read'):
self._fp = body
if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
"""
Should we redirect and where to?
:returns: Truthy redirect location string if we got a redirect status
code and valid location. ``None`` if redirect status and no
location. ``False`` if not a redirect status code.
"""
if self.status in self.REDIRECT_STATUSES:
return self.headers.get('location')
return False
def release_conn(self):
if not self._pool or not self._connection:
return
self._pool._put_conn(self._connection)
self._connection = None
@property
def data(self):
# For backwords-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
if self._fp:
return self.read(cache_content=True)
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param cache_content:
If True, will save the returned data such that the same result is
returned despite of the state of the underlying file object. This
is useful if you want the ``.data`` property to continue working
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
# Note: content-encoding value should be case-insensitive, per RFC 2616
# Section 3.5
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None:
if content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
if decode_content is None:
decode_content = self.decode_content
if self._fp is None:
return
flush_decoder = False
try:
if amt is None:
# cStringIO doesn't like amt=None
data = self._fp.read()
flush_decoder = True
else:
cache_content = False
data = self._fp.read(amt)
if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
# Close the connection when no data is returned
#
# This is redundant to what httplib/http.client _should_
# already do. However, versions of python released before
# December 15, 2012 (http://bugs.python.org/issue16298) do not
# properly close the connection in all cases. There is no harm
# in redundantly calling close.
self._fp.close()
flush_decoder = True
self._fp_bytes_read += len(data)
try:
if decode_content and self._decoder:
data = self._decoder.decompress(data)
except (IOError, zlib.error) as e:
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding,
e)
if flush_decoder and decode_content and self._decoder:
buf = self._decoder.decompress(binary_type())
data += buf + self._decoder.flush()
if cache_content:
self._body = data
return data
finally:
if self._original_response and self._original_response.isclosed():
self.release_conn()
def stream(self, amt=2**16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
``amt`` bytes have been read from the connection or until the
connection is closed.
:param amt:
How much of the content to read. The generator will return up to
much data per iteration, but may return less. This is particularly
likely when using compressed data. However, the empty string will
never be returned.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data:
yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
# Normalize headers between different versions of Python
headers = {}
for k, v in r.getheaders():
# Python 3: Header keys are returned capitalised
k = k.lower()
has_value = headers.get(k)
if has_value: # Python 3: Repeating header keys are unmerged.
v = ', '.join([has_value, v])
headers[k] = v
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0)
return ResponseCls(body=r,
headers=headers,
status=r.status,
version=r.version,
reason=r.reason,
strict=strict,
original_response=r,
**response_kw)
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):
return self.headers
def getheader(self, name, default=None):
return self.headers.get(name, default)
# Overrides from io.IOBase
def close(self):
if not self.closed:
self._fp.close()
@property
def closed(self):
if self._fp is None:
return True
elif hasattr(self._fp, 'closed'):
return self._fp.closed
elif hasattr(self._fp, 'isclosed'): # Python 2
return self._fp.isclosed()
else:
return True
def fileno(self):
if self._fp is None:
raise IOError("HTTPResponse has no file to get a fileno from")
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
raise IOError("The file-like object this HTTPResponse is wrapped "
"around has no file descriptor")
def flush(self):
if self._fp is not None and hasattr(self._fp, 'flush'):
return self._fp.flush()
def readable(self):
return True
|
cloudpassage/cloudpassage-halo-python-sdk
|
refs/heads/master
|
cloudpassage/http_helper.py
|
1
|
"""HttpHelper class. Primary-level object, facilitates
GET / POST / PUT / DELETE requests against API.
"""
from .exceptions import CloudPassageValidation
from .utility import Utility as utility
# This is for Python 3 compatibility
try:
from urllib.parse import urlsplit
except ImportError:
from urlparse import urlsplit
class HttpHelper(object):
"""This class handles communication with the CloudPassage API.
When instantiating this class, pass in a :class:`cloudpassage.HaloSession`
object (referred to here as connection, as it defines connection parameters
for interacting with the API).
"""
def __init__(self, connection):
self.connection = connection
def get(self, endpoint, **kwargs):
"""This method performs a GET against Halo's API.
It will attempt to authenticate using the credentials (required
to instantiate the object) if the session has either:
1) Not been authenticated yet
2) OAuth Token has expired
This is a primary method, meaning it reaches out directly to the Halo
API, and should only be utilized by secondary methods with a more
specific purpose, like gathering events from /v1/events. If you're
using this method because the SDK doesn't provide a more specific
method, please reach out to toolbox@cloudpassage.com so we can get
an enhancement request in place for you.
Args:
endpoint (str): URL- everything between api.cloudpassage.com and
any parameters to be passed. Example: /v1/events
Keyword Args:
params (dict): This is a dictionary object,
represented like this: {"k1": "two,too"}
which goes into the URL looking like this: ?k1=two,too.
If you use a list as the value in a dictionary here, you'll get
two k/v pairs represented in the URL and the CloudPassage API
doesn't operate like that. Only the last instance of that
variable will be considered, and your results may be confusing.
So don't do it. Dictionaries should be {str:str}.
"""
params = kwargs["params"] if "params" in kwargs else None
response = self.connection.interact('get', endpoint, params)
return response.json()
def get_paginated(self, endpoint, key, max_pages, **kwargs):
"""This method returns a concatenated list of objects
from the Halo API.
It's really a wrapper for the get() method. Pass in the
path as with the get() method, and a maxpages number.
Maxpages is expected to be an integer between 2 and 100
Args:
endpoint (str): Path for initial query
key (str): The key in the response containing the objects of
interest. For instance, the /v1/events endpoint will have the
"events" key, which contains a list of dictionary objects
representing Halo events.
maxpages (int): This is a number from 2-100. More than 100 pages
can take quite a while to return, so beyond that you should
consider using this SDK as a component in a multi-threaded
tool.
Keyword Args:
params (dict): This is a dictionary object,
represented like this: {"k1": "two,too"}
which goes into the URL looking like this: ?k1=two,too .
If you use a list as the value in a dictionary here, you'll get
two k/v pairs represented in the URL and the CloudPassage API
doesn't operate like that. Only the last instance of that
variable will be considered, and your results may be confusing.
So don't do it. Dictionaries should be {str:str}.
"""
max_pages_valid, pages_invalid_msg = utility.verify_pages(max_pages)
if not max_pages_valid:
raise CloudPassageValidation(pages_invalid_msg)
more_pages = False
response_accumulator = []
if "params" in kwargs and kwargs["params"] != {}:
initial_page = self.get(endpoint, params=kwargs["params"])
else:
initial_page = self.get(endpoint)
response, next_page = self.process_page(initial_page, key)
response_accumulator.extend(response)
pages_parsed = 1
if next_page is not None:
more_pages = True
while more_pages:
page = self.get(next_page)
response, next_page = self.process_page(page, key)
response_accumulator.extend(response)
pages_parsed += 1
if next_page is None:
more_pages = False
if pages_parsed >= max_pages:
more_pages = False
return response_accumulator
@classmethod
def get_next_page_path(cls, page):
next_page = None
if "pagination" in page:
if "next" in page["pagination"]:
nextpage = page["pagination"]["next"]
endpoint = "{}?{}".format(urlsplit(nextpage).path,
urlsplit(nextpage).query)
next_page = endpoint
return next_page
@classmethod
def process_page(cls, page, key):
"""Page goes in, list data comes out."""
response_accumulator = []
if key not in page:
fail_msg = ("Requested key %s not found in page"
% key)
raise CloudPassageValidation(fail_msg)
for k in page[key]:
response_accumulator.append(k)
next_page = cls.get_next_page_path(page)
return response_accumulator, next_page
def post(self, endpoint, reqbody):
"""This method performs a POST against Halo's API.
As with the GET method, it will attempt to (re)authenticate the session
if the key is expired or has not yet been retrieved.
Also like the GET method, it is not intended for direct use (though
we won't stop you). If you need something that the SDK doesn't already
provide, please reach out to toolbox@cloudpassage.com and let us get an
enhancement request submitted for you.
Args:
endpoint (str): path component of URL
reqbody (dict): Dictionary to be converted to JSON for insertion as
payload for request.
"""
return self.connection.interact("post", endpoint, None, reqbody).json()
def put(self, endpoint, reqbody):
"""This method performs a PUT against Halo's API.
As with the GET method, it will attempt to (re)authenticate the session
if the key is expired or has not yet been retrieved.
Also like the GET method, it is not intended for direct use (though
we won't stop you). If you need something that the SDK doesn't already
provide, please reach out to toolbox@cloudpassage.com and let us get an
enhancement request submitted for you.
Args:
endpoint (str): Path component of URL
reqbody (dict): Dictionary to be converted to JSON for insertion
as payload for request.
"""
response = self.connection.interact("put", endpoint, None, reqbody)
try:
return response.json()
except ValueError: # Sometimes we don't get json back...
return response.text
def delete(self, endpoint, **kwargs):
"""This method performs a Delete against Halo's API.
It will attempt to authenticate using the credentials (required
to instantiate the object) if the session has either:
1) Not been authenticated yet
2) OAuth Token has expired
This is a primary method, meaning it reaches out directly to the Halo
API, and should only be utilized by secondary methods with a more
specific purpose, like gathering events from /v1/events. If you're
using this method because the SDK doesn't provide a more specific
method, please reach out to toolbox@cloudpassage.com so we can get
an enhancement request in place for you.
Args:
endpoint (str): Path component of URL
"""
params = kwargs["params"] if "params" in kwargs else None
response = self.connection.interact('delete', endpoint, params)
try:
return response.json()
except ValueError: # Sometimes we don't get json back...
return response.text
|
adityahase/frappe
|
refs/heads/develop
|
frappe/custom/doctype/package_publish_tool/__init__.py
|
12133432
| |
Jobava/bedrock
|
refs/heads/master
|
bedrock/mozorg/management/commands/__init__.py
|
12133432
| |
franosincic/edx-platform
|
refs/heads/master
|
lms/djangoapps/debug/__init__.py
|
12133432
| |
nju520/django
|
refs/heads/master
|
tests/middleware/__init__.py
|
12133432
| |
mbayon/TFG-MachineLearning
|
refs/heads/master
|
vbig/lib/python2.7/site-packages/numpy/core/tests/test_dtype.py
|
11
|
from __future__ import division, absolute_import, print_function
import sys
import numpy as np
from numpy.core.test_rational import rational
from numpy.testing import (
TestCase, run_module_suite, assert_, assert_equal, assert_raises,
dec
)
def assert_dtype_equal(a, b):
assert_equal(a, b)
assert_equal(hash(a), hash(b),
"two equivalent types do not hash to the same value !")
def assert_dtype_not_equal(a, b):
assert_(a != b)
assert_(hash(a) != hash(b),
"two different types hash to the same value !")
class TestBuiltin(TestCase):
def test_run(self):
"""Only test hash runs at all."""
for t in [np.int, np.float, np.complex, np.int32, np.str, np.object,
np.unicode]:
dt = np.dtype(t)
hash(dt)
def test_dtype(self):
# Make sure equivalent byte order char hash the same (e.g. < and = on
# little endian)
for t in [np.int, np.float]:
dt = np.dtype(t)
dt2 = dt.newbyteorder("<")
dt3 = dt.newbyteorder(">")
if dt == dt2:
self.assertTrue(dt.byteorder != dt2.byteorder, "bogus test")
assert_dtype_equal(dt, dt2)
else:
self.assertTrue(dt.byteorder != dt3.byteorder, "bogus test")
assert_dtype_equal(dt, dt3)
def test_equivalent_dtype_hashing(self):
# Make sure equivalent dtypes with different type num hash equal
uintp = np.dtype(np.uintp)
if uintp.itemsize == 4:
left = uintp
right = np.dtype(np.uint32)
else:
left = uintp
right = np.dtype(np.ulonglong)
self.assertTrue(left == right)
self.assertTrue(hash(left) == hash(right))
def test_invalid_types(self):
# Make sure invalid type strings raise an error
assert_raises(TypeError, np.dtype, 'O3')
assert_raises(TypeError, np.dtype, 'O5')
assert_raises(TypeError, np.dtype, 'O7')
assert_raises(TypeError, np.dtype, 'b3')
assert_raises(TypeError, np.dtype, 'h4')
assert_raises(TypeError, np.dtype, 'I5')
assert_raises(TypeError, np.dtype, 'e3')
assert_raises(TypeError, np.dtype, 'f5')
if np.dtype('g').itemsize == 8 or np.dtype('g').itemsize == 16:
assert_raises(TypeError, np.dtype, 'g12')
elif np.dtype('g').itemsize == 12:
assert_raises(TypeError, np.dtype, 'g16')
if np.dtype('l').itemsize == 8:
assert_raises(TypeError, np.dtype, 'l4')
assert_raises(TypeError, np.dtype, 'L4')
else:
assert_raises(TypeError, np.dtype, 'l8')
assert_raises(TypeError, np.dtype, 'L8')
if np.dtype('q').itemsize == 8:
assert_raises(TypeError, np.dtype, 'q4')
assert_raises(TypeError, np.dtype, 'Q4')
else:
assert_raises(TypeError, np.dtype, 'q8')
assert_raises(TypeError, np.dtype, 'Q8')
def test_bad_param(self):
# Can't give a size that's too small
assert_raises(ValueError, np.dtype,
{'names':['f0', 'f1'],
'formats':['i4', 'i1'],
'offsets':[0, 4],
'itemsize':4})
# If alignment is enabled, the alignment (4) must divide the itemsize
assert_raises(ValueError, np.dtype,
{'names':['f0', 'f1'],
'formats':['i4', 'i1'],
'offsets':[0, 4],
'itemsize':9}, align=True)
# If alignment is enabled, the individual fields must be aligned
assert_raises(ValueError, np.dtype,
{'names':['f0', 'f1'],
'formats':['i1', 'f4'],
'offsets':[0, 2]}, align=True)
class TestRecord(TestCase):
def test_equivalent_record(self):
"""Test whether equivalent record dtypes hash the same."""
a = np.dtype([('yo', np.int)])
b = np.dtype([('yo', np.int)])
assert_dtype_equal(a, b)
def test_different_names(self):
# In theory, they may hash the same (collision) ?
a = np.dtype([('yo', np.int)])
b = np.dtype([('ye', np.int)])
assert_dtype_not_equal(a, b)
def test_different_titles(self):
# In theory, they may hash the same (collision) ?
a = np.dtype({'names': ['r', 'b'],
'formats': ['u1', 'u1'],
'titles': ['Red pixel', 'Blue pixel']})
b = np.dtype({'names': ['r', 'b'],
'formats': ['u1', 'u1'],
'titles': ['RRed pixel', 'Blue pixel']})
assert_dtype_not_equal(a, b)
def test_mutate(self):
# Mutating a dtype should reset the cached hash value
a = np.dtype([('yo', np.int)])
b = np.dtype([('yo', np.int)])
c = np.dtype([('ye', np.int)])
assert_dtype_equal(a, b)
assert_dtype_not_equal(a, c)
a.names = ['ye']
assert_dtype_equal(a, c)
assert_dtype_not_equal(a, b)
state = b.__reduce__()[2]
a.__setstate__(state)
assert_dtype_equal(a, b)
assert_dtype_not_equal(a, c)
def test_not_lists(self):
"""Test if an appropriate exception is raised when passing bad values to
the dtype constructor.
"""
self.assertRaises(TypeError, np.dtype,
dict(names=set(['A', 'B']), formats=['f8', 'i4']))
self.assertRaises(TypeError, np.dtype,
dict(names=['A', 'B'], formats=set(['f8', 'i4'])))
def test_aligned_size(self):
# Check that structured dtypes get padded to an aligned size
dt = np.dtype('i4, i1', align=True)
assert_equal(dt.itemsize, 8)
dt = np.dtype([('f0', 'i4'), ('f1', 'i1')], align=True)
assert_equal(dt.itemsize, 8)
dt = np.dtype({'names':['f0', 'f1'],
'formats':['i4', 'u1'],
'offsets':[0, 4]}, align=True)
assert_equal(dt.itemsize, 8)
dt = np.dtype({'f0': ('i4', 0), 'f1':('u1', 4)}, align=True)
assert_equal(dt.itemsize, 8)
# Nesting should preserve that alignment
dt1 = np.dtype([('f0', 'i4'),
('f1', [('f1', 'i1'), ('f2', 'i4'), ('f3', 'i1')]),
('f2', 'i1')], align=True)
assert_equal(dt1.itemsize, 20)
dt2 = np.dtype({'names':['f0', 'f1', 'f2'],
'formats':['i4',
[('f1', 'i1'), ('f2', 'i4'), ('f3', 'i1')],
'i1'],
'offsets':[0, 4, 16]}, align=True)
assert_equal(dt2.itemsize, 20)
dt3 = np.dtype({'f0': ('i4', 0),
'f1': ([('f1', 'i1'), ('f2', 'i4'), ('f3', 'i1')], 4),
'f2': ('i1', 16)}, align=True)
assert_equal(dt3.itemsize, 20)
assert_equal(dt1, dt2)
assert_equal(dt2, dt3)
# Nesting should preserve packing
dt1 = np.dtype([('f0', 'i4'),
('f1', [('f1', 'i1'), ('f2', 'i4'), ('f3', 'i1')]),
('f2', 'i1')], align=False)
assert_equal(dt1.itemsize, 11)
dt2 = np.dtype({'names':['f0', 'f1', 'f2'],
'formats':['i4',
[('f1', 'i1'), ('f2', 'i4'), ('f3', 'i1')],
'i1'],
'offsets':[0, 4, 10]}, align=False)
assert_equal(dt2.itemsize, 11)
dt3 = np.dtype({'f0': ('i4', 0),
'f1': ([('f1', 'i1'), ('f2', 'i4'), ('f3', 'i1')], 4),
'f2': ('i1', 10)}, align=False)
assert_equal(dt3.itemsize, 11)
assert_equal(dt1, dt2)
assert_equal(dt2, dt3)
def test_union_struct(self):
# Should be able to create union dtypes
dt = np.dtype({'names':['f0', 'f1', 'f2'], 'formats':['<u4', '<u2', '<u2'],
'offsets':[0, 0, 2]}, align=True)
assert_equal(dt.itemsize, 4)
a = np.array([3], dtype='<u4').view(dt)
a['f1'] = 10
a['f2'] = 36
assert_equal(a['f0'], 10 + 36*256*256)
# Should be able to specify fields out of order
dt = np.dtype({'names':['f0', 'f1', 'f2'], 'formats':['<u4', '<u2', '<u2'],
'offsets':[4, 0, 2]}, align=True)
assert_equal(dt.itemsize, 8)
dt2 = np.dtype({'names':['f2', 'f0', 'f1'],
'formats':['<u2', '<u4', '<u2'],
'offsets':[2, 4, 0]}, align=True)
vals = [(0, 1, 2), (3, -1, 4)]
vals2 = [(2, 0, 1), (4, 3, -1)]
a = np.array(vals, dt)
b = np.array(vals2, dt2)
assert_equal(a.astype(dt2), b)
assert_equal(b.astype(dt), a)
assert_equal(a.view(dt2), b)
assert_equal(b.view(dt), a)
# Should not be able to overlap objects with other types
assert_raises(TypeError, np.dtype,
{'names':['f0', 'f1'],
'formats':['O', 'i1'],
'offsets':[0, 2]})
assert_raises(TypeError, np.dtype,
{'names':['f0', 'f1'],
'formats':['i4', 'O'],
'offsets':[0, 3]})
assert_raises(TypeError, np.dtype,
{'names':['f0', 'f1'],
'formats':[[('a', 'O')], 'i1'],
'offsets':[0, 2]})
assert_raises(TypeError, np.dtype,
{'names':['f0', 'f1'],
'formats':['i4', [('a', 'O')]],
'offsets':[0, 3]})
# Out of order should still be ok, however
dt = np.dtype({'names':['f0', 'f1'],
'formats':['i1', 'O'],
'offsets':[np.dtype('intp').itemsize, 0]})
def test_comma_datetime(self):
dt = np.dtype('M8[D],datetime64[Y],i8')
assert_equal(dt, np.dtype([('f0', 'M8[D]'),
('f1', 'datetime64[Y]'),
('f2', 'i8')]))
def test_from_dictproxy(self):
# Tests for PR #5920
dt = np.dtype({'names': ['a', 'b'], 'formats': ['i4', 'f4']})
assert_dtype_equal(dt, np.dtype(dt.fields))
dt2 = np.dtype((np.void, dt.fields))
assert_equal(dt2.fields, dt.fields)
def test_from_dict_with_zero_width_field(self):
# Regression test for #6430 / #2196
dt = np.dtype([('val1', np.float32, (0,)), ('val2', int)])
dt2 = np.dtype({'names': ['val1', 'val2'],
'formats': [(np.float32, (0,)), int]})
assert_dtype_equal(dt, dt2)
assert_equal(dt.fields['val1'][0].itemsize, 0)
assert_equal(dt.itemsize, dt.fields['val2'][0].itemsize)
def test_bool_commastring(self):
d = np.dtype('?,?,?') # raises?
assert_equal(len(d.names), 3)
for n in d.names:
assert_equal(d.fields[n][0], np.dtype('?'))
def test_nonint_offsets(self):
# gh-8059
def make_dtype(off):
return np.dtype({'names': ['A'], 'formats': ['i4'],
'offsets': [off]})
assert_raises(TypeError, make_dtype, 'ASD')
assert_raises(OverflowError, make_dtype, 2**70)
assert_raises(TypeError, make_dtype, 2.3)
assert_raises(ValueError, make_dtype, -10)
# no errors here:
dt = make_dtype(np.uint32(0))
np.zeros(1, dtype=dt)[0].item()
class TestSubarray(TestCase):
def test_single_subarray(self):
a = np.dtype((np.int, (2)))
b = np.dtype((np.int, (2,)))
assert_dtype_equal(a, b)
assert_equal(type(a.subdtype[1]), tuple)
assert_equal(type(b.subdtype[1]), tuple)
def test_equivalent_record(self):
"""Test whether equivalent subarray dtypes hash the same."""
a = np.dtype((np.int, (2, 3)))
b = np.dtype((np.int, (2, 3)))
assert_dtype_equal(a, b)
def test_nonequivalent_record(self):
"""Test whether different subarray dtypes hash differently."""
a = np.dtype((np.int, (2, 3)))
b = np.dtype((np.int, (3, 2)))
assert_dtype_not_equal(a, b)
a = np.dtype((np.int, (2, 3)))
b = np.dtype((np.int, (2, 2)))
assert_dtype_not_equal(a, b)
a = np.dtype((np.int, (1, 2, 3)))
b = np.dtype((np.int, (1, 2)))
assert_dtype_not_equal(a, b)
def test_shape_equal(self):
"""Test some data types that are equal"""
assert_dtype_equal(np.dtype('f8'), np.dtype(('f8', tuple())))
assert_dtype_equal(np.dtype('f8'), np.dtype(('f8', 1)))
assert_dtype_equal(np.dtype((np.int, 2)), np.dtype((np.int, (2,))))
assert_dtype_equal(np.dtype(('<f4', (3, 2))), np.dtype(('<f4', (3, 2))))
d = ([('a', 'f4', (1, 2)), ('b', 'f8', (3, 1))], (3, 2))
assert_dtype_equal(np.dtype(d), np.dtype(d))
def test_shape_simple(self):
"""Test some simple cases that shouldn't be equal"""
assert_dtype_not_equal(np.dtype('f8'), np.dtype(('f8', (1,))))
assert_dtype_not_equal(np.dtype(('f8', (1,))), np.dtype(('f8', (1, 1))))
assert_dtype_not_equal(np.dtype(('f4', (3, 2))), np.dtype(('f4', (2, 3))))
def test_shape_monster(self):
"""Test some more complicated cases that shouldn't be equal"""
assert_dtype_not_equal(
np.dtype(([('a', 'f4', (2, 1)), ('b', 'f8', (1, 3))], (2, 2))),
np.dtype(([('a', 'f4', (1, 2)), ('b', 'f8', (1, 3))], (2, 2))))
assert_dtype_not_equal(
np.dtype(([('a', 'f4', (2, 1)), ('b', 'f8', (1, 3))], (2, 2))),
np.dtype(([('a', 'f4', (2, 1)), ('b', 'i8', (1, 3))], (2, 2))))
assert_dtype_not_equal(
np.dtype(([('a', 'f4', (2, 1)), ('b', 'f8', (1, 3))], (2, 2))),
np.dtype(([('e', 'f8', (1, 3)), ('d', 'f4', (2, 1))], (2, 2))))
assert_dtype_not_equal(
np.dtype(([('a', [('a', 'i4', 6)], (2, 1)), ('b', 'f8', (1, 3))], (2, 2))),
np.dtype(([('a', [('a', 'u4', 6)], (2, 1)), ('b', 'f8', (1, 3))], (2, 2))))
def test_shape_sequence(self):
# Any sequence of integers should work as shape, but the result
# should be a tuple (immutable) of base type integers.
a = np.array([1, 2, 3], dtype=np.int16)
l = [1, 2, 3]
# Array gets converted
dt = np.dtype([('a', 'f4', a)])
assert_(isinstance(dt['a'].shape, tuple))
assert_(isinstance(dt['a'].shape[0], int))
# List gets converted
dt = np.dtype([('a', 'f4', l)])
assert_(isinstance(dt['a'].shape, tuple))
#
class IntLike(object):
def __index__(self):
return 3
def __int__(self):
# (a PyNumber_Check fails without __int__)
return 3
dt = np.dtype([('a', 'f4', IntLike())])
assert_(isinstance(dt['a'].shape, tuple))
assert_(isinstance(dt['a'].shape[0], int))
dt = np.dtype([('a', 'f4', (IntLike(),))])
assert_(isinstance(dt['a'].shape, tuple))
assert_(isinstance(dt['a'].shape[0], int))
def test_shape_matches_ndim(self):
dt = np.dtype([('a', 'f4', ())])
assert_equal(dt['a'].shape, ())
assert_equal(dt['a'].ndim, 0)
dt = np.dtype([('a', 'f4')])
assert_equal(dt['a'].shape, ())
assert_equal(dt['a'].ndim, 0)
dt = np.dtype([('a', 'f4', 4)])
assert_equal(dt['a'].shape, (4,))
assert_equal(dt['a'].ndim, 1)
dt = np.dtype([('a', 'f4', (1, 2, 3))])
assert_equal(dt['a'].shape, (1, 2, 3))
assert_equal(dt['a'].ndim, 3)
def test_shape_invalid(self):
# Check that the shape is valid.
max_int = np.iinfo(np.intc).max
max_intp = np.iinfo(np.intp).max
# Too large values (the datatype is part of this)
assert_raises(ValueError, np.dtype, [('a', 'f4', max_int // 4 + 1)])
assert_raises(ValueError, np.dtype, [('a', 'f4', max_int + 1)])
assert_raises(ValueError, np.dtype, [('a', 'f4', (max_int, 2))])
# Takes a different code path (fails earlier:
assert_raises(ValueError, np.dtype, [('a', 'f4', max_intp + 1)])
# Negative values
assert_raises(ValueError, np.dtype, [('a', 'f4', -1)])
assert_raises(ValueError, np.dtype, [('a', 'f4', (-1, -1))])
def test_alignment(self):
#Check that subarrays are aligned
t1 = np.dtype('1i4', align=True)
t2 = np.dtype('2i4', align=True)
assert_equal(t1.alignment, t2.alignment)
class TestMonsterType(TestCase):
"""Test deeply nested subtypes."""
def test1(self):
simple1 = np.dtype({'names': ['r', 'b'], 'formats': ['u1', 'u1'],
'titles': ['Red pixel', 'Blue pixel']})
a = np.dtype([('yo', np.int), ('ye', simple1),
('yi', np.dtype((np.int, (3, 2))))])
b = np.dtype([('yo', np.int), ('ye', simple1),
('yi', np.dtype((np.int, (3, 2))))])
assert_dtype_equal(a, b)
c = np.dtype([('yo', np.int), ('ye', simple1),
('yi', np.dtype((a, (3, 2))))])
d = np.dtype([('yo', np.int), ('ye', simple1),
('yi', np.dtype((a, (3, 2))))])
assert_dtype_equal(c, d)
class TestMetadata(TestCase):
def test_no_metadata(self):
d = np.dtype(int)
self.assertEqual(d.metadata, None)
def test_metadata_takes_dict(self):
d = np.dtype(int, metadata={'datum': 1})
self.assertEqual(d.metadata, {'datum': 1})
def test_metadata_rejects_nondict(self):
self.assertRaises(TypeError, np.dtype, int, metadata='datum')
self.assertRaises(TypeError, np.dtype, int, metadata=1)
self.assertRaises(TypeError, np.dtype, int, metadata=None)
def test_nested_metadata(self):
d = np.dtype([('a', np.dtype(int, metadata={'datum': 1}))])
self.assertEqual(d['a'].metadata, {'datum': 1})
def base_metadata_copied(self):
d = np.dtype((np.void, np.dtype('i4,i4', metadata={'datum': 1})))
assert_equal(d.metadata, {'datum': 1})
class TestString(TestCase):
def test_complex_dtype_str(self):
dt = np.dtype([('top', [('tiles', ('>f4', (64, 64)), (1,)),
('rtile', '>f4', (64, 36))], (3,)),
('bottom', [('bleft', ('>f4', (8, 64)), (1,)),
('bright', '>f4', (8, 36))])])
assert_equal(str(dt),
"[('top', [('tiles', ('>f4', (64, 64)), (1,)), "
"('rtile', '>f4', (64, 36))], (3,)), "
"('bottom', [('bleft', ('>f4', (8, 64)), (1,)), "
"('bright', '>f4', (8, 36))])]")
# If the sticky aligned flag is set to True, it makes the
# str() function use a dict representation with an 'aligned' flag
dt = np.dtype([('top', [('tiles', ('>f4', (64, 64)), (1,)),
('rtile', '>f4', (64, 36))],
(3,)),
('bottom', [('bleft', ('>f4', (8, 64)), (1,)),
('bright', '>f4', (8, 36))])],
align=True)
assert_equal(str(dt),
"{'names':['top','bottom'], "
"'formats':[([('tiles', ('>f4', (64, 64)), (1,)), "
"('rtile', '>f4', (64, 36))], (3,)),"
"[('bleft', ('>f4', (8, 64)), (1,)), "
"('bright', '>f4', (8, 36))]], "
"'offsets':[0,76800], "
"'itemsize':80000, "
"'aligned':True}")
assert_equal(np.dtype(eval(str(dt))), dt)
dt = np.dtype({'names': ['r', 'g', 'b'], 'formats': ['u1', 'u1', 'u1'],
'offsets': [0, 1, 2],
'titles': ['Red pixel', 'Green pixel', 'Blue pixel']})
assert_equal(str(dt),
"[(('Red pixel', 'r'), 'u1'), "
"(('Green pixel', 'g'), 'u1'), "
"(('Blue pixel', 'b'), 'u1')]")
dt = np.dtype({'names': ['rgba', 'r', 'g', 'b'],
'formats': ['<u4', 'u1', 'u1', 'u1'],
'offsets': [0, 0, 1, 2],
'titles': ['Color', 'Red pixel',
'Green pixel', 'Blue pixel']})
assert_equal(str(dt),
"{'names':['rgba','r','g','b'],"
" 'formats':['<u4','u1','u1','u1'],"
" 'offsets':[0,0,1,2],"
" 'titles':['Color','Red pixel',"
"'Green pixel','Blue pixel'],"
" 'itemsize':4}")
dt = np.dtype({'names': ['r', 'b'], 'formats': ['u1', 'u1'],
'offsets': [0, 2],
'titles': ['Red pixel', 'Blue pixel']})
assert_equal(str(dt),
"{'names':['r','b'],"
" 'formats':['u1','u1'],"
" 'offsets':[0,2],"
" 'titles':['Red pixel','Blue pixel'],"
" 'itemsize':3}")
dt = np.dtype([('a', '<m8[D]'), ('b', '<M8[us]')])
assert_equal(str(dt),
"[('a', '<m8[D]'), ('b', '<M8[us]')]")
def test_complex_dtype_repr(self):
dt = np.dtype([('top', [('tiles', ('>f4', (64, 64)), (1,)),
('rtile', '>f4', (64, 36))], (3,)),
('bottom', [('bleft', ('>f4', (8, 64)), (1,)),
('bright', '>f4', (8, 36))])])
assert_equal(repr(dt),
"dtype([('top', [('tiles', ('>f4', (64, 64)), (1,)), "
"('rtile', '>f4', (64, 36))], (3,)), "
"('bottom', [('bleft', ('>f4', (8, 64)), (1,)), "
"('bright', '>f4', (8, 36))])])")
dt = np.dtype({'names': ['r', 'g', 'b'], 'formats': ['u1', 'u1', 'u1'],
'offsets': [0, 1, 2],
'titles': ['Red pixel', 'Green pixel', 'Blue pixel']},
align=True)
assert_equal(repr(dt),
"dtype([(('Red pixel', 'r'), 'u1'), "
"(('Green pixel', 'g'), 'u1'), "
"(('Blue pixel', 'b'), 'u1')], align=True)")
dt = np.dtype({'names': ['rgba', 'r', 'g', 'b'],
'formats': ['<u4', 'u1', 'u1', 'u1'],
'offsets': [0, 0, 1, 2],
'titles': ['Color', 'Red pixel',
'Green pixel', 'Blue pixel']}, align=True)
assert_equal(repr(dt),
"dtype({'names':['rgba','r','g','b'],"
" 'formats':['<u4','u1','u1','u1'],"
" 'offsets':[0,0,1,2],"
" 'titles':['Color','Red pixel',"
"'Green pixel','Blue pixel'],"
" 'itemsize':4}, align=True)")
dt = np.dtype({'names': ['r', 'b'], 'formats': ['u1', 'u1'],
'offsets': [0, 2],
'titles': ['Red pixel', 'Blue pixel'],
'itemsize': 4})
assert_equal(repr(dt),
"dtype({'names':['r','b'], "
"'formats':['u1','u1'], "
"'offsets':[0,2], "
"'titles':['Red pixel','Blue pixel'], "
"'itemsize':4})")
dt = np.dtype([('a', '<M8[D]'), ('b', '<m8[us]')])
assert_equal(repr(dt),
"dtype([('a', '<M8[D]'), ('b', '<m8[us]')])")
@dec.skipif(sys.version_info[0] >= 3)
def test_dtype_str_with_long_in_shape(self):
# Pull request #376, should not error
np.dtype('(1L,)i4')
def test_base_dtype_with_object_type(self):
# Issue gh-2798, should not error.
np.array(['a'], dtype="O").astype(("O", [("name", "O")]))
def test_empty_string_to_object(self):
# Pull request #4722
np.array(["", ""]).astype(object)
class TestDtypeAttributeDeletion(TestCase):
def test_dtype_non_writable_attributes_deletion(self):
dt = np.dtype(np.double)
attr = ["subdtype", "descr", "str", "name", "base", "shape",
"isbuiltin", "isnative", "isalignedstruct", "fields",
"metadata", "hasobject"]
for s in attr:
assert_raises(AttributeError, delattr, dt, s)
def test_dtype_writable_attributes_deletion(self):
dt = np.dtype(np.double)
attr = ["names"]
for s in attr:
assert_raises(AttributeError, delattr, dt, s)
class TestDtypeAttributes(TestCase):
def test_descr_has_trailing_void(self):
# see gh-6359
dtype = np.dtype({
'names': ['A', 'B'],
'formats': ['f4', 'f4'],
'offsets': [0, 8],
'itemsize': 16})
new_dtype = np.dtype(dtype.descr)
assert_equal(new_dtype.itemsize, 16)
def test_name_builtin(self):
for t in np.typeDict.values():
name = t.__name__
if name.endswith('_'):
name = name[:-1]
assert_equal(np.dtype(t).name, name)
def test_name_dtype_subclass(self):
# Ticket #4357
class user_def_subcls(np.void):
pass
assert_equal(np.dtype(user_def_subcls).name, 'user_def_subcls')
def test_rational_dtype():
# test for bug gh-5719
a = np.array([1111], dtype=rational).astype
assert_raises(OverflowError, a, 'int8')
# test that dtype detection finds user-defined types
x = rational(1)
assert_equal(np.array([x,x]).dtype, np.dtype(rational))
def test_dtypes_are_true():
# test for gh-6294
assert bool(np.dtype('f8'))
assert bool(np.dtype('i8'))
assert bool(np.dtype([('a', 'i8'), ('b', 'f4')]))
if __name__ == "__main__":
run_module_suite()
|
eliksir/mailmojo-python-sdk
|
refs/heads/master
|
test/test_newsletter_send_test.py
|
1
|
# coding: utf-8
"""
MailMojo API
v1 of the MailMojo API # noqa: E501
OpenAPI spec version: 1.1.0
Contact: hjelp@mailmojo.no
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import mailmojo_sdk
from mailmojo_sdk.models.newsletter_send_test import NewsletterSendTest # noqa: E501
from mailmojo_sdk.rest import ApiException
class TestNewsletterSendTest(unittest.TestCase):
"""NewsletterSendTest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testNewsletterSendTest(self):
"""Test NewsletterSendTest"""
# FIXME: construct object with mandatory attributes with example values
# model = mailmojo_sdk.models.newsletter_send_test.NewsletterSendTest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
vitan/hue
|
refs/heads/master
|
desktop/core/ext-py/Paste-1.7.2/paste/evalexception/__init__.py
|
78
|
# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
An exception handler for interactive debugging
"""
from paste.evalexception.middleware import EvalException
|
ralphwort/chef-repo
|
refs/heads/master
|
build/pyparsing/examples/searchparser.py
|
5
|
"""Search query parser
version 2006-03-09
This search query parser uses the excellent Pyparsing module
(http://pyparsing.sourceforge.net/) to parse search queries by users.
It handles:
* 'and', 'or' and implicit 'and' operators;
* parentheses;
* quoted strings;
* wildcards at the end of a search term (help*);
Requirements:
* Python
* Pyparsing
If you run this script, it will perform a number of tests. To use is as a
module, you should use inheritance on the SearchQueryParser class and overwrite
the Get... methods. The ParserTest class gives a very simple example of how this
could work.
-------------------------------------------------------------------------------
Copyright (c) 2006, Estrate, the Netherlands
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Estrate nor the names of its contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
CONTRIBUTORS:
- Steven Mooij
- Rudolph Froger
- Paul McGuire
TODO:
- add more docs
- ask someone to check my English texts
- add more kinds of wildcards ('*' at the beginning and '*' inside a word)?
"""
from pyparsing import Word, alphanums, Keyword, Group, Combine, Forward, Suppress, Optional, OneOrMore, oneOf
from sets import Set
class SearchQueryParser:
def __init__(self):
self._methods = {
'and': self.evaluateAnd,
'or': self.evaluateOr,
'not': self.evaluateNot,
'parenthesis': self.evaluateParenthesis,
'quotes': self.evaluateQuotes,
'word': self.evaluateWord,
'wordwildcard': self.evaluateWordWildcard,
}
self._parser = self.parser()
def parser(self):
"""
This function returns a parser.
The grammar should be like most full text search engines (Google, Tsearch, Lucene).
Grammar:
- a query consists of alphanumeric words, with an optional '*' wildcard
at the end of a word
- a sequence of words between quotes is a literal string
- words can be used together by using operators ('and' or 'or')
- words with operators can be grouped with parenthesis
- a word or group of words can be preceded by a 'not' operator
- the 'and' operator precedes an 'or' operator
- if an operator is missing, use an 'and' operator
"""
operatorOr = Forward()
operatorWord = Group(Combine(Word(alphanums) + Suppress('*'))).setResultsName('wordwildcard') | \
Group(Word(alphanums)).setResultsName('word')
operatorQuotesContent = Forward()
operatorQuotesContent << (
(operatorWord + operatorQuotesContent) | operatorWord
)
operatorQuotes = Group(
Suppress('"') + operatorQuotesContent + Suppress('"')
).setResultsName("quotes") | operatorWord
operatorParenthesis = Group(
(Suppress("(") + operatorOr + Suppress(")"))
).setResultsName("parenthesis") | operatorQuotes
operatorNot = Forward()
operatorNot << (Group(
Suppress(Keyword("not", caseless=True)) + operatorNot
).setResultsName("not") | operatorParenthesis)
operatorAnd = Forward()
operatorAnd << (Group(
operatorNot + Suppress(Keyword("and", caseless=True)) + operatorAnd
).setResultsName("and") | Group(
operatorNot + OneOrMore(~oneOf("and or") + operatorAnd)
).setResultsName("and") | operatorNot)
operatorOr << (Group(
operatorAnd + Suppress(Keyword("or", caseless=True)) + operatorOr
).setResultsName("or") | operatorAnd)
return operatorOr.parseString
def evaluateAnd(self, argument):
return self.evaluate(argument[0]).intersection(self.evaluate(argument[1]))
def evaluateOr(self, argument):
return self.evaluate(argument[0]).union(self.evaluate(argument[1]))
def evaluateNot(self, argument):
return self.GetNot(self.evaluate(argument[0]))
def evaluateParenthesis(self, argument):
return self.evaluate(argument[0])
def evaluateQuotes(self, argument):
"""Evaluate quoted strings
First is does an 'and' on the indidual search terms, then it asks the
function GetQuoted to only return the subset of ID's that contain the
literal string.
"""
r = Set()
search_terms = []
for item in argument:
search_terms.append(item[0])
if len(r) == 0:
r = self.evaluate(item)
else:
r = r.intersection(self.evaluate(item))
return self.GetQuotes(' '.join(search_terms), r)
def evaluateWord(self, argument):
return self.GetWord(argument[0])
def evaluateWordWildcard(self, argument):
return self.GetWordWildcard(argument[0])
def evaluate(self, argument):
return self._methods[argument.getName()](argument)
def Parse(self, query):
#print self._parser(query)[0]
return self.evaluate(self._parser(query)[0])
def GetWord(self, word):
return Set()
def GetWordWildcard(self, word):
return Set()
def GetQuotes(self, search_string, tmp_result):
return Set()
def GetNot(self, not_set):
return Set().difference(not_set)
class ParserTest(SearchQueryParser):
"""Tests the parser with some search queries
tests containts a dictionary with tests and expected results.
"""
tests = {
'help': Set([1, 2, 4, 5]),
'help or hulp': Set([1, 2, 3, 4, 5]),
'help and hulp': Set([2]),
'help hulp': Set([2]),
'help and hulp or hilp': Set([2, 3, 4]),
'help or hulp and hilp': Set([1, 2, 3, 4, 5]),
'help or hulp or hilp or halp': Set([1, 2, 3, 4, 5, 6]),
'(help or hulp) and (hilp or halp)': Set([3, 4, 5]),
'help and (hilp or halp)': Set([4, 5]),
'(help and (hilp or halp)) or hulp': Set([2, 3, 4, 5]),
'not help': Set([3, 6, 7, 8]),
'not hulp and halp': Set([5, 6]),
'not (help and halp)': Set([1, 2, 3, 4, 6, 7, 8]),
'"help me please"': Set([2]),
'"help me please" or hulp': Set([2, 3]),
'"help me please" or (hulp and halp)': Set([2]),
'help*': Set([1, 2, 4, 5, 8]),
'help or hulp*': Set([1, 2, 3, 4, 5]),
'help* and hulp': Set([2]),
'help and hulp* or hilp': Set([2, 3, 4]),
'help* or hulp or hilp or halp': Set([1, 2, 3, 4, 5, 6, 8]),
'(help or hulp*) and (hilp* or halp)': Set([3, 4, 5]),
'help* and (hilp* or halp*)': Set([4, 5]),
'(help and (hilp* or halp)) or hulp*': Set([2, 3, 4, 5]),
'not help* and halp': Set([6]),
'not (help* and helpe*)': Set([1, 2, 3, 4, 5, 6, 7]),
'"help* me please"': Set([2]),
'"help* me* please" or hulp*': Set([2, 3]),
'"help me please*" or (hulp and halp)': Set([2]),
'"help me please" not (hulp and halp)': Set([2]),
'"help me please" hulp': Set([2]),
'help and hilp and not holp': Set([4]),
'help hilp not holp': Set([4]),
'help hilp and not holp': Set([4]),
}
docs = {
1: 'help',
2: 'help me please hulp',
3: 'hulp hilp',
4: 'help hilp',
5: 'halp thinks he needs help',
6: 'he needs halp',
7: 'nothing',
8: 'helper',
}
index = {
'help': Set((1, 2, 4, 5)),
'me': Set((2,)),
'please': Set((2,)),
'hulp': Set((2, 3,)),
'hilp': Set((3, 4,)),
'halp': Set((5, 6,)),
'thinks': Set((5,)),
'he': Set((5, 6,)),
'needs': Set((5, 6,)),
'nothing': Set((7,)),
'helper': Set((8,)),
}
def GetWord(self, word):
if (word in self.index):
return self.index[word]
else:
return Set()
def GetWordWildcard(self, word):
result = Set()
for item in list(self.index.keys()):
if word == item[0:len(word)]:
result = result.union(self.index[item])
return result
def GetQuotes(self, search_string, tmp_result):
result = Set()
for item in tmp_result:
if self.docs[item].count(search_string):
result.add(item)
return result
def GetNot(self, not_set):
all = Set(list(self.docs.keys()))
return all.difference(not_set)
def Test(self):
all_ok = True
for item in list(self.tests.keys()):
print(item)
r = self.Parse(item)
e = self.tests[item]
print('Result: %s' % r)
print('Expect: %s' % e)
if e == r:
print('Test OK')
else:
all_ok = False
print('>>>>>>>>>>>>>>>>>>>>>>Test ERROR<<<<<<<<<<<<<<<<<<<<<')
print('')
return all_ok
if __name__=='__main__':
if ParserTest().Test():
print('All tests OK')
else:
print('One or more tests FAILED')
|
Cyberjusticelab/JusticeAI
|
refs/heads/master
|
src/ml_service/feature_extraction/post_processing/regex/regex_tagger_test.py
|
1
|
# -*- coding: utf-8 -*-
import unittest
import re
import os
from util.constant import Path
from feature_extraction.post_processing.regex.regex_tagger import TagPrecedents
class RegexTaggerTest(unittest.TestCase):
def setUp(self):
self.precedent_tagger = TagPrecedents()
self.precedent_tagger.regexes = {
"regex_facts": [("some_fact", [re.compile("fermentum", re.IGNORECASE)], 'BOOLEAN')],
"regex_outcomes": [("some_outcome", [re.compile("REJETTE")], 'BOOLEAN')]
}
def test_regex_model(self):
binary_directory = Path.binary_directory
Path.binary_directory = Path.test_mock_precedent_directory
self.precedent_tagger.precedents_directory_path = Path.test_mock_precedent_directory
self.precedent_tagger.tag_precedents(10)
binary_model_path = Path.binary_directory + r'precedent_vectors.bin'
self.assertTrue(os.path.isfile(binary_model_path))
os.remove(binary_model_path)
Path.binary_directory = binary_directory
def test_tag_precedents(self):
binary_directory = Path.binary_directory
self.precedent_tagger.precedents_directory_path = Path.test_mock_precedent_directory
Path.binary_directory = Path.test_mock_precedent_directory
facts_found = self.precedent_tagger.tag_precedents(2)
self.assertEqual(facts_found["1.txt"]["facts_vector"], [1])
binary_model_path = Path.binary_directory + r'precedent_vectors.bin'
os.remove(binary_model_path)
Path.binary_directory = binary_directory
def test_intent_indice(self):
binary_directory = Path.binary_directory
Path.binary_directory = Path.test_mock_precedent_directory
regex_list = self.precedent_tagger.get_intent_index()
self.assertEqual(regex_list["facts_vector"][0], (0, "some_fact", 'bool'))
Path.binary_directory = binary_directory
|
vnsofthe/odoo-dev
|
refs/heads/master
|
addons/mass_mailing/models/res_config.py
|
385
|
# -*- coding: utf-8 -*-
from openerp.osv import fields, osv
class MassMailingConfiguration(osv.TransientModel):
_name = 'marketing.config.settings'
_inherit = 'marketing.config.settings'
_columns = {
'group_mass_mailing_campaign': fields.boolean(
'Manage Mass Mailing using Campaign',
implied_group='mass_mailing.group_mass_mailing_campaign',
help="""Manage mass mailign using Campaigns"""),
}
|
andfoy/margffoy-tuay-server
|
refs/heads/master
|
env/lib/python2.7/site-packages/future-0.14.3-py2.7.egg/future/moves/tkinter/font.py
|
118
|
from __future__ import absolute_import
from future.utils import PY3
if PY3:
from tkinter.font import *
else:
try:
from tkFont import *
except ImportError:
raise ImportError('The tkFont module is missing. Does your Py2 '
'installation include tkinter?')
|
ujfjhz/storm
|
refs/heads/master
|
dev-tools/release_notes.py
|
20
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Usage: release_notes.py <version> > RELEASE_NOTES.html
Depends on https://pypi.python.org/pypi/jira/, please use pip to install this module.
Generates release notes for a Storm release by generating an HTML doc containing some introductory information about the
release with links to the Storm docs followed by a list of issues resolved in the release. The script will fail if it finds
any unresolved issues still marked with the target release. You should run this script after either resolving all issues or
moving outstanding issues to a later release.
"""
from jira import JIRA
import itertools, sys
if len(sys.argv) < 2:
print >>sys.stderr, "Usage: release_notes.py <version>"
sys.exit(1)
version = sys.argv[1]
JIRA_BASE_URL = 'https://issues.apache.org/jira'
MAX_RESULTS = 100 # This is constrained for cloud instances so we need to fix this value
def get_issues(jira, query, **kwargs):
"""
Get all issues matching the JQL query from the JIRA instance. This handles expanding paginated results for you. Any additional keyword arguments are forwarded to the JIRA.search_issues call.
"""
results = []
startAt = 0
new_results = None
while new_results == None or len(new_results) == MAX_RESULTS:
new_results = jira.search_issues(query, startAt=startAt, maxResults=MAX_RESULTS, **kwargs)
results += new_results
startAt += len(new_results)
return results
def issue_link(issue):
return "%s/browse/%s" % (JIRA_BASE_URL, issue.key)
if __name__ == "__main__":
apache = JIRA(JIRA_BASE_URL)
issues = get_issues(apache, 'project=STORM and fixVersion=%s' % version)
if not issues:
print >>sys.stderr, "Didn't find any issues for the target fix version"
sys.exit(1)
# Some resolutions, including a lack of resolution, indicate that the bug hasn't actually been addressed and we shouldn't even be able to create a release until they are fixed
UNRESOLVED_RESOLUTIONS = [None,
"Unresolved",
"Duplicate",
"Invalid",
"Not A Problem",
"Not A Bug",
"Won't Fix",
"Incomplete",
"Cannot Reproduce",
"Later",
"Works for Me",
"Workaround",
"Information Provided"
]
unresolved_issues = [issue for issue in issues if issue.fields.resolution in UNRESOLVED_RESOLUTIONS or issue.fields.resolution.name in UNRESOLVED_RESOLUTIONS]
if unresolved_issues:
print >>sys.stderr, "The release is not completed since unresolved issues or improperly resolved issues were found still tagged with this release as the fix version:"
for issue in unresolved_issues:
print >>sys.stderr, "Unresolved issue: %15s %20s %s" % (issue.key, issue.fields.resolution, issue_link(issue))
print >>sys.stderr
print >>sys.stderr, "Note that for some resolutions, you should simply remove the fix version as they have not been truly fixed in this release."
sys.exit(1)
# Get list of (issue type, [issues]) sorted by the issue ID type, with each subset of issues sorted by their key so they
# are in increasing order of bug #. To get a nice ordering of the issue types we customize the key used to sort by issue
# type a bit to ensure features and improvements end up first.
def issue_type_key(issue):
if issue.fields.issuetype.name == 'New Feature':
return -2
if issue.fields.issuetype.name == 'Improvement':
return -1
return issue.fields.issuetype.id
by_group = [(k,sorted(g, key=lambda issue: issue.id)) for k,g in itertools.groupby(sorted(issues, key=issue_type_key), lambda issue: issue.fields.issuetype.name)]
print "<!DOCTYPE html>"
print "<html lang=\"en\">"
print "<head>"
print "<meta charset=\"utf-8\">"
print "<title>Storm %(version)s Release Notes</title>" % { 'version': version }
print "</head>"
print "<body>"
print "<h1>Release Notes for Storm %s</h1>" % version
print """<p>JIRA issues addressed in the %(version)s release of Storm. Documentation for this
release is available at the <a href="http://storm.apache.org/">Apache Storm
project site</a>.</p>""" % { 'version': version }
for itype, issues in by_group:
print "<h2>%s</h2>" % itype
print "<ul>"
for issue in issues:
print '<li>[<a href="%(link)s">%(key)s</a>] - %(summary)s</li>' % {'key': issue.key, 'link': issue_link(issue), 'summary': issue.fields.summary}
print "</ul>"
print "</body>"
print "</html>"
|
maraujop/django-crispy-forms
|
refs/heads/main
|
crispy_forms/tests/test_dynamic_api.py
|
2
|
import pytest
from django import forms
from crispy_forms.bootstrap import AppendedText
from crispy_forms.exceptions import DynamicError
from crispy_forms.helper import FormHelper, FormHelpersException
from crispy_forms.layout import HTML, Div, Field, Fieldset, Layout, MultiField
from crispy_forms.tests.forms import SampleForm
from .conftest import only_uni_form
def test_wrap_all_fields():
helper = FormHelper()
layout = Layout("email", "password1", "password2")
helper.layout = layout
helper.all().wrap(Field, css_class="test-class")
for field in layout.fields:
assert isinstance(field, Field)
assert field.attrs["class"] == "test-class"
assert layout[0][0] == "email"
assert layout[1][0] == "password1"
assert layout[2][0] == "password2"
def test_wrap_selected_fields():
helper = FormHelper()
layout = Layout("email", "password1", "password2")
helper.layout = layout
helper[1:3].wrap(Field, css_class="test-class")
assert not isinstance(layout.fields[0], Field)
assert isinstance(layout.fields[1], Field)
assert isinstance(layout.fields[2], Field)
helper[0].wrap(Fieldset, "legend", css_class="test-class")
assert isinstance(layout[0], Fieldset)
assert layout[0].legend == "legend"
assert layout[0][0] == "email"
def test_wrap_together_with_slices():
helper = FormHelper()
layout = Layout("email", "password1", "password2")
helper.layout = layout
helper[1:3].wrap_together(Field, css_class="test-class")
assert layout.fields[0] == "email"
assert isinstance(layout.fields[1], Field)
assert layout.fields[1][0] == "password1"
assert layout.fields[1][1] == "password2"
layout = Layout(
Div("email"),
"password1",
"password2",
)
helper.layout = layout
helper[0:3].wrap_together(Field, css_class="test-class")
assert isinstance(layout.fields[0], Field)
assert isinstance(layout.fields[0][0], Div)
assert layout.fields[0][0][0] == "email"
assert layout.fields[0][1] == "password1"
assert layout.fields[0][2] == "password2"
layout = Layout("email", "password1", "password2")
helper.layout = layout
helper[0].wrap_together(Field, css_class="test-class")
assert isinstance(layout.fields[0], Field)
assert layout.fields[1] == "password1"
assert layout.fields[2] == "password2"
layout = Layout("email", "password1", "password2")
helper.layout = layout
helper[0].wrap_together(Fieldset, "legend", css_class="test-class")
assert isinstance(layout.fields[0], Fieldset)
assert layout.fields[0].legend == "legend"
assert layout.fields[1] == "password1"
assert layout.fields[2] == "password2"
def test_wrap_together_partial_slices():
helper = FormHelper()
layout = Layout("email", "password1", "password2")
helper.layout = layout
helper[:2].wrap_together(Field, css_class="test-class")
assert isinstance(layout.fields[0], Field)
assert layout.fields[1] == "password2"
assert layout.fields[0][0] == "email"
assert layout.fields[0][1] == "password1"
helper = FormHelper()
layout = Layout("email", "password1", "password2")
helper.layout = layout
helper[1:].wrap_together(Field, css_class="test-class")
assert layout.fields[0] == "email"
assert isinstance(layout.fields[1], Field)
assert layout.fields[1][0] == "password1"
assert layout.fields[1][1] == "password2"
def test_update_attributes():
helper = FormHelper()
helper.layout = Layout("email", Field("password1"), "password2")
helper["password1"].update_attributes(readonly=True)
assert "readonly" in helper.layout[1].attrs
def test_update_attributes_and_wrap_once():
helper = FormHelper()
layout = Layout("email", Field("password1"), "password2")
helper.layout = layout
helper.filter(Field).update_attributes(readonly=True)
assert isinstance(layout[1], Field)
assert layout[1].attrs == {"readonly": True}
layout = Layout(
"email",
Div(Field("password1")),
"password2",
)
helper.layout = layout
helper.filter(Field, max_level=2).update_attributes(readonly=True)
assert isinstance(layout[1][0], Field)
assert layout[1][0].attrs == {"readonly": True}
layout = Layout(
"email",
Div(Field("password1")),
"password2",
)
helper.layout = layout
helper.filter(str, greedy=True).wrap_once(Field)
helper.filter(Field, greedy=True).update_attributes(readonly=True)
assert isinstance(layout[0], Field)
assert isinstance(layout[1][0], Field)
assert isinstance(layout[1][0][0], str)
assert isinstance(layout[2], Field)
assert layout[1][0].attrs == {"readonly": True}
assert layout[0].attrs == {"readonly": True}
assert layout[2].attrs == {"readonly": True}
def test_get_layout_objects():
layout_1 = Layout(Div())
assert layout_1.get_layout_objects(Div) == [[[0], "div"]]
layout_2 = Layout(Div(Div(Div("email")), Div("password1"), "password2"))
assert layout_2.get_layout_objects(Div) == [[[0], "div"]]
assert layout_2.get_layout_objects(Div, max_level=1) == [[[0], "div"], [[0, 0], "div"], [[0, 1], "div"]]
assert layout_2.get_layout_objects(Div, max_level=2) == [
[[0], "div"],
[[0, 0], "div"],
[[0, 0, 0], "div"],
[[0, 1], "div"],
]
layout_3 = Layout(
"email",
Div("password1"),
"password2",
)
assert layout_3.get_layout_objects(str, max_level=2) == [[[0], "email"], [[1, 0], "password1"], [[2], "password2"]]
layout_4 = Layout(
Div(
Div("field_name"),
"field_name2",
),
Div("password"),
"extra_field",
)
assert layout_4.get_layout_objects(Div) == [[[0], "div"], [[1], "div"]]
assert layout_4.get_layout_objects(Div, max_level=1) == [[[0], "div"], [[0, 0], "div"], [[1], "div"]]
def test_filter_and_wrap():
helper = FormHelper()
layout = Layout(
"email",
Div("password1"),
"password2",
)
helper.layout = layout
helper.filter(str).wrap(Field, css_class="test-class")
assert isinstance(layout.fields[0], Field)
assert isinstance(layout.fields[1], Div)
assert isinstance(layout.fields[2], Field)
assert layout[2][0] == "password2"
# Wrapping a div in a div
helper.filter(Div).wrap(Div, css_class="test-class")
assert isinstance(layout.fields[1], Div)
assert isinstance(layout.fields[1].fields[0], Div)
assert layout[1][0][0] == "password1"
def test_filter_and_wrap_side_effects():
helper = FormHelper()
layout = Layout(
Div(
"extra_field",
Div("password1"),
),
)
helper.layout = layout
with pytest.raises(DynamicError):
helper.filter(Div, max_level=2).wrap(Div, css_class="test-class")
def test_get_field_names():
layout_1 = Div("field_name")
assert layout_1.get_field_names() == [[[0], "field_name"]]
layout_2 = Div(Div("field_name"))
assert layout_2.get_field_names() == [[[0, 0], "field_name"]]
layout_3 = Div(Div("field_name"), "password")
assert layout_3.get_field_names() == [[[0, 0], "field_name"], [[1], "password"]]
layout_4 = Div(
Div(
Div("field_name"),
"field_name2",
),
Div("password"),
"extra_field",
)
assert layout_4.get_field_names() == [
[[0, 0, 0], "field_name"],
[[0, 1], "field_name2"],
[[1, 0], "password"],
[[2], "extra_field"],
]
layout_5 = Div(
Div(
"field_name",
"field_name2",
),
"extra_field",
)
assert layout_5.get_field_names() == [
[[0, 0], "field_name"],
[[0, 1], "field_name2"],
[[1], "extra_field"],
]
def test_layout_get_field_names():
layout_1 = Layout(Div("field_name"), "password")
assert layout_1.get_field_names() == [
[[0, 0], "field_name"],
[[1], "password"],
]
layout_2 = Layout(Div("field_name"), "password", Fieldset("legend", "extra_field"))
assert layout_2.get_field_names() == [
[[0, 0], "field_name"],
[[1], "password"],
[[2, 0], "extra_field"],
]
layout_3 = Layout(Div(Div(Div("email")), Div("password1"), "password2"))
assert layout_3.get_field_names() == [
[[0, 0, 0, 0], "email"],
[[0, 1, 0], "password1"],
[[0, 2], "password2"],
]
def test_filter_by_widget(advanced_layout):
form = SampleForm()
form.helper = FormHelper(form)
form.helper.layout = advanced_layout
assert form.helper.filter_by_widget(forms.PasswordInput).slice == [
[[0, 1, 0, 0], "password1"],
[[0, 4, 0], "password2"],
]
def test_exclude_by_widget(advanced_layout):
form = SampleForm()
form.helper = FormHelper(form)
form.helper.layout = advanced_layout
assert form.helper.exclude_by_widget(forms.PasswordInput).slice == [
[[0, 0, 0, 0], "email"],
[[0, 3, 0], "first_name"],
[[1], "last_name"],
]
def test_exclude_by_widget_and_wrap(advanced_layout):
form = SampleForm()
form.helper = FormHelper(form)
form.helper.layout = advanced_layout
form.helper.exclude_by_widget(forms.PasswordInput).wrap(Field, css_class="hero")
# Check wrapped fields
assert isinstance(form.helper.layout[0][0][0][0], Field)
assert isinstance(form.helper.layout[0][3][0], Field)
assert isinstance(form.helper.layout[1], Field)
# Check others stay the same
assert isinstance(form.helper.layout[0][3][1], HTML)
assert isinstance(form.helper.layout[0][1][0][0], str)
assert isinstance(form.helper.layout[0][4][0], str)
def test_all_without_layout():
form = SampleForm()
form.helper = FormHelper()
with pytest.raises(FormHelpersException):
form.helper.all().wrap(Div)
def test_filter_by_widget_without_form(advanced_layout):
form = SampleForm()
form.helper = FormHelper()
form.helper.layout = advanced_layout
with pytest.raises(FormHelpersException):
form.helper.filter_by_widget(forms.PasswordInput)
def test_formhelper__getitem__():
helper = FormHelper()
layout = Layout(
Div("email"),
"password1",
)
helper.layout = layout
helper["email"].wrap(Field, css_class="hero")
assert isinstance(layout[0][0], Field)
assert layout[0][0][0] == "email"
helper = FormHelper()
helper.layout = Layout("password1")
helper["password1"].wrap(AppendedText, "extra")
assert isinstance(helper.layout[0], AppendedText)
assert helper.layout[0][0] == "password1"
assert helper.layout[0].text == "extra"
def test_formhelper__setitem__():
helper = FormHelper()
layout = Layout("first_field", Div("email"))
helper.layout = layout
helper[0] = "replaced"
assert layout[0] == "replaced"
def test_formhelper__delitem__and__len__():
helper = FormHelper()
layout = Layout("first_field", Div("email"))
helper.layout = layout
del helper[0]
assert len(helper) == 1
def test__delitem__and__len__layout_object():
layout = Layout("first_field", Div("email"))
del layout[0]
assert len(layout) == 1
def test__getitem__layout_object():
layout = Layout(Div(Div(Div("email")), Div("password1"), "password2"))
assert isinstance(layout[0], Div)
assert isinstance(layout[0][0], Div)
assert isinstance(layout[0][0][0], Div)
assert isinstance(layout[0][1], Div)
assert isinstance(layout[0][1][0], str)
assert isinstance(layout[0][2], str)
def test__getattr__append_layout_object():
layout = Layout(Div("email"))
layout.append("password1")
assert isinstance(layout[0], Div)
assert isinstance(layout[0][0], str)
assert isinstance(layout[1], str)
def test__setitem__layout_object():
layout = Layout(Div("email"))
layout[0][0] = "password1"
assert isinstance(layout[0], Div)
assert layout[0][0] == "password1"
@only_uni_form
def test_filter():
helper = FormHelper()
helper.layout = Layout(
Div(
MultiField("field_name"),
"field_name2",
),
Div("password"),
"extra_field",
)
assert helper.filter(Div, MultiField).slice == [[[0], "div"], [[1], "div"]]
assert helper.filter(Div, MultiField, max_level=1).slice == [[[0], "div"], [[0, 0], "multifield"], [[1], "div"]]
assert helper.filter(MultiField, max_level=1).slice == [[[0, 0], "multifield"]]
|
tperol/ConvNetQuake
|
refs/heads/master
|
bin/preprocess/create_dataset_events.py
|
1
|
#!/usr/bin/env python
# -------------------------------------------------------------------
# File Name : create_dataset_events.py
# Creation Date : 05-12-2016
# Last Modified : Fri Jan 6 15:04:54 2017
# Author: Thibaut Perol <tperol@g.harvard.edu>
# -------------------------------------------------------------------
"""Creates tfrecords dataset of events trace and their cluster_ids.
This is done by loading a dir of .mseed and one catalog with the
time stamps of the events and their cluster_id
e.g.,
./bin/preprocess/create_dataset_events.py \
--stream_dir data/streams \
--catalog data/50_clusters/catalog_with_cluster_ids.csv\
--output_dir data/50_clusters/tfrecords
"""
import os
import numpy as np
from quakenet.data_pipeline import DataWriter
import tensorflow as tf
from obspy.core import read
from quakenet.data_io import load_catalog
from obspy.core.utcdatetime import UTCDateTime
from openquake.hazardlib.geo.geodetic import distance
import fnmatch
import json
flags = tf.flags
flags.DEFINE_string('stream_dir', None,
'path to the directory of streams to preprocess.')
flags.DEFINE_string(
'catalog', None, 'path to the events catalog to use as labels.')
flags.DEFINE_string('output_dir', None,
'path to the directory in which the tfrecords are saved')
flags.DEFINE_bool("plot", False,
"If we want the event traces to be plotted")
flags.DEFINE_float(
'window_size', 10, 'size of the window samples (in seconds)')
flags.DEFINE_float('v_mean', 5.0, 'mean velocity')
flags.DEFINE_boolean("save_mseed",False,
"save the windows in mseed format")
FLAGS = flags.FLAGS
def distance_to_station(lat, long, depth):
# station GPS coordinates
lat0 = 35.796570
long0 = -97.454860
depth0 = -0.333
# return distance of the event to the station
return distance(long, lat, depth, long0, lat0, depth0)
def preprocess_stream(stream):
stream = stream.detrend('constant')
return stream.normalize()
def filter_catalog(cat):
# Filter around Guthrie sequence
cat = cat[(cat.latitude > 35.7) & (cat.latitude < 36)
& (cat.longitude > -97.6) & (cat.longitude < -97.2)]
return cat
def get_travel_time(catalog):
"""Find the time between origin and propagation"""
v_mean = FLAGS.v_mean
coordinates = [(lat, lon, depth) for (lat, lon, depth) in zip(catalog.latitude,
catalog.longitude,
catalog.depth)]
distances_to_station = [distance_to_station(lat, lon, depth)
for (lat, lon, depth) in coordinates]
travel_time = [distance/v_mean for distance in distances_to_station]
return travel_time
def write_json(metadata,output_metadata):
with open(output_metadata, 'w') as outfile:
json.dump(metadata, outfile)
def main(_):
stream_files = [file for file in os.listdir(FLAGS.stream_dir) if
fnmatch.fnmatch(file, '*.mseed')]
print "List of streams to anlayze", stream_files
# Create dir to store tfrecords
if not os.path.exists(FLAGS.output_dir):
os.makedirs(FLAGS.output_dir)
# Dictionary of nb of events per tfrecords
metadata = {}
output_metadata = os.path.join(FLAGS.output_dir,"metadata.json")
# Load Catalog
print "+ Loading Catalog"
cat = load_catalog(FLAGS.catalog)
cat = filter_catalog(cat)
for stream_file in stream_files:
# Load stream
stream_path = os.path.join(FLAGS.stream_dir, stream_file)
print "+ Loading Stream {}".format(stream_file)
stream = read(stream_path)
print '+ Preprocessing stream'
stream = preprocess_stream(stream)
# Filter catalog according to the loaded stream
start_date = stream[0].stats.starttime
end_date = stream[-1].stats.endtime
print("-- Start Date={}, End Date={}".format(start_date, end_date))
filtered_catalog = cat[
((cat.utc_timestamp >= start_date)
& (cat.utc_timestamp < end_date))]
# Propagation time from source to station
travel_time = get_travel_time(filtered_catalog)
# Write event waveforms and cluster_id in .tfrecords
output_name = stream_file.split(".mseed")[0] + ".tfrecords"
output_path = os.path.join(FLAGS.output_dir, output_name)
writer = DataWriter(output_path)
print("+ Creating tfrecords for {} events".format(filtered_catalog.shape[0]))
# Loop over all events in the considered stream
for event_n in range(filtered_catalog.shape[0]):
event_time = filtered_catalog.utc_timestamp.values[event_n]
event_time += travel_time[event_n]
st_event = stream.slice(UTCDateTime(event_time),
UTCDateTime(event_time) + FLAGS.window_size).copy()
cluster_id = filtered_catalog.cluster_id.values[event_n]
n_traces = len(st_event)
# If there is not trace skip this waveform
if n_traces == 0:
continue
n_samples = len(st_event[0].data)
n_pts = st_event[0].stats.sampling_rate * FLAGS.window_size + 1
if (len(st_event) == 3) and (n_pts == n_samples):
# Write tfrecords
writer.write(st_event, cluster_id)
# Save window and cluster_id
if FLAGS.save_mseed:
output_label = "label_{}_lat_{:.3f}_lon_{:.3f}.mseed".format(
cluster_id,
filtered_catalog.latitude.values[event_n],
filtered_catalog.longitude.values[event_n])
output_mseed_dir = os.path.join(FLAGS.output_dir,"mseed")
if not os.path.exists(output_mseed_dir):
os.makedirs(output_mseed_dir)
output_mseed = os.path.join(output_mseed_dir,output_label)
st_event.write(output_mseed,format="MSEED")
# Plot events
if FLAGS.plot:
trace = st_event[0]
viz_dir = os.path.join(
FLAGS.output_dir, "viz", stream_file.split(".mseed")[0])
if not os.path.exists(viz_dir):
os.makedirs(viz_dir)
trace.plot(outfile=os.path.join(viz_dir,
"event_{}.png".format(event_n)))
else:
print "Missing waveform for event:", UTCDateTime(event_time)
# Cleanup writer
print("Number of events written={}".format(writer._written))
writer.close()
# Write metadata
metadata[stream_file.split(".mseed")[0]] = writer._written
write_json(metadata, output_metadata)
if __name__ == "__main__":
tf.app.run()
|
leorochael/odoo
|
refs/heads/8.0
|
addons/hr_attendance/wizard/hr_attendance_error.py
|
377
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_attendance_error(osv.osv_memory):
_name = 'hr.attendance.error'
_description = 'Print Error Attendance Report'
_columns = {
'init_date': fields.date('Starting Date', required=True),
'end_date': fields.date('Ending Date', required=True),
'max_delay': fields.integer('Max. Delay (Min)', required=True)
}
_defaults = {
'init_date': lambda *a: time.strftime('%Y-%m-%d'),
'end_date': lambda *a: time.strftime('%Y-%m-%d'),
'max_delay': 120,
}
def print_report(self, cr, uid, ids, context=None):
emp_ids = []
data_error = self.read(cr, uid, ids, context=context)[0]
date_from = data_error['init_date']
date_to = data_error['end_date']
cr.execute("SELECT id FROM hr_attendance WHERE employee_id IN %s AND to_char(name,'YYYY-mm-dd')<=%s AND to_char(name,'YYYY-mm-dd')>=%s AND action IN %s ORDER BY name" ,(tuple(context['active_ids']), date_to, date_from, tuple(['sign_in','sign_out'])))
attendance_ids = [x[0] for x in cr.fetchall()]
if not attendance_ids:
raise osv.except_osv(_('No Data Available!'), _('No records are found for your selection!'))
attendance_records = self.pool.get('hr.attendance').browse(cr, uid, attendance_ids, context=context)
for rec in attendance_records:
if rec.employee_id.id not in emp_ids:
emp_ids.append(rec.employee_id.id)
data_error['emp_ids'] = emp_ids
datas = {
'ids': [],
'model': 'hr.employee',
'form': data_error
}
return self.pool['report'].get_action(
cr, uid, [], 'hr_attendance.report_attendanceerrors', data=datas, context=context
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Pythonify/awesome
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/chardetect.py
|
1785
|
#!/usr/bin/env python
"""
Script which takes one or more file paths and reports on their detected
encodings
Example::
% chardetect somefile someotherfile
somefile: windows-1252 with confidence 0.5
someotherfile: ascii with confidence 1.0
If no paths are provided, it takes its input from stdin.
"""
from __future__ import absolute_import, print_function, unicode_literals
import argparse
import sys
from io import open
from chardet import __version__
from chardet.universaldetector import UniversalDetector
def description_of(lines, name='stdin'):
"""
Return a string describing the probable encoding of a file or
list of strings.
:param lines: The lines to get the encoding of.
:type lines: Iterable of bytes
:param name: Name of file or collection of lines
:type name: str
"""
u = UniversalDetector()
for line in lines:
u.feed(line)
u.close()
result = u.result
if result['encoding']:
return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
result['confidence'])
else:
return '{0}: no result'.format(name)
def main(argv=None):
'''
Handles command line arguments and gets things started.
:param argv: List of arguments, as if specified on the command-line.
If None, ``sys.argv[1:]`` is used instead.
:type argv: list of str
'''
# Get command line arguments
parser = argparse.ArgumentParser(
description="Takes one or more file paths and reports their detected \
encodings",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
conflict_handler='resolve')
parser.add_argument('input',
help='File whose encoding we would like to determine.',
type=argparse.FileType('rb'), nargs='*',
default=[sys.stdin])
parser.add_argument('--version', action='version',
version='%(prog)s {0}'.format(__version__))
args = parser.parse_args(argv)
for f in args.input:
if f.isatty():
print("You are running chardetect interactively. Press " +
"CTRL-D twice at the start of a blank line to signal the " +
"end of your input. If you want help, run chardetect " +
"--help\n", file=sys.stderr)
print(description_of(f, f.name))
if __name__ == '__main__':
main()
|
aparo/django-nonrel
|
refs/heads/master
|
django/db/backends/creation.py
|
11
|
import sys
import time
from django.conf import settings
from django.core.management import call_command
# The prefix to put on the default database name when creating
# the test database.
TEST_DATABASE_PREFIX = 'test_'
class BaseDatabaseCreation(object):
"""
This class encapsulates all backend-specific differences that pertain to
database *creation*, such as the column types to use for particular Django
Fields, the SQL used to create and destroy tables, and the creation and
destruction of test databases.
"""
data_types = {}
def __init__(self, connection):
self.connection = connection
def _digest(self, *args):
"""
Generates a 32-bit digest of a set of arguments that can be used to
shorten identifying names.
"""
return '%x' % (abs(hash(args)) % 4294967296L) # 2**32
def sql_create_model(self, model, style, known_models=set()):
"""
Returns the SQL required to create a single model, as a tuple of:
(list_of_sql, pending_references_dict)
"""
from django.db import models
opts = model._meta
if not opts.managed or opts.proxy:
return [], {}
final_output = []
table_output = []
pending_references = {}
qn = self.connection.ops.quote_name
for f in opts.local_fields:
col_type = f.db_type(connection=self.connection)
tablespace = f.db_tablespace or opts.db_tablespace
if col_type is None:
# Skip ManyToManyFields, because they're not represented as
# database columns in this table.
continue
# Make the definition (e.g. 'foo VARCHAR(30)') for this field.
field_output = [style.SQL_FIELD(qn(f.column)),
style.SQL_COLTYPE(col_type)]
if not f.null:
field_output.append(style.SQL_KEYWORD('NOT NULL'))
if f.primary_key:
field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
elif f.unique:
field_output.append(style.SQL_KEYWORD('UNIQUE'))
if tablespace and f.unique:
# We must specify the index tablespace inline, because we
# won't be generating a CREATE INDEX statement for this field.
field_output.append(self.connection.ops.tablespace_sql(tablespace, inline=True))
if f.rel:
ref_output, pending = self.sql_for_inline_foreign_key_references(f, known_models, style)
if pending:
pr = pending_references.setdefault(f.rel.to, []).append((model, f))
else:
field_output.extend(ref_output)
table_output.append(' '.join(field_output))
for field_constraints in opts.unique_together:
table_output.append(style.SQL_KEYWORD('UNIQUE') + ' (%s)' % \
", ".join([style.SQL_FIELD(qn(opts.get_field(f).column)) for f in field_constraints]))
full_statement = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + style.SQL_TABLE(qn(opts.db_table)) + ' (']
for i, line in enumerate(table_output): # Combine and add commas.
full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or ''))
full_statement.append(')')
if opts.db_tablespace:
full_statement.append(self.connection.ops.tablespace_sql(opts.db_tablespace))
full_statement.append(';')
final_output.append('\n'.join(full_statement))
if opts.has_auto_field:
# Add any extra SQL needed to support auto-incrementing primary keys.
auto_column = opts.auto_field.db_column or opts.auto_field.name
autoinc_sql = self.connection.ops.autoinc_sql(opts.db_table, auto_column)
if autoinc_sql:
for stmt in autoinc_sql:
final_output.append(stmt)
return final_output, pending_references
def sql_for_inline_foreign_key_references(self, field, known_models, style):
"Return the SQL snippet defining the foreign key reference for a field"
qn = self.connection.ops.quote_name
if field.rel.to in known_models:
output = [style.SQL_KEYWORD('REFERENCES') + ' ' + \
style.SQL_TABLE(qn(field.rel.to._meta.db_table)) + ' (' + \
style.SQL_FIELD(qn(field.rel.to._meta.get_field(field.rel.field_name).column)) + ')' +
self.connection.ops.deferrable_sql()
]
pending = False
else:
# We haven't yet created the table to which this field
# is related, so save it for later.
output = []
pending = True
return output, pending
def sql_for_pending_references(self, model, style, pending_references):
"Returns any ALTER TABLE statements to add constraints after the fact."
from django.db.backends.util import truncate_name
if not model._meta.managed or model._meta.proxy:
return []
qn = self.connection.ops.quote_name
final_output = []
opts = model._meta
if model in pending_references:
for rel_class, f in pending_references[model]:
rel_opts = rel_class._meta
r_table = rel_opts.db_table
r_col = f.column
table = opts.db_table
col = opts.get_field(f.rel.field_name).column
# For MySQL, r_name must be unique in the first 64 characters.
# So we are careful with character usage here.
r_name = '%s_refs_%s_%s' % (r_col, col, self._digest(r_table, table))
final_output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % \
(qn(r_table), qn(truncate_name(r_name, self.connection.ops.max_name_length())),
qn(r_col), qn(table), qn(col),
self.connection.ops.deferrable_sql()))
del pending_references[model]
return final_output
def sql_for_many_to_many(self, model, style):
"Return the CREATE TABLE statments for all the many-to-many tables defined on a model"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
PendingDeprecationWarning
)
output = []
for f in model._meta.local_many_to_many:
if model._meta.managed or f.rel.to._meta.managed:
output.extend(self.sql_for_many_to_many_field(model, f, style))
return output
def sql_for_many_to_many_field(self, model, f, style):
"Return the CREATE TABLE statements for a single m2m field"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
PendingDeprecationWarning
)
from django.db import models
from django.db.backends.util import truncate_name
output = []
if f.auto_created:
opts = model._meta
qn = self.connection.ops.quote_name
tablespace = f.db_tablespace or opts.db_tablespace
if tablespace:
sql = self.connection.ops.tablespace_sql(tablespace, inline=True)
if sql:
tablespace_sql = ' ' + sql
else:
tablespace_sql = ''
else:
tablespace_sql = ''
table_output = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + \
style.SQL_TABLE(qn(f.m2m_db_table())) + ' (']
table_output.append(' %s %s %s%s,' %
(style.SQL_FIELD(qn('id')),
style.SQL_COLTYPE(models.AutoField(primary_key=True).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL PRIMARY KEY'),
tablespace_sql))
deferred = []
inline_output, deferred = self.sql_for_inline_many_to_many_references(model, f, style)
table_output.extend(inline_output)
table_output.append(' %s (%s, %s)%s' %
(style.SQL_KEYWORD('UNIQUE'),
style.SQL_FIELD(qn(f.m2m_column_name())),
style.SQL_FIELD(qn(f.m2m_reverse_name())),
tablespace_sql))
table_output.append(')')
if opts.db_tablespace:
# f.db_tablespace is only for indices, so ignore its value here.
table_output.append(self.connection.ops.tablespace_sql(opts.db_tablespace))
table_output.append(';')
output.append('\n'.join(table_output))
for r_table, r_col, table, col in deferred:
r_name = '%s_refs_%s_%s' % (r_col, col, self._digest(r_table, table))
output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' %
(qn(r_table),
qn(truncate_name(r_name, self.connection.ops.max_name_length())),
qn(r_col), qn(table), qn(col),
self.connection.ops.deferrable_sql()))
# Add any extra SQL needed to support auto-incrementing PKs
autoinc_sql = self.connection.ops.autoinc_sql(f.m2m_db_table(), 'id')
if autoinc_sql:
for stmt in autoinc_sql:
output.append(stmt)
return output
def sql_for_inline_many_to_many_references(self, model, field, style):
"Create the references to other tables required by a many-to-many table"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
PendingDeprecationWarning
)
from django.db import models
opts = model._meta
qn = self.connection.ops.quote_name
table_output = [
' %s %s %s %s (%s)%s,' %
(style.SQL_FIELD(qn(field.m2m_column_name())),
style.SQL_COLTYPE(models.ForeignKey(model).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL REFERENCES'),
style.SQL_TABLE(qn(opts.db_table)),
style.SQL_FIELD(qn(opts.pk.column)),
self.connection.ops.deferrable_sql()),
' %s %s %s %s (%s)%s,' %
(style.SQL_FIELD(qn(field.m2m_reverse_name())),
style.SQL_COLTYPE(models.ForeignKey(field.rel.to).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL REFERENCES'),
style.SQL_TABLE(qn(field.rel.to._meta.db_table)),
style.SQL_FIELD(qn(field.rel.to._meta.pk.column)),
self.connection.ops.deferrable_sql())
]
deferred = []
return table_output, deferred
def sql_indexes_for_model(self, model, style):
"Returns the CREATE INDEX SQL statements for a single model"
if not model._meta.managed or model._meta.proxy:
return []
output = []
for f in model._meta.local_fields:
output.extend(self.sql_indexes_for_field(model, f, style))
return output
def sql_indexes_for_field(self, model, f, style):
"Return the CREATE INDEX SQL statements for a single model field"
from django.db.backends.util import truncate_name
if f.db_index and not f.unique:
qn = self.connection.ops.quote_name
tablespace = f.db_tablespace or model._meta.db_tablespace
if tablespace:
sql = self.connection.ops.tablespace_sql(tablespace)
if sql:
tablespace_sql = ' ' + sql
else:
tablespace_sql = ''
else:
tablespace_sql = ''
i_name = '%s_%s' % (model._meta.db_table, self._digest(f.column))
output = [style.SQL_KEYWORD('CREATE INDEX') + ' ' +
style.SQL_TABLE(qn(truncate_name(i_name, self.connection.ops.max_name_length()))) + ' ' +
style.SQL_KEYWORD('ON') + ' ' +
style.SQL_TABLE(qn(model._meta.db_table)) + ' ' +
"(%s)" % style.SQL_FIELD(qn(f.column)) +
"%s;" % tablespace_sql]
else:
output = []
return output
def sql_destroy_model(self, model, references_to_delete, style):
"Return the DROP TABLE and restraint dropping statements for a single model"
if not model._meta.managed or model._meta.proxy:
return []
# Drop the table now
qn = self.connection.ops.quote_name
output = ['%s %s;' % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(qn(model._meta.db_table)))]
if model in references_to_delete:
output.extend(self.sql_remove_table_constraints(model, references_to_delete, style))
if model._meta.has_auto_field:
ds = self.connection.ops.drop_sequence_sql(model._meta.db_table)
if ds:
output.append(ds)
return output
def sql_remove_table_constraints(self, model, references_to_delete, style):
from django.db.backends.util import truncate_name
if not model._meta.managed or model._meta.proxy:
return []
output = []
qn = self.connection.ops.quote_name
for rel_class, f in references_to_delete[model]:
table = rel_class._meta.db_table
col = f.column
r_table = model._meta.db_table
r_col = model._meta.get_field(f.rel.field_name).column
r_name = '%s_refs_%s_%s' % (col, r_col, self._digest(table, r_table))
output.append('%s %s %s %s;' % \
(style.SQL_KEYWORD('ALTER TABLE'),
style.SQL_TABLE(qn(table)),
style.SQL_KEYWORD(self.connection.ops.drop_foreignkey_sql()),
style.SQL_FIELD(qn(truncate_name(r_name, self.connection.ops.max_name_length())))))
del references_to_delete[model]
return output
def sql_destroy_many_to_many(self, model, f, style):
"Returns the DROP TABLE statements for a single m2m field"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
PendingDeprecationWarning
)
qn = self.connection.ops.quote_name
output = []
if f.auto_created:
output.append("%s %s;" % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(qn(f.m2m_db_table()))))
ds = self.connection.ops.drop_sequence_sql("%s_%s" % (model._meta.db_table, f.column))
if ds:
output.append(ds)
return output
def create_test_db(self, verbosity=1, autoclobber=False):
"""
Creates a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
if verbosity >= 1:
print "Creating test database '%s'..." % self.connection.alias
test_database_name = self._create_test_db(verbosity, autoclobber)
self.connection.close()
self.connection.settings_dict["NAME"] = test_database_name
can_rollback = self._rollback_works()
self.connection.settings_dict["SUPPORTS_TRANSACTIONS"] = can_rollback
call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias)
if settings.CACHE_BACKEND.startswith('db://'):
from django.core.cache import parse_backend_uri
_, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND)
call_command('createcachetable', cache_name)
# Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database.
cursor = self.connection.cursor()
return test_database_name
def _create_test_db(self, verbosity, autoclobber):
"Internal implementation - creates the test db tables."
suffix = self.sql_table_creation_suffix()
if self.connection.settings_dict['TEST_NAME']:
test_database_name = self.connection.settings_dict['TEST_NAME']
else:
test_database_name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
qn = self.connection.ops.quote_name
# Create the test database and connect to it. We need to autocommit
# if the database supports it because PostgreSQL doesn't allow
# CREATE/DROP DATABASE statements within transactions.
cursor = self.connection.cursor()
self.set_autocommit()
try:
cursor.execute("CREATE DATABASE %s %s" % (qn(test_database_name), suffix))
except Exception, e:
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = raw_input("Type 'yes' if you would like to try deleting the test database '%s', or 'no' to cancel: " % test_database_name)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test database..."
cursor.execute("DROP DATABASE %s" % qn(test_database_name))
if verbosity >= 1:
print "Creating test database..."
cursor.execute("CREATE DATABASE %s %s" % (qn(test_database_name), suffix))
except Exception, e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
return test_database_name
def _rollback_works(self):
cursor = self.connection.cursor()
cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)')
self.connection._commit()
cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)')
self.connection._rollback()
cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST')
count, = cursor.fetchone()
cursor.execute('DROP TABLE ROLLBACK_TEST')
self.connection._commit()
return count == 0
def destroy_test_db(self, old_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
if verbosity >= 1:
print "Destroying test database '%s'..." % self.connection.alias
self.connection.close()
test_database_name = self.connection.settings_dict['NAME']
self.connection.settings_dict['NAME'] = old_database_name
self._destroy_test_db(test_database_name, verbosity)
def _destroy_test_db(self, test_database_name, verbosity):
"Internal implementation - remove the test db tables."
# Remove the test database to clean up after
# ourselves. Connect to the previous database (not the test database)
# to do so, because it's not allowed to delete a database while being
# connected to it.
cursor = self.connection.cursor()
self.set_autocommit()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
cursor.execute("DROP DATABASE %s" % self.connection.ops.quote_name(test_database_name))
self.connection.close()
def set_autocommit(self):
"Make sure a connection is in autocommit mode."
if hasattr(self.connection.connection, "autocommit"):
if callable(self.connection.connection.autocommit):
self.connection.connection.autocommit(True)
else:
self.connection.connection.autocommit = True
elif hasattr(self.connection.connection, "set_isolation_level"):
self.connection.connection.set_isolation_level(0)
def sql_table_creation_suffix(self):
"SQL to append to the end of the test table creation statements"
return ''
|
gxx/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/regressiontests/forms/localflavor/ie.py
|
89
|
from django.contrib.localflavor.ie.forms import IECountySelect
from utils import LocalFlavorTestCase
class IELocalFlavorTests(LocalFlavorTestCase):
def test_IECountySelect(self):
f = IECountySelect()
out = u'''<select name="counties">
<option value="antrim">Antrim</option>
<option value="armagh">Armagh</option>
<option value="carlow">Carlow</option>
<option value="cavan">Cavan</option>
<option value="clare">Clare</option>
<option value="cork">Cork</option>
<option value="derry">Derry</option>
<option value="donegal">Donegal</option>
<option value="down">Down</option>
<option value="dublin" selected="selected">Dublin</option>
<option value="fermanagh">Fermanagh</option>
<option value="galway">Galway</option>
<option value="kerry">Kerry</option>
<option value="kildare">Kildare</option>
<option value="kilkenny">Kilkenny</option>
<option value="laois">Laois</option>
<option value="leitrim">Leitrim</option>
<option value="limerick">Limerick</option>
<option value="longford">Longford</option>
<option value="louth">Louth</option>
<option value="mayo">Mayo</option>
<option value="meath">Meath</option>
<option value="monaghan">Monaghan</option>
<option value="offaly">Offaly</option>
<option value="roscommon">Roscommon</option>
<option value="sligo">Sligo</option>
<option value="tipperary">Tipperary</option>
<option value="tyrone">Tyrone</option>
<option value="waterford">Waterford</option>
<option value="westmeath">Westmeath</option>
<option value="wexford">Wexford</option>
<option value="wicklow">Wicklow</option>
</select>'''
self.assertEqual(f.render('counties', 'dublin'), out)
|
vriera/micropython
|
refs/heads/master
|
tests/basics/fun_annotations.py
|
118
|
def foo(x: int, y: list) -> dict:
return {x: y}
print(foo(1, [2, 3]))
|
sander76/home-assistant
|
refs/heads/dev
|
tests/components/automation/__init__.py
|
104
|
"""The tests for Automation."""
|
kirill-korolev/slack-banner
|
refs/heads/master
|
hello/migrations/0001_initial.py
|
42
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-27 21:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Greeting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('when', models.DateTimeField(auto_now_add=True, verbose_name=b'date created')),
],
),
]
|
mcrowson/django
|
refs/heads/master
|
django/utils/checksums.py
|
310
|
"""
Common checksum routines.
"""
__all__ = ['luhn']
import warnings
from django.utils import six
from django.utils.deprecation import RemovedInDjango110Warning
warnings.warn(
"django.utils.checksums will be removed in Django 1.10. The "
"luhn() function is now included in django-localflavor 1.1+.",
RemovedInDjango110Warning
)
LUHN_ODD_LOOKUP = (0, 2, 4, 6, 8, 1, 3, 5, 7, 9) # sum_of_digits(index * 2)
def luhn(candidate):
"""
Checks a candidate number for validity according to the Luhn
algorithm (used in validation of, for example, credit cards).
Both numeric and string candidates are accepted.
"""
if not isinstance(candidate, six.string_types):
candidate = str(candidate)
try:
evens = sum(int(c) for c in candidate[-1::-2])
odds = sum(LUHN_ODD_LOOKUP[int(c)] for c in candidate[-2::-2])
return ((evens + odds) % 10 == 0)
except ValueError: # Raised if an int conversion fails
return False
|
joerideturck/gcloud-python-bigtable
|
refs/heads/master
|
gcloud_bigtable/happybase/batch.py
|
1
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Bigtable HappyBase batch module."""
import datetime
import six
from gcloud_bigtable._helpers import _microseconds_to_timestamp
from gcloud_bigtable.row import TimestampRange
_WAL_SENTINEL = object()
# Assumed granularity of timestamps in Cloud Bigtable.
_ONE_MILLISECOND = datetime.timedelta(microseconds=1000)
def _get_column_pairs(columns, require_qualifier=False):
"""Turns a list of column or column families in parsed pairs.
Turns a column family (``fam`` or ``fam:``) into a pair such
as ``['fam', None]`` and turns a column (``fam:col``) into
``['fam', 'col']``.
:type columns: list
:param columns: Iterable containing column names (as
strings). Each column name can be either
* an entire column family: ``fam`` or ``fam:``
* an single column: ``fam:col``
:type require_qualifier: bool
:param require_qualifier: Boolean indicating if the columns should
all have a qualifier or not.
:rtype: list
:returns: List of pairs, where the first element in each pair is the
column family and the second is the column qualifier
(or :data:`None`).
:raises: :class:`ValueError <exceptions.ValueError>` if any of the columns
are not of the expected format.
:class:`ValueError <exceptions.ValueError>` if
``require_qualifier`` is :data:`True` and one of the values is
for an entire column family
"""
column_pairs = []
for column in columns:
# Remove trailing colons (i.e. for standalone column family).
column = column.rstrip(':')
num_colons = column.count(':')
if num_colons == 0:
# column is a column family.
if require_qualifier:
raise ValueError('column does not contain a qualifier',
column)
else:
column_pairs.append([column, None])
elif num_colons == 1:
column_pairs.append(column.split(':'))
else:
raise ValueError('Column contains the : separator more than once')
return column_pairs
class Batch(object):
"""Batch class for accumulating mutations.
:type table: :class:`Table <gcloud_bigtable.happybase.table.Table>`
:param table: The table where mutations will be applied.
:type timestamp: int
:param timestamp: (Optional) Timestamp (in milliseconds since the epoch)
that all mutations will be applied at.
:type batch_size: int
:param batch_size: (Optional) The maximum number of mutations to allow
to accumulate before committing them.
:type transaction: bool
:param transaction: Flag indicating if the mutations should be sent
transactionally or not. If ``transaction=True`` and
an error occurs while a :class:`Batch` is active,
then none of the accumulated mutations will be
committed. If ``batch_size`` is set, the mutation
can't be transactional.
:type wal: object
:param wal: Unused parameter (Boolean for using the HBase Write Ahead Log).
Provided for compatibility with HappyBase, but irrelevant for
Cloud Bigtable since it does not have a Write Ahead Log.
:raises: :class:`TypeError <exceptions.TypeError>` if ``batch_size``
is set and ``transaction=True``.
:class:`ValueError <exceptions.ValueError>` if ``batch_size``
is not positive.
:class:`ValueError <exceptions.ValueError>` if ``wal``
is used.
"""
def __init__(self, table, timestamp=None, batch_size=None,
transaction=False, wal=_WAL_SENTINEL):
if wal is not _WAL_SENTINEL:
raise ValueError('The wal argument cannot be used with '
'Cloud Bigtable.')
if batch_size is not None:
if transaction:
raise TypeError('When batch_size is set, a Batch cannot be '
'transactional')
if batch_size <= 0:
raise ValueError('batch_size must be positive')
self._table = table
self._batch_size = batch_size
# Timestamp is in milliseconds, convert to microseconds.
self._timestamp = self._delete_range = None
if timestamp is not None:
self._timestamp = _microseconds_to_timestamp(1000 * timestamp)
# For deletes, we get the very next timestamp (assuming timestamp
# granularity is milliseconds). This is because HappyBase users
# expect HBase deletes to go **up to** and **including** the
# timestamp while Cloud Bigtable Time Ranges **exclude** the
# final timestamp.
next_timestamp = self._timestamp + _ONE_MILLISECOND
self._delete_range = TimestampRange(end=next_timestamp)
self._transaction = transaction
# Internal state for tracking mutations.
self._row_map = {}
self._mutation_count = 0
def send(self):
"""Send / commit the batch of mutations to the server."""
for row in self._row_map.values():
# commit() does nothing if row hasn't accumulated any mutations.
row.commit()
self._row_map.clear()
self._mutation_count = 0
def _try_send(self):
"""Send / commit the batch if mutations have exceeded batch size."""
if self._batch_size and self._mutation_count >= self._batch_size:
self.send()
def _get_row(self, row_key):
"""Gets a row that will hold mutations.
If the row is not already cached on the current batch, a new row will
be created.
:type row_key: str
:param row_key: The row key for a row stored in the map.
:rtype: :class:`Row <gcloud_bigtable.row.Row>`
:returns: The newly created or stored row that will hold mutations.
"""
if row_key not in self._row_map:
table = self._table._low_level_table
self._row_map[row_key] = table.row(row_key)
return self._row_map[row_key]
def put(self, row, data, wal=_WAL_SENTINEL):
"""Insert data into a row in the table owned by this batch.
:type row: str
:param row: The row key where the mutation will be "put".
:type data: dict
:param data: Dictionary containing the data to be inserted. The keys
are columns names (of the form ``fam:col``) and the values
are strings (bytes) to be stored in those columns.
:type wal: object
:param wal: Unused parameter (to over-ride the default on the
instance). Provided for compatibility with HappyBase, but
irrelevant for Cloud Bigtable since it does not have a
Write Ahead Log.
:raises: :class:`ValueError <exceptions.ValueError>` if ``wal``
is used.
"""
if wal is not _WAL_SENTINEL:
raise ValueError('The wal argument cannot be used with '
'Cloud Bigtable.')
row_object = self._get_row(row)
# Make sure all the keys are valid before beginning
# to add mutations.
column_pairs = _get_column_pairs(six.iterkeys(data),
require_qualifier=True)
for column_family_id, column_qualifier in column_pairs:
value = data[column_family_id + ':' + column_qualifier]
row_object.set_cell(column_family_id, column_qualifier,
value, timestamp=self._timestamp)
self._mutation_count += len(data)
self._try_send()
def _delete_columns(self, columns, row_object):
"""Adds delete mutations for a list of columns and column families.
:type columns: list
:param columns: Iterable containing column names (as
strings). Each column name can be either
* an entire column family: ``fam`` or ``fam:``
* an single column: ``fam:col``
:type row_object: :class:`Row <gcloud_bigtable.row.Row>`
:param row_object: The row which will hold the delete mutations.
:raises: :class:`ValueError <exceptions.ValueError>` if the delete
timestamp range is set on the current batch, but a
column family delete is attempted.
"""
column_pairs = _get_column_pairs(columns)
for column_family_id, column_qualifier in column_pairs:
if column_qualifier is None:
if self._delete_range is not None:
raise ValueError('The Cloud Bigtable API does not support '
'adding a timestamp to '
'"DeleteFromFamily" ')
row_object.delete_cells(column_family_id,
columns=row_object.ALL_COLUMNS)
else:
row_object.delete_cell(column_family_id,
column_qualifier,
time_range=self._delete_range)
def delete(self, row, columns=None, wal=_WAL_SENTINEL):
"""Delete data from a row in the table owned by this batch.
:type row: str
:param row: The row key where the delete will occur.
:type columns: list
:param columns: (Optional) Iterable containing column names (as
strings). Each column name can be either
* an entire column family: ``fam`` or ``fam:``
* an single column: ``fam:col``
If not used, will delete the entire row.
:type wal: object
:param wal: Unused parameter (to over-ride the default on the
instance). Provided for compatibility with HappyBase, but
irrelevant for Cloud Bigtable since it does not have a
Write Ahead Log.
:raises: :class:`ValueError <exceptions.ValueError>` if ``wal``
is used, or if if the delete timestamp range is set on the
current batch, but a full row delete is attempted.
"""
if wal is not _WAL_SENTINEL:
raise ValueError('The wal argument cannot be used with '
'Cloud Bigtable.')
row_object = self._get_row(row)
if columns is None:
# Delete entire row.
if self._delete_range is not None:
raise ValueError('The Cloud Bigtable API does not support '
'adding a timestamp to "DeleteFromRow" '
'mutations')
row_object.delete()
self._mutation_count += 1
else:
self._delete_columns(columns, row_object)
self._mutation_count += len(columns)
self._try_send()
def __enter__(self):
"""Enter context manager, no set-up required."""
return self
def __exit__(self, exc_type, exc_value, traceback):
"""Exit context manager, no set-up required.
:type exc_type: type
:param exc_type: The type of the exception if one occurred while the
context manager was active. Otherwise, :data:`None`.
:type exc_value: :class:`Exception <exceptions.Exception>`
:param exc_value: An instance of ``exc_type`` if an exception occurred
while the context was active.
Otherwise, :data:`None`.
:type traceback: ``traceback`` type
:param traceback: The traceback where the exception occurred (if one
did occur). Otherwise, :data:`None`.
"""
# If the context manager encountered an exception and the batch is
# transactional, we don't commit the mutations.
if self._transaction and exc_type is not None:
return
# NOTE: For non-transactional batches, this will even commit mutations
# if an error occurred during the context manager.
self.send()
|
trentm/node-gyp
|
refs/heads/master
|
gyp/test/mac/gyptest-missing-cfbundlesignature.py
|
298
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that an Info.plist with CFBundleSignature works.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp('test.gyp', chdir='missing-cfbundlesignature')
test.build('test.gyp', test.ALL, chdir='missing-cfbundlesignature')
test.built_file_must_match('mytarget.app/Contents/PkgInfo', 'APPL????',
chdir='missing-cfbundlesignature')
test.built_file_must_match('myothertarget.app/Contents/PkgInfo', 'APPL????',
chdir='missing-cfbundlesignature')
test.built_file_must_match('thirdtarget.app/Contents/PkgInfo', 'APPL????',
chdir='missing-cfbundlesignature')
test.pass_test()
|
Acimaz/Google_Apple_Financial_Reporter
|
refs/heads/master
|
lib/third_party/oauth2client/client.py
|
8
|
#!/usr/bin/env python
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An OAuth 2.0 client.
Tools for interacting with OAuth 2.0 protected resources.
"""
import base64
import collections
import copy
import datetime
import json
import logging
import os
import socket
import sys
import tempfile
import time
import shutil
import six
from six.moves import urllib
import httplib2
from oauth2client import clientsecrets
from oauth2client import GOOGLE_AUTH_URI
from oauth2client import GOOGLE_DEVICE_URI
from oauth2client import GOOGLE_REVOKE_URI
from oauth2client import GOOGLE_TOKEN_URI
from oauth2client import util
HAS_OPENSSL = False
HAS_CRYPTO = False
try:
from oauth2client import crypt
HAS_CRYPTO = True
if crypt.OpenSSLVerifier is not None:
HAS_OPENSSL = True
except ImportError:
pass
logger = logging.getLogger(__name__)
# Expiry is stored in RFC3339 UTC format
EXPIRY_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
# Which certs to use to validate id_tokens received.
ID_TOKEN_VERIFICATION_CERTS = 'https://www.googleapis.com/oauth2/v1/certs'
# This symbol previously had a typo in the name; we keep the old name
# around for now, but will remove it in the future.
ID_TOKEN_VERIFICATON_CERTS = ID_TOKEN_VERIFICATION_CERTS
# Constant to use for the out of band OAuth 2.0 flow.
OOB_CALLBACK_URN = 'urn:ietf:wg:oauth:2.0:oob'
# Google Data client libraries may need to set this to [401, 403].
REFRESH_STATUS_CODES = [401]
# The value representing user credentials.
AUTHORIZED_USER = 'authorized_user'
# The value representing service account credentials.
SERVICE_ACCOUNT = 'service_account'
# The environment variable pointing the file with local
# Application Default Credentials.
GOOGLE_APPLICATION_CREDENTIALS = 'GOOGLE_APPLICATION_CREDENTIALS'
# The ~/.config subdirectory containing gcloud credentials. Intended
# to be swapped out in tests.
_CLOUDSDK_CONFIG_DIRECTORY = 'gcloud'
# The environment variable name which can replace ~/.config if set.
_CLOUDSDK_CONFIG_ENV_VAR = 'CLOUDSDK_CONFIG'
# The error message we show users when we can't find the Application
# Default Credentials.
ADC_HELP_MSG = (
'The Application Default Credentials are not available. They are available '
'if running in Google Compute Engine. Otherwise, the environment variable '
+ GOOGLE_APPLICATION_CREDENTIALS +
' must be defined pointing to a file defining the credentials. See '
'https://developers.google.com/accounts/docs/application-default-credentials' # pylint:disable=line-too-long
' for more information.')
# The access token along with the seconds in which it expires.
AccessTokenInfo = collections.namedtuple(
'AccessTokenInfo', ['access_token', 'expires_in'])
DEFAULT_ENV_NAME = 'UNKNOWN'
# If set to True _get_environment avoid GCE check (_detect_gce_environment)
NO_GCE_CHECK = os.environ.setdefault('NO_GCE_CHECK', 'False')
class SETTINGS(object):
"""Settings namespace for globally defined values."""
env_name = None
class Error(Exception):
"""Base error for this module."""
class FlowExchangeError(Error):
"""Error trying to exchange an authorization grant for an access token."""
class AccessTokenRefreshError(Error):
"""Error trying to refresh an expired access token."""
class TokenRevokeError(Error):
"""Error trying to revoke a token."""
class UnknownClientSecretsFlowError(Error):
"""The client secrets file called for an unknown type of OAuth 2.0 flow. """
class AccessTokenCredentialsError(Error):
"""Having only the access_token means no refresh is possible."""
class HttpAccessTokenRefreshError(AccessTokenRefreshError):
"""Error (with HTTP status) trying to refresh an expired access token."""
def __init__(self, *args, **kwargs):
super(HttpAccessTokenRefreshError, self).__init__(*args)
self.status = kwargs.get('status')
class VerifyJwtTokenError(Error):
"""Could not retrieve certificates for validation."""
class NonAsciiHeaderError(Error):
"""Header names and values must be ASCII strings."""
class ApplicationDefaultCredentialsError(Error):
"""Error retrieving the Application Default Credentials."""
class OAuth2DeviceCodeError(Error):
"""Error trying to retrieve a device code."""
class CryptoUnavailableError(Error, NotImplementedError):
"""Raised when a crypto library is required, but none is available."""
def _abstract():
raise NotImplementedError('You need to override this function')
class MemoryCache(object):
"""httplib2 Cache implementation which only caches locally."""
def __init__(self):
self.cache = {}
def get(self, key):
return self.cache.get(key)
def set(self, key, value):
self.cache[key] = value
def delete(self, key):
self.cache.pop(key, None)
class Credentials(object):
"""Base class for all Credentials objects.
Subclasses must define an authorize() method that applies the credentials to
an HTTP transport.
Subclasses must also specify a classmethod named 'from_json' that takes a JSON
string as input and returns an instantiated Credentials object.
"""
NON_SERIALIZED_MEMBERS = ['store']
def authorize(self, http):
"""Take an httplib2.Http instance (or equivalent) and authorizes it.
Authorizes it for the set of credentials, usually by replacing
http.request() with a method that adds in the appropriate headers and then
delegates to the original Http.request() method.
Args:
http: httplib2.Http, an http object to be used to make the refresh
request.
"""
_abstract()
def refresh(self, http):
"""Forces a refresh of the access_token.
Args:
http: httplib2.Http, an http object to be used to make the refresh
request.
"""
_abstract()
def revoke(self, http):
"""Revokes a refresh_token and makes the credentials void.
Args:
http: httplib2.Http, an http object to be used to make the revoke
request.
"""
_abstract()
def apply(self, headers):
"""Add the authorization to the headers.
Args:
headers: dict, the headers to add the Authorization header to.
"""
_abstract()
def _to_json(self, strip):
"""Utility function that creates JSON repr. of a Credentials object.
Args:
strip: array, An array of names of members to not include in the JSON.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
t = type(self)
d = copy.copy(self.__dict__)
for member in strip:
if member in d:
del d[member]
if (d.get('token_expiry') and
isinstance(d['token_expiry'], datetime.datetime)):
d['token_expiry'] = d['token_expiry'].strftime(EXPIRY_FORMAT)
# Add in information we will need later to reconsistitue this instance.
d['_class'] = t.__name__
d['_module'] = t.__module__
for key, val in d.items():
if isinstance(val, bytes):
d[key] = val.decode('utf-8')
return json.dumps(d)
def to_json(self):
"""Creating a JSON representation of an instance of Credentials.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
@classmethod
def new_from_json(cls, s):
"""Utility class method to instantiate a Credentials subclass from a JSON
representation produced by to_json().
Args:
s: string, JSON from to_json().
Returns:
An instance of the subclass of Credentials that was serialized with
to_json().
"""
if six.PY3 and isinstance(s, bytes):
s = s.decode('utf-8')
data = json.loads(s)
# Find and call the right classmethod from_json() to restore the object.
module = data['_module']
try:
m = __import__(module)
except ImportError:
# In case there's an object from the old package structure, update it
module = module.replace('.googleapiclient', '')
m = __import__(module)
m = __import__(module, fromlist=module.split('.')[:-1])
kls = getattr(m, data['_class'])
from_json = getattr(kls, 'from_json')
return from_json(s)
@classmethod
def from_json(cls, unused_data):
"""Instantiate a Credentials object from a JSON description of it.
The JSON should have been produced by calling .to_json() on the object.
Args:
unused_data: dict, A deserialized JSON object.
Returns:
An instance of a Credentials subclass.
"""
return Credentials()
class Flow(object):
"""Base class for all Flow objects."""
pass
class Storage(object):
"""Base class for all Storage objects.
Store and retrieve a single credential. This class supports locking
such that multiple processes and threads can operate on a single
store.
"""
def acquire_lock(self):
"""Acquires any lock necessary to access this Storage.
This lock is not reentrant.
"""
pass
def release_lock(self):
"""Release the Storage lock.
Trying to release a lock that isn't held will result in a
RuntimeError.
"""
pass
def locked_get(self):
"""Retrieve credential.
The Storage lock must be held when this is called.
Returns:
oauth2client.client.Credentials
"""
_abstract()
def locked_put(self, credentials):
"""Write a credential.
The Storage lock must be held when this is called.
Args:
credentials: Credentials, the credentials to store.
"""
_abstract()
def locked_delete(self):
"""Delete a credential.
The Storage lock must be held when this is called.
"""
_abstract()
def get(self):
"""Retrieve credential.
The Storage lock must *not* be held when this is called.
Returns:
oauth2client.client.Credentials
"""
self.acquire_lock()
try:
return self.locked_get()
finally:
self.release_lock()
def put(self, credentials):
"""Write a credential.
The Storage lock must be held when this is called.
Args:
credentials: Credentials, the credentials to store.
"""
self.acquire_lock()
try:
self.locked_put(credentials)
finally:
self.release_lock()
def delete(self):
"""Delete credential.
Frees any resources associated with storing the credential.
The Storage lock must *not* be held when this is called.
Returns:
None
"""
self.acquire_lock()
try:
return self.locked_delete()
finally:
self.release_lock()
def clean_headers(headers):
"""Forces header keys and values to be strings, i.e not unicode.
The httplib module just concats the header keys and values in a way that may
make the message header a unicode string, which, if it then tries to
contatenate to a binary request body may result in a unicode decode error.
Args:
headers: dict, A dictionary of headers.
Returns:
The same dictionary but with all the keys converted to strings.
"""
clean = {}
try:
for k, v in six.iteritems(headers):
clean_k = k if isinstance(k, bytes) else str(k).encode('ascii')
clean_v = v if isinstance(v, bytes) else str(v).encode('ascii')
clean[clean_k] = clean_v
except UnicodeEncodeError:
raise NonAsciiHeaderError(k + ': ' + v)
return clean
def _update_query_params(uri, params):
"""Updates a URI with new query parameters.
Args:
uri: string, A valid URI, with potential existing query parameters.
params: dict, A dictionary of query parameters.
Returns:
The same URI but with the new query parameters added.
"""
parts = urllib.parse.urlparse(uri)
query_params = dict(urllib.parse.parse_qsl(parts.query))
query_params.update(params)
new_parts = parts._replace(query=urllib.parse.urlencode(query_params))
return urllib.parse.urlunparse(new_parts)
class OAuth2Credentials(Credentials):
"""Credentials object for OAuth 2.0.
Credentials can be applied to an httplib2.Http object using the authorize()
method, which then adds the OAuth 2.0 access token to each request.
OAuth2Credentials objects may be safely pickled and unpickled.
"""
@util.positional(8)
def __init__(self, access_token, client_id, client_secret, refresh_token,
token_expiry, token_uri, user_agent, revoke_uri=None,
id_token=None, token_response=None):
"""Create an instance of OAuth2Credentials.
This constructor is not usually called by the user, instead
OAuth2Credentials objects are instantiated by the OAuth2WebServerFlow.
Args:
access_token: string, access token.
client_id: string, client identifier.
client_secret: string, client secret.
refresh_token: string, refresh token.
token_expiry: datetime, when the access_token expires.
token_uri: string, URI of token endpoint.
user_agent: string, The HTTP User-Agent to provide for this application.
revoke_uri: string, URI for revoke endpoint. Defaults to None; a token
can't be revoked if this is None.
id_token: object, The identity of the resource owner.
token_response: dict, the decoded response to the token request. None
if a token hasn't been requested yet. Stored because some providers
(e.g. wordpress.com) include extra fields that clients may want.
Notes:
store: callable, A callable that when passed a Credential
will store the credential back to where it came from.
This is needed to store the latest access_token if it
has expired and been refreshed.
"""
self.access_token = access_token
self.client_id = client_id
self.client_secret = client_secret
self.refresh_token = refresh_token
self.store = None
self.token_expiry = token_expiry
self.token_uri = token_uri
self.user_agent = user_agent
self.revoke_uri = revoke_uri
self.id_token = id_token
self.token_response = token_response
# True if the credentials have been revoked or expired and can't be
# refreshed.
self.invalid = False
def authorize(self, http):
"""Authorize an httplib2.Http instance with these credentials.
The modified http.request method will add authentication headers to each
request and will refresh access_tokens when a 401 is received on a
request. In addition the http.request method has a credentials property,
http.request.credentials, which is the Credentials object that authorized
it.
Args:
http: An instance of ``httplib2.Http`` or something that acts
like it.
Returns:
A modified instance of http that was passed in.
Example::
h = httplib2.Http()
h = credentials.authorize(h)
You can't create a new OAuth subclass of httplib2.Authentication
because it never gets passed the absolute URI, which is needed for
signing. So instead we have to overload 'request' with a closure
that adds in the Authorization header and then calls the original
version of 'request()'.
"""
request_orig = http.request
# The closure that will replace 'httplib2.Http.request'.
def new_request(uri, method='GET', body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
if not self.access_token:
logger.info('Attempting refresh to obtain initial access_token')
self._refresh(request_orig)
# Clone and modify the request headers to add the appropriate
# Authorization header.
if headers is None:
headers = {}
else:
headers = dict(headers)
self.apply(headers)
if self.user_agent is not None:
if 'user-agent' in headers:
headers['user-agent'] = self.user_agent + ' ' + headers['user-agent']
else:
headers['user-agent'] = self.user_agent
body_stream_position = None
if all(getattr(body, stream_prop, None) for stream_prop in
('read', 'seek', 'tell')):
body_stream_position = body.tell()
resp, content = request_orig(uri, method, body, clean_headers(headers),
redirections, connection_type)
# A stored token may expire between the time it is retrieved and the time
# the request is made, so we may need to try twice.
max_refresh_attempts = 2
for refresh_attempt in range(max_refresh_attempts):
if resp.status not in REFRESH_STATUS_CODES:
break
logger.info('Refreshing due to a %s (attempt %s/%s)', resp.status,
refresh_attempt + 1, max_refresh_attempts)
self._refresh(request_orig)
self.apply(headers)
if body_stream_position is not None:
body.seek(body_stream_position)
resp, content = request_orig(uri, method, body, clean_headers(headers),
redirections, connection_type)
return (resp, content)
# Replace the request method with our own closure.
http.request = new_request
# Set credentials as a property of the request method.
setattr(http.request, 'credentials', self)
return http
def refresh(self, http):
"""Forces a refresh of the access_token.
Args:
http: httplib2.Http, an http object to be used to make the refresh
request.
"""
self._refresh(http.request)
def revoke(self, http):
"""Revokes a refresh_token and makes the credentials void.
Args:
http: httplib2.Http, an http object to be used to make the revoke
request.
"""
self._revoke(http.request)
def apply(self, headers):
"""Add the authorization to the headers.
Args:
headers: dict, the headers to add the Authorization header to.
"""
headers['Authorization'] = 'Bearer ' + self.access_token
def to_json(self):
return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
@classmethod
def from_json(cls, s):
"""Instantiate a Credentials object from a JSON description of it. The JSON
should have been produced by calling .to_json() on the object.
Args:
data: dict, A deserialized JSON object.
Returns:
An instance of a Credentials subclass.
"""
if six.PY3 and isinstance(s, bytes):
s = s.decode('utf-8')
data = json.loads(s)
if (data.get('token_expiry') and
not isinstance(data['token_expiry'], datetime.datetime)):
try:
data['token_expiry'] = datetime.datetime.strptime(
data['token_expiry'], EXPIRY_FORMAT)
except ValueError:
data['token_expiry'] = None
retval = cls(
data['access_token'],
data['client_id'],
data['client_secret'],
data['refresh_token'],
data['token_expiry'],
data['token_uri'],
data['user_agent'],
revoke_uri=data.get('revoke_uri', None),
id_token=data.get('id_token', None),
token_response=data.get('token_response', None))
retval.invalid = data['invalid']
return retval
@property
def access_token_expired(self):
"""True if the credential is expired or invalid.
If the token_expiry isn't set, we assume the token doesn't expire.
"""
if self.invalid:
return True
if not self.token_expiry:
return False
now = datetime.datetime.utcnow()
if now >= self.token_expiry:
logger.info('access_token is expired. Now: %s, token_expiry: %s',
now, self.token_expiry)
return True
return False
def get_access_token(self, http=None):
"""Return the access token and its expiration information.
If the token does not exist, get one.
If the token expired, refresh it.
"""
if not self.access_token or self.access_token_expired:
if not http:
http = httplib2.Http()
self.refresh(http)
return AccessTokenInfo(access_token=self.access_token,
expires_in=self._expires_in())
def set_store(self, store):
"""Set the Storage for the credential.
Args:
store: Storage, an implementation of Storage object.
This is needed to store the latest access_token if it
has expired and been refreshed. This implementation uses
locking to check for updates before updating the
access_token.
"""
self.store = store
def _expires_in(self):
"""Return the number of seconds until this token expires.
If token_expiry is in the past, this method will return 0, meaning the
token has already expired.
If token_expiry is None, this method will return None. Note that returning
0 in such a case would not be fair: the token may still be valid;
we just don't know anything about it.
"""
if self.token_expiry:
now = datetime.datetime.utcnow()
if self.token_expiry > now:
time_delta = self.token_expiry - now
# TODO(user): return time_delta.total_seconds()
# once dropping support for Python 2.6
return time_delta.days * 86400 + time_delta.seconds
else:
return 0
def _updateFromCredential(self, other):
"""Update this Credential from another instance."""
self.__dict__.update(other.__getstate__())
def __getstate__(self):
"""Trim the state down to something that can be pickled."""
d = copy.copy(self.__dict__)
del d['store']
return d
def __setstate__(self, state):
"""Reconstitute the state of the object from being pickled."""
self.__dict__.update(state)
self.store = None
def _generate_refresh_request_body(self):
"""Generate the body that will be used in the refresh request."""
body = urllib.parse.urlencode({
'grant_type': 'refresh_token',
'client_id': self.client_id,
'client_secret': self.client_secret,
'refresh_token': self.refresh_token,
})
return body
def _generate_refresh_request_headers(self):
"""Generate the headers that will be used in the refresh request."""
headers = {
'content-type': 'application/x-www-form-urlencoded',
}
if self.user_agent is not None:
headers['user-agent'] = self.user_agent
return headers
def _refresh(self, http_request):
"""Refreshes the access_token.
This method first checks by reading the Storage object if available.
If a refresh is still needed, it holds the Storage lock until the
refresh is completed.
Args:
http_request: callable, a callable that matches the method signature of
httplib2.Http.request, used to make the refresh request.
Raises:
AccessTokenRefreshError: When the refresh fails.
"""
if not self.store:
self._do_refresh_request(http_request)
else:
self.store.acquire_lock()
try:
new_cred = self.store.locked_get()
if (new_cred and not new_cred.invalid and
new_cred.access_token != self.access_token and
not new_cred.access_token_expired):
logger.info('Updated access_token read from Storage')
self._updateFromCredential(new_cred)
else:
self._do_refresh_request(http_request)
finally:
self.store.release_lock()
def _do_refresh_request(self, http_request):
"""Refresh the access_token using the refresh_token.
Args:
http_request: callable, a callable that matches the method signature of
httplib2.Http.request, used to make the refresh request.
Raises:
AccessTokenRefreshError: When the refresh fails.
"""
body = self._generate_refresh_request_body()
headers = self._generate_refresh_request_headers()
logger.info('Refreshing access_token')
resp, content = http_request(
self.token_uri, method='POST', body=body, headers=headers)
if six.PY3 and isinstance(content, bytes):
content = content.decode('utf-8')
if resp.status == 200:
d = json.loads(content)
self.token_response = d
self.access_token = d['access_token']
self.refresh_token = d.get('refresh_token', self.refresh_token)
if 'expires_in' in d:
self.token_expiry = datetime.timedelta(
seconds=int(d['expires_in'])) + datetime.datetime.utcnow()
else:
self.token_expiry = None
# On temporary refresh errors, the user does not actually have to
# re-authorize, so we unflag here.
self.invalid = False
if self.store:
self.store.locked_put(self)
else:
# An {'error':...} response body means the token is expired or revoked,
# so we flag the credentials as such.
logger.info('Failed to retrieve access token: %s', content)
error_msg = 'Invalid response %s.' % resp['status']
try:
d = json.loads(content)
if 'error' in d:
error_msg = d['error']
if 'error_description' in d:
error_msg += ': ' + d['error_description']
self.invalid = True
if self.store:
self.store.locked_put(self)
except (TypeError, ValueError):
pass
raise AccessTokenRefreshError(error_msg)
def _revoke(self, http_request):
"""Revokes this credential and deletes the stored copy (if it exists).
Args:
http_request: callable, a callable that matches the method signature of
httplib2.Http.request, used to make the revoke request.
"""
self._do_revoke(http_request, self.refresh_token or self.access_token)
def _do_revoke(self, http_request, token):
"""Revokes this credential and deletes the stored copy (if it exists).
Args:
http_request: callable, a callable that matches the method signature of
httplib2.Http.request, used to make the refresh request.
token: A string used as the token to be revoked. Can be either an
access_token or refresh_token.
Raises:
TokenRevokeError: If the revoke request does not return with a 200 OK.
"""
logger.info('Revoking token')
query_params = {'token': token}
token_revoke_uri = _update_query_params(self.revoke_uri, query_params)
resp, content = http_request(token_revoke_uri)
if resp.status == 200:
self.invalid = True
else:
error_msg = 'Invalid response %s.' % resp.status
try:
d = json.loads(content)
if 'error' in d:
error_msg = d['error']
except (TypeError, ValueError):
pass
raise TokenRevokeError(error_msg)
if self.store:
self.store.delete()
class AccessTokenCredentials(OAuth2Credentials):
"""Credentials object for OAuth 2.0.
Credentials can be applied to an httplib2.Http object using the
authorize() method, which then signs each request from that object
with the OAuth 2.0 access token. This set of credentials is for the
use case where you have acquired an OAuth 2.0 access_token from
another place such as a JavaScript client or another web
application, and wish to use it from Python. Because only the
access_token is present it can not be refreshed and will in time
expire.
AccessTokenCredentials objects may be safely pickled and unpickled.
Usage::
credentials = AccessTokenCredentials('<an access token>',
'my-user-agent/1.0')
http = httplib2.Http()
http = credentials.authorize(http)
Exceptions:
AccessTokenCredentialsExpired: raised when the access_token expires or is
revoked.
"""
def __init__(self, access_token, user_agent, revoke_uri=None):
"""Create an instance of OAuth2Credentials
This is one of the few types if Credentials that you should contrust,
Credentials objects are usually instantiated by a Flow.
Args:
access_token: string, access token.
user_agent: string, The HTTP User-Agent to provide for this application.
revoke_uri: string, URI for revoke endpoint. Defaults to None; a token
can't be revoked if this is None.
"""
super(AccessTokenCredentials, self).__init__(
access_token,
None,
None,
None,
None,
None,
user_agent,
revoke_uri=revoke_uri)
@classmethod
def from_json(cls, s):
if six.PY3 and isinstance(s, bytes):
s = s.decode('utf-8')
data = json.loads(s)
retval = AccessTokenCredentials(
data['access_token'],
data['user_agent'])
return retval
def _refresh(self, http_request):
raise AccessTokenCredentialsError(
'The access_token is expired or invalid and can\'t be refreshed.')
def _revoke(self, http_request):
"""Revokes the access_token and deletes the store if available.
Args:
http_request: callable, a callable that matches the method signature of
httplib2.Http.request, used to make the revoke request.
"""
self._do_revoke(http_request, self.access_token)
def _detect_gce_environment(urlopen=None):
"""Determine if the current environment is Compute Engine.
Args:
urlopen: Optional argument. Function used to open a connection to a URL.
Returns:
Boolean indicating whether or not the current environment is Google
Compute Engine.
"""
urlopen = urlopen or urllib.request.urlopen
# Note: the explicit `timeout` below is a workaround. The underlying
# issue is that resolving an unknown host on some networks will take
# 20-30 seconds; making this timeout short fixes the issue, but
# could lead to false negatives in the event that we are on GCE, but
# the metadata resolution was particularly slow. The latter case is
# "unlikely".
try:
response = urlopen('http://169.254.169.254/', timeout=1)
return response.info().get('Metadata-Flavor', '') == 'Google'
except socket.timeout:
logger.info('Timeout attempting to reach GCE metadata service.')
return False
except urllib.error.URLError as e:
if isinstance(getattr(e, 'reason', None), socket.timeout):
logger.info('Timeout attempting to reach GCE metadata service.')
return False
def _in_gae_environment():
"""Detects if the code is running in the App Engine environment.
Returns:
True if running in the GAE environment, False otherwise.
"""
if SETTINGS.env_name is not None:
return SETTINGS.env_name in ('GAE_PRODUCTION', 'GAE_LOCAL')
try:
import google.appengine
server_software = os.environ.get('SERVER_SOFTWARE', '')
if server_software.startswith('Google App Engine/'):
SETTINGS.env_name = 'GAE_PRODUCTION'
return True
elif server_software.startswith('Development/'):
SETTINGS.env_name = 'GAE_LOCAL'
return True
except ImportError:
pass
return False
def _in_gce_environment(urlopen=None):
"""Detect if the code is running in the Compute Engine environment.
Args:
urlopen: Optional argument. Function used to open a connection to a URL.
Returns:
True if running in the GCE environment, False otherwise.
"""
if SETTINGS.env_name is not None:
return SETTINGS.env_name == 'GCE_PRODUCTION'
if NO_GCE_CHECK != 'True' and _detect_gce_environment(urlopen=urlopen):
SETTINGS.env_name = 'GCE_PRODUCTION'
return True
return False
class GoogleCredentials(OAuth2Credentials):
"""Application Default Credentials for use in calling Google APIs.
The Application Default Credentials are being constructed as a function of
the environment where the code is being run.
More details can be found on this page:
https://developers.google.com/accounts/docs/application-default-credentials
Here is an example of how to use the Application Default Credentials for a
service that requires authentication:
from googleapiclient.discovery import build
from oauth2client.client import GoogleCredentials
credentials = GoogleCredentials.get_application_default()
service = build('compute', 'v1', credentials=credentials)
PROJECT = 'bamboo-machine-422'
ZONE = 'us-central1-a'
request = service.instances().list(project=PROJECT, zone=ZONE)
response = request.execute()
print(response)
"""
def __init__(self, access_token, client_id, client_secret, refresh_token,
token_expiry, token_uri, user_agent,
revoke_uri=GOOGLE_REVOKE_URI):
"""Create an instance of GoogleCredentials.
This constructor is not usually called by the user, instead
GoogleCredentials objects are instantiated by
GoogleCredentials.from_stream() or
GoogleCredentials.get_application_default().
Args:
access_token: string, access token.
client_id: string, client identifier.
client_secret: string, client secret.
refresh_token: string, refresh token.
token_expiry: datetime, when the access_token expires.
token_uri: string, URI of token endpoint.
user_agent: string, The HTTP User-Agent to provide for this application.
revoke_uri: string, URI for revoke endpoint.
Defaults to GOOGLE_REVOKE_URI; a token can't be revoked if this is None.
"""
super(GoogleCredentials, self).__init__(
access_token, client_id, client_secret, refresh_token, token_expiry,
token_uri, user_agent, revoke_uri=revoke_uri)
def create_scoped_required(self):
"""Whether this Credentials object is scopeless.
create_scoped(scopes) method needs to be called in order to create
a Credentials object for API calls.
"""
return False
def create_scoped(self, scopes):
"""Create a Credentials object for the given scopes.
The Credentials type is preserved.
"""
return self
@property
def serialization_data(self):
"""Get the fields and their values identifying the current credentials."""
return {
'type': 'authorized_user',
'client_id': self.client_id,
'client_secret': self.client_secret,
'refresh_token': self.refresh_token
}
@staticmethod
def _implicit_credentials_from_gae():
"""Attempts to get implicit credentials in Google App Engine env.
If the current environment is not detected as App Engine, returns None,
indicating no Google App Engine credentials can be detected from the
current environment.
Returns:
None, if not in GAE, else an appengine.AppAssertionCredentials object.
"""
if not _in_gae_environment():
return None
return _get_application_default_credential_GAE()
@staticmethod
def _implicit_credentials_from_gce():
"""Attempts to get implicit credentials in Google Compute Engine env.
If the current environment is not detected as Compute Engine, returns None,
indicating no Google Compute Engine credentials can be detected from the
current environment.
Returns:
None, if not in GCE, else a gce.AppAssertionCredentials object.
"""
if not _in_gce_environment():
return None
return _get_application_default_credential_GCE()
@staticmethod
def _implicit_credentials_from_files():
"""Attempts to get implicit credentials from local credential files.
First checks if the environment variable GOOGLE_APPLICATION_CREDENTIALS
is set with a filename and then falls back to a configuration file (the
"well known" file) associated with the 'gcloud' command line tool.
Returns:
Credentials object associated with the GOOGLE_APPLICATION_CREDENTIALS
file or the "well known" file if either exist. If neither file is
define, returns None, indicating no credentials from a file can
detected from the current environment.
"""
credentials_filename = _get_environment_variable_file()
if not credentials_filename:
credentials_filename = _get_well_known_file()
if os.path.isfile(credentials_filename):
extra_help = (' (produced automatically when running'
' "gcloud auth login" command)')
else:
credentials_filename = None
else:
extra_help = (' (pointed to by ' + GOOGLE_APPLICATION_CREDENTIALS +
' environment variable)')
if not credentials_filename:
return
# If we can read the credentials from a file, we don't need to know what
# environment we are in.
SETTINGS.env_name = DEFAULT_ENV_NAME
try:
return _get_application_default_credential_from_file(credentials_filename)
except (ApplicationDefaultCredentialsError, ValueError) as error:
_raise_exception_for_reading_json(credentials_filename, extra_help, error)
@classmethod
def _get_implicit_credentials(cls):
"""Gets credentials implicitly from the environment.
Checks environment in order of precedence:
- Google App Engine (production and testing)
- Environment variable GOOGLE_APPLICATION_CREDENTIALS pointing to
a file with stored credentials information.
- Stored "well known" file associated with `gcloud` command line tool.
- Google Compute Engine production environment.
Exceptions:
ApplicationDefaultCredentialsError: raised when the credentials fail
to be retrieved.
"""
# Environ checks (in order).
environ_checkers = [
cls._implicit_credentials_from_gae,
cls._implicit_credentials_from_files,
cls._implicit_credentials_from_gce,
]
for checker in environ_checkers:
credentials = checker()
if credentials is not None:
return credentials
# If no credentials, fail.
raise ApplicationDefaultCredentialsError(ADC_HELP_MSG)
@staticmethod
def get_application_default():
"""Get the Application Default Credentials for the current environment.
Exceptions:
ApplicationDefaultCredentialsError: raised when the credentials fail
to be retrieved.
"""
return GoogleCredentials._get_implicit_credentials()
@staticmethod
def from_stream(credential_filename):
"""Create a Credentials object by reading the information from a given file.
It returns an object of type GoogleCredentials.
Args:
credential_filename: the path to the file from where the credentials
are to be read
Exceptions:
ApplicationDefaultCredentialsError: raised when the credentials fail
to be retrieved.
"""
if credential_filename and os.path.isfile(credential_filename):
try:
return _get_application_default_credential_from_file(
credential_filename)
except (ApplicationDefaultCredentialsError, ValueError) as error:
extra_help = ' (provided as parameter to the from_stream() method)'
_raise_exception_for_reading_json(credential_filename,
extra_help,
error)
else:
raise ApplicationDefaultCredentialsError(
'The parameter passed to the from_stream() '
'method should point to a file.')
def _save_private_file(filename, json_contents):
"""Saves a file with read-write permissions on for the owner.
Args:
filename: String. Absolute path to file.
json_contents: JSON serializable object to be saved.
"""
temp_filename = tempfile.mktemp()
file_desc = os.open(temp_filename, os.O_WRONLY | os.O_CREAT, 0o600)
with os.fdopen(file_desc, 'w') as file_handle:
json.dump(json_contents, file_handle, sort_keys=True,
indent=2, separators=(',', ': '))
shutil.move(temp_filename, filename)
def save_to_well_known_file(credentials, well_known_file=None):
"""Save the provided GoogleCredentials to the well known file.
Args:
credentials:
the credentials to be saved to the well known file;
it should be an instance of GoogleCredentials
well_known_file:
the name of the file where the credentials are to be saved;
this parameter is supposed to be used for testing only
"""
# TODO(user): move this method to tools.py
# once the argparse import gets fixed (it is not present in Python 2.6)
if well_known_file is None:
well_known_file = _get_well_known_file()
config_dir = os.path.dirname(well_known_file)
if not os.path.isdir(config_dir):
raise OSError('Config directory does not exist: %s' % config_dir)
credentials_data = credentials.serialization_data
_save_private_file(well_known_file, credentials_data)
def _get_environment_variable_file():
application_default_credential_filename = (
os.environ.get(GOOGLE_APPLICATION_CREDENTIALS,
None))
if application_default_credential_filename:
if os.path.isfile(application_default_credential_filename):
return application_default_credential_filename
else:
raise ApplicationDefaultCredentialsError(
'File ' + application_default_credential_filename + ' (pointed by ' +
GOOGLE_APPLICATION_CREDENTIALS +
' environment variable) does not exist!')
def _get_well_known_file():
"""Get the well known file produced by command 'gcloud auth login'."""
# TODO(user): Revisit this method once gcloud provides a better way
# of pinpointing the exact location of the file.
WELL_KNOWN_CREDENTIALS_FILE = 'application_default_credentials.json'
default_config_dir = os.getenv(_CLOUDSDK_CONFIG_ENV_VAR)
if default_config_dir is None:
if os.name == 'nt':
try:
default_config_dir = os.path.join(os.environ['APPDATA'],
_CLOUDSDK_CONFIG_DIRECTORY)
except KeyError:
# This should never happen unless someone is really messing with things.
drive = os.environ.get('SystemDrive', 'C:')
default_config_dir = os.path.join(drive, '\\',
_CLOUDSDK_CONFIG_DIRECTORY)
else:
default_config_dir = os.path.join(os.path.expanduser('~'),
'.config',
_CLOUDSDK_CONFIG_DIRECTORY)
return os.path.join(default_config_dir, WELL_KNOWN_CREDENTIALS_FILE)
def _get_application_default_credential_from_file(filename):
"""Build the Application Default Credentials from file."""
from oauth2client import service_account
# read the credentials from the file
with open(filename) as file_obj:
client_credentials = json.load(file_obj)
credentials_type = client_credentials.get('type')
if credentials_type == AUTHORIZED_USER:
required_fields = set(['client_id', 'client_secret', 'refresh_token'])
elif credentials_type == SERVICE_ACCOUNT:
required_fields = set(['client_id', 'client_email', 'private_key_id',
'private_key'])
else:
raise ApplicationDefaultCredentialsError(
"'type' field should be defined (and have one of the '" +
AUTHORIZED_USER + "' or '" + SERVICE_ACCOUNT + "' values)")
missing_fields = required_fields.difference(client_credentials.keys())
if missing_fields:
_raise_exception_for_missing_fields(missing_fields)
if client_credentials['type'] == AUTHORIZED_USER:
return GoogleCredentials(
access_token=None,
client_id=client_credentials['client_id'],
client_secret=client_credentials['client_secret'],
refresh_token=client_credentials['refresh_token'],
token_expiry=None,
token_uri=GOOGLE_TOKEN_URI,
user_agent='Python client library')
else: # client_credentials['type'] == SERVICE_ACCOUNT
return service_account._ServiceAccountCredentials(
service_account_id=client_credentials['client_id'],
service_account_email=client_credentials['client_email'],
private_key_id=client_credentials['private_key_id'],
private_key_pkcs8_text=client_credentials['private_key'],
scopes=[])
def _raise_exception_for_missing_fields(missing_fields):
raise ApplicationDefaultCredentialsError(
'The following field(s) must be defined: ' + ', '.join(missing_fields))
def _raise_exception_for_reading_json(credential_file,
extra_help,
error):
raise ApplicationDefaultCredentialsError(
'An error was encountered while reading json file: '+
credential_file + extra_help + ': ' + str(error))
def _get_application_default_credential_GAE():
from oauth2client.appengine import AppAssertionCredentials
return AppAssertionCredentials([])
def _get_application_default_credential_GCE():
from oauth2client.contrib.gce import AppAssertionCredentials
return AppAssertionCredentials([])
class AssertionCredentials(GoogleCredentials):
"""Abstract Credentials object used for OAuth 2.0 assertion grants.
This credential does not require a flow to instantiate because it
represents a two legged flow, and therefore has all of the required
information to generate and refresh its own access tokens. It must
be subclassed to generate the appropriate assertion string.
AssertionCredentials objects may be safely pickled and unpickled.
"""
@util.positional(2)
def __init__(self, assertion_type, user_agent=None,
token_uri=GOOGLE_TOKEN_URI,
revoke_uri=GOOGLE_REVOKE_URI,
**unused_kwargs):
"""Constructor for AssertionFlowCredentials.
Args:
assertion_type: string, assertion type that will be declared to the auth
server
user_agent: string, The HTTP User-Agent to provide for this application.
token_uri: string, URI for token endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
revoke_uri: string, URI for revoke endpoint.
"""
super(AssertionCredentials, self).__init__(
None,
None,
None,
None,
None,
token_uri,
user_agent,
revoke_uri=revoke_uri)
self.assertion_type = assertion_type
def _generate_refresh_request_body(self):
assertion = self._generate_assertion()
body = urllib.parse.urlencode({
'assertion': assertion,
'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
})
return body
def _generate_assertion(self):
"""Generate the assertion string that will be used in the access token
request.
"""
_abstract()
def _revoke(self, http_request):
"""Revokes the access_token and deletes the store if available.
Args:
http_request: callable, a callable that matches the method signature of
httplib2.Http.request, used to make the revoke request.
"""
self._do_revoke(http_request, self.access_token)
def _RequireCryptoOrDie():
"""Ensure we have a crypto library, or throw CryptoUnavailableError.
The oauth2client.crypt module requires either PyCrypto or PyOpenSSL
to be available in order to function, but these are optional
dependencies.
"""
if not HAS_CRYPTO:
raise CryptoUnavailableError('No crypto library available')
class SignedJwtAssertionCredentials(AssertionCredentials):
"""Credentials object used for OAuth 2.0 Signed JWT assertion grants.
This credential does not require a flow to instantiate because it
represents a two legged flow, and therefore has all of the required
information to generate and refresh its own access tokens.
SignedJwtAssertionCredentials requires either PyOpenSSL, or PyCrypto
2.6 or later. For App Engine you may also consider using
AppAssertionCredentials.
"""
MAX_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
@util.positional(4)
def __init__(self,
service_account_name,
private_key,
scope,
private_key_password='notasecret',
user_agent=None,
token_uri=GOOGLE_TOKEN_URI,
revoke_uri=GOOGLE_REVOKE_URI,
**kwargs):
"""Constructor for SignedJwtAssertionCredentials.
Args:
service_account_name: string, id for account, usually an email address.
private_key: string, private key in PKCS12 or PEM format.
scope: string or iterable of strings, scope(s) of the credentials being
requested.
private_key_password: string, password for private_key, unused if
private_key is in PEM format.
user_agent: string, HTTP User-Agent to provide for this application.
token_uri: string, URI for token endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
revoke_uri: string, URI for revoke endpoint.
kwargs: kwargs, Additional parameters to add to the JWT token, for
example sub=joe@xample.org.
Raises:
CryptoUnavailableError if no crypto library is available.
"""
_RequireCryptoOrDie()
super(SignedJwtAssertionCredentials, self).__init__(
None,
user_agent=user_agent,
token_uri=token_uri,
revoke_uri=revoke_uri,
)
self.scope = util.scopes_to_string(scope)
# Keep base64 encoded so it can be stored in JSON.
self.private_key = base64.b64encode(private_key)
if isinstance(self.private_key, six.text_type):
self.private_key = self.private_key.encode('utf-8')
self.private_key_password = private_key_password
self.service_account_name = service_account_name
self.kwargs = kwargs
@classmethod
def from_json(cls, s):
data = json.loads(s)
retval = SignedJwtAssertionCredentials(
data['service_account_name'],
base64.b64decode(data['private_key']),
data['scope'],
private_key_password=data['private_key_password'],
user_agent=data['user_agent'],
token_uri=data['token_uri'],
**data['kwargs']
)
retval.invalid = data['invalid']
retval.access_token = data['access_token']
return retval
def _generate_assertion(self):
"""Generate the assertion that will be used in the request."""
now = int(time.time())
payload = {
'aud': self.token_uri,
'scope': self.scope,
'iat': now,
'exp': now + SignedJwtAssertionCredentials.MAX_TOKEN_LIFETIME_SECS,
'iss': self.service_account_name
}
payload.update(self.kwargs)
logger.debug(str(payload))
private_key = base64.b64decode(self.private_key)
return crypt.make_signed_jwt(crypt.Signer.from_string(
private_key, self.private_key_password), payload)
# Only used in verify_id_token(), which is always calling to the same URI
# for the certs.
_cached_http = httplib2.Http(MemoryCache())
@util.positional(2)
def verify_id_token(id_token, audience, http=None,
cert_uri=ID_TOKEN_VERIFICATION_CERTS):
"""Verifies a signed JWT id_token.
This function requires PyOpenSSL and because of that it does not work on
App Engine.
Args:
id_token: string, A Signed JWT.
audience: string, The audience 'aud' that the token should be for.
http: httplib2.Http, instance to use to make the HTTP request. Callers
should supply an instance that has caching enabled.
cert_uri: string, URI of the certificates in JSON format to
verify the JWT against.
Returns:
The deserialized JSON in the JWT.
Raises:
oauth2client.crypt.AppIdentityError: if the JWT fails to verify.
CryptoUnavailableError: if no crypto library is available.
"""
_RequireCryptoOrDie()
if http is None:
http = _cached_http
resp, content = http.request(cert_uri)
if resp.status == 200:
certs = json.loads(content.decode('utf-8'))
return crypt.verify_signed_jwt_with_certs(id_token, certs, audience)
else:
raise VerifyJwtTokenError('Status code: %d' % resp.status)
def _urlsafe_b64decode(b64string):
# Guard against unicode strings, which base64 can't handle.
if isinstance(b64string, six.text_type):
b64string = b64string.encode('ascii')
padded = b64string + b'=' * (4 - len(b64string) % 4)
return base64.urlsafe_b64decode(padded)
def _extract_id_token(id_token):
"""Extract the JSON payload from a JWT.
Does the extraction w/o checking the signature.
Args:
id_token: string or bytestring, OAuth 2.0 id_token.
Returns:
object, The deserialized JSON payload.
"""
if type(id_token) == bytes:
segments = id_token.split(b'.')
else:
segments = id_token.split(u'.')
if len(segments) != 3:
raise VerifyJwtTokenError(
'Wrong number of segments in token: %s' % id_token)
return json.loads(_urlsafe_b64decode(segments[1]).decode('utf-8'))
def _parse_exchange_token_response(content):
"""Parses response of an exchange token request.
Most providers return JSON but some (e.g. Facebook) return a
url-encoded string.
Args:
content: The body of a response
Returns:
Content as a dictionary object. Note that the dict could be empty,
i.e. {}. That basically indicates a failure.
"""
resp = {}
try:
resp = json.loads(content.decode('utf-8'))
except Exception:
# different JSON libs raise different exceptions,
# so we just do a catch-all here
content = content.decode('utf-8')
resp = dict(urllib.parse.parse_qsl(content))
# some providers respond with 'expires', others with 'expires_in'
if resp and 'expires' in resp:
resp['expires_in'] = resp.pop('expires')
return resp
@util.positional(4)
def credentials_from_code(client_id, client_secret, scope, code,
redirect_uri='postmessage', http=None,
user_agent=None, token_uri=GOOGLE_TOKEN_URI,
auth_uri=GOOGLE_AUTH_URI,
revoke_uri=GOOGLE_REVOKE_URI,
device_uri=GOOGLE_DEVICE_URI):
"""Exchanges an authorization code for an OAuth2Credentials object.
Args:
client_id: string, client identifier.
client_secret: string, client secret.
scope: string or iterable of strings, scope(s) to request.
code: string, An authorization code, most likely passed down from
the client
redirect_uri: string, this is generally set to 'postmessage' to match the
redirect_uri that the client specified
http: httplib2.Http, optional http instance to use to do the fetch
token_uri: string, URI for token endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
auth_uri: string, URI for authorization endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
device_uri: string, URI for device authorization endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
Returns:
An OAuth2Credentials object.
Raises:
FlowExchangeError if the authorization code cannot be exchanged for an
access token
"""
flow = OAuth2WebServerFlow(client_id, client_secret, scope,
redirect_uri=redirect_uri, user_agent=user_agent,
auth_uri=auth_uri, token_uri=token_uri,
revoke_uri=revoke_uri, device_uri=device_uri)
credentials = flow.step2_exchange(code, http=http)
return credentials
@util.positional(3)
def credentials_from_clientsecrets_and_code(filename, scope, code,
message = None,
redirect_uri='postmessage',
http=None,
cache=None,
device_uri=None):
"""Returns OAuth2Credentials from a clientsecrets file and an auth code.
Will create the right kind of Flow based on the contents of the clientsecrets
file or will raise InvalidClientSecretsError for unknown types of Flows.
Args:
filename: string, File name of clientsecrets.
scope: string or iterable of strings, scope(s) to request.
code: string, An authorization code, most likely passed down from
the client
message: string, A friendly string to display to the user if the
clientsecrets file is missing or invalid. If message is provided then
sys.exit will be called in the case of an error. If message in not
provided then clientsecrets.InvalidClientSecretsError will be raised.
redirect_uri: string, this is generally set to 'postmessage' to match the
redirect_uri that the client specified
http: httplib2.Http, optional http instance to use to do the fetch
cache: An optional cache service client that implements get() and set()
methods. See clientsecrets.loadfile() for details.
device_uri: string, OAuth 2.0 device authorization endpoint
Returns:
An OAuth2Credentials object.
Raises:
FlowExchangeError if the authorization code cannot be exchanged for an
access token
UnknownClientSecretsFlowError if the file describes an unknown kind of Flow.
clientsecrets.InvalidClientSecretsError if the clientsecrets file is
invalid.
"""
flow = flow_from_clientsecrets(filename, scope, message=message, cache=cache,
redirect_uri=redirect_uri,
device_uri=device_uri)
credentials = flow.step2_exchange(code, http=http)
return credentials
class DeviceFlowInfo(collections.namedtuple('DeviceFlowInfo', (
'device_code', 'user_code', 'interval', 'verification_url',
'user_code_expiry'))):
"""Intermediate information the OAuth2 for devices flow."""
@classmethod
def FromResponse(cls, response):
"""Create a DeviceFlowInfo from a server response.
The response should be a dict containing entries as described here:
http://tools.ietf.org/html/draft-ietf-oauth-v2-05#section-3.7.1
"""
# device_code, user_code, and verification_url are required.
kwargs = {
'device_code': response['device_code'],
'user_code': response['user_code'],
}
# The response may list the verification address as either
# verification_url or verification_uri, so we check for both.
verification_url = response.get(
'verification_url', response.get('verification_uri'))
if verification_url is None:
raise OAuth2DeviceCodeError(
'No verification_url provided in server response')
kwargs['verification_url'] = verification_url
# expires_in and interval are optional.
kwargs.update({
'interval': response.get('interval'),
'user_code_expiry': None,
})
if 'expires_in' in response:
kwargs['user_code_expiry'] = datetime.datetime.now() + datetime.timedelta(
seconds=int(response['expires_in']))
return cls(**kwargs)
class OAuth2WebServerFlow(Flow):
"""Does the Web Server Flow for OAuth 2.0.
OAuth2WebServerFlow objects may be safely pickled and unpickled.
"""
@util.positional(4)
def __init__(self, client_id,
client_secret=None,
scope=None,
redirect_uri=None,
user_agent=None,
auth_uri=GOOGLE_AUTH_URI,
token_uri=GOOGLE_TOKEN_URI,
revoke_uri=GOOGLE_REVOKE_URI,
login_hint=None,
device_uri=GOOGLE_DEVICE_URI,
authorization_header=None,
**kwargs):
"""Constructor for OAuth2WebServerFlow.
The kwargs argument is used to set extra query parameters on the
auth_uri. For example, the access_type and approval_prompt
query parameters can be set via kwargs.
Args:
client_id: string, client identifier.
client_secret: string client secret.
scope: string or iterable of strings, scope(s) of the credentials being
requested.
redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
a non-web-based application, or a URI that handles the callback from
the authorization server.
user_agent: string, HTTP User-Agent to provide for this application.
auth_uri: string, URI for authorization endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
token_uri: string, URI for token endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
login_hint: string, Either an email address or domain. Passing this hint
will either pre-fill the email box on the sign-in form or select the
proper multi-login session, thereby simplifying the login flow.
device_uri: string, URI for device authorization endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
authorization_header: string, For use with OAuth 2.0 providers that
require a client to authenticate using a header value instead of passing
client_secret in the POST body.
**kwargs: dict, The keyword arguments are all optional and required
parameters for the OAuth calls.
"""
# scope is a required argument, but to preserve backwards-compatibility
# we don't want to rearrange the positional arguments
if scope is None:
raise TypeError("The value of scope must not be None")
self.client_id = client_id
self.client_secret = client_secret
self.scope = util.scopes_to_string(scope)
self.redirect_uri = redirect_uri
self.login_hint = login_hint
self.user_agent = user_agent
self.auth_uri = auth_uri
self.token_uri = token_uri
self.revoke_uri = revoke_uri
self.device_uri = device_uri
self.authorization_header = authorization_header
self.params = {
'access_type': 'offline',
'response_type': 'code',
}
self.params.update(kwargs)
@util.positional(1)
def step1_get_authorize_url(self, redirect_uri=None):
"""Returns a URI to redirect to the provider.
Args:
redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
a non-web-based application, or a URI that handles the callback from
the authorization server. This parameter is deprecated, please move to
passing the redirect_uri in via the constructor.
Returns:
A URI as a string to redirect the user to begin the authorization flow.
"""
if redirect_uri is not None:
logger.warning((
'The redirect_uri parameter for '
'OAuth2WebServerFlow.step1_get_authorize_url is deprecated. Please '
'move to passing the redirect_uri in via the constructor.'))
self.redirect_uri = redirect_uri
if self.redirect_uri is None:
raise ValueError('The value of redirect_uri must not be None.')
query_params = {
'client_id': self.client_id,
'redirect_uri': self.redirect_uri,
'scope': self.scope,
}
if self.login_hint is not None:
query_params['login_hint'] = self.login_hint
query_params.update(self.params)
return _update_query_params(self.auth_uri, query_params)
@util.positional(1)
def step1_get_device_and_user_codes(self, http=None):
"""Returns a user code and the verification URL where to enter it
Returns:
A user code as a string for the user to authorize the application
An URL as a string where the user has to enter the code
"""
if self.device_uri is None:
raise ValueError('The value of device_uri must not be None.')
body = urllib.parse.urlencode({
'client_id': self.client_id,
'scope': self.scope,
})
headers = {
'content-type': 'application/x-www-form-urlencoded',
}
if self.user_agent is not None:
headers['user-agent'] = self.user_agent
if http is None:
http = httplib2.Http()
resp, content = http.request(self.device_uri, method='POST', body=body,
headers=headers)
if resp.status == 200:
try:
flow_info = json.loads(content)
except ValueError as e:
raise OAuth2DeviceCodeError(
'Could not parse server response as JSON: "%s", error: "%s"' % (
content, e))
return DeviceFlowInfo.FromResponse(flow_info)
else:
error_msg = 'Invalid response %s.' % resp.status
try:
d = json.loads(content)
if 'error' in d:
error_msg += ' Error: %s' % d['error']
except ValueError:
# Couldn't decode a JSON response, stick with the default message.
pass
raise OAuth2DeviceCodeError(error_msg)
@util.positional(2)
def step2_exchange(self, code=None, http=None, device_flow_info=None):
"""Exchanges a code for OAuth2Credentials.
Args:
code: string, a dict-like object, or None. For a non-device
flow, this is either the response code as a string, or a
dictionary of query parameters to the redirect_uri. For a
device flow, this should be None.
http: httplib2.Http, optional http instance to use when fetching
credentials.
device_flow_info: DeviceFlowInfo, return value from step1 in the
case of a device flow.
Returns:
An OAuth2Credentials object that can be used to authorize requests.
Raises:
FlowExchangeError: if a problem occurred exchanging the code for a
refresh_token.
ValueError: if code and device_flow_info are both provided or both
missing.
"""
if code is None and device_flow_info is None:
raise ValueError('No code or device_flow_info provided.')
if code is not None and device_flow_info is not None:
raise ValueError('Cannot provide both code and device_flow_info.')
if code is None:
code = device_flow_info.device_code
elif not isinstance(code, six.string_types):
if 'code' not in code:
raise FlowExchangeError(code.get(
'error', 'No code was supplied in the query parameters.'))
code = code['code']
post_data = {
'client_id': self.client_id,
'code': code,
'scope': self.scope,
}
if self.client_secret is not None:
post_data['client_secret'] = self.client_secret
if device_flow_info is not None:
post_data['grant_type'] = 'http://oauth.net/grant_type/device/1.0'
else:
post_data['grant_type'] = 'authorization_code'
post_data['redirect_uri'] = self.redirect_uri
body = urllib.parse.urlencode(post_data)
headers = {
'content-type': 'application/x-www-form-urlencoded',
}
if self.authorization_header is not None:
headers['Authorization'] = self.authorization_header
if self.user_agent is not None:
headers['user-agent'] = self.user_agent
if http is None:
http = httplib2.Http()
resp, content = http.request(self.token_uri, method='POST', body=body,
headers=headers)
d = _parse_exchange_token_response(content)
if resp.status == 200 and 'access_token' in d:
access_token = d['access_token']
refresh_token = d.get('refresh_token', None)
if not refresh_token:
logger.info(
'Received token response with no refresh_token. Consider '
"reauthenticating with approval_prompt='force'.")
token_expiry = None
if 'expires_in' in d:
token_expiry = datetime.datetime.utcnow() + datetime.timedelta(
seconds=int(d['expires_in']))
extracted_id_token = None
if 'id_token' in d:
extracted_id_token = _extract_id_token(d['id_token'])
logger.info('Successfully retrieved access token')
return OAuth2Credentials(access_token, self.client_id,
self.client_secret, refresh_token, token_expiry,
self.token_uri, self.user_agent,
revoke_uri=self.revoke_uri,
id_token=extracted_id_token,
token_response=d)
else:
logger.info('Failed to retrieve access token: %s', content)
if 'error' in d:
# you never know what those providers got to say
error_msg = str(d['error']) + str(d.get('error_description', ''))
else:
error_msg = 'Invalid response: %s.' % str(resp.status)
raise FlowExchangeError(error_msg)
@util.positional(2)
def flow_from_clientsecrets(filename, scope, redirect_uri=None,
message=None, cache=None, login_hint=None,
device_uri=None):
"""Create a Flow from a clientsecrets file.
Will create the right kind of Flow based on the contents of the clientsecrets
file or will raise InvalidClientSecretsError for unknown types of Flows.
Args:
filename: string, File name of client secrets.
scope: string or iterable of strings, scope(s) to request.
redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
a non-web-based application, or a URI that handles the callback from
the authorization server.
message: string, A friendly string to display to the user if the
clientsecrets file is missing or invalid. If message is provided then
sys.exit will be called in the case of an error. If message in not
provided then clientsecrets.InvalidClientSecretsError will be raised.
cache: An optional cache service client that implements get() and set()
methods. See clientsecrets.loadfile() for details.
login_hint: string, Either an email address or domain. Passing this hint
will either pre-fill the email box on the sign-in form or select the
proper multi-login session, thereby simplifying the login flow.
device_uri: string, URI for device authorization endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
Returns:
A Flow object.
Raises:
UnknownClientSecretsFlowError if the file describes an unknown kind of Flow.
clientsecrets.InvalidClientSecretsError if the clientsecrets file is
invalid.
"""
try:
client_type, client_info = clientsecrets.loadfile(filename, cache=cache)
if client_type in (clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED):
constructor_kwargs = {
'redirect_uri': redirect_uri,
'auth_uri': client_info['auth_uri'],
'token_uri': client_info['token_uri'],
'login_hint': login_hint,
}
revoke_uri = client_info.get('revoke_uri')
if revoke_uri is not None:
constructor_kwargs['revoke_uri'] = revoke_uri
if device_uri is not None:
constructor_kwargs['device_uri'] = device_uri
return OAuth2WebServerFlow(
client_info['client_id'], client_info['client_secret'],
scope, **constructor_kwargs)
except clientsecrets.InvalidClientSecretsError:
if message:
sys.exit(message)
else:
raise
else:
raise UnknownClientSecretsFlowError(
'This OAuth 2.0 flow is unsupported: %r' % client_type)
|
Maccimo/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyClassHasNoInitInspection/unresolvedParent.py
|
83
|
__author__ = 'ktisha'
class B(ABC):
def foo(self):
self.b = 1
|
Nolski/olympia
|
refs/heads/master
|
apps/users/utils.py
|
15
|
import base64
from functools import partial
import hashlib
import hmac
import time
import uuid
from django.conf import settings
from django.db.models import Q
import commonware.log
from users.models import UserProfile, BlacklistedName
log = commonware.log.getLogger('z.users')
class EmailResetCode():
@classmethod
def create(cls, user_id, email):
"""Encode+Hash an email for a reset code. This is the new email."""
data = [user_id, email]
data.append(int(time.time()))
token = ",".join([str(i) for i in data])
secret = cls.make_secret(token)
return base64.urlsafe_b64encode(token), secret
@classmethod
def parse(cls, code, hash):
"""Extract a user id and an email from a code and validate against a
hash. The hash ensures us the email address hasn't changed and that
the email address matches the user id. This will raise
``ValueError`` if the hash fails or if the code is over 48 hours
old."""
try:
decoded = base64.urlsafe_b64decode(str(code))
user_id, mail, req_time = decoded.split(',')
except (ValueError, TypeError):
# Data is broken
raise ValueError
if cls.make_secret(decoded) != hash:
log.info(u"[Tampering] Email reset data does not match hash")
raise ValueError
# Is the request over 48 hours old?
age = time.time() - int(req_time)
if age > 48 * 60 * 60:
raise ValueError
return int(user_id), mail
@classmethod
def make_secret(cls, token):
return hmac.new(settings.SECRET_KEY, msg=token,
digestmod=hashlib.sha256).hexdigest()
class UnsubscribeCode():
@classmethod
def create(cls, email):
"""Encode+Hash an email for an unsubscribe code."""
secret = cls.make_secret(email)
return base64.urlsafe_b64encode(email), secret
@classmethod
def parse(cls, code, hash):
try:
decoded = base64.urlsafe_b64decode(str(code))
mail = decoded
except (ValueError, TypeError):
# Data is broken
raise ValueError
if cls.make_secret(decoded) != hash:
log.info(u"[Tampering] Unsubscribe link data does not match hash")
raise ValueError
return mail
@classmethod
def make_secret(cls, token):
return hmac.new(settings.SECRET_KEY, msg=token,
digestmod=hashlib.sha256).hexdigest()
def get_task_user():
"""
Returns a user object. This user is suitable for assigning to
cron jobs or long running tasks.
"""
return UserProfile.objects.get(pk=settings.TASK_USER_ID)
def find_users(email):
"""
Given an email find all the possible users, by looking in
users and in their history.
"""
return UserProfile.objects.filter(Q(email=email) |
Q(history__email=email)).distinct()
def autocreate_username(candidate, tries=1):
"""Returns a unique valid username."""
max_tries = settings.MAX_GEN_USERNAME_TRIES
from amo.utils import slugify, SLUG_OK
make_u = partial(slugify, ok=SLUG_OK, lower=True, spaces=False,
delimiter='-')
adjusted_u = make_u(candidate)
if tries > 1:
adjusted_u = '%s%s' % (adjusted_u, tries)
if (BlacklistedName.blocked(adjusted_u) or adjusted_u == ''
or tries > max_tries or len(adjusted_u) > 255):
log.info('username blocked, empty, max tries reached, or too long;'
' username=%s; max=%s' % (adjusted_u, max_tries))
return autocreate_username(uuid.uuid4().hex[0:15])
if UserProfile.objects.filter(username=adjusted_u).count():
return autocreate_username(candidate, tries=tries + 1)
return adjusted_u
|
fmacias64/keras
|
refs/heads/master
|
tests/manual/check_model_utils.py
|
77
|
from __future__ import absolute_import
from __future__ import print_function
from keras.models import Sequential, Graph
from keras.layers.core import Layer, Activation, Dense, Flatten, Reshape, Merge
from keras.layers.convolutional import Convolution2D, MaxPooling2D
import keras.utils.layer_utils as layer_utils
print('-- Sequential model')
left = Sequential()
left.add(Convolution2D(32, 1, 3, 3, border_mode='valid'))
left.add(MaxPooling2D(poolsize=(2, 2)))
left.add(Flatten())
left.add(Dense(32 * 13 * 13, 50))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(784, 30))
right.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, right], mode='concat'))
model.add(Dense(80, 10))
model.add(Activation('softmax'))
layer_utils.print_layer_shapes(model, [(1, 1, 28, 28), (1, 784)])
print('-- Graph model')
graph = Graph()
graph.add_input(name='input1', ndim=2)
graph.add_input(name='input2', ndim=4)
graph.add_node(Dense(32, 16), name='dense1', input='input1')
graph.add_node(Dense(16, 4), name='dense3', input='dense1')
graph.add_node(Convolution2D(32, 1, 3, 3), name='conv1', input='input2')
graph.add_node(Flatten(), name='flatten1', input='conv1')
graph.add_node(Dense(32 * 13 * 13, 10), name='dense4', input='flatten1')
graph.add_output(name='output1', inputs=['dense1', 'dense3'], merge_mode='sum')
graph.add_output(name='output2', inputs=['dense1', 'dense4'], merge_mode='concat')
layer_utils.print_layer_shapes(graph, {'input1': (1, 32), 'input2': (1, 1, 28, 28)})
print('Test script complete')
|
andris210296/andris-projeto
|
refs/heads/master
|
backend/venv/test/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.py
|
287
|
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import re
import sys
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
from .winterm import WinTerm, WinColor, WinStyle
from .win32 import windll
if windll is not None:
winterm = WinTerm()
def is_a_tty(stream):
return hasattr(stream, 'isatty') and stream.isatty()
class StreamWrapper(object):
'''
Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()', which is delegated to our
Converter instance.
'''
def __init__(self, wrapped, converter):
# double-underscore everything to prevent clashes with names of
# attributes on the wrapped stream object.
self.__wrapped = wrapped
self.__convertor = converter
def __getattr__(self, name):
return getattr(self.__wrapped, name)
def write(self, text):
self.__convertor.write(text)
class AnsiToWin32(object):
'''
Implements a 'write()' method which, on Windows, will strip ANSI character
sequences from the text, and if outputting to a tty, will convert them into
win32 function calls.
'''
ANSI_RE = re.compile('\033\[((?:\d|;)*)([a-zA-Z])')
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
# The wrapped stream (normally sys.stdout or sys.stderr)
self.wrapped = wrapped
# should we reset colors to defaults after every .write()
self.autoreset = autoreset
# create the proxy wrapping our output stream
self.stream = StreamWrapper(wrapped, self)
on_windows = sys.platform.startswith('win')
# should we strip ANSI sequences from our output?
if strip is None:
strip = on_windows
self.strip = strip
# should we should convert ANSI sequences into win32 calls?
if convert is None:
convert = on_windows and is_a_tty(wrapped)
self.convert = convert
# dict of ansi codes to win32 functions and parameters
self.win32_calls = self.get_win32_calls()
# are we wrapping stderr?
self.on_stderr = self.wrapped is sys.stderr
def should_wrap(self):
'''
True if this class is actually needed. If false, then the output
stream will not be affected, nor will win32 calls be issued, so
wrapping stdout is not actually required. This will generally be
False on non-Windows platforms, unless optional functionality like
autoreset has been requested using kwargs to init()
'''
return self.convert or self.strip or self.autoreset
def get_win32_calls(self):
if self.convert and winterm:
return {
AnsiStyle.RESET_ALL: (winterm.reset_all, ),
AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
AnsiFore.RED: (winterm.fore, WinColor.RED),
AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
AnsiFore.RESET: (winterm.fore, ),
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
AnsiBack.RED: (winterm.back, WinColor.RED),
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
AnsiBack.RESET: (winterm.back, ),
}
def write(self, text):
if self.strip or self.convert:
self.write_and_convert(text)
else:
self.wrapped.write(text)
self.wrapped.flush()
if self.autoreset:
self.reset_all()
def reset_all(self):
if self.convert:
self.call_win32('m', (0,))
elif is_a_tty(self.wrapped):
self.wrapped.write(Style.RESET_ALL)
def write_and_convert(self, text):
'''
Write the given text to our wrapped stream, stripping any ANSI
sequences from the text, and optionally converting them into win32
calls.
'''
cursor = 0
for match in self.ANSI_RE.finditer(text):
start, end = match.span()
self.write_plain_text(text, cursor, start)
self.convert_ansi(*match.groups())
cursor = end
self.write_plain_text(text, cursor, len(text))
def write_plain_text(self, text, start, end):
if start < end:
self.wrapped.write(text[start:end])
self.wrapped.flush()
def convert_ansi(self, paramstring, command):
if self.convert:
params = self.extract_params(paramstring)
self.call_win32(command, params)
def extract_params(self, paramstring):
def split(paramstring):
for p in paramstring.split(';'):
if p != '':
yield int(p)
return tuple(split(paramstring))
def call_win32(self, command, params):
if params == []:
params = [0]
if command == 'm':
for param in params:
if param in self.win32_calls:
func_args = self.win32_calls[param]
func = func_args[0]
args = func_args[1:]
kwargs = dict(on_stderr=self.on_stderr)
func(*args, **kwargs)
elif command in ('H', 'f'): # set cursor position
func = winterm.set_cursor_position
func(params, on_stderr=self.on_stderr)
elif command in ('J'):
func = winterm.erase_data
func(params, on_stderr=self.on_stderr)
elif command == 'A':
if params == () or params == None:
num_rows = 1
else:
num_rows = params[0]
func = winterm.cursor_up
func(num_rows, on_stderr=self.on_stderr)
|
Mrs-X/Darknet
|
refs/heads/master
|
contrib/zmq/zmq_sub.py
|
69
|
#!/usr/bin/env python
import array
import binascii
import zmq
import struct
port = 28332
zmqContext = zmq.Context()
zmqSubSocket = zmqContext.socket(zmq.SUB)
zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashblock")
zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashtx")
zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashtxlock")
zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"rawblock")
zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"rawtx")
zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"rawtxlock")
zmqSubSocket.connect("tcp://127.0.0.1:%i" % port)
try:
while True:
msg = zmqSubSocket.recv_multipart()
topic = str(msg[0].decode("utf-8"))
body = msg[1]
sequence = "Unknown";
if len(msg[-1]) == 4:
msgSequence = struct.unpack('<I', msg[-1])[-1]
sequence = str(msgSequence)
if topic == "hashblock":
print('- HASH BLOCK ('+sequence+') -')
print(binascii.hexlify(body).decode("utf-8"))
elif topic == "hashtx":
print ('- HASH TX ('+sequence+') -')
print(binascii.hexlify(body).decode("utf-8"))
elif topic == "hashtxlock":
print('- HASH TX LOCK ('+sequence+') -')
print(binascii.hexlify(body).decode("utf-8"))
elif topic == "rawblock":
print('- RAW BLOCK HEADER ('+sequence+') -')
print(binascii.hexlify(body[:80]).decode("utf-8"))
elif topic == "rawtx":
print('- RAW TX ('+sequence+') -')
print(binascii.hexlify(body).decode("utf-8"))
elif topic == "rawtxlock":
print('- RAW TX LOCK ('+sequence+') -')
print(binascii.hexlify(body).decode("utf-8"))
except KeyboardInterrupt:
zmqContext.destroy()
|
RapidApplicationDevelopment/tensorflow
|
refs/heads/master
|
tensorflow/tools/quantization/quantize_graph.py
|
6
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Transforms a float-trained graph into an equivalent quantized version.
An example of command-line usage is:
bazel build tensorflow/tools/quantization:quantize_graph \
&& bazel-bin/tensorflow/tools/quantization/quantize_graph \
--input=tensorflow_inception_graph.pb
--output_node_names="softmax2" --print_nodes --output=/tmp/quantized_graph.pb \
--mode=eightbit --logtostderr
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_boolean("print_nodes", False, """Lists all nodes in the model.""")
flags.DEFINE_string("input", "", """TensorFlow 'GraphDef' file to load.""")
flags.DEFINE_string("output_node_names", "",
"""Output node names, comma separated.""")
flags.DEFINE_string("output", "", """File to save the output graph to.""")
flags.DEFINE_integer("bitdepth", 8,
"""How many bits to quantize the graph to.""")
flags.DEFINE_string("mode", "round",
"""What transformation to apply (round, quantize,"""
""" eightbit, weights, or weights_rounded).""")
flags.DEFINE_string("test_input_dims", "1,224,224,3",
"""The size of the input tensor to use when testing a"""
""" graph loaded from a file.""")
flags.DEFINE_boolean("strip_redundant_quantization", True,
"""Removes redundant dequantize/quantize pairs.""")
flags.DEFINE_boolean("quantized_input", False,
"If true, assume Placeholders are quantized with values "
"covering [--quantized_input_min,--quantized_input_max]. "
"Only supported when --mode=eightbit")
flags.DEFINE_float("quantized_input_min", 0,
"The minimum of the actual input range when "
"--quantized_input")
flags.DEFINE_float("quantized_input_max", 1,
"The maximum of the actual input range when "
"--quantized_input")
flags.DEFINE_float(
"quantized_fallback_min", None,
"The fallback 'min' value to use for layers which lack min-max "
"information. Note: this should be considered a coarse tool just good "
"enough for experimentation purposes, since graphs quantized in this way "
"would be very inaccurate.")
flags.DEFINE_float(
"quantized_fallback_max", None,
"The fallback 'max' value to use for layers which lack min-max "
"information. Note: this should be considered a coarse tool just good "
"enough for experimentation purposes, since graphs quantized in this way "
"would be very inaccurate.")
def print_input_nodes(current_node, nodes_map, indent, already_visited):
print(" " * indent + current_node.op + ":" + current_node.name)
already_visited[current_node.name] = True
for input_node_name in current_node.input:
if input_node_name in already_visited:
continue
input_node = nodes_map[input_node_name]
print_input_nodes(input_node, nodes_map, indent + 1, already_visited)
def create_node(op, name, inputs):
new_node = tf.NodeDef()
new_node.op = op
new_node.name = name
for input_name in inputs:
new_node.input.extend([input_name])
return new_node
def create_constant_node(name, value, dtype, shape=None):
node = create_node("Const", name, [])
set_attr_dtype(node, "dtype", dtype)
set_attr_tensor(node, "value", value, dtype, shape)
return node
def copy_attr(node, key, attr_value):
try:
node.attr[key].CopyFrom(attr_value)
except KeyError:
pass
def set_attr_dtype(node, key, value):
try:
node.attr[key].CopyFrom(tf.AttrValue(type=value.as_datatype_enum))
except KeyError:
pass
def set_attr_shape(node, key, value):
try:
node.attr[key].CopyFrom(
tf.AttrValue(shape=tensor_shape.as_shape(value).as_proto()))
except KeyError:
pass
def set_attr_tensor(node, key, value, dtype, shape=None):
try:
node.attr[key].CopyFrom(tf.AttrValue(
tensor=tensor_util.make_tensor_proto(value,
dtype=dtype,
shape=shape)))
except KeyError:
pass
def set_attr_string(node, key, value):
try:
node.attr[key].CopyFrom(tf.AttrValue(s=value))
except KeyError:
pass
def set_attr_int_list(node, key, value):
list_value = tf.AttrValue.ListValue(i=value)
try:
node.attr[key].CopyFrom(tf.AttrValue(list=list_value))
except KeyError:
pass
def set_attr_bool(node, key, value):
try:
node.attr[key].CopyFrom(tf.AttrValue(b=value))
except KeyError:
pass
def set_attr_int(node, key, value):
try:
node.attr[key].CopyFrom(tf.AttrValue(i=value))
except KeyError:
pass
def set_attr_float(node, key, value):
try:
node.attr[key].CopyFrom(tf.AttrValue(f=value))
except KeyError:
pass
def node_name_from_input(node_name):
"""Strips off ports and other decorations to get the underlying node name."""
if node_name.startswith("^"):
node_name = node_name[1:]
m = re.search(r"(.*):\d+$", node_name)
if m:
node_name = m.group(1)
return node_name
def ensure_tensor_name_has_port(node_name):
"""Makes sure that a tensor name has :0 if no explicit port exists."""
m = re.search(r"(.*):\d+$", node_name)
if m:
name_with_port = node_name
else:
name_with_port = node_name + ":0"
return name_with_port
def unique_node_name_from_input(node_name):
"""Replaces invalid characters in input names to get a unique node name."""
return node_name.replace(":", "__port__").replace("^", "__hat__")
def quantize_array(arr, num_buckets):
"""Quantizes a numpy array.
This function maps each scalar in arr to the center of one of num_buckets
buckets. For instance,
quantize_array([0, 0.3, 0.6, 1], 2) => [0.25, 0.25, 0.75, 0.75]
Args:
arr: The numpy array to quantize.
num_buckets: The number of buckets to map "var" to.
Returns:
The quantized numpy array.
Raises:
ValueError: when num_buckets < 1.
"""
if num_buckets < 1:
raise ValueError("num_buckets must be >= 1")
arr_max = arr.max()
arr_min = arr.min()
if arr_max == arr_min:
return arr
bucket_width = (arr_max - arr_min) / num_buckets
# Map scalars to bucket indices. Take special care of max(arr).
bucket_indices = np.floor((arr - arr_min) / bucket_width)
bucket_indices[bucket_indices == num_buckets] = num_buckets - 1
# Map each scalar to the center of a bucket.
arr = arr_min + bucket_width * (bucket_indices + 0.5)
return arr
def quantize_weight_rounded(input_node):
"""Returns a replacement node for input_node containing bucketed floats."""
input_tensor = input_node.attr["value"].tensor
tensor_value = tensor_util.MakeNdarray(input_tensor)
shape = input_tensor.tensor_shape
# Currently, the parameter FLAGS.bitdepth is used to compute the
# number of buckets as 1 << FLAGS.bitdepth, meaning the number of
# buckets can only be a power of 2.
# This could be fixed by introducing a new parameter, num_buckets,
# which would allow for more flexibility in chosing the right model
# size/accuracy tradeoff. But I didn't want to add more parameters
# to this script than absolutely necessary.
num_buckets = 1 << FLAGS.bitdepth
tensor_value_rounded = quantize_array(tensor_value, num_buckets)
tensor_shape_list = tensor_util.TensorShapeProtoToList(shape)
return [create_constant_node(input_node.name, tensor_value_rounded,
tf.float32, shape=tensor_shape_list)]
def quantize_weight_eightbit(input_node, quantization_mode):
"""Returns replacement nodes for input_node using the Dequantize op."""
base_name = input_node.name + "_"
quint8_const_name = base_name + "quint8_const"
min_name = base_name + "min"
max_name = base_name + "max"
float_tensor = tensor_util.MakeNdarray(
input_node.attr["value"].tensor)
min_value = np.min(float_tensor.flatten())
max_value = np.max(float_tensor.flatten())
# min_value == max_value is a tricky case. It can occur for general
# tensors, and of course for scalars. The quantized ops cannot deal
# with this case, so we set max_value to something else.
# It's a tricky question what is the numerically best solution to
# deal with this degeneracy.
# TODO(petewarden): Better use a tolerance than a hard comparison?
if min_value == max_value:
if abs(min_value) < 0.000001:
max_value = min_value + 1.0
elif min_value > 0:
max_value = 2 * min_value
else:
max_value = min_value / 2.0
sess = tf.Session()
with sess.as_default():
quantize_op = tf.contrib.quantization.python.quantize_v2(
float_tensor,
min_value,
max_value,
tf.quint8,
mode=quantization_mode)
quint8_tensor = quantize_op[0].eval()
shape = tensor_util.TensorShapeProtoToList(input_node.attr[
"value"].tensor.tensor_shape)
quint8_const_node = create_constant_node(quint8_const_name,
quint8_tensor,
tf.quint8,
shape=shape)
min_node = create_constant_node(min_name, min_value, tf.float32)
max_node = create_constant_node(max_name, max_value, tf.float32)
dequantize_node = create_node("Dequantize", input_node.name,
[quint8_const_name, min_name, max_name])
set_attr_dtype(dequantize_node, "T", tf.quint8)
set_attr_string(dequantize_node, "mode", quantization_mode)
return [quint8_const_node, min_node, max_node, dequantize_node]
EightbitizeRecursionState = collections.namedtuple(
"EightbitizeRecursionState", ["already_visited", "output_node_stack",
"merged_with_fake_quant"])
class GraphRewriter(object):
"""Takes a float graph, and rewrites it in quantized form."""
def __init__(self, input_graph, mode, quantized_input_range,
fallback_quantization_range=None):
"""Sets up the class to rewrite a float graph.
Args:
input_graph: A float graph to transform.
mode: A string controlling how quantization is performed -
round, quantize, eightbit, or weights.
quantized_input_range: if set, assume the input is
quantized and represents the range
[quantized_input_range[0], quantized_input_range[1]]
fallback_quantization_range: if set, then for nodes where the quantization
range can't be inferred from the graph, use the range
[fallback_quantization_range[0], fallback_quantization_range[1]) instead
of using a RequantizationRange node in the graph.
Raises:
ValueError: Two nodes with the same name were found in the graph.
"""
self.input_graph = input_graph
self.nodes_map = self.create_nodes_map(input_graph)
self.output_graph = None
self.mode = mode
self.final_node_renames = {}
if quantized_input_range:
self.input_range = (quantized_input_range[0], quantized_input_range[1])
if self.input_range[0] >= self.input_range[1]:
raise ValueError("Invalid quantized_input_range: [%s,%s]" %
self.input_range)
if self.mode != "eightbit":
raise ValueError(
"quantized_input_range can only be specified in eightbit mode")
else:
self.input_range = None
if fallback_quantization_range:
self.fallback_quantization_range = [fallback_quantization_range[0],
fallback_quantization_range[1]]
if (self.fallback_quantization_range[0] >=
self.fallback_quantization_range[1]):
raise ValueError("Invalid fallback_quantization_range: [%s,%s]" %
self.fallback_quantization_range)
if self.mode != "eightbit":
raise ValueError(
"fallback_quantization_range can only be "
"specified in eightbit mode")
else:
self.fallback_quantization_range = None
# Data that is valid only during the recursive call to rewrite the graph.
self.state = None
def create_nodes_map(self, graph):
"""Builds a mapping of node names to their defs from the graph."""
nodes_map = {}
for node in graph.node:
if node.name not in nodes_map.keys():
nodes_map[node.name] = node
else:
raise ValueError("Duplicate node names detected.")
return nodes_map
def rewrite(self, output_node_names):
"""Triggers rewriting of the float graph.
Args:
output_node_names: A list of names of the nodes that produce the final
results.
Returns:
A quantized version of the float graph.
"""
self.output_graph = tf.GraphDef()
output_nodes = [self.nodes_map[output_node_name]
for output_node_name in output_node_names]
if self.mode == "round":
self.already_visited = {}
for output_node in output_nodes:
self.round_nodes_recursively(output_node)
elif self.mode == "quantize":
self.already_visited = {}
self.already_quantized = {}
for output_node in output_nodes:
self.quantize_nodes_recursively(output_node)
elif self.mode == "eightbit":
self.set_input_graph(graph_util.remove_training_nodes(self.input_graph))
output_nodes = [self.nodes_map[output_node_name]
for output_node_name in output_node_names]
self.state = EightbitizeRecursionState(already_visited={},
output_node_stack=[],
merged_with_fake_quant={})
for output_node in output_nodes:
self.eightbitize_nodes_recursively(output_node)
self.state = None
if self.input_range:
self.add_output_graph_node(create_constant_node(
"quantized_input_min_value", self.input_range[0], tf.float32, []))
self.add_output_graph_node(create_constant_node(
"quantized_input_max_value", self.input_range[1], tf.float32, []))
if self.fallback_quantization_range:
self.add_output_graph_node(create_constant_node(
"fallback_quantization_min_value",
self.fallback_quantization_range[0], tf.float32, []))
self.add_output_graph_node(create_constant_node(
"fallback_quantization_max_value",
self.fallback_quantization_range[1], tf.float32, []))
if FLAGS.strip_redundant_quantization:
self.output_graph = self.remove_redundant_quantization(
self.output_graph)
self.remove_dead_nodes(output_node_names)
self.apply_final_node_renames()
elif self.mode == "weights":
self.output_graph = self.quantize_weights(self.input_graph,
b"MIN_COMBINED")
self.remove_dead_nodes(output_node_names)
elif self.mode == "weights_rounded":
self.output_graph = self.quantize_weights(self.input_graph, self.mode)
self.remove_dead_nodes(output_node_names)
else:
print("Bad mode - " + self.mode + ".")
return self.output_graph
def round_nodes_recursively(self, current_node):
"""The entry point for simple rounding quantization."""
if self.already_visited[current_node.name]:
return
self.already_visited[current_node.name] = True
for input_node_name in current_node.input:
input_node_name = node_name_from_input(input_node_name)
input_node = self.nodes_map[input_node_name]
self.round_nodes_recursively(input_node)
nodes_to_quantize = ["Conv2D", "BiasAdd", "MatMul"]
if any(current_node.op in s for s in nodes_to_quantize):
new_node = tf.NodeDef()
new_node.CopyFrom(current_node)
new_node.name = current_node.name + "_original"
self.add_output_graph_node(new_node)
levels = 1 << FLAGS.bitdepth
constant_name = current_node.name + "_round_depth"
constant_tensor = tf.constant(levels, dtype=tf.int32, name=constant_name)
constant_node = constant_tensor.op.node_def
self.add_output_graph_node(constant_node)
quantize_node = tf.NodeDef()
quantize_node.op = "RoundToSteps"
quantize_node.name = current_node.name
quantize_node.input.extend([current_node.name + "_original"])
quantize_node.input.extend([constant_node.name])
self.add_output_graph_node(quantize_node)
else:
new_node = tf.NodeDef()
new_node.CopyFrom(current_node)
self.add_output_graph_node(new_node)
def quantize_nodes_recursively(self, current_node):
"""The entry point for quantizing nodes to eight bit and back."""
if self.already_visited[current_node.name]:
return
self.already_visited[current_node.name] = True
for input_node_name in current_node.input:
input_node_name = node_name_from_input(input_node_name)
input_node = self.nodes_map[input_node_name]
self.quantize_nodes_recursively(input_node)
nodes_to_quantize = ["Conv2D", "BiasAdd", "MatMul"]
if any(current_node.op in s for s in nodes_to_quantize):
for input_name in current_node.input:
input_name = node_name_from_input(input_name)
input_node = self.nodes_map[input_name]
self.quantize_node(input_node)
self.quantize_node(current_node)
else:
new_node = tf.NodeDef()
new_node.CopyFrom(current_node)
self.add_output_graph_node(new_node)
def quantize_node(self, input_node):
"""Handles quantizing a single node."""
input_name = input_node.name
if input_name in self.already_quantized:
return
self.already_quantized[input_name] = True
original_input_name = input_name + "_original"
reshape_name = input_name + "_reshape"
reshape_dims_name = input_name + "_reshape_dims"
max_name = input_name + "_max"
min_name = input_name + "_min"
dims_name = input_name + "_dims"
quantize_name = input_name + "_quantize"
dequantize_name = input_name
original_input_node = tf.NodeDef()
original_input_node.CopyFrom(input_node)
original_input_node.name = original_input_name
self.add_output_graph_node(original_input_node)
reshape_dims_node = create_constant_node(reshape_dims_name, -1, tf.int32,
[1])
self.add_output_graph_node(reshape_dims_node)
reshape_node = create_node("Reshape", reshape_name, [original_input_name,
reshape_dims_name])
set_attr_dtype(reshape_node, "T", tf.float32)
self.add_output_graph_node(reshape_node)
dims_node = create_constant_node(dims_name, 0, tf.int32, [1])
self.add_output_graph_node(dims_node)
max_node = create_node("Max", max_name, [reshape_name, dims_name])
set_attr_dtype(max_node, "T", tf.float32)
set_attr_bool(max_node, "keep_dims", False)
self.add_output_graph_node(max_node)
min_node = create_node("Min", min_name, [reshape_name, dims_name])
set_attr_dtype(min_node, "T", tf.float32)
set_attr_bool(min_node, "keep_dims", False)
self.add_output_graph_node(min_node)
quantize_node = create_node("Quantize", quantize_name, [original_input_name,
min_name, max_name])
set_attr_dtype(quantize_node, "T", tf.quint8)
set_attr_string(quantize_node, "mode", b"MIN_FIRST")
self.add_output_graph_node(quantize_node)
dequantize_node = create_node("Dequantize", dequantize_name,
[quantize_name, min_name, max_name])
set_attr_dtype(dequantize_node, "T", tf.quint8)
set_attr_string(dequantize_node, "mode", b"MIN_FIRST")
self.add_output_graph_node(dequantize_node)
def should_merge_with_fake_quant_node(self):
"""Should the current node merge with self.state.output_node_stack[-1]?"""
if not self.state.output_node_stack: return False
top = self.state.output_node_stack[-1]
return top[1] == 0 and top[0].op in ["FakeQuantWithMinMaxVars"]
def should_quantize_const(self, node):
if not self.state.output_node_stack: return False
top = self.state.output_node_stack[-1]
if not top[2]: return False
dtype = tf.as_dtype(node.attr["dtype"].type)
assert dtype == tf.float32, (
"Failed to quantized constant %s of type %s" % (node.name, dtype))
return True
def eightbitize_nodes_recursively(self, current_node):
"""The entry point for transforming a graph into full eight bit."""
if current_node.name in self.state.already_visited:
if (self.should_merge_with_fake_quant_node() or
current_node.name in self.state.merged_with_fake_quant):
raise ValueError("Unsupported graph structure: output of node %s "
"is processed by a FakeQuant* node and should have "
"no other outputs.", current_node.name)
return
self.state.already_visited[current_node.name] = True
for i, input_node_name in enumerate(current_node.input):
quantize_input = False
if current_node.op in ("MatMul", "Conv2D", "BiasAdd", "MaxPool",
"AvgPool", "Relu", "Relu6",
"BatchNormWithGlobalNormalization"):
quantize_input = True
elif current_node.op == "Concat" and i > 0:
quantize_input = (
tf.as_dtype(current_node.attr["T"].type) == tf.float32)
elif current_node.op == "Reshape" and i == 0:
quantize_input = (
tf.as_dtype(current_node.attr["T"].type) == tf.float32)
self.state.output_node_stack.append((current_node, i, quantize_input))
input_node_name = node_name_from_input(input_node_name)
input_node = self.nodes_map[input_node_name]
self.eightbitize_nodes_recursively(input_node)
self.state.output_node_stack.pop()
if current_node.op == "MatMul":
self.eightbitize_mat_mul_node(current_node)
elif current_node.op == "Conv2D":
self.eightbitize_conv_node(current_node)
elif current_node.op == "BiasAdd":
self.eightbitize_bias_add_node(current_node)
elif current_node.op == "MaxPool" or current_node.op == "AvgPool":
self.eightbitize_single_input_tensor_node(current_node,
self.add_pool_function)
elif current_node.op == "Relu" or current_node.op == "Relu6":
self.eightbitize_single_input_tensor_node(current_node,
self.add_relu_function)
elif (current_node.op == "Concat" and
tf.as_dtype(current_node.attr["T"].type) == tf.float32):
self.eightbitize_concat_node(current_node)
elif current_node.op == "BatchNormWithGlobalNormalization":
self.eightbitize_batch_norm_node(current_node)
elif (current_node.op == "Reshape" and
tf.as_dtype(current_node.attr["T"].type) == tf.float32):
self.eightbitize_reshape_node(current_node)
elif (self.input_range and
current_node.op in ("Placeholder", "PlaceholderV2")):
self.eightbitize_placeholder_node(current_node)
elif current_node.op == "FakeQuantWithMinMaxVars":
# It will have been merged into the underlying node.
pass
elif current_node.op == "Const":
if self.should_quantize_const(current_node):
for n in quantize_weight_eightbit(current_node, b"MIN_FIRST"):
self.add_output_graph_node(n)
else:
new_node = tf.NodeDef()
new_node.CopyFrom(current_node)
self.add_output_graph_node(new_node)
###################################################################
# Note: if more cases are added here, you may need to update the op
# name lists in the loop over children at the start of the function.
###################################################################
else:
new_node = tf.NodeDef()
new_node.CopyFrom(current_node)
self.add_output_graph_node(new_node)
if (self.should_merge_with_fake_quant_node() and
current_node.name not in self.state.merged_with_fake_quant):
raise ValueError(
"FakeQuant* node %s failed to merge with node %s of type %s" % (
self.state.output_node_stack[-1][0], current_node.name,
current_node.op))
def add_eightbit_prologue_nodes(self, original_node):
"""Adds input conversion nodes to handle quantizing the underlying node."""
namespace_prefix = original_node.name + "_eightbit"
reshape_dims_name, reduction_dims_name = self.add_common_quantization_nodes(
namespace_prefix)
input_names = []
min_max_names = []
for original_input_name in original_node.input:
quantize_input_name, min_input_name, max_input_name = (
self.eightbitize_input_to_node(namespace_prefix, original_input_name,
reshape_dims_name,
reduction_dims_name))
input_names.append(quantize_input_name)
min_max_names.append(min_input_name)
min_max_names.append(max_input_name)
all_input_names = []
all_input_names.extend(input_names)
all_input_names.extend(min_max_names)
return all_input_names
def add_common_quantization_nodes(self, namespace_prefix):
"""Builds constant nodes needed for quantization of inputs."""
reshape_dims_name = namespace_prefix + "_reshape_dims"
reduction_dims_name = namespace_prefix + "_reduction_dims"
reshape_dims_node = create_constant_node(reshape_dims_name, -1, tf.int32,
[1])
self.add_output_graph_node(reshape_dims_node)
reduction_dims_node = create_constant_node(reduction_dims_name, 0, tf.int32,
[1])
self.add_output_graph_node(reduction_dims_node)
return reshape_dims_name, reduction_dims_name
def eightbitize_input_to_node(self, namespace_prefix, original_input_name,
reshape_dims_name, reduction_dims_name):
"""Takes one float input to an op, and converts it to quantized form."""
unique_input_name = unique_node_name_from_input(original_input_name)
reshape_input_name = namespace_prefix + "_reshape_" + unique_input_name
min_input_name = namespace_prefix + "_min_" + unique_input_name
max_input_name = namespace_prefix + "_max_" + unique_input_name
quantize_input_name = namespace_prefix + "_quantize_" + unique_input_name
reshape_input_node = create_node("Reshape", reshape_input_name,
[original_input_name, reshape_dims_name])
set_attr_dtype(reshape_input_node, "T", tf.float32)
self.add_output_graph_node(reshape_input_node)
min_input_node = create_node("Min", min_input_name, [reshape_input_name,
reduction_dims_name])
set_attr_dtype(min_input_node, "T", tf.float32)
set_attr_bool(min_input_node, "keep_dims", False)
self.add_output_graph_node(min_input_node)
max_input_node = create_node("Max", max_input_name, [reshape_input_name,
reduction_dims_name])
set_attr_dtype(max_input_node, "T", tf.float32)
set_attr_bool(max_input_node, "keep_dims", False)
self.add_output_graph_node(max_input_node)
quantize_input_node = create_node("QuantizeV2", quantize_input_name,
[original_input_name, min_input_name,
max_input_name])
set_attr_dtype(quantize_input_node, "T", tf.quint8)
set_attr_string(quantize_input_node, "mode", b"MIN_FIRST")
self.add_output_graph_node(quantize_input_node)
min_output_name = quantize_input_name + ":1"
max_output_name = quantize_input_name + ":2"
return quantize_input_name, min_output_name, max_output_name
def add_quantize_down_nodes(self, original_node, quantized_output_name):
quantized_outputs = [
quantized_output_name, quantized_output_name + ":1",
quantized_output_name + ":2"
]
min_max_inputs = None
if self.should_merge_with_fake_quant_node():
# Use the inputs to the FakeQuantWithMinMaxVars node as the inputs to
# Requantize.
fake_quant_node = self.state.output_node_stack[-1][0]
min_max_inputs = [fake_quant_node.input[1], fake_quant_node.input[2]]
assert original_node.name not in self.state.merged_with_fake_quant
self.state.merged_with_fake_quant[original_node.name] = True
elif self.fallback_quantization_range:
min_max_inputs = ["fallback_quantization_min_value:0",
"fallback_quantization_max_value:0"]
else:
# Add a RequantizationRange node for finding the min and max values.
requant_range_node = create_node(
"RequantizationRange", original_node.name + "_eightbit_requant_range",
quantized_outputs)
set_attr_dtype(requant_range_node, "Tinput", tf.qint32)
self.add_output_graph_node(requant_range_node)
min_max_inputs = [requant_range_node.name + ":0",
requant_range_node.name + ":1"]
requantize_node = create_node(
"Requantize", original_node.name + "_eightbit_requantize",
quantized_outputs + min_max_inputs)
set_attr_dtype(requantize_node, "Tinput", tf.qint32)
set_attr_dtype(requantize_node, "out_type", tf.quint8)
self.add_output_graph_node(requantize_node)
return requantize_node.name
def add_dequantize_result_node(self, quantized_output_name,
original_node_name, min_tensor_index=1):
min_max_inputs = [
"%s:%s" % (quantized_output_name, min_tensor_index),
"%s:%s" % (quantized_output_name, (min_tensor_index + 1))]
dequantize_name = original_node_name
if self.should_merge_with_fake_quant_node():
fake_quant_node = self.state.output_node_stack[-1][0]
if original_node_name not in self.state.merged_with_fake_quant:
min_max_inputs = [fake_quant_node.input[1], fake_quant_node.input[2]]
self.state.merged_with_fake_quant[original_node_name] = True
dequantize_name = fake_quant_node.name
dequantize_node = create_node(
"Dequantize", dequantize_name,
[quantized_output_name, min_max_inputs[0], min_max_inputs[1]])
set_attr_dtype(dequantize_node, "T", tf.quint8)
set_attr_string(dequantize_node, "mode", b"MIN_FIRST")
self.add_output_graph_node(dequantize_node)
def eightbitize_mat_mul_node(self, original_node):
"""Replaces a MatMul node with the eight bit equivalent sub-graph."""
quantized_mat_mul_name = original_node.name + "_eightbit_quantized_mat_mul"
all_input_names = self.add_eightbit_prologue_nodes(original_node)
quantized_mat_mul_node = create_node(
"QuantizedMatMul", quantized_mat_mul_name,
all_input_names)
set_attr_dtype(quantized_mat_mul_node, "T1", tf.quint8)
set_attr_dtype(quantized_mat_mul_node, "T2", tf.quint8)
set_attr_dtype(quantized_mat_mul_node, "Toutput", tf.qint32)
copy_attr(quantized_mat_mul_node, "transpose_a",
original_node.attr["transpose_a"])
copy_attr(quantized_mat_mul_node, "transpose_b",
original_node.attr["transpose_b"])
self.add_output_graph_node(quantized_mat_mul_node)
quantize_down_name = self.add_quantize_down_nodes(original_node,
quantized_mat_mul_name)
self.add_dequantize_result_node(quantize_down_name, original_node.name)
def eightbitize_conv_node(self, original_node):
"""Replaces a Conv2D node with the eight bit equivalent sub-graph."""
all_input_names = self.add_eightbit_prologue_nodes(original_node)
quantized_conv_name = original_node.name + "_eightbit_quantized_conv"
quantized_conv_node = create_node("QuantizedConv2D", quantized_conv_name,
all_input_names)
copy_attr(quantized_conv_node, "strides", original_node.attr["strides"])
copy_attr(quantized_conv_node, "padding", original_node.attr["padding"])
set_attr_dtype(quantized_conv_node, "Tinput", tf.quint8)
set_attr_dtype(quantized_conv_node, "Tfilter", tf.quint8)
set_attr_dtype(quantized_conv_node, "out_type", tf.qint32)
self.add_output_graph_node(quantized_conv_node)
quantize_down_name = self.add_quantize_down_nodes(original_node,
quantized_conv_name)
self.add_dequantize_result_node(quantize_down_name, original_node.name)
def eightbitize_bias_add_node(self, original_node):
"""Replaces a BiasAdd node with the eight bit equivalent sub-graph."""
quantized_bias_add_name = (original_node.name +
"_eightbit_quantized_bias_add")
all_input_names = self.add_eightbit_prologue_nodes(original_node)
quantized_bias_add_node = create_node(
"QuantizedBiasAdd", quantized_bias_add_name,
all_input_names)
set_attr_dtype(quantized_bias_add_node, "T1", tf.quint8)
set_attr_dtype(quantized_bias_add_node, "T2", tf.quint8)
set_attr_dtype(quantized_bias_add_node, "out_type", tf.qint32)
self.add_output_graph_node(quantized_bias_add_node)
quantize_down_name = self.add_quantize_down_nodes(original_node,
quantized_bias_add_name)
self.add_dequantize_result_node(quantize_down_name, original_node.name)
def eightbitize_single_input_tensor_node(self, original_node,
add_op_function):
"""Replaces a single-tensor node with the eight bit equivalent sub-graph.
Converts a node like this:
Shape(f) Input(f)
| |
+--------v v
Operation
|
v
(f)
Into a quantized equivalent:
Input(f) ReshapeDims
+------v v-------------+
| Reshape
| |
| | ReductionDims
| +-----+ |
| | +---c---------+
| v v v v-------+
| Min Max
| +----+ |
v v v--------+
Quantize
|
v
QuantizedOperation
| | |
v v v
Dequantize
|
v
(f)
Args:
original_node: Float node to be converted.
add_op_function: Function to create the actual node.
Returns:
Subgraph representing the quantized version of the original node.
"""
quantized_op_name = original_node.name + "_eightbit_quantized"
quantized_op_type = "Quantized" + original_node.op
all_input_names = self.add_eightbit_prologue_nodes(original_node)
quantized_op_node = create_node(
quantized_op_type, quantized_op_name, all_input_names)
add_op_function(original_node, quantized_op_node)
self.add_output_graph_node(quantized_op_node)
self.add_dequantize_result_node(quantized_op_name, original_node.name)
def add_pool_function(self, original_node, quantized_op_node):
set_attr_dtype(quantized_op_node, "T", tf.quint8)
copy_attr(quantized_op_node, "ksize", original_node.attr["ksize"])
copy_attr(quantized_op_node, "strides", original_node.attr["strides"])
copy_attr(quantized_op_node, "padding", original_node.attr["padding"])
def add_relu_function(self, unused_arg_node, quantized_op_node):
set_attr_dtype(quantized_op_node, "Tinput", tf.quint8)
def eightbitize_concat_node(self, original_node):
"""Replaces a Concat node with the eight bit equivalent sub-graph.
Converts a node like this:
Shape(f) Input0(f) Input1(f)
| | |
+--------v v v----------+
Concat
|
v
(f)
Into a quantized equivalent:
Shape(f) Input0(f) ReshapeDims Input1(f)
| +------v v--------------+------------------v v------+
| | Reshape Reshape |
| | | | |
| | | ReductionDims | |
| | +------+ | +--------+ |
| | | +---c---------+-----------c-----+ | |
| | +v v v v-------+---------v v v v+ |
| | Min Max Min Max |
| | +----+ | | +-----+ |
| v v v--------+ +----------v v v
| Quantize Quantize
| +------------------+ +----------------------+
+-------------------------------+ | |
v v v
QuantizedConcat
| | |
v v v
Dequantize
|
v
(f)
Args:
original_node: Float node to be converted.
Returns:
Subgraph representing the quantized version of the original node.
"""
namespace_prefix = original_node.name + "_eightbit"
quantized_concat_name = namespace_prefix + "_quantized_concat"
reshape_dims_name, reduction_dims_name = self.add_common_quantization_nodes(
namespace_prefix)
shape_input_name = original_node.input[0]
original_inputs = original_node.input[1:]
input_names = []
min_names = []
max_names = []
for original_input_name in original_inputs:
quantize_input_name, min_input_name, max_input_name = (
self.eightbitize_input_to_node(namespace_prefix, original_input_name,
reshape_dims_name,
reduction_dims_name))
input_names.append(quantize_input_name)
min_names.append(min_input_name)
max_names.append(max_input_name)
all_input_names = [shape_input_name]
all_input_names.extend(input_names)
all_input_names.extend(min_names)
all_input_names.extend(max_names)
quantized_concat_node = create_node(
"QuantizedConcat", quantized_concat_name, all_input_names)
set_attr_int(quantized_concat_node, "N", len(original_inputs))
set_attr_dtype(quantized_concat_node, "T", tf.quint8)
self.add_output_graph_node(quantized_concat_node)
self.add_dequantize_result_node(quantized_concat_name, original_node.name)
def eightbitize_placeholder_node(self, current_node):
"""Replaces a placeholder node with a quint8 placeholder node+dequantize."""
name = current_node.name
# Convert the placeholder into a quantized type.
output_node = tf.NodeDef()
output_node.CopyFrom(current_node)
set_attr_dtype(output_node, "dtype", tf.quint8)
output_node.name += "_original_input"
self.add_output_graph_node(output_node)
# Add a dequantize to convert back to float.
dequantize_node = create_node(
"Dequantize", name,
[output_node.name, "quantized_input_min_value",
"quantized_input_max_value"])
set_attr_dtype(dequantize_node, "T", tf.quint8)
set_attr_string(dequantize_node, "mode", b"MIN_FIRST")
self.add_output_graph_node(dequantize_node)
# For the descent over the graph to work, the dequantize node must be named
# current_node.name. However, for the feeding of the graph to work, the
# placeholder must have the name current_node.name; so record a final set
# of renames to apply after all processing has been done.
self.final_node_renames[output_node.name] = name
self.final_node_renames[dequantize_node.name] = name + "_dequantize"
def eightbitize_reshape_node(self, original_node):
"""Replaces a Reshape node with the eight bit equivalent sub-graph.
Args:
original_node: Float node to be converted.
Returns:
Subgraph representing the quantized version of the original node.
"""
namespace_prefix = original_node.name + "_eightbit"
quantized_reshape_name = namespace_prefix + "_quantized_reshape"
reshape_dims_name, reduction_dims_name = self.add_common_quantization_nodes(
namespace_prefix)
shape_input_name = original_node.input[1]
quantize_input_name, min_input_name, max_input_name = (
self.eightbitize_input_to_node(namespace_prefix, original_node.input[0],
reshape_dims_name, reduction_dims_name))
quantized_reshape_node = create_node(
"QuantizedReshape", quantized_reshape_name,
[quantize_input_name, shape_input_name, min_input_name, max_input_name])
set_attr_dtype(quantized_reshape_node, "T", tf.quint8)
self.add_output_graph_node(quantized_reshape_node)
self.add_dequantize_result_node(quantized_reshape_name, original_node.name)
def eightbitize_batch_norm_node(self, original_node):
"""Replaces a MatMul node with the eight bit equivalent sub-graph."""
namespace_prefix = original_node.name + "_eightbit"
original_input_name = original_node.input[0]
original_mean_name = original_node.input[1]
original_variance_name = original_node.input[2]
original_beta_name = original_node.input[3]
original_gamma_name = original_node.input[4]
quantized_batch_norm_name = namespace_prefix + "_quantized_batch_norm"
reshape_dims_name, reduction_dims_name = self.add_common_quantization_nodes(
namespace_prefix)
quantize_input_name, min_input_name, max_input_name = (
self.eightbitize_input_to_node(namespace_prefix, original_input_name,
reshape_dims_name, reduction_dims_name))
quantize_mean_name, min_mean_name, max_mean_name = (
self.eightbitize_input_to_node(namespace_prefix, original_mean_name,
reshape_dims_name, reduction_dims_name))
quantize_variance_name, min_variance_name, max_variance_name = (
self.eightbitize_input_to_node(namespace_prefix, original_variance_name,
reshape_dims_name, reduction_dims_name))
quantize_beta_name, min_beta_name, max_beta_name = (
self.eightbitize_input_to_node(namespace_prefix, original_beta_name,
reshape_dims_name, reduction_dims_name))
quantize_gamma_name, min_gamma_name, max_gamma_name = (
self.eightbitize_input_to_node(namespace_prefix, original_gamma_name,
reshape_dims_name, reduction_dims_name))
quantized_batch_norm_node = create_node(
"QuantizedBatchNormWithGlobalNormalization", quantized_batch_norm_name,
[quantize_input_name, min_input_name, max_input_name,
quantize_mean_name, min_mean_name, max_mean_name,
quantize_variance_name, min_variance_name, max_variance_name,
quantize_beta_name, min_beta_name, max_beta_name, quantize_gamma_name,
min_gamma_name, max_gamma_name])
set_attr_dtype(quantized_batch_norm_node, "Tinput", tf.quint8)
set_attr_dtype(quantized_batch_norm_node, "out_type", tf.qint32)
copy_attr(quantized_batch_norm_node, "scale_after_normalization",
original_node.attr["scale_after_normalization"])
copy_attr(quantized_batch_norm_node, "variance_epsilon",
original_node.attr["variance_epsilon"])
self.add_output_graph_node(quantized_batch_norm_node)
quantize_down_name = self.add_quantize_down_nodes(original_node,
quantized_batch_norm_name)
self.add_dequantize_result_node(quantize_down_name, original_node.name)
def add_output_graph_node(self, output_node):
"""Inserts one node into the new graph."""
self.output_graph.node.extend([output_node])
def remove_redundant_quantization(self, old_graph):
"""Removes unneeded pairs of quantize/dequantize ops from the graph.
This is a bit of a tricky function, because it's attempting to spot the
pattern of dequantizing from eight-bit up to float, and then immediately
quantizing back down to eight bits again, that's introduced by previous
passes that do 'key-hole' conversions of individual nodes but have to
convert back to float to match the previous output interface, since they
don't know that the next op can handle quantized tensors.
It works by:
- Looking for Quantize nodes.
- Checking to see if their first input is a Dequantize node.
- Seeing if their min/max inputs come from Min/Max nodes.
- Making sure those Min/Max nodes are being fed from the same Dequantize.
- Or that the Min is indirectly being fed from the same Dequantize as Max.
- Making sure the Dequantize is going through a Reshape (which we add
during the previous pass when we create the quantize sub-graph).
- Looking for the dims Const op for the Min/Max dims.
If all of these conditions are met, then it's a sub-graph pattern that
we know how to optimize out (and is likely the common one we've introduced).
We then rewire the graph to skip it entirely, and then rely on the dead node
removal pass to get rid of any nodes that are no longer needed.
Args:
old_graph: The model we'll be stripping redundant nodes from.
Returns:
A graph with the unnecessary nodes removed.
Raises:
ValueError: Two nodes with the same name were found in the graph.
"""
old_nodes_map = self.create_nodes_map(old_graph)
self.output_graph = tf.GraphDef()
inputs_to_rename = {}
# We go through all the nodes, looking for any that match the patterns we
# know how to optimize away.
for node in old_graph.node:
# We always start with a Quantize node, and examine its inputs to see if
# they are in a form that can be removed.
if node.op not in ["Quantize", "QuantizeV2"]:
continue
dequantize_node_name = node_name_from_input(node.input[0])
if dequantize_node_name not in old_nodes_map:
raise ValueError("Input node name '" + dequantize_node_name +
"' not found in node '" + node.name + "'")
dequantize_node = old_nodes_map[dequantize_node_name]
# Do we have a Dequantize feeding in, with the same type as the Quantize?
if dequantize_node.op != "Dequantize":
continue
if node.attr["T"] != dequantize_node.attr["T"]:
continue
# Now look at the other inputs, and ensure they're Min/Max nodes.
min_node_name = node_name_from_input(node.input[1])
max_node_name = node_name_from_input(node.input[2])
min_node = old_nodes_map[min_node_name]
max_node = old_nodes_map[max_node_name]
is_min_right_type = (min_node.op in ["Min", "Dequantize"])
is_max_right_type = (max_node.op in ["Max", "Dequantize"])
if not is_min_right_type or not is_max_right_type:
print("Didn't find expected types on inputs : %s, %s." % (
min_node.op, max_node.op))
continue
min_node_input_name = node_name_from_input(min_node.input[0])
max_node_input_name = node_name_from_input(max_node.input[0])
# There are two different patterns for Min nodes we can recognize, one
# where the input comes directly from the same one as the Max, and
# another where we run it through another Min first, so check for both.
is_same_input = False
if min_node_input_name == max_node_input_name:
is_same_input = True
else:
first_min_node_input = old_nodes_map[min_node_input_name]
if first_min_node_input.op == "Concat":
second_min_node_name = node_name_from_input(
first_min_node_input.input[1])
second_min_node = old_nodes_map[second_min_node_name]
if second_min_node.op == "Min":
second_min_node_input_name = node_name_from_input(
second_min_node.input[0])
is_same_input = (second_min_node_input_name == max_node_input_name)
if not is_same_input:
print("Different min/max inputs: " + min_node_input_name)
continue
# We recognize this pattern, so mark the graph edges to be rewired to
# route around it entirely, since we know it's a no-op.
dequantize_source_name = node_name_from_input(dequantize_node.input[0])
node_tensor_name = ensure_tensor_name_has_port(node.name)
min_tensor_name = node.name + ":1"
max_tensor_name = node.name + ":2"
inputs_to_rename[node_tensor_name] = dequantize_source_name
inputs_to_rename[min_tensor_name] = dequantize_node.input[1]
inputs_to_rename[max_tensor_name] = dequantize_node.input[2]
# Finally we apply all the rewiring we've marked to the graph.
for node in old_graph.node:
for index, input_full_name in enumerate(node.input):
input_name = ensure_tensor_name_has_port(input_full_name)
if input_name in inputs_to_rename:
node.input[index] = inputs_to_rename[input_name]
self.add_output_graph_node(node)
return self.output_graph
def apply_final_node_renames(self):
"""Applies node renames in self.final_node_renames to self.output_graph."""
old_graph = self.output_graph
self.output_graph = tf.GraphDef()
for node in old_graph.node:
node.name = self.final_node_renames.get(node.name, node.name)
for index, input_name in enumerate(node.input):
node_name = node_name_from_input(input_name)
input_full_name = ensure_tensor_name_has_port(input_name)
if node_name in self.final_node_renames:
node.input[index] = "%s%s" % (self.final_node_renames[node_name],
input_full_name[len(node_name):])
self.add_output_graph_node(node)
return self.output_graph
def remove_dead_nodes(self, output_names):
"""Removes nodes that are no longer needed for inference from the graph."""
old_output_graph = self.output_graph
self.output_graph = graph_util.extract_sub_graph(old_output_graph,
output_names)
def quantize_weights(self, input_graph, quantization_mode):
"""Quantize float Const ops.
There are two modes of operations, both replace float Const ops with
quantized values.
1. If quantization_mode is "weights_rounded", this function replaces float
Const ops with quantized float Const ops - same as the original op, but
float values being mapped to the center of one of 1<<FLAGS.bitdepth buckets.
This does not change the raw model size, but compression algorithms such as
zip (as used for compressing apks) or bzip2 will achieve a very good
compression ratio.
2. For other quantization modes ("MIN_COMBINED" or "MIN_FIRST"), float
Const ops are quantized and replaced by a tuple of four ops to perform
the dequantization at runtime:
* eight-bit Const (bucket indices, same shape as original float Const op
* two float Const ops (min and max value of original float Const op)
* Dequantize op to convert the eight-bit consts to float tensors.
The quantization mode is important because we see accuracy problems when
quantizing weights for different situations depending on the algorithm
used. We haven't figured out exactly what the underlying cause is yet,
unfortunately.
Args:
input_graph: A GraphDef of the model containing float Const ops.
quantization_mode: How to quantize and dequantize the values.
Returns:
A GraphDef of the converted graph.
Raises:
ValueError: If quantization_mode is unsupported.
"""
output_graph = tf.GraphDef()
for input_node in input_graph.node:
should_quantize = False
if input_node.op == "Const":
dtype = tf.as_dtype(input_node.attr["dtype"].type)
if dtype == tf.float32:
should_quantize = True
if should_quantize:
if quantization_mode == "weights_rounded":
output_graph.node.extend(quantize_weight_rounded(input_node))
elif quantization_mode in (b"MIN_COMBINED", b"MIN_FIRST"):
output_graph.node.extend(quantize_weight_eightbit(input_node,
quantization_mode))
else:
raise ValueError("Unsupported quantization mode %s." %
quantization_mode)
else:
output_node = tf.NodeDef()
output_node.CopyFrom(input_node)
output_graph.node.extend([output_node])
return output_graph
def set_input_graph(self, new_input_graph):
self.input_graph = new_input_graph
self.nodes_map = self.create_nodes_map(self.input_graph)
def main(unused_args):
if not tf.gfile.Exists(FLAGS.input):
print("Input graph file '" + FLAGS.input + "' does not exist!")
return -1
known_modes = ["round", "quantize", "eightbit", "weights", "test",
"weights_rounded"]
if not any(FLAGS.mode in s for s in known_modes):
print("mode is '" + FLAGS.mode + "', not in " + ", ".join(known_modes) +
".")
return -1
tf_graph = tf.GraphDef()
with tf.gfile.Open(FLAGS.input, "rb") as f:
data = f.read()
tf_graph.ParseFromString(data)
graph = tf.Graph()
with graph.as_default():
tf.import_graph_def(tf_graph, input_map={}, name="")
quantized_input_range = None
if FLAGS.quantized_input:
quantized_input_range = [FLAGS.quantized_input_min,
FLAGS.quantized_input_max]
fallback_quantization_range = None
if (FLAGS.quantized_fallback_min is not None or
FLAGS.quantized_fallback_max is not None):
assert FLAGS.quantized_fallback_min is not None
assert FLAGS.quantized_fallback_max is not None
fallback_quantization_range = [FLAGS.quantized_fallback_min,
FLAGS.quantized_fallback_max]
rewriter = GraphRewriter(tf_graph, FLAGS.mode, quantized_input_range,
fallback_quantization_range)
output_graph = rewriter.rewrite(FLAGS.output_node_names.split(","))
f = tf.gfile.FastGFile(FLAGS.output, "wb")
f.write(output_graph.SerializeToString())
return 0
if __name__ == "__main__":
tf.app.run()
|
akeym/cyder
|
refs/heads/master
|
cyder/api/v1/endpoints/dns/soa/api.py
|
4
|
from rest_framework import serializers
from cyder.api.v1.endpoints.api import CommonAPINestedAVSerializer
from cyder.api.v1.endpoints.dns import api
from cyder.cydns.soa.models import SOA, SOAAV
class SOAAVSerializer(serializers.ModelSerializer):
id = serializers.Field(source='id')
entity = serializers.HyperlinkedRelatedField(
view_name='api-dns-soa-detail')
attribute = serializers.SlugRelatedField(slug_field='name')
class Meta: # don't import from api.CommonDNSMeta so we get all fields
model = SOAAV
class SOAAVViewSet(api.CommonDNSViewSet):
model = SOAAV
serializer_class = SOAAVSerializer
class SOANestedKeyValueSerializer(CommonAPINestedAVSerializer):
id = serializers.HyperlinkedIdentityField(
view_name='api-dns-soa_attributes-detail')
class Meta:
model = SOAAV
fields = api.NestedKeyValueFields
class SOASerializer(serializers.HyperlinkedModelSerializer):
soaav_set = SOANestedKeyValueSerializer(many=True)
class Meta(api.CommonDNSMeta):
model = SOA
class SOAViewSet(api.CommonDNSViewSet):
model = SOA
serializer_class = SOASerializer
avmodel = SOAAV
|
mmilaprat/policycompass-services
|
refs/heads/master
|
apps/ratingsmanager/models.py
|
2
|
from django.db import models
from django.core.validators import RegexValidator, MinValueValidator, \
MaxValueValidator
import logging
log = logging.getLogger(__name__)
class Rating(models.Model):
identifier = models.CharField(max_length=255, unique=True)
votes_counter = models.PositiveIntegerField(default=0)
score = models.DecimalField(max_digits=2, decimal_places=1, default=0.0)
def calculate(self):
"""
Recalculate the totals, and save.
"""
aggregates = self.rating_votes.aggregate(count=models.Count('score'),
score=models.Avg('score'))
score = aggregates.get('score')
# round score to the nearest 0.5
score *= 2
score = round(score)
score /= 2
self.votes_counter = aggregates.get('count') or 0
self.score = score or 0.0
self.save()
class RatingVote(models.Model):
rating = models.ForeignKey(Rating, related_name='rating_votes')
score = models.SmallIntegerField(
validators=[MinValueValidator(1), MaxValueValidator(5)])
user_identifier = models.CharField(max_length=1024, validators=[
RegexValidator("^(/[^/]*)+/?$")])
timestamp_created = models.DateTimeField(auto_now_add=True)
timestamp_modified = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ("rating", "user_identifier")
|
wtpayne/hiai
|
refs/heads/master
|
a3_src/h20_capability/runtime/mil/spec/spec_simulator.py
|
1
|
# -*- coding: utf-8 -*-
"""
Unit tests for the runtime.mil.simulator module.
---
type:
python_module
validation_level:
v00_minimum
protection:
k00_public
copyright:
"Copyright 2016 High Integrity Artificial Intelligence Systems"
license:
"Licensed under the Apache License, Version 2.0 (the License);
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an AS IS BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."
...
"""
# =============================================================================
class SpecifyMain:
"""
Specify the runtime.mil.main() function.
"""
# -------------------------------------------------------------------------
def it_is_callable(self):
"""
The main() function is callable.
"""
import runtime.mil.simulator
assert callable(runtime.mil.simulator.main)
|
HeraldStudio/HeraldAppApi
|
refs/heads/master
|
mod/huodong/upload.py
|
1
|
# -*- coding: utf-8 -*-
__author__ = 'LiangJ'
import tornado.web
import hashlib
import json,io
from ..Basehandler import BaseHandler
from ..databases.tables import ActivCommitUser,Activity
from sqlalchemy.orm.exc import NoResultFound
from PIL import Image
import traceback
class UploadException(RuntimeError):
def __init__(self,code,content):
self.code = code
self.content = content
MAX_FILE_SIZE = 500*1024
class UploadPichandler(BaseHandler):
def post(self):
upload_path = 'http://www.heraldstudio.com/herald/static/img'
save_path = 'static/img'
retjson = {'code':200,'content':'success'}
try:
request_cookie = self.get_secure_cookie('ActivityCommitter')
if request_cookie:
matched_user = self.db.query(ActivCommitUser).filter(ActivCommitUser.cookie == request_cookie).one()
file_metas = self.request.files['file']
if file_metas:
for meta in file_metas:
filename = meta['filename']
houzhui = filename.split('.')[-1:][0]
file_size = len(meta['body'])
if file_size>MAX_FILE_SIZE:
raise UploadException(401,u'文件过大')
if houzhui not in ['jpg','png']:
raise UploadException(401,u'文件格式不支持')
img = Image.open(io.BytesIO(meta['body']))
pic_size = img.size
if float(pic_size[0])/pic_size[1] != 2.5:
raise UploadException(401,u'图片比例不是5:2')
img = img.resize((500,200))
shaobj = hashlib.md5()
shaobj.update(meta['body'])
filehash = shaobj.hexdigest()
filepath = save_path+'/'+filehash+'.jpg'
database_path = upload_path+'/'+filehash+'.jpg'
img.save(filepath,'jpeg')
retjson['content'] = database_path
else:
picurl = self.get_argument('pic_url')
if picurl:
retjson['content'] = picurl
else:
retjson['code'] = 500
retjson['content'] = u'文件为空'
else:
raise UploadException(400,u'请先登录')
except UploadException,e:
retjson['code'] = e.code
retjson['content'] = e.content
except NoResultFound:
retjson['code'] = 400
retjson['content'] = u'请先登录'
except Exception,e:
retjson['code'] = 500
retjson['content'] = u'系统错误'
self.write(json.dumps(retjson,ensure_ascii=False, indent=2))
|
shaunstanislaus/nomad
|
refs/heads/master
|
client/driver/test-resources/docker/main.py
|
50
|
import signal
import time
# Setup handler for sigterm so we can exit when docker stop is called.
def term(signum, stack_Frame):
exit(1)
signal.signal(signal.SIGTERM, term)
print ("Starting")
max = 3
for i in range(max):
time.sleep(1)
print("Heartbeat {0}/{1}".format(i + 1, max))
print("Exiting")
|
MilyMilo/sci-organizer
|
refs/heads/master
|
accounts/tests.py
|
24123
|
from django.test import TestCase
# Create your tests here.
|
MacHu-GWU/uszipcode-project
|
refs/heads/master
|
uszipcode/pkg/sqlalchemy_mate/pkg/prettytable/cli.py
|
1
|
from __future__ import print_function
import argparse
import sys
from .factory import from_csv
from ._compact import StringIO
def main():
text_in = sys.stdin.read()
if text_in:
print(from_csv(StringIO.StringIO(text_in)))
return
parser = argparse.ArgumentParser(description='A simple Python library designed to make it quick and easy to '
'represent tabular data in visually appealing ASCII tables.')
parser.add_argument('--csv', help='CSV file name')
args = parser.parse_args()
with open(args.csv) as fp:
print(from_csv(fp))
if __name__ == '__main__':
main()
|
pfnet/chainer
|
refs/heads/master
|
tests/chainer_tests/optimizer_hooks_tests/test_gradient_hard_clipping.py
|
6
|
import unittest
import numpy as np
from chainer import optimizer_hooks
from chainer import optimizers
from chainer import testing
import utils
_backend_params = [
# NumPy
{},
{'use_ideep': 'always'},
# CuPy
{'use_cuda': True, 'cuda_device': 0},
{'use_cuda': True, 'cuda_device': 1},
# ChainerX
{'use_chainerx': True, 'chainerx_device': 'native:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:1'},
]
@testing.backend.inject_backend_tests(None, _backend_params)
@testing.backend.inject_backend_tests(None, _backend_params)
@testing.backend.inject_backend_tests(None, _backend_params)
class TestGradientHardClipping(unittest.TestCase):
def setUp(self):
self.target = utils.ParametersLink.from_param_props(
((2, 3), (2, 0, 1), ()))
def check_hardclipping(self, backend_configs):
target = self.target
assert len(backend_configs) == len(list(target.params()))
devices = [bc.device for bc in backend_configs]
lower_bound = -0.9
upper_bound = 1.1
expects = []
# Compute expected
for param, device in zip(target.params(), devices):
expects.append(param.array - np.clip(param.grad,
lower_bound, upper_bound))
param.to_device(device)
# Apply optimizer_hook
opt = optimizers.SGD(lr=1)
opt.setup(self.target)
opt.add_hook(
optimizer_hooks.GradientHardClipping(lower_bound, upper_bound))
opt.update()
# Validate
for expect, param in zip(expects, target.params()):
testing.assert_allclose(expect, param.array)
def test_hardclipping(self, backend_config0,
backend_config1, backend_config2):
self.check_hardclipping(
[backend_config0, backend_config1, backend_config2])
testing.run_module(__name__, __file__)
|
songyi199111/sentry
|
refs/heads/master
|
src/sentry/web/frontend/__init__.py
|
40
|
"""
sentry.web.frontend
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
|
saurabh6790/omnitech-libs
|
refs/heads/master
|
webnotes/utils/email_lib/smtp.py
|
10
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Sends email via outgoing server specified in "Control Panel"
Allows easy adding of Attachments of "File" objects
"""
import webnotes
from webnotes import conf
from webnotes import msgprint
from webnotes.utils import cint, scrub_urls
import email.utils
class OutgoingEmailError(webnotes.ValidationError): pass
def get_email(recipients, sender='', msg='', subject='[No Subject]', text_content = None, footer=None):
"""send an html email as multipart with attachments and all"""
email = EMail(sender, recipients, subject)
if (not '<br>' in msg) and (not '<p>' in msg) and (not '<div' in msg):
msg = msg.replace('\n', '<br>')
email.set_html(msg, text_content, footer=footer)
return email
class EMail:
"""
Wrapper on the email module. Email object represents emails to be sent to the client.
Also provides a clean way to add binary `FileData` attachments
Also sets all messages as multipart/alternative for cleaner reading in text-only clients
"""
def __init__(self, sender='', recipients=[], subject='', alternative=0, reply_to=None):
from email.mime.multipart import MIMEMultipart
from email import Charset
Charset.add_charset('utf-8', Charset.QP, Charset.QP, 'utf-8')
if isinstance(recipients, basestring):
recipients = recipients.replace(';', ',').replace('\n', '')
recipients = recipients.split(',')
# remove null
recipients = filter(None, (r.strip() for r in recipients))
self.sender = sender
self.reply_to = reply_to or sender
self.recipients = recipients
self.subject = subject
self.msg_root = MIMEMultipart('mixed')
self.msg_multipart = MIMEMultipart('alternative')
self.msg_root.attach(self.msg_multipart)
self.cc = []
self.html_set = False
def set_html(self, message, text_content = None, footer=None):
"""Attach message in the html portion of multipart/alternative"""
message = message + self.get_footer(footer)
message = scrub_urls(message)
# this is the first html part of a multi-part message,
# convert to text well
if not self.html_set:
if text_content:
self.set_text(text_content)
else:
self.set_html_as_text(message)
self.set_part_html(message)
self.html_set = True
def set_text(self, message):
"""
Attach message in the text portion of multipart/alternative
"""
from email.mime.text import MIMEText
part = MIMEText(message.encode('utf-8'), 'plain', 'utf-8')
self.msg_multipart.attach(part)
def set_part_html(self, message):
from email.mime.text import MIMEText
part = MIMEText(message.encode('utf-8'), 'html', 'utf-8')
self.msg_multipart.attach(part)
def set_html_as_text(self, html):
"""return html2text"""
import HTMLParser
from webnotes.utils.email_lib.html2text import html2text
try:
self.set_text(html2text(html))
except HTMLParser.HTMLParseError:
pass
def set_message(self, message, mime_type='text/html', as_attachment=0, filename='attachment.html'):
"""Append the message with MIME content to the root node (as attachment)"""
from email.mime.text import MIMEText
maintype, subtype = mime_type.split('/')
part = MIMEText(message, _subtype = subtype)
if as_attachment:
part.add_header('Content-Disposition', 'attachment', filename=filename)
self.msg_root.attach(part)
def get_footer(self, footer=None):
"""append a footer (signature)"""
footer = footer or ""
footer += webnotes.conn.get_value('Control Panel',None,'mail_footer') or ''
try:
import startup
footer += getattr(startup, 'mail_footer', '')
except ImportError:
pass
return footer
def attach_file(self, n):
"""attach a file from the `FileData` table"""
from webnotes.utils.file_manager import get_file
res = get_file(n)
if not res:
return
self.add_attachment(res[0], res[1])
def add_attachment(self, fname, fcontent, content_type=None):
"""add attachment"""
from email.mime.audio import MIMEAudio
from email.mime.base import MIMEBase
from email.mime.image import MIMEImage
from email.mime.text import MIMEText
import mimetypes
if not content_type:
content_type, encoding = mimetypes.guess_type(fname)
if content_type is None:
# No guess could be made, or the file is encoded (compressed), so
# use a generic bag-of-bits type.
content_type = 'application/octet-stream'
maintype, subtype = content_type.split('/', 1)
if maintype == 'text':
# Note: we should handle calculating the charset
if isinstance(fcontent, unicode):
fcontent = fcontent.encode("utf-8")
part = MIMEText(fcontent, _subtype=subtype, _charset="utf-8")
elif maintype == 'image':
part = MIMEImage(fcontent, _subtype=subtype)
elif maintype == 'audio':
part = MIMEAudio(fcontent, _subtype=subtype)
else:
part = MIMEBase(maintype, subtype)
part.set_payload(fcontent)
# Encode the payload using Base64
from email import encoders
encoders.encode_base64(part)
# Set the filename parameter
if fname:
part.add_header(b'Content-Disposition',
("attachment; filename=%s" % fname).encode('utf-8'))
self.msg_root.attach(part)
def validate(self):
"""validate the email ids"""
from webnotes.utils import validate_email_add
def _validate(email):
"""validate an email field"""
if email and not validate_email_add(email):
webnotes.msgprint("%s is not a valid email id" % email,
raise_exception = 1)
return email
if not self.sender:
self.sender = webnotes.conn.get_value('Email Settings', None,
'auto_email_id') or conf.get('auto_email_id') or None
if not self.sender:
webnotes.msgprint("""Please specify 'Auto Email Id' \
in Setup > Email Settings""")
if not "expires_on" in conf:
webnotes.msgprint("""Alternatively, \
you can also specify 'auto_email_id' in conf.py""")
raise webnotes.ValidationError
self.sender = _validate(self.sender)
self.reply_to = _validate(self.reply_to)
for e in self.recipients + (self.cc or []):
_validate(e.strip())
def make(self):
"""build into msg_root"""
self.msg_root['Subject'] = self.subject.encode("utf-8")
self.msg_root['From'] = self.sender.encode("utf-8")
self.msg_root['To'] = ', '.join([r.strip() for r in self.recipients]).encode("utf-8")
self.msg_root['Date'] = email.utils.formatdate()
if not self.reply_to:
self.reply_to = self.sender
self.msg_root['Reply-To'] = self.reply_to.encode("utf-8")
if self.cc:
self.msg_root['CC'] = ', '.join([r.strip() for r in self.cc]).encode("utf-8")
def as_string(self):
"""validate, build message and convert to string"""
self.validate()
self.make()
return self.msg_root.as_string()
def send(self, as_bulk=False):
"""send the message or add it to Outbox Email"""
if webnotes.flags.mute_emails or conf.get("mute_emails") or False:
webnotes.msgprint("Emails are muted")
return
import smtplib
try:
smtpserver = SMTPServer()
if hasattr(smtpserver, "always_use_login_id_as_sender") and \
cint(smtpserver.always_use_login_id_as_sender) and smtpserver.login:
if not self.reply_to:
self.reply_to = self.sender
self.sender = smtpserver.login
smtpserver.sess.sendmail(self.sender, self.recipients + (self.cc or []),
self.as_string())
except smtplib.SMTPSenderRefused:
webnotes.msgprint("""Invalid Outgoing Mail Server's Login Id or Password. \
Please rectify and try again.""")
raise
except smtplib.SMTPRecipientsRefused:
webnotes.msgprint("""Invalid Recipient (To) Email Address. \
Please rectify and try again.""")
raise
class SMTPServer:
def __init__(self, login=None, password=None, server=None, port=None, use_ssl=None):
import webnotes.model.doc
from webnotes.utils import cint
# get defaults from control panel
try:
es = webnotes.model.doc.Document('Email Settings','Email Settings')
except webnotes.DoesNotExistError:
es = None
self._sess = None
if server:
self.server = server
self.port = port
self.use_ssl = cint(use_ssl)
self.login = login
self.password = password
elif es and es.outgoing_mail_server:
self.server = es.outgoing_mail_server
self.port = es.mail_port
self.use_ssl = cint(es.use_ssl)
self.login = es.mail_login
self.password = es.mail_password
self.always_use_login_id_as_sender = es.always_use_login_id_as_sender
else:
self.server = conf.get("mail_server") or ""
self.port = conf.get("mail_port") or None
self.use_ssl = cint(conf.get("use_ssl") or 0)
self.login = conf.get("mail_login") or ""
self.password = conf.get("mail_password") or ""
@property
def sess(self):
"""get session"""
if self._sess:
return self._sess
from webnotes.utils import cint
import smtplib
import _socket
# check if email server specified
if not self.server:
err_msg = 'Outgoing Mail Server not specified'
webnotes.msgprint(err_msg)
raise webnotes.OutgoingEmailError, err_msg
try:
if self.use_ssl and not self.port:
self.port = 587
self._sess = smtplib.SMTP((self.server or "").encode('utf-8'),
cint(self.port) or None)
if not self._sess:
err_msg = 'Could not connect to outgoing email server'
webnotes.msgprint(err_msg)
raise webnotes.OutgoingEmailError, err_msg
if self.use_ssl:
self._sess.ehlo()
self._sess.starttls()
self._sess.ehlo()
if self.login:
ret = self._sess.login((self.login or "").encode('utf-8'),
(self.password or "").encode('utf-8'))
# check if logged correctly
if ret[0]!=235:
msgprint(ret[1])
raise webnotes.OutgoingEmailError, ret[1]
return self._sess
except _socket.error:
# Invalid mail server -- due to refusing connection
webnotes.msgprint('Invalid Outgoing Mail Server or Port. Please rectify and try again.')
raise
except smtplib.SMTPAuthenticationError:
webnotes.msgprint("Invalid Outgoing Mail Server's Login Id or Password. \
Please rectify and try again.")
raise
except smtplib.SMTPException:
webnotes.msgprint('There is something wrong with your Outgoing Mail Settings. \
Please contact us at support@erpnext.com')
raise
|
Mozhuowen/brython
|
refs/heads/master
|
www/src/Lib/test/encoded_modules/__init__.py
|
179
|
# -*- encoding: utf-8 -*-
# This is a package that contains a number of modules that are used to
# test import from the source files that have different encodings.
# This file (the __init__ module of the package), is encoded in utf-8
# and contains a list of strings from various unicode planes that are
# encoded differently to compare them to the same strings encoded
# differently in submodules. The following list, test_strings,
# contains a list of tuples. The first element of each tuple is the
# suffix that should be prepended with 'module_' to arrive at the
# encoded submodule name, the second item is the encoding and the last
# is the test string. The same string is assigned to the variable
# named 'test' inside the submodule. If the decoding of modules works
# correctly, from module_xyz import test should result in the same
# string as listed below in the 'xyz' entry.
# module, encoding, test string
test_strings = (
('iso_8859_1', 'iso-8859-1', "Les hommes ont oublié cette vérité, "
"dit le renard. Mais tu ne dois pas l'oublier. Tu deviens "
"responsable pour toujours de ce que tu as apprivoisé."),
('koi8_r', 'koi8-r', "Познание бесконечности требует бесконечного времени.")
)
|
nickanderson/ansible
|
refs/heads/devel
|
lib/ansible/runner/connection_plugins/winrm.py
|
8
|
# (c) 2014, Chris Church <chris@ninemoreminutes.com>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import base64
import hashlib
import imp
import os
import re
import shlex
import traceback
import urlparse
from ansible import errors
from ansible import utils
from ansible.callbacks import vvv, vvvv, verbose
from ansible.runner.shell_plugins import powershell
try:
from winrm import Response
from winrm.exceptions import WinRMTransportError
from winrm.protocol import Protocol
except ImportError:
raise errors.AnsibleError("winrm is not installed")
_winrm_cache = {
# 'user:pwhash@host:port': <protocol instance>
}
def vvvvv(msg, host=None):
verbose(msg, host=host, caplevel=4)
class Connection(object):
'''WinRM connections over HTTP/HTTPS.'''
def __init__(self, runner, host, port, user, password, *args, **kwargs):
self.runner = runner
self.host = host
self.port = port
self.user = user
self.password = password
self.has_pipelining = False
self.default_shell = 'powershell'
self.default_suffixes = ['.ps1', '']
self.protocol = None
self.shell_id = None
self.delegate = None
def _winrm_connect(self):
'''
Establish a WinRM connection over HTTP/HTTPS.
'''
port = self.port or 5986
vvv("ESTABLISH WINRM CONNECTION FOR USER: %s on PORT %s TO %s" % \
(self.user, port, self.host), host=self.host)
netloc = '%s:%d' % (self.host, port)
cache_key = '%s:%s@%s:%d' % (self.user, hashlib.md5(self.password).hexdigest(), self.host, port)
if cache_key in _winrm_cache:
vvvv('WINRM REUSE EXISTING CONNECTION: %s' % cache_key, host=self.host)
return _winrm_cache[cache_key]
transport_schemes = [('plaintext', 'https'), ('plaintext', 'http')] # FIXME: ssl/kerberos
if port == 5985:
transport_schemes = reversed(transport_schemes)
exc = None
for transport, scheme in transport_schemes:
endpoint = urlparse.urlunsplit((scheme, netloc, '/wsman', '', ''))
vvvv('WINRM CONNECT: transport=%s endpoint=%s' % (transport, endpoint),
host=self.host)
protocol = Protocol(endpoint, transport=transport,
username=self.user, password=self.password)
try:
protocol.send_message('')
_winrm_cache[cache_key] = protocol
return protocol
except WinRMTransportError, exc:
err_msg = str(exc)
if re.search(r'Operation\s+?timed\s+?out', err_msg, re.I):
raise errors.AnsibleError("the connection attempt timed out")
m = re.search(r'Code\s+?(\d{3})', err_msg)
if m:
code = int(m.groups()[0])
if code == 401:
raise errors.AnsibleError("the username/password specified for this server was incorrect")
elif code == 411:
_winrm_cache[cache_key] = protocol
return protocol
vvvv('WINRM CONNECTION ERROR: %s' % err_msg, host=self.host)
continue
if exc:
raise errors.AnsibleError(str(exc))
def _winrm_exec(self, command, args=(), from_exec=False):
if from_exec:
vvvv("WINRM EXEC %r %r" % (command, args), host=self.host)
else:
vvvvv("WINRM EXEC %r %r" % (command, args), host=self.host)
if not self.protocol:
self.protocol = self._winrm_connect()
if not self.shell_id:
self.shell_id = self.protocol.open_shell()
command_id = None
try:
command_id = self.protocol.run_command(self.shell_id, command, args)
response = Response(self.protocol.get_command_output(self.shell_id, command_id))
if from_exec:
vvvv('WINRM RESULT %r' % response, host=self.host)
else:
vvvvv('WINRM RESULT %r' % response, host=self.host)
vvvvv('WINRM STDOUT %s' % response.std_out, host=self.host)
vvvvv('WINRM STDERR %s' % response.std_err, host=self.host)
return response
finally:
if command_id:
self.protocol.cleanup_command(self.shell_id, command_id)
def connect(self):
if not self.protocol:
self.protocol = self._winrm_connect()
return self
def exec_command(self, cmd, tmp_path, sudo_user=None, sudoable=False, executable=None, in_data=None, su=None, su_user=None):
cmd = cmd.encode('utf-8')
cmd_parts = shlex.split(cmd, posix=False)
if '-EncodedCommand' in cmd_parts:
encoded_cmd = cmd_parts[cmd_parts.index('-EncodedCommand') + 1]
decoded_cmd = base64.b64decode(encoded_cmd)
vvv("EXEC %s" % decoded_cmd, host=self.host)
else:
vvv("EXEC %s" % cmd, host=self.host)
# For script/raw support.
if cmd_parts and cmd_parts[0].lower().endswith('.ps1'):
script = powershell._build_file_cmd(cmd_parts)
cmd_parts = powershell._encode_script(script, as_list=True)
try:
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], from_exec=True)
except Exception, e:
traceback.print_exc()
raise errors.AnsibleError("failed to exec cmd %s" % cmd)
return (result.status_code, '', result.std_out.encode('utf-8'), result.std_err.encode('utf-8'))
def put_file(self, in_path, out_path):
vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
if not os.path.exists(in_path):
raise errors.AnsibleFileNotFound("file or module does not exist: %s" % in_path)
with open(in_path) as in_file:
in_size = os.path.getsize(in_path)
script_template = '''
$s = [System.IO.File]::OpenWrite("%s");
[void]$s.Seek(%d, [System.IO.SeekOrigin]::Begin);
$b = [System.Convert]::FromBase64String("%s");
[void]$s.Write($b, 0, $b.length);
[void]$s.SetLength(%d);
[void]$s.Close();
'''
# Determine max size of data we can pass per command.
script = script_template % (powershell._escape(out_path), in_size, '', in_size)
cmd = powershell._encode_script(script)
# Encode script with no data, subtract its length from 8190 (max
# windows command length), divide by 2.67 (UTF16LE base64 command
# encoding), then by 1.35 again (data base64 encoding).
buffer_size = int(((8190 - len(cmd)) / 2.67) / 1.35)
for offset in xrange(0, in_size, buffer_size):
try:
out_data = in_file.read(buffer_size)
if offset == 0:
if out_data.lower().startswith('#!powershell') and not out_path.lower().endswith('.ps1'):
out_path = out_path + '.ps1'
b64_data = base64.b64encode(out_data)
script = script_template % (powershell._escape(out_path), offset, b64_data, in_size)
vvvv("WINRM PUT %s to %s (offset=%d size=%d)" % (in_path, out_path, offset, len(out_data)), host=self.host)
cmd_parts = powershell._encode_script(script, as_list=True)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:])
if result.status_code != 0:
raise IOError(result.std_err.encode('utf-8'))
except Exception:
traceback.print_exc()
raise errors.AnsibleError("failed to transfer file to %s" % out_path)
def fetch_file(self, in_path, out_path):
out_path = out_path.replace('\\', '/')
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
buffer_size = 2**19 # 0.5MB chunks
if not os.path.exists(os.path.dirname(out_path)):
os.makedirs(os.path.dirname(out_path))
out_file = None
try:
offset = 0
while True:
try:
script = '''
If (Test-Path -PathType Leaf "%(path)s")
{
$stream = [System.IO.File]::OpenRead("%(path)s");
$stream.Seek(%(offset)d, [System.IO.SeekOrigin]::Begin) | Out-Null;
$buffer = New-Object Byte[] %(buffer_size)d;
$bytesRead = $stream.Read($buffer, 0, %(buffer_size)d);
$bytes = $buffer[0..($bytesRead-1)];
[System.Convert]::ToBase64String($bytes);
$stream.Close() | Out-Null;
}
ElseIf (Test-Path -PathType Container "%(path)s")
{
Write-Host "[DIR]";
}
Else
{
Write-Error "%(path)s does not exist";
Exit 1;
}
''' % dict(buffer_size=buffer_size, path=powershell._escape(in_path), offset=offset)
vvvv("WINRM FETCH %s to %s (offset=%d)" % (in_path, out_path, offset), host=self.host)
cmd_parts = powershell._encode_script(script, as_list=True)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:])
if result.status_code != 0:
raise IOError(result.std_err.encode('utf-8'))
if result.std_out.strip() == '[DIR]':
data = None
else:
data = base64.b64decode(result.std_out.strip())
if data is None:
if not os.path.exists(out_path):
os.makedirs(out_path)
break
else:
if not out_file:
# If out_path is a directory and we're expecting a file, bail out now.
if os.path.isdir(out_path):
break
out_file = open(out_path, 'wb')
out_file.write(data)
if len(data) < buffer_size:
break
offset += len(data)
except Exception:
traceback.print_exc()
raise errors.AnsibleError("failed to transfer file to %s" % out_path)
finally:
if out_file:
out_file.close()
def close(self):
if self.protocol and self.shell_id:
self.protocol.close_shell(self.shell_id)
self.shell_id = None
|
wfn/stem
|
refs/heads/master
|
test/unit/response/control_line.py
|
7
|
"""
Unit tests for the stem.response.ControlLine class.
"""
import unittest
import stem.response
# response made by having 'DataDirectory /tmp/my data\"dir/' in the torrc
PROTOCOLINFO_RESPONSE = (
'PROTOCOLINFO 1',
'AUTH METHODS=COOKIE COOKIEFILE="/tmp/my data\\\\\\"dir//control_auth_cookie"',
'VERSION Tor="0.2.1.30"',
'OK',
)
class TestControlLine(unittest.TestCase):
def test_pop_examples(self):
"""
Checks that the pop method's pydoc examples are correct.
"""
line = stem.response.ControlLine("\"We're all mad here.\" says the grinning cat.")
self.assertEquals(line.pop(True), "We're all mad here.")
self.assertEquals(line.pop(), "says")
self.assertEquals(line.remainder(), "the grinning cat.")
line = stem.response.ControlLine("\"this has a \\\" and \\\\ in it\" foo=bar more_data")
self.assertEquals(line.pop(True, True), "this has a \" and \\ in it")
def test_string(self):
"""
Basic checks that we behave as a regular immutable string.
"""
line = stem.response.ControlLine(PROTOCOLINFO_RESPONSE[0])
self.assertEquals(line, 'PROTOCOLINFO 1')
self.assertTrue(line.startswith('PROTOCOLINFO '))
# checks that popping items doesn't effect us
line.pop()
self.assertEquals(line, 'PROTOCOLINFO 1')
self.assertTrue(line.startswith('PROTOCOLINFO '))
def test_general_usage(self):
"""
Checks a basic use case for the popping entries.
"""
# pops a series of basic, space separated entries
line = stem.response.ControlLine(PROTOCOLINFO_RESPONSE[0])
self.assertEquals(line.remainder(), 'PROTOCOLINFO 1')
self.assertFalse(line.is_empty())
self.assertFalse(line.is_next_quoted())
self.assertFalse(line.is_next_mapping())
self.assertEquals(None, line.peek_key())
self.assertRaises(ValueError, line.pop_mapping)
self.assertEquals(line.pop(), 'PROTOCOLINFO')
self.assertEquals(line.remainder(), '1')
self.assertFalse(line.is_empty())
self.assertFalse(line.is_next_quoted())
self.assertFalse(line.is_next_mapping())
self.assertEquals(None, line.peek_key())
self.assertRaises(ValueError, line.pop_mapping)
self.assertEquals(line.pop(), '1')
self.assertEquals(line.remainder(), '')
self.assertTrue(line.is_empty())
self.assertFalse(line.is_next_quoted())
self.assertFalse(line.is_next_mapping())
self.assertEquals(None, line.peek_key())
self.assertRaises(IndexError, line.pop_mapping)
self.assertRaises(IndexError, line.pop)
self.assertEquals(line.remainder(), '')
self.assertTrue(line.is_empty())
self.assertFalse(line.is_next_quoted())
self.assertFalse(line.is_next_mapping())
self.assertEquals(None, line.peek_key())
def test_pop_mapping(self):
"""
Checks use cases when parsing KEY=VALUE mappings.
"""
# version entry with a space
version_entry = 'Tor="0.2.1.30 (0a083b0188cacd2f07838ff0446113bd5211a024)"'
line = stem.response.ControlLine(version_entry)
self.assertEquals(line.remainder(), version_entry)
self.assertFalse(line.is_empty())
self.assertFalse(line.is_next_quoted())
self.assertTrue(line.is_next_mapping())
self.assertTrue(line.is_next_mapping(key = "Tor"))
self.assertTrue(line.is_next_mapping(key = "Tor", quoted = True))
self.assertTrue(line.is_next_mapping(quoted = True))
self.assertEquals("Tor", line.peek_key())
# try popping this as a non-quoted mapping
self.assertEquals(line.pop_mapping(), ('Tor', '"0.2.1.30'))
self.assertEquals(line.remainder(), '(0a083b0188cacd2f07838ff0446113bd5211a024)"')
self.assertFalse(line.is_empty())
self.assertFalse(line.is_next_quoted())
self.assertFalse(line.is_next_mapping())
self.assertRaises(ValueError, line.pop_mapping)
self.assertEquals(None, line.peek_key())
# try popping this as a quoted mapping
line = stem.response.ControlLine(version_entry)
self.assertEquals(line.pop_mapping(True), ('Tor', '0.2.1.30 (0a083b0188cacd2f07838ff0446113bd5211a024)'))
self.assertEquals(line.remainder(), '')
self.assertTrue(line.is_empty())
self.assertFalse(line.is_next_quoted())
self.assertFalse(line.is_next_mapping())
self.assertEquals(None, line.peek_key())
def test_escapes(self):
"""
Checks that we can parse quoted values with escaped quotes in it. This
explicitely comes up with the COOKIEFILE attribute of PROTOCOLINFO
responses.
"""
auth_line = PROTOCOLINFO_RESPONSE[1]
line = stem.response.ControlLine(auth_line)
self.assertEquals(line, auth_line)
self.assertEquals(line.remainder(), auth_line)
self.assertEquals(line.pop(), "AUTH")
self.assertEquals(line.pop_mapping(), ("METHODS", "COOKIE"))
self.assertEquals(line.remainder(), r'COOKIEFILE="/tmp/my data\\\"dir//control_auth_cookie"')
self.assertTrue(line.is_next_mapping())
self.assertTrue(line.is_next_mapping(key = "COOKIEFILE"))
self.assertTrue(line.is_next_mapping(quoted = True))
self.assertTrue(line.is_next_mapping(quoted = True, escaped = True))
cookie_file_entry = line.remainder()
# try a general pop
self.assertEquals(line.pop(), 'COOKIEFILE="/tmp/my')
self.assertEquals(line.pop(), r'data\\\"dir//control_auth_cookie"')
self.assertTrue(line.is_empty())
# try a general pop with escapes
line = stem.response.ControlLine(cookie_file_entry)
self.assertEquals(line.pop(escaped = True), 'COOKIEFILE="/tmp/my')
self.assertEquals(line.pop(escaped = True), r'data\"dir//control_auth_cookie"')
self.assertTrue(line.is_empty())
# try a mapping pop
line = stem.response.ControlLine(cookie_file_entry)
self.assertEquals(line.pop_mapping(), ('COOKIEFILE', '"/tmp/my'))
self.assertEquals(line.remainder(), r'data\\\"dir//control_auth_cookie"')
self.assertFalse(line.is_empty())
# try a quoted mapping pop (this should trip up on the escaped quote)
line = stem.response.ControlLine(cookie_file_entry)
self.assertEquals(line.pop_mapping(True), ('COOKIEFILE', '/tmp/my data\\\\\\'))
self.assertEquals(line.remainder(), 'dir//control_auth_cookie"')
self.assertFalse(line.is_empty())
# try an escaped quoted mapping pop
line = stem.response.ControlLine(cookie_file_entry)
self.assertEquals(line.pop_mapping(True, True), ('COOKIEFILE', r'/tmp/my data\"dir//control_auth_cookie'))
self.assertTrue(line.is_empty())
# try an escaped slash followed by a character that could be part of an
# escape sequence
line = stem.response.ControlLine(r'COOKIEFILE="C:\\Users\\Atagar\\AppData\\tor\\control_auth_cookie"')
self.assertEquals(line.pop_mapping(True, True), ('COOKIEFILE', r'C:\Users\Atagar\AppData\tor\control_auth_cookie'))
self.assertTrue(line.is_empty())
|
theo-l/django
|
refs/heads/master
|
tests/migrations/test_migrations_conflict/0002_conflicting_second.py
|
145
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("migrations", "0001_initial")]
operations = [
migrations.CreateModel(
"Something",
[
("id", models.AutoField(primary_key=True)),
],
)
]
|
henaras/horizon
|
refs/heads/master
|
openstack_dashboard/dashboards/project/network_topology/ports/tables.py
|
73
|
# Copyright 2013 NTT Innovation Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from openstack_dashboard.dashboards.project.routers.ports import tables
class RemoveInterface(tables.RemoveInterface):
failure_url = 'horizon:project:network_topology:router'
class PortsTable(tables.PortsTable):
class Meta(object):
name = "interfaces"
verbose_name = _("Interfaces")
row_actions = (RemoveInterface, )
|
JioCloud/swift
|
refs/heads/master
|
swift/proxy/controllers/__init__.py
|
5
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from swift.proxy.controllers.base import Controller
from swift.proxy.controllers.obj import ObjectController
from swift.proxy.controllers.account import AccountController
from swift.proxy.controllers.container import ContainerController
__all__ = [
'AccountController',
'ContainerController',
'Controller',
'ObjectController',
]
|
derDavidT/sympy
|
refs/heads/master
|
sympy/concrete/summations.py
|
8
|
from __future__ import print_function, division
from sympy.concrete.expr_with_limits import AddWithLimits
from sympy.concrete.expr_with_intlimits import ExprWithIntLimits
from sympy.core.function import Derivative
from sympy.core.relational import Eq
from sympy.core.singleton import S
from sympy.core.symbol import Dummy, Wild
from sympy.core.add import Add
from sympy.concrete.gosper import gosper_sum
from sympy.functions.elementary.piecewise import Piecewise
from sympy.polys import apart, PolynomialError
from sympy.solvers import solve
from sympy.core.compatibility import range
from sympy.tensor.indexed import Idx
class Sum(AddWithLimits, ExprWithIntLimits):
r"""Represents unevaluated summation.
``Sum`` represents a finite or infinite series, with the first argument
being the general form of terms in the series, and the second argument
being ``(dummy_variable, start, end)``, with ``dummy_variable`` taking
all integer values from ``start`` through ``end``. In accordance with
long-standing mathematical convention, the end term is included in the
summation.
Finite sums
===========
For finite sums (and sums with symbolic limits assumed to be finite) we
follow the summation convention described by Karr [1], especially
definition 3 of section 1.4. The sum:
.. math::
\sum_{m \leq i < n} f(i)
has *the obvious meaning* for `m < n`, namely:
.. math::
\sum_{m \leq i < n} f(i) = f(m) + f(m+1) + \ldots + f(n-2) + f(n-1)
with the upper limit value `f(n)` excluded. The sum over an empty set is
zero if and only if `m = n`:
.. math::
\sum_{m \leq i < n} f(i) = 0 \quad \mathrm{for} \quad m = n
Finally, for all other sums over empty sets we assume the following
definition:
.. math::
\sum_{m \leq i < n} f(i) = - \sum_{n \leq i < m} f(i) \quad \mathrm{for} \quad m > n
It is important to note that Karr defines all sums with the upper
limit being exclusive. This is in contrast to the usual mathematical notation,
but does not affect the summation convention. Indeed we have:
.. math::
\sum_{m \leq i < n} f(i) = \sum_{i = m}^{n - 1} f(i)
where the difference in notation is intentional to emphasize the meaning,
with limits typeset on the top being inclusive.
Examples
========
>>> from sympy.abc import i, k, m, n, x
>>> from sympy import Sum, factorial, oo, IndexedBase, Function
>>> Sum(k, (k, 1, m))
Sum(k, (k, 1, m))
>>> Sum(k, (k, 1, m)).doit()
m**2/2 + m/2
>>> Sum(k**2, (k, 1, m))
Sum(k**2, (k, 1, m))
>>> Sum(k**2, (k, 1, m)).doit()
m**3/3 + m**2/2 + m/6
>>> Sum(x**k, (k, 0, oo))
Sum(x**k, (k, 0, oo))
>>> Sum(x**k, (k, 0, oo)).doit()
Piecewise((1/(-x + 1), Abs(x) < 1), (Sum(x**k, (k, 0, oo)), True))
>>> Sum(x**k/factorial(k), (k, 0, oo)).doit()
exp(x)
Here are examples to do summation with symbolic indices. You
can use either Function of IndexedBase classes:
>>> f = Function('f')
>>> Sum(f(n), (n, 0, 3)).doit()
f(0) + f(1) + f(2) + f(3)
>>> Sum(f(n), (n, 0, oo)).doit()
Sum(f(n), (n, 0, oo))
>>> f = IndexedBase('f')
>>> Sum(f[n]**2, (n, 0, 3)).doit()
f[0]**2 + f[1]**2 + f[2]**2 + f[3]**2
An example showing that the symbolic result of a summation is still
valid for seemingly nonsensical values of the limits. Then the Karr
convention allows us to give a perfectly valid interpretation to
those sums by interchanging the limits according to the above rules:
>>> S = Sum(i, (i, 1, n)).doit()
>>> S
n**2/2 + n/2
>>> S.subs(n, -4)
6
>>> Sum(i, (i, 1, -4)).doit()
6
>>> Sum(-i, (i, -3, 0)).doit()
6
An explicit example of the Karr summation convention:
>>> S1 = Sum(i**2, (i, m, m+n-1)).doit()
>>> S1
m**2*n + m*n**2 - m*n + n**3/3 - n**2/2 + n/6
>>> S2 = Sum(i**2, (i, m+n, m-1)).doit()
>>> S2
-m**2*n - m*n**2 + m*n - n**3/3 + n**2/2 - n/6
>>> S1 + S2
0
>>> S3 = Sum(i, (i, m, m-1)).doit()
>>> S3
0
See Also
========
summation
Product, product
References
==========
.. [1] Michael Karr, "Summation in Finite Terms", Journal of the ACM,
Volume 28 Issue 2, April 1981, Pages 305-350
http://dl.acm.org/citation.cfm?doid=322248.322255
.. [2] http://en.wikipedia.org/wiki/Summation#Capital-sigma_notation
.. [3] http://en.wikipedia.org/wiki/Empty_sum
"""
__slots__ = ['is_commutative']
def __new__(cls, function, *symbols, **assumptions):
obj = AddWithLimits.__new__(cls, function, *symbols, **assumptions)
if not hasattr(obj, 'limits'):
return obj
if any(len(l) != 3 or None in l for l in obj.limits):
raise ValueError('Sum requires values for lower and upper bounds.')
return obj
def _eval_is_zero(self):
# a Sum is only zero if its function is zero or if all terms
# cancel out. This only answers whether the summand is zero; if
# not then None is returned since we don't analyze whether all
# terms cancel out.
if self.function.is_zero:
return True
def doit(self, **hints):
if hints.get('deep', True):
f = self.function.doit(**hints)
else:
f = self.function
if self.function.is_Matrix:
return self.expand().doit()
for n, limit in enumerate(self.limits):
i, a, b = limit
dif = b - a
if dif.is_integer and (dif < 0) == True:
a, b = b + 1, a - 1
f = -f
if isinstance(i, Idx):
i = i.label
newf = eval_sum(f, (i, a, b))
if newf is None:
if f == self.function:
return self
else:
return self.func(f, *self.limits[n:])
f = newf
if hints.get('deep', True):
# eval_sum could return partially unevaluated
# result with Piecewise. In this case we won't
# doit() recursively.
if not isinstance(f, Piecewise):
return f.doit(**hints)
return f
def _eval_derivative(self, x):
"""
Differentiate wrt x as long as x is not in the free symbols of any of
the upper or lower limits.
Sum(a*b*x, (x, 1, a)) can be differentiated wrt x or b but not `a`
since the value of the sum is discontinuous in `a`. In a case
involving a limit variable, the unevaluated derivative is returned.
"""
# diff already confirmed that x is in the free symbols of self, but we
# don't want to differentiate wrt any free symbol in the upper or lower
# limits
# XXX remove this test for free_symbols when the default _eval_derivative is in
if x not in self.free_symbols:
return S.Zero
# get limits and the function
f, limits = self.function, list(self.limits)
limit = limits.pop(-1)
if limits: # f is the argument to a Sum
f = self.func(f, *limits)
if len(limit) == 3:
_, a, b = limit
if x in a.free_symbols or x in b.free_symbols:
return None
df = Derivative(f, x, evaluate=True)
rv = self.func(df, limit)
if limit[0] not in df.free_symbols:
rv = rv.doit()
return rv
else:
return NotImplementedError('Lower and upper bound expected.')
def _eval_difference_delta(self, n, step):
k, _, upper = self.args[-1]
new_upper = upper.subs(n, n + step)
if len(self.args) == 2:
f = self.args[0]
else:
f = self.func(*self.args[:-1])
return Sum(f, (k, upper + 1, new_upper)).doit()
def _eval_simplify(self, ratio, measure):
from sympy.simplify.simplify import sum_simplify
return sum_simplify(self)
def _eval_summation(self, f, x):
return None
def euler_maclaurin(self, m=0, n=0, eps=0, eval_integral=True):
"""
Return an Euler-Maclaurin approximation of self, where m is the
number of leading terms to sum directly and n is the number of
terms in the tail.
With m = n = 0, this is simply the corresponding integral
plus a first-order endpoint correction.
Returns (s, e) where s is the Euler-Maclaurin approximation
and e is the estimated error (taken to be the magnitude of
the first omitted term in the tail):
>>> from sympy.abc import k, a, b
>>> from sympy import Sum
>>> Sum(1/k, (k, 2, 5)).doit().evalf()
1.28333333333333
>>> s, e = Sum(1/k, (k, 2, 5)).euler_maclaurin()
>>> s
-log(2) + 7/20 + log(5)
>>> from sympy import sstr
>>> print(sstr((s.evalf(), e.evalf()), full_prec=True))
(1.26629073187415, 0.0175000000000000)
The endpoints may be symbolic:
>>> s, e = Sum(1/k, (k, a, b)).euler_maclaurin()
>>> s
-log(a) + log(b) + 1/(2*b) + 1/(2*a)
>>> e
Abs(1/(12*b**2) - 1/(12*a**2))
If the function is a polynomial of degree at most 2n+1, the
Euler-Maclaurin formula becomes exact (and e = 0 is returned):
>>> Sum(k, (k, 2, b)).euler_maclaurin()
(b**2/2 + b/2 - 1, 0)
>>> Sum(k, (k, 2, b)).doit()
b**2/2 + b/2 - 1
With a nonzero eps specified, the summation is ended
as soon as the remainder term is less than the epsilon.
"""
from sympy.functions import bernoulli, factorial
from sympy.integrals import Integral
m = int(m)
n = int(n)
f = self.function
if len(self.limits) != 1:
raise ValueError("More than 1 limit")
i, a, b = self.limits[0]
if (a > b) == True:
if a - b == 1:
return S.Zero,S.Zero
a, b = b + 1, a - 1
f = -f
s = S.Zero
if m:
if b.is_Integer and a.is_Integer:
m = min(m, b - a + 1)
if not eps or f.is_polynomial(i):
for k in range(m):
s += f.subs(i, a + k)
else:
term = f.subs(i, a)
if term:
test = abs(term.evalf(3)) < eps
if test == True:
return s, abs(term)
elif not (test == False):
# a symbolic Relational class, can't go further
return term, S.Zero
s += term
for k in range(1, m):
term = f.subs(i, a + k)
if abs(term.evalf(3)) < eps and term != 0:
return s, abs(term)
s += term
if b - a + 1 == m:
return s, S.Zero
a += m
x = Dummy('x')
I = Integral(f.subs(i, x), (x, a, b))
if eval_integral:
I = I.doit()
s += I
def fpoint(expr):
if b is S.Infinity:
return expr.subs(i, a), 0
return expr.subs(i, a), expr.subs(i, b)
fa, fb = fpoint(f)
iterm = (fa + fb)/2
g = f.diff(i)
for k in range(1, n + 2):
ga, gb = fpoint(g)
term = bernoulli(2*k)/factorial(2*k)*(gb - ga)
if (eps and term and abs(term.evalf(3)) < eps) or (k > n):
break
s += term
g = g.diff(i, 2, simplify=False)
return s + iterm, abs(term)
def reverse_order(self, *indices):
"""
Reverse the order of a limit in a Sum.
Usage
=====
``reverse_order(self, *indices)`` reverses some limits in the expression
``self`` which can be either a ``Sum`` or a ``Product``. The selectors in
the argument ``indices`` specify some indices whose limits get reversed.
These selectors are either variable names or numerical indices counted
starting from the inner-most limit tuple.
Examples
========
>>> from sympy import Sum
>>> from sympy.abc import x, y, a, b, c, d
>>> Sum(x, (x, 0, 3)).reverse_order(x)
Sum(-x, (x, 4, -1))
>>> Sum(x*y, (x, 1, 5), (y, 0, 6)).reverse_order(x, y)
Sum(x*y, (x, 6, 0), (y, 7, -1))
>>> Sum(x, (x, a, b)).reverse_order(x)
Sum(-x, (x, b + 1, a - 1))
>>> Sum(x, (x, a, b)).reverse_order(0)
Sum(-x, (x, b + 1, a - 1))
While one should prefer variable names when specifying which limits
to reverse, the index counting notation comes in handy in case there
are several symbols with the same name.
>>> S = Sum(x**2, (x, a, b), (x, c, d))
>>> S
Sum(x**2, (x, a, b), (x, c, d))
>>> S0 = S.reverse_order(0)
>>> S0
Sum(-x**2, (x, b + 1, a - 1), (x, c, d))
>>> S1 = S0.reverse_order(1)
>>> S1
Sum(x**2, (x, b + 1, a - 1), (x, d + 1, c - 1))
Of course we can mix both notations:
>>> Sum(x*y, (x, a, b), (y, 2, 5)).reverse_order(x, 1)
Sum(x*y, (x, b + 1, a - 1), (y, 6, 1))
>>> Sum(x*y, (x, a, b), (y, 2, 5)).reverse_order(y, x)
Sum(x*y, (x, b + 1, a - 1), (y, 6, 1))
See Also
========
index, reorder_limit, reorder
References
==========
.. [1] Michael Karr, "Summation in Finite Terms", Journal of the ACM,
Volume 28 Issue 2, April 1981, Pages 305-350
http://dl.acm.org/citation.cfm?doid=322248.322255
"""
l_indices = list(indices)
for i, indx in enumerate(l_indices):
if not isinstance(indx, int):
l_indices[i] = self.index(indx)
e = 1
limits = []
for i, limit in enumerate(self.limits):
l = limit
if i in l_indices:
e = -e
l = (limit[0], limit[2] + 1, limit[1] - 1)
limits.append(l)
return Sum(e * self.function, *limits)
def summation(f, *symbols, **kwargs):
r"""
Compute the summation of f with respect to symbols.
The notation for symbols is similar to the notation used in Integral.
summation(f, (i, a, b)) computes the sum of f with respect to i from a to b,
i.e.,
::
b
____
\ `
summation(f, (i, a, b)) = ) f
/___,
i = a
If it cannot compute the sum, it returns an unevaluated Sum object.
Repeated sums can be computed by introducing additional symbols tuples::
>>> from sympy import summation, oo, symbols, log
>>> i, n, m = symbols('i n m', integer=True)
>>> summation(2*i - 1, (i, 1, n))
n**2
>>> summation(1/2**i, (i, 0, oo))
2
>>> summation(1/log(n)**n, (n, 2, oo))
Sum(log(n)**(-n), (n, 2, oo))
>>> summation(i, (i, 0, n), (n, 0, m))
m**3/6 + m**2/2 + m/3
>>> from sympy.abc import x
>>> from sympy import factorial
>>> summation(x**n/factorial(n), (n, 0, oo))
exp(x)
See Also
========
Sum
Product, product
"""
return Sum(f, *symbols, **kwargs).doit(deep=False)
def telescopic_direct(L, R, n, limits):
"""Returns the direct summation of the terms of a telescopic sum
L is the term with lower index
R is the term with higher index
n difference between the indexes of L and R
For example:
>>> from sympy.concrete.summations import telescopic_direct
>>> from sympy.abc import k, a, b
>>> telescopic_direct(1/k, -1/(k+2), 2, (k, a, b))
-1/(b + 2) - 1/(b + 1) + 1/(a + 1) + 1/a
"""
(i, a, b) = limits
s = 0
for m in range(n):
s += L.subs(i, a + m) + R.subs(i, b - m)
return s
def telescopic(L, R, limits):
'''Tries to perform the summation using the telescopic property
return None if not possible
'''
(i, a, b) = limits
if L.is_Add or R.is_Add:
return None
# We want to solve(L.subs(i, i + m) + R, m)
# First we try a simple match since this does things that
# solve doesn't do, e.g. solve(f(k+m)-f(k), m) fails
k = Wild("k")
sol = (-R).match(L.subs(i, i + k))
s = None
if sol and k in sol:
s = sol[k]
if not (s.is_Integer and L.subs(i, i + s) == -R):
# sometimes match fail(f(x+2).match(-f(x+k))->{k: -2 - 2x}))
s = None
# But there are things that match doesn't do that solve
# can do, e.g. determine that 1/(x + m) = 1/(1 - x) when m = 1
if s is None:
m = Dummy('m')
try:
sol = solve(L.subs(i, i + m) + R, m) or []
except NotImplementedError:
return None
sol = [si for si in sol if si.is_Integer and
(L.subs(i, i + si) + R).expand().is_zero]
if len(sol) != 1:
return None
s = sol[0]
if s < 0:
return telescopic_direct(R, L, abs(s), (i, a, b))
elif s > 0:
return telescopic_direct(L, R, s, (i, a, b))
def eval_sum(f, limits):
from sympy.concrete.delta import deltasummation, _has_simple_delta
from sympy.functions import KroneckerDelta
(i, a, b) = limits
if f is S.Zero:
return S.Zero
if i not in f.free_symbols:
return f*(b - a + 1)
if a == b:
return f.subs(i, a)
if isinstance(f, Piecewise):
if not any(i in arg.args[1].free_symbols for arg in f.args):
# Piecewise conditions do not depend on the dummy summation variable,
# therefore we can fold: Sum(Piecewise((e, c), ...), limits)
# --> Piecewise((Sum(e, limits), c), ...)
newargs = []
for arg in f.args:
newexpr = eval_sum(arg.expr, limits)
if newexpr is None:
return None
newargs.append((newexpr, arg.cond))
return f.func(*newargs)
if f.has(KroneckerDelta) and _has_simple_delta(f, limits[0]):
return deltasummation(f, limits)
dif = b - a
definite = dif.is_Integer
# Doing it directly may be faster if there are very few terms.
if definite and (dif < 100):
return eval_sum_direct(f, (i, a, b))
if isinstance(f, Piecewise):
return None
# Try to do it symbolically. Even when the number of terms is known,
# this can save time when b-a is big.
# We should try to transform to partial fractions
value = eval_sum_symbolic(f.expand(), (i, a, b))
if value is not None:
return value
# Do it directly
if definite:
return eval_sum_direct(f, (i, a, b))
def eval_sum_direct(expr, limits):
from sympy.core import Add
(i, a, b) = limits
dif = b - a
return Add(*[expr.subs(i, a + j) for j in range(dif + 1)])
def eval_sum_symbolic(f, limits):
from sympy.functions import harmonic, bernoulli
f_orig = f
(i, a, b) = limits
if not f.has(i):
return f*(b - a + 1)
# Linearity
if f.is_Mul:
L, R = f.as_two_terms()
if not L.has(i):
sR = eval_sum_symbolic(R, (i, a, b))
if sR:
return L*sR
if not R.has(i):
sL = eval_sum_symbolic(L, (i, a, b))
if sL:
return R*sL
try:
f = apart(f, i) # see if it becomes an Add
except PolynomialError:
pass
if f.is_Add:
L, R = f.as_two_terms()
lrsum = telescopic(L, R, (i, a, b))
if lrsum:
return lrsum
lsum = eval_sum_symbolic(L, (i, a, b))
rsum = eval_sum_symbolic(R, (i, a, b))
if None not in (lsum, rsum):
r = lsum + rsum
if not r is S.NaN:
return r
# Polynomial terms with Faulhaber's formula
n = Wild('n')
result = f.match(i**n)
if result is not None:
n = result[n]
if n.is_Integer:
if n >= 0:
if (b is S.Infinity and not a is S.NegativeInfinity) or \
(a is S.NegativeInfinity and not b is S.Infinity):
return S.Infinity
return ((bernoulli(n + 1, b + 1) - bernoulli(n + 1, a))/(n + 1)).expand()
elif a.is_Integer and a >= 1:
if n == -1:
return harmonic(b) - harmonic(a - 1)
else:
return harmonic(b, abs(n)) - harmonic(a - 1, abs(n))
if not (a.has(S.Infinity, S.NegativeInfinity) or
b.has(S.Infinity, S.NegativeInfinity)):
# Geometric terms
c1 = Wild('c1', exclude=[i])
c2 = Wild('c2', exclude=[i])
c3 = Wild('c3', exclude=[i])
e = f.match(c1**(c2*i + c3))
if e is not None:
p = (c1**c3).subs(e)
q = (c1**c2).subs(e)
r = p*(q**a - q**(b + 1))/(1 - q)
l = p*(b - a + 1)
return Piecewise((l, Eq(q, S.One)), (r, True))
r = gosper_sum(f, (i, a, b))
if not r in (None, S.NaN):
return r
return eval_sum_hyper(f_orig, (i, a, b))
def _eval_sum_hyper(f, i, a):
""" Returns (res, cond). Sums from a to oo. """
from sympy.functions import hyper
from sympy.simplify import hyperexpand, hypersimp, fraction, simplify
from sympy.polys.polytools import Poly, factor
if a != 0:
return _eval_sum_hyper(f.subs(i, i + a), i, 0)
if f.subs(i, 0) == 0:
if simplify(f.subs(i, Dummy('i', integer=True, positive=True))) == 0:
return S(0), True
return _eval_sum_hyper(f.subs(i, i + 1), i, 0)
hs = hypersimp(f, i)
if hs is None:
return None
numer, denom = fraction(factor(hs))
top, topl = numer.as_coeff_mul(i)
bot, botl = denom.as_coeff_mul(i)
ab = [top, bot]
factors = [topl, botl]
params = [[], []]
for k in range(2):
for fac in factors[k]:
mul = 1
if fac.is_Pow:
mul = fac.exp
fac = fac.base
if not mul.is_Integer:
return None
p = Poly(fac, i)
if p.degree() != 1:
return None
m, n = p.all_coeffs()
ab[k] *= m**mul
params[k] += [n/m]*mul
# Add "1" to numerator parameters, to account for implicit n! in
# hypergeometric series.
ap = params[0] + [1]
bq = params[1]
x = ab[0]/ab[1]
h = hyper(ap, bq, x)
return f.subs(i, 0)*hyperexpand(h), h.convergence_statement
def eval_sum_hyper(f, i_a_b):
from sympy.logic.boolalg import And
i, a, b = i_a_b
if (b - a).is_Integer:
# We are never going to do better than doing the sum in the obvious way
return None
old_sum = Sum(f, (i, a, b))
if b != S.Infinity:
if a == S.NegativeInfinity:
res = _eval_sum_hyper(f.subs(i, -i), i, -b)
if res is not None:
return Piecewise(res, (old_sum, True))
else:
res1 = _eval_sum_hyper(f, i, a)
res2 = _eval_sum_hyper(f, i, b + 1)
if res1 is None or res2 is None:
return None
(res1, cond1), (res2, cond2) = res1, res2
cond = And(cond1, cond2)
if cond == False:
return None
return Piecewise((res1 - res2, cond), (old_sum, True))
if a == S.NegativeInfinity:
res1 = _eval_sum_hyper(f.subs(i, -i), i, 1)
res2 = _eval_sum_hyper(f, i, 0)
if res1 is None or res2 is None:
return None
res1, cond1 = res1
res2, cond2 = res2
cond = And(cond1, cond2)
if cond == False:
return None
return Piecewise((res1 + res2, cond), (old_sum, True))
# Now b == oo, a != -oo
res = _eval_sum_hyper(f, i, a)
if res is not None:
r, c = res
if c == False:
if r.is_number:
f = f.subs(i, Dummy('i', integer=True, positive=True) + a)
if f.is_positive or f.is_zero:
return S.Infinity
elif f.is_negative:
return S.NegativeInfinity
return None
return Piecewise(res, (old_sum, True))
|
jakirkham/ilastik
|
refs/heads/master
|
ilastik/applets/thresholdMasking/__init__.py
|
4
|
###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
from thresholdMaskingApplet import ThresholdMaskingApplet
|
telefonicaid/selenium
|
refs/heads/master
|
py/test/selenium/webdriver/common/opacity_tests.py
|
15
|
#!/usr/bin/python
# Copyright 2008-2012 WebDriver committers
# Copyright 2008-2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import pytest
from selenium.webdriver.common.by import By
class OpacityTests(unittest.TestCase):
@pytest.mark.ignore_ie
@pytest.mark.ignore_opera
def testShouldBeAbleToClickOnElementsWithOpacityZero(self):
self._loadPage("click_jacker")
element = self.driver.find_element(By.ID, "clickJacker")
self.assertEquals('0', element.value_of_css_property("opacity"),
"Precondition failed: clickJacker should be transparent.\
Value was %s" % element.value_of_css_property("opacity"))
element.click()
self.assertEquals('1', element.value_of_css_property("opacity"))
@pytest.mark.ignore_ie
def testShouldBeAbleToSelectOptionsFromAnInvisibleSelect(self):
self._loadPage("formPage")
select = self.driver.find_element(By.ID, "invisi_select")
options = select.find_elements(By.TAG_NAME, "option")
apples = options[0]
oranges = options[1]
self.assertTrue(apples.is_selected())
self.assertFalse(oranges.is_selected())
oranges.click()
self.assertFalse(apples.is_selected())
self.assertTrue(oranges.is_selected())
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
|
ar45/django
|
refs/heads/master
|
tests/generic_views/test_detail.py
|
284
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from django.views.generic.base import View
from django.views.generic.detail import SingleObjectTemplateResponseMixin
from django.views.generic.edit import ModelFormMixin
from .models import Artist, Author, Book, Page
@override_settings(ROOT_URLCONF='generic_views.urls')
class DetailViewTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.artist1 = Artist.objects.create(name='Rene Magritte')
cls.author1 = Author.objects.create(name='Roberto Bolaño', slug='roberto-bolano')
cls.author2 = Author.objects.create(name='Scott Rosenberg', slug='scott-rosenberg')
cls.book1 = Book.objects.create(name='2066', slug='2066', pages=800, pubdate=datetime.date(2008, 10, 1))
cls.book1.authors.add(cls.author1)
cls.book2 = Book.objects.create(
name='Dreaming in Code', slug='dreaming-in-code', pages=300, pubdate=datetime.date(2006, 5, 1)
)
cls.page1 = Page.objects.create(
content='I was once bitten by a moose.', template='generic_views/page_template.html'
)
def test_simple_object(self):
res = self.client.get('/detail/obj/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], {'foo': 'bar'})
self.assertIsInstance(res.context['view'], View)
self.assertTemplateUsed(res, 'generic_views/detail.html')
def test_detail_by_pk(self):
res = self.client.get('/detail/author/%s/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_missing_object(self):
res = self.client.get('/detail/author/500/')
self.assertEqual(res.status_code, 404)
def test_detail_object_does_not_exist(self):
self.assertRaises(ObjectDoesNotExist, self.client.get, '/detail/doesnotexist/1/')
def test_detail_by_custom_pk(self):
res = self.client.get('/detail/author/bycustompk/%s/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_slug(self):
res = self.client.get('/detail/author/byslug/scott-rosenberg/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Author.objects.get(slug='scott-rosenberg'))
self.assertEqual(res.context['author'], Author.objects.get(slug='scott-rosenberg'))
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_custom_slug(self):
res = self.client.get('/detail/author/bycustomslug/scott-rosenberg/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Author.objects.get(slug='scott-rosenberg'))
self.assertEqual(res.context['author'], Author.objects.get(slug='scott-rosenberg'))
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_pk_ignore_slug(self):
res = self.client.get('/detail/author/bypkignoreslug/%s-roberto-bolano/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_pk_ignore_slug_mismatch(self):
res = self.client.get('/detail/author/bypkignoreslug/%s-scott-rosenberg/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_pk_and_slug(self):
res = self.client.get('/detail/author/bypkandslug/%s-roberto-bolano/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_detail_by_pk_and_slug_mismatch_404(self):
res = self.client.get('/detail/author/bypkandslug/%s-scott-rosenberg/' % self.author1.pk)
self.assertEqual(res.status_code, 404)
def test_verbose_name(self):
res = self.client.get('/detail/artist/%s/' % self.artist1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.artist1)
self.assertEqual(res.context['artist'], self.artist1)
self.assertTemplateUsed(res, 'generic_views/artist_detail.html')
def test_template_name(self):
res = self.client.get('/detail/author/%s/template_name/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/about.html')
def test_template_name_suffix(self):
res = self.client.get('/detail/author/%s/template_name_suffix/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['author'], self.author1)
self.assertTemplateUsed(res, 'generic_views/author_view.html')
def test_template_name_field(self):
res = self.client.get('/detail/page/%s/field/' % self.page1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.page1)
self.assertEqual(res.context['page'], self.page1)
self.assertTemplateUsed(res, 'generic_views/page_template.html')
def test_context_object_name(self):
res = self.client.get('/detail/author/%s/context_object_name/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertEqual(res.context['thingy'], self.author1)
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_duplicated_context_object_name(self):
res = self.client.get('/detail/author/%s/dupe_context_object_name/' % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author1)
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/author_detail.html')
def test_deferred_queryset_template_name(self):
class FormContext(SingleObjectTemplateResponseMixin):
request = RequestFactory().get('/')
model = Author
object = Author.objects.defer('name').get(pk=self.author1.pk)
self.assertEqual(FormContext().get_template_names()[0], 'generic_views/author_detail.html')
def test_deferred_queryset_context_object_name(self):
class FormContext(ModelFormMixin):
request = RequestFactory().get('/')
model = Author
object = Author.objects.defer('name').get(pk=self.author1.pk)
fields = ('name',)
form_context_data = FormContext().get_context_data()
self.assertEqual(form_context_data['object'], self.author1)
self.assertEqual(form_context_data['author'], self.author1)
def test_invalid_url(self):
self.assertRaises(AttributeError, self.client.get, '/detail/author/invalid/url/')
def test_invalid_queryset(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/detail/author/invalid/qs/')
def test_non_model_object_with_meta(self):
res = self.client.get('/detail/nonmodel/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'].id, "non_model_1")
|
ocefpaf/paegan
|
refs/heads/master
|
tests/test_asainterpolate.py
|
3
|
import math
import unittest
import numpy as np
from paegan.utils.asainterpolate import GenInterpolator, CfGeoInterpolator, create_grid
class CfInterpolator(unittest.TestCase):
def test_interpolator_2d(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
data = np.random.rand(50, 50)
i = CfGeoInterpolator(data, lon, lat, method='nearest')
data2 = i.interpgrid(lon, lat)
assert np.all(data==data2)
def test_interpolator_2dmesh(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
lon, lat = np.meshgrid(lon, lat, indexing='ij')
data = np.random.rand(50, 50)
i = CfGeoInterpolator(data, lon, lat, method='nearest')
data2 = i.interpgrid(lon, lat)
assert np.all(data==data2)
def test_interpolator_3d_1dz_2dll(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
z = np.arange(10)
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
lon, lat = np.meshgrid(lon, lat, indexing='ij')
data = np.random.rand(10, 50, 50)
i = CfGeoInterpolator(data, lon, lat, z=z, method='nearest')
data2 = i.interpgrid(lon, lat, z=z)
assert np.all(data==data2)
def test_interpolator_3d_3dz_2dll(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
z, dummy, dummy2 = np.meshgrid(np.arange(10), lon, lat, indexing='ij')
lon, lat = np.meshgrid(lon, lat)
data = np.random.rand(10, 50, 50)
i = CfGeoInterpolator(data, lon, lat, z=z, method='nearest')
data2 = i.interpgrid(lon, lat, z=z)
assert np.all(data==data2)
def test_interpolator_3d_1dz_1dll(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
data = np.random.rand(10, 50, 50)
z = np.arange(10)
i = CfGeoInterpolator(data, lon, lat, z=z, method='nearest')
data2 = i.interpgrid(lon, lat, z=z)
assert np.all(data==data2)
def test_interpolator_3d_3dz_1dll(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
data = np.random.rand(10, 50, 50)
z, dummy, dummy2 = np.meshgrid(np.arange(10), lon, lat, indexing='ij')
i = CfGeoInterpolator(data, lon, lat, z=z, method='nearest')
data2 = i.interpgrid(lon, lat, z=z)
assert np.all(data==data2)
def test_interpolator_3d_1dt_3dz_1dll(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
data = np.random.rand(9, 10, 50, 50)
z, dummy, dummy2 = np.meshgrid(np.arange(10), lon, lat, indexing='ij')
t = np.arange(9)
i = CfGeoInterpolator(data, lon, lat, z=z, t=t, method='nearest')
data2 = i.interpgrid(lon, lat, z=z, t=t)
assert np.all(data==data2)
def test_interpolator_3d_1dt_1dz_1dll(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
data = np.random.rand(9, 10, 50, 50)
z = np.arange(10)
t = np.arange(9)
i = CfGeoInterpolator(data, lon, lat, z=z, t=t, method='nearest')
data2 = i.interpgrid(lon, lat, z=z, t=t)
assert np.all(data==data2)
def test_interpolator_3d_1dt_4dz_1dll(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
data = np.random.rand(9, 10, 50, 50)
t = np.arange(9)
dumm3, z, dummy, dummy2 = np.meshgrid(t, np.arange(10), lon, lat, indexing='ij')
i = CfGeoInterpolator(data, lon, lat, z=z, t=t, method='nearest')
data2 = i.interpgrid(lon, lat, z=z, t=t)
assert np.all(data==data2)
class GeneralInterpolator(unittest.TestCase):
def test_interpolator_2d(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
data = np.random.rand(50, 50)
i = GenInterpolator(data, lat, lon, method='nearest')
data2 = i.interpgrid(lat, lon)
assert np.all(data==data2)
class AsaCreateGrid(unittest.TestCase):
def test_create_grid(self):
lonbounds = [-70, -60]
latbounds = [40, 50]
nx, ny = 50, 50
lon, lat = create_grid(lonbounds[0], lonbounds[1], latbounds[0], latbounds[1], nx=50, ny=50)
assert lon[0] == lonbounds[0]
assert lon[-1] == lonbounds[1]
assert lat[0] == latbounds[0]
assert lat[-1] == latbounds[1]
assert lon.shape[0] == nx
assert lat.shape[0] == ny
|
kayarre/Tools
|
refs/heads/master
|
vtk/transform_vtu_dsa2vwi.py
|
1
|
import vtk
import h5py
"""
read the transform from slicer and apply it to a surface mesh
"""
file_path1 = "/raid/home/ksansom/caseFiles/mri/VWI_proj/case1/fluent_dsa/vtk_out/calc_test_node_stats.vtu"
file_path2 = "/raid/home/ksansom/caseFiles/mri/VWI_proj/case1/fluent_dsa/vtk_out/calc_test_node.vtu"
file_path3 = "/raid/home/ksansom/caseFiles/mri/VWI_proj/case1/fluent_dsa/vtk_out/interior_outfile_node.vtu"
out_path = "/raid/home/ksansom/caseFiles/mri/VWI_proj/case1/fluent_dsa/vtk_out/calc_test_node_stats_dsa2vwi.vtu"
out_path2 = "/raid/home/ksansom/caseFiles/mri/VWI_proj/case1/fluent_dsa/vtk_out/calc_test_node_dsa2vwi.vtu"
out_path3 = "/raid/home/ksansom/caseFiles/mri/VWI_proj/case1/fluent_dsa/vtk_out/interior_outfile_node_dsa2vwi.vtu"
print('Reading vtu mesh file.')
reader1 = vtk.vtkXMLUnstructuredGridReader()
reader1.SetFileName(file_path1)
reader1.Update()
reader2 = vtk.vtkXMLUnstructuredGridReader()
reader2.SetFileName(file_path2)
reader2.Update()
reader3 = vtk.vtkXMLUnstructuredGridReader()
reader3.SetFileName(file_path3)
reader3.Update()
#reader1 = vtk.vtkXMLPolyDataReader()
#reader1.SetFileName(file_path1)
trans_file = h5py.File("/home/ksansom/caseFiles/mri/VWI_proj/case1/registration_2/Transform.h5", 'r')
trans_data = trans_file['/TransformGroup/0/TranformParameters'].value
trans_type = trans_file['/TransformGroup/0/TransformType'].value
trans_fixed = trans_file['/TransformGroup/0/TranformFixedParameters'].value
def list_trans_2_4x4matrix(trans_list, scale_trans):
trans = vtk.vtkMatrix4x4()
# set rotation
for i in range(3):
for j in range(3):
trans.SetElement(i,j,trans_list[i*3 + j])
# set translation
for i in range(3):
trans.SetElement(i,3,trans_list[9 + i]/scale_trans)
# not sure what this sets
for i in range(3):
trans.SetElement(3,i,0.0)
#set global scale
trans.SetElement(3,3,1.0)
return trans
input_units = "mm"
if(input_units == "mm"):
scale_translation = 1000.0
else:
scale_translation = 1.0
trans_m = list_trans_2_4x4matrix(trans_data, scale_translation)
# convert from itk format to vtk format
lps2ras = vtk.vtkMatrix4x4()
lps2ras.SetElement(0,0,-1)
lps2ras.SetElement(1,1,-1)
ras2lps = vtk.vtkMatrix4x4()
ras2lps.DeepCopy(lps2ras) # lps2ras is diagonal therefore the inverse is identical
vtkmat = vtk.vtkMatrix4x4()
# https://www.slicer.org/wiki/Documentation/Nightly/Modules/Transforms
vtk.vtkMatrix4x4.Multiply4x4(lps2ras, trans_m, vtkmat)
vtk.vtkMatrix4x4.Multiply4x4(vtkmat, ras2lps, vtkmat)
# Convert from LPS (ITK) to RAS (Slicer)
#vtk.vtkMatrix4x4.Multiply4x4(ras2lps, trans_m, vtkmat)
#tk.vtkMatrix4x4.Multiply4x4(vtkmat, lps2ras, vtkmat)
# Convert the sense of the transform (from ITK resampling to Slicer modeling transform)
invert = vtk.vtkMatrix4x4()
vtk.vtkMatrix4x4.Invert(vtkmat, invert)
#print(invert)
# linear transform matrix
invert_lt = vtk.vtkMatrixToLinearTransform()
invert_lt.SetInput(invert)
#pre = vtk.vtkTransform()
#pre.RotateZ(0)#180)
print(invert_lt.GetMatrix())
trans_1 = vtk.vtkTransform()
trans_1.SetInput(invert_lt)
#trans_1.Concatenate(pre)
#trans_1.Concatenate(second_lt)
#trans_1.PostMultiply() # Does it do the matrix order = resize * trans_1 * pre
trans_1.Update()
second_trans = [0.989624049914536, -0.01820165778214229, 0.14252346994349668, -2.755470746170803, 0.019607903388433528, 0.9997718820638946, -0.008468409480471466, -0.3766689731697975, -0.142336818692364, 0.011175128115635211, 0.9897551952660507, 1.4619829512121851, 0.0, 0.0, 0.0, 1.0]
for i in range(3):
second_trans[3*(i+1)+i] = second_trans[3*(i+1)+i] / (scale_translation)
trans_m_2 = vtk.vtkTransform()
trans_m_2.SetMatrix(second_trans)
#trans_m_2.Scale([1.0/scale_translation,1.0/scale_translation,1.0/scale_translation] )
#trans_m_2.GetMatrix().Invert()
print(trans_m_2.GetMatrix())
#pre = vtk.vtkTransform()
#pre.RotateZ(0)#180)
poly_filt = vtk.vtkTransformFilter()
poly_filt.SetInputConnection(reader1.GetOutputPort())
poly_filt.SetTransform(trans_1)
poly_filt2 = vtk.vtkTransformFilter()
poly_filt2.SetInputConnection(poly_filt.GetOutputPort())
poly_filt2.SetTransform(trans_m_2)
writer = vtk.vtkXMLUnstructuredGridWriter()
writer.SetInputConnection(poly_filt2.GetOutputPort())
writer.SetFileName(out_path)
writer.Update()
time_set_range = reader2.GetTimeStepRange()
current_time = reader2.GetTimeStep()
print(current_time)
poly_filt3 = vtk.vtkTransformFilter()
poly_filt3.SetInputConnection(reader2.GetOutputPort())
poly_filt3.SetTransform(trans_1)
poly_filt4 = vtk.vtkTransformFilter()
poly_filt4.SetInputConnection(poly_filt3.GetOutputPort())
poly_filt4.SetTransform(trans_m_2)
writer2 = vtk.vtkXMLUnstructuredGridWriter()
writer2.SetInputConnection(poly_filt4.GetOutputPort())
writer2.SetFileName(out_path2)
writer2.SetNumberOfTimeSteps(int(time_set_range[1] - time_set_range[0]))
writer2.Start()
poly_filt5 = vtk.vtkTransformFilter()
poly_filt5.SetInputConnection(reader3.GetOutputPort())
poly_filt5.SetTransform(trans_1)
poly_filt6 = vtk.vtkTransformFilter()
poly_filt6.SetInputConnection(poly_filt5.GetOutputPort())
poly_filt6.SetTransform(trans_m_2)
writer3 = vtk.vtkXMLUnstructuredGridWriter()
writer3.SetInputConnection(poly_filt6.GetOutputPort())
writer3.SetFileName(out_path3)
writer3.SetNumberOfTimeSteps(int(time_set_range[1] - time_set_range[0]))
writer3.Start()
print("Number of Times: {0}".format(time_set_range[1]))
for i in range(time_set_range[0], time_set_range[1]):
next_time = i
print( "write : {0}".format(next_time))
if( current_time == next_time):
print("first time")
pass
else:
# update the reader
reader2.SetTimeStep(next_time)
reader2.Update()
poly_filt3.Update()
poly_filt4.Update()
reader3.SetTimeStep(next_time)
reader3.Update()
poly_filt5.Update()
poly_filt6.Update()
current_time = next_time
writer2.WriteNextTime(current_time)
writer3.WriteNextTime(current_time)
writer2.Stop()
writer3.Stop()
|
OpusVL/odoo
|
refs/heads/master
|
addons/procurement_jit/procurement_jit.py
|
4
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2013 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class procurement_order(osv.osv):
_inherit = "procurement.order"
def create(self, cr, uid, vals, context=None):
procurement_id = super(procurement_order, self).create(cr, uid, vals, context=context)
self.run(cr, uid, [procurement_id], context=context)
self.check(cr, uid, [procurement_id], context=context)
return procurement_id
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.