text stringlengths 4 1.02M | meta dict |
|---|---|
from django.conf import settings
from django.core.files.uploadedfile import SimpleUploadedFile
from mock import patch
from nose.tools import eq_
import amo
import amo.tests
from mkt.comm.forms import CommAttachmentForm
@patch.object(settings, 'MAX_REVIEW_ATTACHMENT_UPLOAD_SIZE', 1024)
class TestReviewAppAttachmentForm(amo.tests.TestCase):
def setUp(self):
self.max_size = settings.MAX_REVIEW_ATTACHMENT_UPLOAD_SIZE
def post_data(self, **kwargs):
post_data = {
'description': 'My Test File'
}
post_data.update(kwargs)
return post_data
def file_data(self, size=1024):
file_data = {
'attachment': None
}
if size:
file_data['attachment'] = SimpleUploadedFile('bacon.txt',
' ' * size)
return file_data
def test_no_attachment(self):
file_data = self.file_data(size=0)
self.check_valid(False, file_data=file_data)
def test_no_description(self):
post_data = self.post_data(description=None)
self.check_valid(True, post_data=post_data)
def test_attachment_okay(self):
file_data = self.file_data(size=self.max_size)
self.check_valid(True, file_data=file_data)
def test_attachment_too_large(self):
file_data = self.file_data(size=self.max_size + 1)
self.check_valid(False, file_data=file_data)
def check_valid(self, valid, post_data=None, file_data=None):
if not post_data:
post_data = self.post_data()
if not file_data:
file_data = self.file_data()
form = CommAttachmentForm(post_data, file_data)
eq_(form.is_valid(), valid)
| {
"content_hash": "a8e31d166490aa2637f681d0b79956f0",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 69,
"avg_line_length": 30.649122807017545,
"alnum_prop": 0.6187750429307384,
"repo_name": "ngokevin/zamboni",
"id": "0306b2b6dba5319a44ef0c117f656ab266eda3ef",
"size": "1747",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mkt/reviewers/tests/test_forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "356777"
},
{
"name": "JavaScript",
"bytes": "536388"
},
{
"name": "Python",
"bytes": "3883015"
},
{
"name": "Shell",
"bytes": "13597"
}
],
"symlink_target": ""
} |
"""incertitude geonames loader is a working example of creating a simple
geocoder for possibly ambiguous input queries.
Usage:
incertitude.py <mapping_file.json> <geonames_file_path> <settings_file.json> <host:port>
incertitude.py (-h | --help)
incertitude.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import csv
import json
from docopt import docopt
from elasticsearch import Elasticsearch
if __name__ == '__main__':
arguments = docopt(__doc__, version='incertitude geonames 1.0')
db_path = arguments["<geonames_file_path>"]
mapping_file = arguments["<mapping_file.json>"]
settings_file = arguments["<settings_file.json>"]
host, port = arguments["<host:port>"].split(":")
with file(mapping_file) as f:
mapping = json.load(f)
with file(settings_file) as f:
settings = json.load(f)
es = Elasticsearch([{"host": host, "port": port}])
body = {
"settings": settings,
"mappings": mapping
}
# create an index in elasticsearch, ignore status code 400 (index already exists)
es.indices.delete(index='geocode', ignore=404)
es.indices.create(index='geocode', ignore=400, body=body)
with file(db_path) as db:
reader = csv.reader(db, 'excel-tab')
for row in reader:
_id = row[0]
name = row[1]
country = row[8]
lat = row[4]
lng = row[5]
state = row[10]
population = row[14]
data = {
"name": name + " " + state,
"country": country,
"population": population,
"location": {
"lat": lat,
"lon": lng
}
}
es.index(index="geocode", doc_type="place", id=_id, body=data)
| {
"content_hash": "ace7f5b9fc72d9729bab8d36ee40d2d3",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 90,
"avg_line_length": 29.428571428571427,
"alnum_prop": 0.5631067961165048,
"repo_name": "ClipCard/incertitude",
"id": "75ca0dd82524ec6c5bae43f3be907c1fe99caf2d",
"size": "1855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "incertitude.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1855"
}
],
"symlink_target": ""
} |
from django.utils.translation import gettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class ShareSnapshots(horizon.Panel):
name = _("Share Snapshots")
slug = 'share_snapshots'
permissions = (
'openstack.services.share',
)
dashboard.Admin.register(ShareSnapshots)
| {
"content_hash": "436c910a1d6683633e4bd357b9fd0910",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 58,
"avg_line_length": 23.857142857142858,
"alnum_prop": 0.7365269461077845,
"repo_name": "openstack/manila-ui",
"id": "71e693bcc733b3f34502b78f0f9ab520c8bc168a",
"size": "941",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manila_ui/dashboards/admin/share_snapshots/panel.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "72666"
},
{
"name": "Python",
"bytes": "756045"
},
{
"name": "Shell",
"bytes": "20977"
}
],
"symlink_target": ""
} |
import os
import sys
import re
import platform
import shutil
import traceback
import time
import json
from Utils.WAAgentUtil import waagent
import Utils.HandlerUtil as Util
from web_console.web_console_handler import WebConsoleHandler
# Global variables definition
EXTENSION_SHORT_NAME = 'SteppingStone'
def install():
hutil.do_parse_context('Install')
hutil.do_exit(0, 'Install', 'success', '0', 'Install Succeeded')
def enable():
hutil.do_parse_context('Enable')
try:
protect_settings = hutil._context._config['runtimeSettings'][0]\
['handlerSettings'].get('protectedSettings')
web_console_handler.parse_settings(protect_settings)
# Ensure the same configuration is executed only once
hutil.exit_if_seq_smaller()
script_file_path = os.path.realpath(sys.argv[0])
os.system(' '.join(['python', script_file_path, '-web_console', '>/dev/null 2>&1 &']))
hutil.do_exit(0, 'Enable', 'success', '0', 'Enable Succeeded')
except Exception, e:
hutil.error('Failed to enable the extension with error: %s, stack trace: %s' %(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Enable', 'error', '0', 'Enable Failed')
def uninstall():
hutil.do_parse_context('Uninstall')
hutil.do_exit(0, 'Uninstall', 'success', '0', 'Uninstall Succeeded')
def disable():
hutil.do_parse_context('Disable')
try:
hutil.do_exit(0, 'Disable', 'success', '0', 'Disable Succeeded')
except Exception, e:
hutil.error('Failed to disable the extension with error: %s, stack trace: %s' %(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Disable', 'error', '0', 'Disable Failed')
def update():
hutil.do_parse_context('Upadate')
hutil.do_exit(0, 'Update', 'success', '0', 'Update Succeeded')
def web_console():
hutil.do_parse_context('Web Console')
try:
protect_settings = hutil._context._config['runtimeSettings'][0]\
['handlerSettings'].get('protectedSettings')
web_console_handler.parse_settings(protect_settings)
web_console_handler.install()
web_console_handler.enable()
time.sleep(10)
messages = web_console_handler.get_web_console_uri()
messages_str = ''
if type(messages) is dict:
for k,v in messages.items():
messages_str += k + ': ' + v + '; '
elif type(messages) is str:
messages_str = messages
hutil.do_exit(0, 'Enable', 'success', '0', messages_str.strip())
except Exception, e:
hutil.error('Failed to install the extension with error: %s, stack trace: %s' %(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Enable', 'error', '0', 'Failed to enable web console')
# Main function is the only entrance to this extension handler
def main():
waagent.LoggerInit('/var/log/waagent.log', '/dev/stdout')
waagent.Log("%s started to handle." %(EXTENSION_SHORT_NAME))
global hutil
hutil = Util.HandlerUtility(waagent.Log, waagent.Error, EXTENSION_SHORT_NAME)
global web_console_handler
web_console_handler = WebConsoleHandler(hutil)
for a in sys.argv[1:]:
if re.match("^([-/]*)(disable)", a):
disable()
elif re.match("^([-/]*)(uninstall)", a):
uninstall()
elif re.match("^([-/]*)(install)", a):
install()
elif re.match("^([-/]*)(enable)", a):
enable()
elif re.match("^([-/]*)(update)", a):
update()
elif re.match("^([-/]*)(web_console)", a):
web_console()
if __name__ == '__main__':
main()
| {
"content_hash": "fff679ea8fbbfab6bbecb0dc1c6f527b",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 120,
"avg_line_length": 36.2970297029703,
"alnum_prop": 0.6099290780141844,
"repo_name": "Snesha/azure-linux-extensions",
"id": "5070aef882bf35dc7c735abbc98bdfe8be7c923a",
"size": "4327",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SteppingStone/handler.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "33945"
},
{
"name": "JavaScript",
"bytes": "19742"
},
{
"name": "Makefile",
"bytes": "1554"
},
{
"name": "Python",
"bytes": "1920287"
},
{
"name": "Shell",
"bytes": "13080"
}
],
"symlink_target": ""
} |
from django import forms
from external_push.models import GenericPushTarget, BrewersFriendPushTarget, BrewfatherPushTarget, ThingSpeakPushTarget, GrainfatherPushTarget
from django.core import validators
import fermentrack_django.settings as settings
from django.forms import ModelForm
class GenericPushTargetModelForm(ModelForm):
class Meta:
model = GenericPushTarget
fields = ['name', 'push_frequency', 'api_key', 'brewpi_push_selection', 'brewpi_to_push',
'gravity_push_selection', 'gravity_sensors_to_push', 'target_host']
class BrewersFriendPushTargetModelForm(ModelForm):
class Meta:
model = BrewersFriendPushTarget
fields = ['gravity_sensor_to_push', 'push_frequency', 'api_key']
class BrewfatherPushTargetModelForm(ModelForm):
class Meta:
model = BrewfatherPushTarget
fields = ['gravity_sensor_to_push', 'push_frequency', 'logging_url', 'device_type', 'brewpi_to_push']
class ThingSpeakPushTargetModelForm(ModelForm):
class Meta:
model = ThingSpeakPushTarget
fields = ['name', 'push_frequency', 'api_key', 'brewpi_to_push']
class GrainfatherPushTargetModelForm(ModelForm):
class Meta:
model = GrainfatherPushTarget
fields = ['gravity_sensor_to_push', 'push_frequency', 'logging_url', 'gf_name']
| {
"content_hash": "a046a439c9ffaa57409a2107eaeee54c",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 142,
"avg_line_length": 35.05263157894737,
"alnum_prop": 0.7184684684684685,
"repo_name": "thorrak/fermentrack",
"id": "1f83115a743ec5cf0977b09f9685a258d938a4a9",
"size": "1332",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "external_push/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6186"
},
{
"name": "CSS",
"bytes": "19929"
},
{
"name": "Dockerfile",
"bytes": "3095"
},
{
"name": "HTML",
"bytes": "341762"
},
{
"name": "JavaScript",
"bytes": "1728"
},
{
"name": "Python",
"bytes": "888851"
},
{
"name": "Shell",
"bytes": "20031"
}
],
"symlink_target": ""
} |
import os
import slack
import logging
from datetime import datetime
logger = logging.getLogger(__name__)
def case_not_found(channel_id, user_id, case):
"""
Informs the user of their case could not be found.
Parameters
----------
channel_id : str
unique string used to idenify a Slack channel. Used to send messages to the channel
user_id : str
the Slack user_id of the user who submitted the request. Used to send ephemeral
messages to the user
case : str
unique id of the case
"""
client = slack.WebClient(token=os.environ.get('SLACK_TOKEN'))
try:
client.chat_postEphemeral(
channel=channel_id,
user=user_id,
text=f"Case {case} could not be found in your org. If this case was recently"
" created, please give the system 60 seconds to fetch it. Otherwise,"
" double check your case number or confirm the org being tracked"
" with your Slack admin.")
except slack.errors.SlackApiError as e:
error_message = str(e) + ' : {}'.format(datetime.now())
logger.error(error_message)
if __name__ == "__main__":
channel_id = os.environ.get('TEST_CHANNEL_ID')
user_id = os.environ.get('TEST_USER_ID')
case = "xxxxxxxx"
case_not_found(channel_id, user_id, case)
| {
"content_hash": "315a53466714370aa2994ddedab8382a",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 91,
"avg_line_length": 32.78048780487805,
"alnum_prop": 0.6294642857142857,
"repo_name": "GoogleCloudPlatform/professional-services",
"id": "b22089eaaaccc91f7f2b2933dfba71df99e763a1",
"size": "1942",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tools/google-cloud-support-slackbot/case_not_found.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "117994"
},
{
"name": "C++",
"bytes": "174"
},
{
"name": "CSS",
"bytes": "13405"
},
{
"name": "Component Pascal",
"bytes": "798"
},
{
"name": "Dockerfile",
"bytes": "15093"
},
{
"name": "Go",
"bytes": "352968"
},
{
"name": "HCL",
"bytes": "204776"
},
{
"name": "HTML",
"bytes": "1229668"
},
{
"name": "Java",
"bytes": "338810"
},
{
"name": "JavaScript",
"bytes": "59905"
},
{
"name": "Jinja",
"bytes": "60083"
},
{
"name": "Makefile",
"bytes": "14129"
},
{
"name": "Python",
"bytes": "2250081"
},
{
"name": "Scala",
"bytes": "978327"
},
{
"name": "Shell",
"bytes": "109299"
},
{
"name": "Smarty",
"bytes": "19839"
},
{
"name": "TypeScript",
"bytes": "147194"
}
],
"symlink_target": ""
} |
import sys
import os
import argparse
import shutil
import subprocess
import platform
fips_dir = os.path.normpath(os.path.dirname(os.path.abspath(__file__)) + '/..')
# find the path of rt.jar
jre_paths = subprocess.check_output(['java', 'GetRT'], cwd=fips_dir+'/tools').decode("utf-8")
if platform.system() == 'Windows':
jre_paths = jre_paths.replace('\\','/').split(';')
else:
jre_paths = jre_paths.split(':')
RT_JAR = None
for jre_path in jre_paths:
if jre_path.endswith('rt.jar'):
RT_JAR = jre_path
break
SDK_HOME = os.path.abspath(fips_dir + '/../fips-sdks/android/') + '/'
BUILD_TOOLS = SDK_HOME + 'build-tools/29.0.3/'
EXE = '.exe' if platform.system() == 'Windows' else ''
BAT = '.bat' if platform.system() == 'Windows' else ''
AAPT = BUILD_TOOLS + 'aapt' + EXE
DX = BUILD_TOOLS + 'dx' + BAT
ZIPALIGN = BUILD_TOOLS + 'zipalign' + EXE
APKSIGNER = BUILD_TOOLS + 'apksigner' + BAT
if not RT_JAR:
print("Can't find rt.jar (is the Java JDK installed?)")
sys.exit(10)
if not os.path.isfile(RT_JAR):
print("Can't find Java runtime package '{}'!".format(RT_JAR))
sys.exit(10)
if not os.path.isdir(SDK_HOME):
print("Can't find Android SDK '{}'!".format(SDK_HOME))
sys.exit(10)
for tool in [AAPT, DX, ZIPALIGN, APKSIGNER]:
if not os.path.isfile(tool):
print("Can't find required tool in Android SDK: {}".format(tool))
sys.exit(10)
parser = argparse.ArgumentParser(description="Android APK package helper.")
parser.add_argument('--path', help='path to the cmake build dir', required=True)
parser.add_argument('--deploy', help='path where resulting APK will be copied to', required=True)
parser.add_argument('--name', help='cmake target name', required=True)
parser.add_argument('--abi', help='the NDK ABI string (armeabi-v7a, mips or x86', default='armeabi-v7a')
parser.add_argument('--version', help='the Android SDK platform version (e.g. 28)', default='28')
parser.add_argument('--package', help='the Java package name', required=True)
args = parser.parse_args()
if not args.path.endswith('/'):
args.path += '/'
if not args.deploy.endswith('/'):
args.deploy += '/'
if not os.path.exists(args.deploy):
os.makedirs(args.deploy)
pkg_name = args.package.replace('-','_')
# create the empty project
apk_dir = args.path + 'android/' + args.name + '/'
if not os.path.exists(apk_dir):
os.makedirs(apk_dir)
libs_dir = apk_dir + 'lib/' + args.abi + '/'
if not os.path.exists(libs_dir):
os.makedirs(libs_dir)
src_dir = apk_dir + 'src/' + pkg_name.replace('.', '/')
if not os.path.exists(src_dir):
os.makedirs(src_dir)
obj_dir = apk_dir + '/obj'
if not os.path.exists(obj_dir):
os.makedirs(obj_dir)
bin_dir = apk_dir + '/bin'
if not os.path.exists(bin_dir):
os.makedirs(bin_dir)
# copy the native shared library
so_name = 'lib' + args.name + '.so'
src_so = args.path + so_name
dst_so = libs_dir + so_name
shutil.copy(src_so, dst_so)
# copy the dummy assets directory
res_dir = apk_dir + 'res/'
if not os.path.exists(res_dir):
shutil.copytree(fips_dir + '/templates/android_assets/res', res_dir)
# generate AndroidManifest.xml
with open(apk_dir + 'AndroidManifest.xml', 'w') as f:
f.write('<manifest xmlns:android="http://schemas.android.com/apk/res/android"\n')
f.write(' package="{}"\n'.format(pkg_name))
f.write(' android:versionCode="1"\n')
f.write(' android:versionName="1.0">\n')
f.write(' <uses-sdk android:minSdkVersion="11" android:targetSdkVersion="{}"/>\n'.format(args.version))
f.write(' <uses-permission android:name="android.permission.INTERNET"></uses-permission>\n')
f.write(' <uses-feature android:glEsVersion="0x00030000"></uses-feature>\n')
f.write(' <application android:label="{}" android:debuggable="true" android:hasCode="false">\n'.format(args.name))
f.write(' <activity android:name="android.app.NativeActivity"\n');
f.write(' android:label="{}"\n'.format(args.name))
f.write(' android:launchMode="singleTask"\n')
f.write(' android:screenOrientation="fullUser"\n')
f.write(' android:configChanges="orientation|screenSize|keyboard|keyboardHidden">\n')
f.write(' <meta-data android:name="android.app.lib_name" android:value="{}"/>\n'.format(args.name))
f.write(' <intent-filter>\n')
f.write(' <action android:name="android.intent.action.MAIN"/>\n')
f.write(' <category android:name="android.intent.category.LAUNCHER"/>\n')
f.write(' </intent-filter>\n')
f.write(' </activity>\n')
f.write(' </application>\n')
f.write('</manifest>\n')
# prepare APK structure
cmd = [
AAPT,
'package',
'-v', '-f', '-m',
'-S', 'res', '-J', 'src',
'-M', 'AndroidManifest.xml',
'-I', SDK_HOME + 'platforms/android-' + args.version + '/android.jar'
]
subprocess.call(cmd, cwd=apk_dir)
# compile Java sources
cmd = [
'javac', '-d', './obj',
'-source', '1.7',
'-target', '1.7',
'-sourcepath', 'src',
'-bootclasspath', RT_JAR,
src_dir + '/R.java'
]
subprocess.call(cmd, cwd=apk_dir)
# convert Java byte code to DEX
cmd = [
DX,
'--verbose',
'--dex', '--output=bin/classes.dex',
'./obj'
]
subprocess.call(cmd, cwd=apk_dir)
# package the APK
cmd = [
AAPT,
'package',
'-v', '-f',
'-S', 'res',
'-M', 'AndroidManifest.xml',
'-I', SDK_HOME + 'platforms/android-' + args.version + '/android.jar',
'-F', args.path + args.name + '-unaligned.apk',
'bin'
]
subprocess.call(cmd, cwd=apk_dir)
cmd = [
AAPT, 'add', '-v',
args.path + args.name + '-unaligned.apk',
'lib/'+args.abi+'/'+so_name
]
subprocess.call(cmd, cwd=apk_dir)
# run zipalign on the package
cmd = [
ZIPALIGN,
'-f', '4',
args.path + args.name + '-unaligned.apk',
args.path + args.name + '.apk'
]
subprocess.call(cmd, cwd=apk_dir)
# create debug signing key
keystore_path = args.path + 'debug.keystore'
if not os.path.exists(keystore_path):
cmd = [
'keytool', '-genkeypair',
'-keystore', keystore_path,
'-storepass', 'android',
'-alias', 'androiddebugkey',
'-keypass', 'android',
'-keyalg', 'RSA',
'-validity', '10000',
'-dname', 'CN=,OU=,O=,L=,S=,C='
]
subprocess.call(cmd, cwd=apk_dir)
# sign the APK
cmd = [
APKSIGNER, 'sign',
'-v',
'--ks', keystore_path,
'--ks-pass', 'pass:android',
'--key-pass', 'pass:android',
'--ks-key-alias', 'androiddebugkey',
args.path + args.name + '.apk'
]
subprocess.call(cmd, cwd=apk_dir)
# verify the APK
cmd = [
APKSIGNER, 'verify',
'-v',
args.path + args.name + '.apk'
]
subprocess.call(cmd, cwd=apk_dir)
# copy APK to the fips-deploy directory
shutil.copy(args.path+args.name+'.apk', args.deploy+args.name+'.apk')
| {
"content_hash": "d9aa2122a7fd8215370e3df360b86361",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 119,
"avg_line_length": 32.66826923076923,
"alnum_prop": 0.623252391464312,
"repo_name": "floooh/fips",
"id": "7d73ba07170c88558885ca65ced3dc890076e2c7",
"size": "7456",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/android-create-apk.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "34"
},
{
"name": "CMake",
"bytes": "126512"
},
{
"name": "Java",
"bytes": "162"
},
{
"name": "Python",
"bytes": "648132"
},
{
"name": "Shell",
"bytes": "2581"
},
{
"name": "Vim Script",
"bytes": "211"
}
],
"symlink_target": ""
} |
"""Set of functions for encapsulating data according to the PEM format.
PEM (Privacy Enhanced Mail) was an IETF standard for securing emails via a
Public Key Infrastructure. It is specified in RFC 1421-1424.
Even though it has been abandoned, the simple message encapsulation it defined
is still widely used today for encoding *binary* cryptographic objects like
keys and certificates into text.
"""
__all__ = ['encode', 'decode']
from Cryptodome.Util.py3compat import b, hexlify, unhexlify, tobytes, tostr
import re
from binascii import a2b_base64, b2a_base64
from Cryptodome.Hash import MD5
from Cryptodome.Util.Padding import pad, unpad
from Cryptodome.Cipher import DES, DES3, AES
from Cryptodome.Protocol.KDF import PBKDF1
from Cryptodome.Random import get_random_bytes
def encode(data, marker, passphrase=None, randfunc=None):
"""Encode a piece of binary data into PEM format.
:Parameters:
data : byte string
The piece of binary data to encode.
marker : string
The marker for the PEM block (e.g. "PUBLIC KEY").
Note that there is no official master list for all allowed markers.
Still, you can refer to the OpenSSL_ source code.
passphrase : byte string
If given, the PEM block will be encrypted. The key is derived from
the passphrase.
randfunc : callable
Random number generation function; it accepts an integer N and returns
a byte string of random data, N bytes long. If not given, a new one is
instantiated.
:Returns:
The PEM block, as a string.
.. _OpenSSL: http://cvs.openssl.org/fileview?f=openssl/crypto/pem/pem.h&v=1.66.2.1.4.2
"""
if randfunc is None:
randfunc = get_random_bytes
out = "-----BEGIN %s-----\n" % marker
if passphrase:
# We only support 3DES for encryption
salt = randfunc(8)
key = PBKDF1(passphrase, salt, 16, 1, MD5)
key += PBKDF1(key + passphrase, salt, 8, 1, MD5)
objenc = DES3.new(key, DES3.MODE_CBC, salt)
out += "Proc-Type: 4,ENCRYPTED\nDEK-Info: DES-EDE3-CBC,%s\n\n" %\
tostr(hexlify(salt).upper())
# Encrypt with PKCS#7 padding
data = objenc.encrypt(pad(data, objenc.block_size))
elif passphrase is not None:
raise ValueError("Empty password")
# Each BASE64 line can take up to 64 characters (=48 bytes of data)
# b2a_base64 adds a new line character!
chunks = [tostr(b2a_base64(data[i:i + 48]))
for i in range(0, len(data), 48)]
out += "".join(chunks)
out += "-----END %s-----" % marker
return out
def decode(pem_data, passphrase=None):
"""Decode a PEM block into binary.
:Parameters:
pem_data : string
The PEM block.
passphrase : byte string
If given and the PEM block is encrypted,
the key will be derived from the passphrase.
:Returns:
A tuple with the binary data, the marker string, and a boolean to
indicate if decryption was performed.
:Raises ValueError:
If decoding fails, if the PEM file is encrypted and no passphrase has
been provided or if the passphrase is incorrect.
"""
# Verify Pre-Encapsulation Boundary
r = re.compile("\s*-----BEGIN (.*)-----\s+")
m = r.match(pem_data)
if not m:
raise ValueError("Not a valid PEM pre boundary")
marker = m.group(1)
# Verify Post-Encapsulation Boundary
r = re.compile("-----END (.*)-----\s*$")
m = r.search(pem_data)
if not m or m.group(1) != marker:
raise ValueError("Not a valid PEM post boundary")
# Removes spaces and slit on lines
lines = pem_data.replace(" ", '').split()
# Decrypts, if necessary
if lines[1].startswith('Proc-Type:4,ENCRYPTED'):
if not passphrase:
raise ValueError("PEM is encrypted, but no passphrase available")
DEK = lines[2].split(':')
if len(DEK) != 2 or DEK[0] != 'DEK-Info':
raise ValueError("PEM encryption format not supported.")
algo, salt = DEK[1].split(',')
salt = unhexlify(tobytes(salt))
if algo == "DES-CBC":
# This is EVP_BytesToKey in OpenSSL
key = PBKDF1(passphrase, salt, 8, 1, MD5)
objdec = DES.new(key, DES.MODE_CBC, salt)
elif algo == "DES-EDE3-CBC":
# Note that EVP_BytesToKey is note exactly the same as PBKDF1
key = PBKDF1(passphrase, salt, 16, 1, MD5)
key += PBKDF1(key + passphrase, salt, 8, 1, MD5)
objdec = DES3.new(key, DES3.MODE_CBC, salt)
elif algo == "AES-128-CBC":
key = PBKDF1(passphrase, salt[:8], 16, 1, MD5)
objdec = AES.new(key, AES.MODE_CBC, salt)
else:
raise ValueError("Unsupport PEM encryption algorithm (%s)." % algo)
lines = lines[2:]
else:
objdec = None
# Decode body
data = a2b_base64(b(''.join(lines[1:-1])))
enc_flag = False
if objdec:
data = unpad(objdec.decrypt(data), objdec.block_size)
enc_flag = True
return (data, marker, enc_flag)
| {
"content_hash": "5fe98e6349cb923a5feecd61a0b56c6b",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 90,
"avg_line_length": 37.014084507042256,
"alnum_prop": 0.6092085235920852,
"repo_name": "chronicwaffle/PokemonGo-DesktopMap",
"id": "149d8eea2e2e7f473102ae4f9185d9ce8c4572a6",
"size": "6853",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/pylibs/win32/Cryptodome/IO/PEM.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "29260"
},
{
"name": "JavaScript",
"bytes": "52980"
},
{
"name": "Python",
"bytes": "11998498"
},
{
"name": "Shell",
"bytes": "4097"
}
],
"symlink_target": ""
} |
"""Soft Nearest Neighbors Loss.
FaceNet: A Unified Embedding for Face Recognition and Clustering
https://arxiv.org/abs/1902.01889
"""
from typing import Any, Callable, Union
import tensorflow as tf
from tensorflow_similarity.algebra import build_masks
from tensorflow_similarity.distances import Distance, distance_canonicalizer
from tensorflow_similarity.types import FloatTensor, IntTensor
from .metric_loss import MetricLoss
def soft_nn_loss(
query_labels: IntTensor,
query_embeddings: FloatTensor,
key_labels: IntTensor,
key_embeddings: FloatTensor,
distance: Callable,
temperature: float,
remove_diagonal: bool = True,
) -> Any:
"""Computes the soft nearest neighbors loss.
Args:
query_labels: labels associated with the query embed.
query_embeddings: Embedded query examples.
key_labels: labels associated with the key embed.
key_embeddings: Embedded key examples.
distance: Which distance function to use to compute the pairwise.
temperature: Controls relative importance given
to the pair of points.
remove_diagonal: Bool. If True, will set diagonal to False in positive pair mask
Returns:
loss: loss value for the current batch.
"""
batch_size = tf.size(query_labels)
eps = 1e-9
pairwise_dist = distance(query_embeddings, key_embeddings)
pairwise_dist = pairwise_dist / temperature
negexpd = tf.math.exp(-pairwise_dist)
# Mask out diagonal entries
diag = tf.linalg.diag(tf.ones(batch_size, dtype=tf.bool))
diag_mask = tf.cast(tf.logical_not(diag), dtype=tf.float32)
negexpd = tf.math.multiply(negexpd, diag_mask)
# creating mask to sample same class neighboorhood
pos_mask, _ = build_masks(
query_labels,
key_labels,
batch_size=batch_size,
remove_diagonal=remove_diagonal,
)
pos_mask = tf.cast(pos_mask, dtype=tf.float32)
# all class neighborhood
alcn = tf.reduce_sum(negexpd, axis=1)
# same class neighborhood
sacn = tf.reduce_sum(tf.math.multiply(negexpd, pos_mask), axis=1)
# exclude examples with unique class from loss calculation
excl = tf.math.not_equal(tf.reduce_sum(pos_mask, axis=1), tf.zeros(batch_size))
excl = tf.cast(excl, tf.float32)
loss = tf.math.divide(sacn, alcn)
loss = -tf.multiply(tf.math.log(eps + loss), excl)
return loss
@tf.keras.utils.register_keras_serializable(package="Similarity")
class SoftNearestNeighborLoss(MetricLoss):
"""Computes the soft nearest neighbors loss in an online fashion.
Similar to TripletLoss, this loss compares intra- and inter-class
distances. However, unlike TripletLoss, this loss uses all
positive and negative examples in the batch.
See: https://arxiv.org/abs/1902.01889 for the original paper.
`labels` must be a 1-D integer `Tensor` of shape (batch_size,).
It's values represent the classes associated with the examples as
**integer values**.
`embeddings` must be 2-D float `Tensor` of embedding vectors.
"""
def __init__(
self,
distance: Union[Distance, str] = "sql2",
temperature: float = 1,
name: str = "SoftNearestNeighborLoss",
**kwargs
):
"""Initializes the SoftNearestNeighborLoss Loss
Args:
`distance`: Which distance function to use to compute
the pairwise distances between embeddings.
Defaults to 'sql2'.
`temperature`: Alters the value of loss function.
Defaults to 1.
`name`: Loss name. Defaults to SoftNearestNeighborLoss.
"""
# distance canonicalization
distance = distance_canonicalizer(distance)
self.distance = distance
self.temperature = temperature
super().__init__(fn=soft_nn_loss, name=name, distance=distance, temperature=temperature, **kwargs)
| {
"content_hash": "c9dfc9ee974c45366825d66cd3fa3f94",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 106,
"avg_line_length": 33.83050847457627,
"alnum_prop": 0.6698396793587175,
"repo_name": "tensorflow/similarity",
"id": "572cbe195fefd27b401da3ada7a3b1db15471bde",
"size": "4681",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow_similarity/losses/softnn_loss.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "607742"
}
],
"symlink_target": ""
} |
"""Helpers for evaluating the log likelihood of pianorolls under a model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
from magenta.models.coconet import lib_tfutil
from magenta.models.coconet import lib_util
import numpy as np
from scipy.misc import logsumexp
import tensorflow as tf
def evaluate(evaluator, pianorolls):
"""Evaluate a sequence of pianorolls.
The returned dictionary contains two kinds of evaluation results: the "unit"
losses and the "example" losses. The unit loss measures the negative
log-likelihood of each unit (e.g. note or frame). The example loss is the
average of the unit loss across the example. Additionally, the dictionary
contains various aggregates such as the mean and standard error of the mean
of both losses, as well as min/max and quartile bounds.
Args:
evaluator: an instance of BaseEvaluator
pianorolls: sequence of pianorolls to evaluate
Returns:
A dictionary with evaluation results.
"""
example_losses = []
unit_losses = []
for pi, pianoroll in enumerate(pianorolls):
tf.logging.info("evaluating piece %d", pi)
start_time = time.time()
unit_loss = -evaluator(pianoroll)
example_loss = np.mean(unit_loss)
example_losses.append(example_loss)
unit_losses.append(unit_loss)
duration = (time.time() - start_time) / 60.
_report(unit_loss, prefix="%i %5.2fmin " % (pi, duration))
if np.isinf(example_loss):
break
_report(example_losses, prefix="FINAL example-level ")
_report(unit_losses, prefix="FINAL unit-level ")
rval = dict(example_losses=example_losses, unit_losses=unit_losses)
rval.update(("example_%s" % k, v) for k, v in _stats(example_losses).items())
rval.update(
("unit_%s" % k, v) for k, v in _stats(_flatcat(unit_losses)).items())
return rval
def _report(losses, prefix=""):
tf.logging.info("%s loss %s", prefix, _statstr(_flatcat(losses)))
def _stats(x):
return dict(
mean=np.mean(x),
sem=np.std(x) / np.sqrt(len(x)),
min=np.min(x),
max=np.max(x),
q1=np.percentile(x, 25),
q2=np.percentile(x, 50),
q3=np.percentile(x, 75))
def _statstr(x):
return ("mean/sem: {mean:8.5f}+-{sem:8.5f} {min:.5f} < {q1:.5f} < {q2:.5f} < "
"{q3:.5f} < {max:.5g}").format(**_stats(x))
def _flatcat(xs):
return np.concatenate([x.flatten() for x in xs])
class BaseEvaluator(lib_util.Factory):
"""Evaluator base class."""
def __init__(self, wmodel, chronological):
"""Initialize BaseEvaluator instance.
Args:
wmodel: WrappedModel instance
chronological: whether to evaluate in chronological order or in any order
"""
self.wmodel = wmodel
self.chronological = chronological
def predictor(pianorolls, masks):
p = self.wmodel.sess.run(
self.wmodel.model.predictions,
feed_dict={
self.wmodel.model.pianorolls: pianorolls,
self.wmodel.model.masks: masks
})
return p
self.predictor = lib_tfutil.RobustPredictor(predictor)
@property
def hparams(self):
return self.wmodel.hparams
@property
def separate_instruments(self):
return self.wmodel.hparams.separate_instruments
def __call__(self, pianoroll):
"""Evaluate a single pianoroll.
Args:
pianoroll: a single pianoroll, shaped (tt, pp, ii)
Returns:
unit losses
"""
raise NotImplementedError()
def _update_lls(self, lls, x, pxhat, t, d):
"""Update accumulated log-likelihoods.
Note: the shape of `lls` and the range of `d` depends on the "number of
variables per time step" `dd`, which is the number of instruments if
instruments if instruments are separated or the number of pitches otherwise.
Args:
lls: (tt, dd)-shaped array of unit log-likelihoods.
x: the pianoroll being evaluated, shape (B, tt, P, I).
pxhat: the probabilities output by the model, shape (B, tt, P, I).
t: the batch of time indices being evaluated, shape (B,).
d: the batch of variable indices being evaluated, shape (B,).
"""
# The code below assumes x is binary, so instead of x * log(px) which is
# inconveniently NaN if both x and log(px) are zero, we can use
# where(x, log(px), 0).
assert np.array_equal(x, x.astype(bool))
if self.separate_instruments:
index = (np.arange(x.shape[0]), t, slice(None), d)
else:
index = (np.arange(x.shape[0]), t, d, slice(None))
lls[t, d] = np.log(np.where(x[index], pxhat[index], 1)).sum(axis=1)
class FrameEvaluator(BaseEvaluator):
"""Framewise evaluator.
Evaluates pianorolls one frame at a time. That is, the model is judged for its
prediction of entire frames at a time, conditioning on its own samples rather
than the ground truth of other instruments/pitches in the same frame.
The frames are evaluated in random order, and within each frame the
instruments/pitches are evaluated in random order.
"""
key = "frame"
def __call__(self, pianoroll):
tt, pp, ii = pianoroll.shape
assert self.separate_instruments or ii == 1
dd = ii if self.separate_instruments else pp
# Compile a batch with each frame being an example.
bb = tt
xs = np.tile(pianoroll[None], [bb, 1, 1, 1])
ts, ds = self.draw_ordering(tt, dd)
# Set up sequence of masks to predict the first (according to ordering)
# instrument for each frame
mask = []
mask_scratch = np.ones([tt, pp, ii], dtype=np.float32)
for j, (t, d) in enumerate(zip(ts, ds)):
# When time rolls over, reveal the entire current frame for purposes of
# predicting the next one.
if j % dd != 0:
continue
mask.append(mask_scratch.copy())
mask_scratch[t, :, :] = 0
assert np.allclose(mask_scratch, 0)
del mask_scratch
mask = np.array(mask)
lls = np.zeros([tt, dd], dtype=np.float32)
# We can't parallelize within the frame, as we need the predictions of
# some of the other instruments.
# Hence we outer loop over the instruments and parallelize across frames.
xs_scratch = xs.copy()
for d_idx in range(dd):
# Call out to the model to get predictions for the first instrument
# at each time step.
pxhats = self.predictor(xs_scratch, mask)
t, d = ts[d_idx::dd], ds[d_idx::dd]
assert len(t) == bb and len(d) == bb
# Write in predictions and update mask.
if self.separate_instruments:
xs_scratch[np.arange(bb), t, :, d] = np.eye(pp)[np.argmax(
pxhats[np.arange(bb), t, :, d], axis=1)]
mask[np.arange(bb), t, :, d] = 0
# Every example in the batch sees one frame more than the previous.
assert np.allclose(
(1 - mask).sum(axis=(1, 2, 3)),
[(k * dd + d_idx + 1) * pp for k in range(mask.shape[0])])
else:
xs_scratch[np.arange(bb), t, d, :] = (
pxhats[np.arange(bb), t, d, :] > 0.5)
mask[np.arange(bb), t, d, :] = 0
# Every example in the batch sees one frame more than the previous.
assert np.allclose(
(1 - mask).sum(axis=(1, 2, 3)),
[(k * dd + d_idx + 1) * ii for k in range(mask.shape[0])])
self._update_lls(lls, xs, pxhats, t, d)
# conjunction over notes within frames; frame is the unit of prediction
return lls.sum(axis=1)
def draw_ordering(self, tt, dd):
o = np.arange(tt, dtype=np.int32)
if not self.chronological:
np.random.shuffle(o)
# random variable orderings within each time step
o = o[:, None] * dd + np.arange(dd, dtype=np.int32)[None, :]
for t in range(tt):
np.random.shuffle(o[t])
o = o.reshape([tt * dd])
ts, ds = np.unravel_index(o.T, dims=(tt, dd))
return ts, ds
class NoteEvaluator(BaseEvaluator):
"""Evalutes note-based negative likelihood."""
key = "note"
def __call__(self, pianoroll):
tt, pp, ii = pianoroll.shape
assert self.separate_instruments or ii == 1
dd = ii if self.separate_instruments else pp
# compile a batch with an example for each variable
bb = tt * dd
xs = np.tile(pianoroll[None], [bb, 1, 1, 1])
ts, ds = self.draw_ordering(tt, dd)
assert len(ts) == bb and len(ds) == bb
# set up sequence of masks, one for each variable
mask = []
mask_scratch = np.ones([tt, pp, ii], dtype=np.float32)
for unused_j, (t, d) in enumerate(zip(ts, ds)):
mask.append(mask_scratch.copy())
if self.separate_instruments:
mask_scratch[t, :, d] = 0
else:
mask_scratch[t, d, :] = 0
assert np.allclose(mask_scratch, 0)
del mask_scratch
mask = np.array(mask)
pxhats = self.predictor(xs, mask)
lls = np.zeros([tt, dd], dtype=np.float32)
self._update_lls(lls, xs, pxhats, ts, ds)
return lls
def _draw_ordering(self, tt, dd):
o = np.arange(tt * dd, dtype=np.int32)
if not self.chronological:
np.random.shuffle(o)
ts, ds = np.unravel_index(o.T, dims=(tt, dd))
return ts, ds
class EnsemblingEvaluator(object):
"""Decorating for ensembled evaluation.
Calls the decorated evaluator multiple times so as to evaluate according to
multiple orderings. The likelihoods from different orderings are averaged
in probability space, which gives a better result than averaging in log space
(which would correspond to a geometric mean that is unnormalized and tends
to waste probability mass).
"""
key = "_ensembling"
def __init__(self, evaluator, ensemble_size):
self.evaluator = evaluator
self.ensemble_size = ensemble_size
def __call__(self, pianoroll):
lls = [self.evaluator(pianoroll) for _ in range(self.ensemble_size)]
return logsumexp(lls, b=1. / len(lls), axis=0)
| {
"content_hash": "94dc1025f69cacfcc697bfa108334ce7",
"timestamp": "",
"source": "github",
"line_count": 301,
"max_line_length": 80,
"avg_line_length": 32.55813953488372,
"alnum_prop": 0.6457142857142857,
"repo_name": "adarob/magenta",
"id": "8761159540c0c8a9d2690a43018f03d1aa5ed50a",
"size": "10385",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "magenta/models/coconet/lib_evaluation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1933"
},
{
"name": "Python",
"bytes": "2941402"
},
{
"name": "Shell",
"bytes": "24986"
}
],
"symlink_target": ""
} |
from django.urls import include, path
from rest_framework import routers
from feeds.viewsets import CategoryViewSet, SubscriptionViewSet, ItemViewSet
router = routers.DefaultRouter()
router.register(r'categories', CategoryViewSet, base_name='category')
router.register(r'subscriptions', SubscriptionViewSet, base_name='subscription')
router.register(r'items', ItemViewSet, base_name='item')
urlpatterns = [
path('', include(router.urls))
]
| {
"content_hash": "674c8e1ae6c25bfab1d2e71feba1f5a6",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 80,
"avg_line_length": 29.933333333333334,
"alnum_prop": 0.7861915367483296,
"repo_name": "jochenklar/reader2",
"id": "d741499db01a7e899e2b1fdfce7e6a8d8a20666f",
"size": "449",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "feeds/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2583"
},
{
"name": "HTML",
"bytes": "13740"
},
{
"name": "JavaScript",
"bytes": "9872"
},
{
"name": "Python",
"bytes": "24339"
}
],
"symlink_target": ""
} |
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "field1=\'newvalue\'". Não é possível atualizar ou excluir os resultados de uma junção',
'# of International Staff': '# De equipe internacional',
'# of National Staff': '# De equipe nacional',
'# of Vehicles': '# De Veículos',
'%(msg)s\nIf the request type is "%(type)s", please enter the %(type)s on the next screen.': '%(msg)s\nSe o tipo de pedido é "%(type)s", digite a %(type)s na próxima tela.',
'%(system_name)s - Verify Email': '%(system_name)s - Verificar E-Mail',
'%.1f km': '%.1f km',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%m-%d-%Y': '%m-%d-%Y',
'%m-%d-%Y %H:%M:%S': '%m-%d-%Y %H:%M:%S',
'%s Create a new site or ensure that you have permissions for an existing site.': '%s Cria um novo site ou garante que você tenha permissões para um site existente.',
'%s rows deleted': '%s linhas excluídas',
'%s rows updated': '%s linhas atualizadas',
'& then click on the map below to adjust the Lat/Lon fields': 'Em seguida selecione o mapa abaixo para ajustar os campos Lat/Lon',
"'Cancel' will indicate an asset log entry did not occur": "'cancelar' irá indicar que a entrada de log de ativo não ocorreu",
'* Required Fields': '* campos obrigatórios',
'0-15 minutes': '0-15 minutos',
'1 Assessment': '1 Avaliação',
'1 location, shorter time, can contain multiple Tasks': '1 Local, menos tempo, pode conter várias Tarefas',
'1-3 days': '1 a 3 dias',
'15-30 minutes': '15 a 30 minutos',
'2 different options are provided here currently:': '2 opções diferentes são fornecidos aqui atualmente:',
'2x4 Car': 'Carro 2x4',
'30-60 minutes': '30-60 minutos',
'4-7 days': '4-7 Dias',
'4x4 Car': 'Carro 4x4',
'8-14 days': '8-14 Dias',
'A Marker assigned to an individual Location is set if there is a need to override the Marker assigned to the Feature Class.': 'Um marcador assinalado para um local individual é configurado se há a necessidade de substituir um marcador assinalado para o Recurso Classe.',
'A Reference Document such as a file, URL or contact person to verify this data.': 'A Reference Document such as a file, URL or contact person to verify this data.',
'A Reference Document such as a file, URL or contact person to verify this data. You can type the 1st few characters of the document name to link to an existing document.': 'Um documento de referência como um arquivo, URL ou contacto pessoal para verificar esses dados. Pode inserir as primeiras letras do nome dum documento para chegar a esse documento.',
'A brief description of the group (optional)': 'Uma descrição breve do grupo (opcional)',
'A file downloaded from a GPS containing a series of geographic points in XML format.': 'Um ficheiro descarregado de um GPS contendo uma série de pontos geográficos em formato XML.',
'A file in GPX format taken from a GPS whose timestamps can be correlated with the timestamps on the photos to locate them on the map.': 'Um ficheiro em formato GPX retirado de um GPS cujas datas e horas podem ser correlacionadas com as de fotografias para localização num mapa.',
'A file in GPX format taken from a GPS.': 'A file in GPX format taken from a GPS.',
'A library of digital resources, such as photos, documents and reports': 'Uma biblioteca de recursos digitais, como fotos, documentos e relatórios',
'A location group can be used to define the extent of an affected area, if it does not fall within one administrative region.': 'Um grupo local pode ser usado para definir a extensão de uma área afetada, se não cair dentro de uma região administrativa.',
'A location group is a set of locations (often, a set of administrative regions representing a combined area).': 'Um grupo de localização é um conjunto de locais (muitas vezes, um conjunto de regiões administrativas que representam uma área Combinada).',
'A location group is a set of locations (often, a set of administrative regions representing a combined area). Member locations are added to a location group here. Location groups may be used to filter what is shown on the map and in search results to only entities covered by locations in the group. A location group can be used to define the extent of an affected area, if it does not fall within one administrative region. Location groups can be used in the Regions menu.': 'Um grupo de localização é um conjunto de locais (muitas vezes, um conjunto de regiões administrativas que representam uma área Combinada). Membros locais são adicionados em grupos locais aqui. Grupos locais podem ser utilizados para filtrar o que é mostrado no mapa e nos resultados da procura apenas as entidades locais abrangidas no grupo. Um grupo local pode ser usado para definir a extensão de uma área afetada, se não cair dentro de uma região administrativa. Grupos local pode ser utilizado no menu Regiões.',
'A location group must have at least one member.': 'Um grupo de localização deve ter, pelo menos, um membro.',
"A location that specifies the geographic area for this region. This can be a location from the location hierarchy, or a 'group location', or a location that has a boundary for the area.": 'Um local que especifica a área geográfica dessa região. Este pode ser um local a partir da hierarquia local, ou um "grupo local", ou um local que tem um limite para a área.',
'A survey series with id %s does not exist. Please go back and create one.': 'Id% não foi encontrado na pesquisa. Por favor voltar e crie um.',
'A task is a piece of work that an individual or team can do in 1-2 days': 'A task is a piece of work that an individual or team can do in 1-2 days',
'ABOUT THIS MODULE': 'SOBRE ESTE MÓDULO',
'ACCESS DATA': 'Dados de Acesso',
'ANY': 'Todos',
'API Key': 'API Key',
'API is documented here': 'API está documentado aqui',
'ATC-20 Rapid Evaluation modified for New Zealand': 'ATC-20 Rápida Avaliação modificado para a Nova Zelândia',
'Abbreviation': 'Abreviatura',
'Ability to Fill Out Surveys': 'Capacidade para preencher Inquéritos',
'Ability to customize the list of details tracked at a Shelter': 'Capacidade de Customizar a lista de detalhes rastreados em um Abrigo',
'Ability to customize the list of human resource tracked at a Shelter': 'Capacidade de Customizar a lista de recursos humanos Rastreados em um Abrigo',
'Ability to customize the list of important facilities needed at a Shelter': 'Capacidade de Customizar a lista das instalações importante necessária em um Abrigo',
'Ability to view Results of Completed and/or partially filled out Surveys': 'Capacidade para visualizar resultados de Concluída e/ou parcialmente preenchido Pesquisas',
'About': 'sobre',
'About Sahana': 'Sobre Sahana',
'Access denied': 'Acesso negado',
'Access to Shelter': 'Acesso a Abrigo',
'Access to education services': 'Acesso a serviços de educação',
'Accessibility of Affected Location': 'Acessibilidade do Local Afectado',
'Accompanying Relative': 'Accompanying Relative',
'Account Registered - Please Check Your Email': 'Conta registrada - verifique seu e-mail',
'Account registered, however registration is still pending approval - please wait until confirmation received.': 'Conta registrada, mas registro pende aprovação - por favor aguarde até confirmação ser recebida.',
'Acronym': 'Iniciais',
"Acronym of the organization's name, eg. IFRC.": 'Acrônimo do nome da organização, por exemplo, FICV.',
'Actionable by all targeted recipients': 'Acionáveis por todos os destinatários de destino',
'Actionable only by designated exercise participants; exercise identifier SHOULD appear in <note>': 'Acionáveis apenas pelos participantes exercício designado; Identificação do excercício deve aparecer em',
'Actioned?': 'Acionado?',
'Actions': 'Ações',
'Actions taken as a result of this request.': 'Ações tomadas como resultado desse pedido.',
'Activate Events from Scenario templates for allocation of appropriate Resources (Human, Assets & Facilities).': 'Ativar eventos dos templates de cenário para alocação adequada de recursos (humanos, ativos e equipamentos)',
'Active': 'ativo',
'Active Problems': 'Problemas ativos',
'Activities': 'atividades',
'Activities matching Assessments:': 'Atividades correspondentes a Avaliações:',
'Activities of boys 13-17yrs before disaster': 'Atividades de garotos 13-17 anos antes do desastre',
'Activities of boys 13-17yrs now': 'Atividades de garotos 13-17yrs agora',
'Activities of boys <12yrs before disaster': 'Atividades de garotos <12 anos antes do desastre',
'Activities of boys <12yrs now': 'Atividades de garotos <12 anos agora',
'Activities of children': 'Atividades de crianças',
'Activities of girls 13-17yrs before disaster': 'Atividades de meninas 13-17yrs antes de desastres',
'Activities of girls 13-17yrs now': 'Atividades de meninas 13-17yrs agora',
'Activities of girls <12yrs before disaster': 'Atividades de meninas <12yrs antes de desastres',
'Activities of girls <12yrs now': 'Agora atividades de meninas de menos de 12 anos',
'Activities:': 'Atividades:',
'Activity': 'atividade',
'Activity Added': 'Atividade Incluída',
'Activity Deleted': 'Atividade Apagada',
'Activity Details': 'Detalhes da Atividade',
'Activity Report': 'Relatório de atividades',
'Activity Reports': 'Relatório de Atividades',
'Activity Type': 'Tipo de atividade',
'Activity Updated': 'Atividade Atualizada',
'Activity added': 'Activity added',
'Activity removed': 'Activity removed',
'Activity updated': 'Activity updated',
'Add': 'incluir',
'Add Activity': 'Incluir Atividade',
'Add Activity Report': 'Incluir Relatório de atividade',
'Add Activity Type': 'Incluir tipo de atividade',
'Add Address': 'Incluir Endereço',
'Add Alternative Item': 'Adicionar item alternativo',
'Add Assessment': 'Incluir Avaliação',
'Add Assessment Summary': 'Incluir Avaliação De Resumo',
'Add Asset': 'Incluir ativo',
'Add Asset Log Entry - Change Label': 'Incluir recurso de entrada de entrada - trocar a Etiqueta',
'Add Availability': 'Incluir Disponibilidade',
'Add Baseline': 'Incluir Linha',
'Add Baseline Type': 'Incluir Linha De Tipo',
'Add Bed Type': 'Incluir Tipo De Cama',
'Add Brand': 'Incluir Marca',
'Add Budget': 'Incluir Orçamento',
'Add Bundle': 'Incluir Pacote Configurável',
'Add Camp': 'Incluir acampamento',
'Add Camp Service': 'Incluir acampamento de serviço',
'Add Camp Type': 'Incluir tipo de acampamento',
'Add Catalog': 'Incluir Catálogo',
'Add Catalog Item': 'Incluir Item de Catálogo',
'Add Certificate': 'Incluir certificado',
'Add Certification': 'Adicionar Certificação',
'Add Cholera Treatment Capability Information': 'Incluir Informação sobre capacidade para tratamento de cólera',
'Add Cluster': 'Incluir cluster',
'Add Cluster Subsector': 'Incluir Subsetor de Cluster',
'Add Competency': 'incluir competência',
'Add Competency Rating': 'Incluir Classificação da Competência',
'Add Contact': 'Incluir contato',
'Add Contact Information': 'Incluir informações de contato',
'Add Course': 'Incluir curso',
'Add Course Certificate': 'Incluir Certificado de Curso',
'Add Credential': 'Incluir referência',
'Add Credentials': 'Incluir Referências',
'Add Dead Body Report': 'Incluir Relatório de Cadáver',
'Add Disaster Victims': 'Incluir Vítimas de Desastre',
'Add Distribution.': 'Incluir distribuição.',
'Add Document': 'Add Document',
'Add Donor': 'Incluir doador',
'Add Facility': 'Incluir Recurso',
'Add Feature Class': 'Incluir classe de recurso',
'Add Feature Layer': 'Incluir camada de recurso',
'Add Flood Report': 'Incluir Relatório Enchente',
'Add GPS data': 'Add GPS data',
'Add Group': 'Incluir Grupo',
'Add Group Member': 'Incluir Membro do Grupo',
'Add Hospital': 'Incluir Hospital',
'Add Human Resource': 'Incluir Recurso Humano',
'Add Identification Report': 'Incluir Identificação Relatório',
'Add Identity': 'Incluir Identidade',
'Add Image': 'Incluir Imagem',
'Add Impact': 'Adicionar Impacto',
'Add Impact Type': 'Incluir Tipo De Impacto',
'Add Incident': 'Adicionar Incidente',
'Add Incident Report': 'Incluir relatório de incidente',
'Add Inventory Item': 'Inclúir item de inventário',
'Add Item': 'Incluir item',
'Add Item Category': 'Incluir categoria de item',
'Add Item Pack': 'Incluir pacote de itens',
'Add Item to Catalog': 'Incluir Item no Catálogo',
'Add Item to Commitment': 'Incluir Item no Compromisso',
'Add Item to Inventory': 'Incluir Item de Inventário',
'Add Item to Request': 'Incluir Item para pedido',
'Add Item to Shipment': 'Adicionar Item para Embarque',
'Add Job Role': 'Incluir tarefa Função',
'Add Key': 'Incluir Chave',
'Add Kit': 'Adicionar Kit',
'Add Layer': 'Incluir Camada',
'Add Level 1 Assessment': 'Incluir nível de Avaliação 1',
'Add Level 2 Assessment': 'Incluir nível de Avaliação 2',
'Add Location': 'Incluir Local',
'Add Log Entry': 'Adicionar Entrada de Log',
'Add Map Configuration': 'Incluir Mapa de configuração',
'Add Marker': 'Incluir Marcador',
'Add Member': 'Incluir membro',
'Add Membership': 'Incluir Associação',
'Add Message': 'Incluir Mensagem',
'Add Mission': 'Incluir Missão',
'Add Need': 'Incluir o necessário',
'Add Need Type': 'Adicionar o tipo Necessário',
'Add New': 'Incluir novo',
'Add New Activity': 'Incluir Nova Atividade',
'Add New Address': 'Incluir Novo Endereço',
'Add New Alternative Item': 'Incluir novo Item Alternativo',
'Add New Assessment': 'Adicionar Nova Avaliação',
'Add New Assessment Summary': 'Incluir novo Resumo de Avaliação',
'Add New Asset': 'Incluir Novo Ativo',
'Add New Baseline': 'Incluir nova linha de base',
'Add New Baseline Type': 'Incluir novo tipo de linha de base',
'Add New Brand': 'Adicionar Nova Marca',
'Add New Budget': 'Adicionar Novo Orçamento',
'Add New Bundle': 'Incluir Novo Pacote',
'Add New Camp': 'Incluir novo Campo',
'Add New Camp Service': 'Inlcuir Novo Campo de Serviço',
'Add New Camp Type': 'Incluir Novo Campo de Tipo',
'Add New Catalog': 'Incluir Novo Catálogo',
'Add New Catalog Item': 'Incluir novo Item de catálogo',
'Add New Cluster': 'Adicionar novo grupo',
'Add New Cluster Subsector': 'Adicionar novo subgrupo',
'Add New Commitment Item': 'Incluir novo item de compromisso',
'Add New Contact': 'Incluir novo contato',
'Add New Credential': 'Incluir nova credencial',
'Add New Document': 'Incluir Novo Documento',
'Add New Donor': 'Adicionar novo doador',
'Add New Entry': 'Incluir Nova Entrada',
'Add New Event': 'Adicionar novo evento',
'Add New Facility': 'Incluir novo Recurso',
'Add New Feature Class': 'Incluir nova classe do recurso',
'Add New Feature Layer': 'Adicionar nova camada de características',
'Add New Flood Report': 'Adicionar novo relatório de cheias',
'Add New Group': 'Adicionar novo grupo',
'Add New Home': 'Add New Home',
'Add New Hospital': 'Adicionar novo hospital',
'Add New Human Resource': 'Incluir novos recursos humanos',
'Add New Identity': 'Adicionar nova identidade',
'Add New Image': 'Adicionar nova imagem',
'Add New Impact': 'Adicionar novo impacto',
'Add New Impact Type': 'Incluir novo Tipo De Impacto',
'Add New Incident Report': 'Adicionar novo relatório de incidentes',
'Add New Inventory Item': 'Incluir novo Item De Inventário',
'Add New Item': 'Incluir novo item',
'Add New Item Category': 'Incluir nova categoria de itens',
'Add New Item Pack': 'Incluir novo pacote de itens',
'Add New Item to Kit': 'Incluir novo Item de Kit',
'Add New Key': 'Adicionar Nova Chave',
'Add New Kit': 'Incluir novo Kit',
'Add New Layer': 'Adicionar Nova Camada',
'Add New Level 1 Assessment': 'Incluir novo nível 1 avaliação',
'Add New Level 2 Assessment': 'Incluir novo nível 2 avaliação',
'Add New Location': 'Incluir Novo Local',
'Add New Log Entry': 'Incluir nova entrada de Log',
'Add New Map Configuration': 'Incluir Novo Mapa de Configuração',
'Add New Marker': 'Incluir novo Marcador',
'Add New Member': 'Incluir Novo Membro',
'Add New Membership': 'Incluir novo membro',
'Add New Need': 'Adicionar novas necessidades',
'Add New Need Type': 'Incluir novo Tipo Necessário',
'Add New Note': 'Adicionar NOVA NOTA',
'Add New Office': 'Adicionar novo escritório',
'Add New Organization': 'Incluir nova Organização',
'Add New Patient': 'Add New Patient',
'Add New Person to Commitment': 'Add New Person to Commitment',
'Add New Photo': 'Adicionar Nova Foto',
'Add New Population Statistic': 'Incluir nova População De Estatística',
'Add New Problem': 'Incluir novo Problema',
'Add New Project': 'Incluir novo projeto',
'Add New Projection': 'Adicionar Nova Projecção',
'Add New Rapid Assessment': 'Incluir nova Avaliação Rápida',
'Add New Received Item': 'Incluir novo Item Recebido',
'Add New Record': 'Incluir Novo Registro',
'Add New Relative': 'Add New Relative',
'Add New Report': 'Incluir Novo Relatório',
'Add New Request': 'Incluir novo pedido',
'Add New Request Item': 'Incluir novo Item de Pedido',
'Add New Resource': 'Incluir Novo Recurso',
'Add New River': 'Incluir novo Rio',
'Add New Role': 'INCLUIR NOVA FUNÇÃO',
'Add New Role to User': 'Incluir nova função para o usuário',
'Add New Room': 'Adicionar nova sala',
'Add New Scenario': 'Adicionar Novo cenário',
'Add New Sector': 'Incluir novo Sector',
'Add New Sent Item': 'Incluir novo Item Enviado',
'Add New Setting': 'Adicionar Nova Configuração',
'Add New Shelter': 'Incluir Novo Abrigo',
'Add New Shelter Service': 'Incluir Novo Serviço de Abrigo',
'Add New Shelter Type': 'Incluir Novo Tipo de Abrigo',
'Add New Skill': 'Adicionar nova habilidade',
'Add New Solution': 'Adicionar nova solução',
'Add New Staff': 'Adicionar Nova Equipe',
'Add New Staff Member': 'Incluir novo equipe do membro',
'Add New Staff Type': 'Incluir novo tipo de equipe.',
'Add New Subsector': 'Incluir novo Subsector',
'Add New Survey Answer': 'Incluir nova resposta na pesquisa.',
'Add New Survey Question': 'Incluir nova pergunta na pesquisa.',
'Add New Survey Section': 'Incluir nova seção na pesquisa.',
'Add New Survey Series': 'Incluir nova série na pesquisa.',
'Add New Survey Template': 'Incluir novo Modelo De Pesquisa',
'Add New Task': 'Incluir Nova Tarefa',
'Add New Team': 'Adicionar nova equipe',
'Add New Theme': 'Incluir novo tema',
'Add New Ticket': 'Incluir nova permissão',
'Add New Track': 'Adicionar Nova Pista',
'Add New User': 'Incluir Novo Usuário',
'Add New User to Role': 'Adicionar Novo usuário para Função',
'Add New Vehicle': 'Add New Vehicle',
'Add New Volunteer': 'Incluir novo Voluntário',
'Add New Warehouse': 'Adicionar novo armazém',
'Add Note': 'Incluir nota',
'Add Office': 'Adicionar Office',
'Add Organization': 'Incluir Organização',
'Add Peer': 'Incluír Par',
'Add Person': 'incluir pessoa',
'Add Person to Commitment': 'Add Person to Commitment',
'Add Personal Effects': 'Incluir efeitos pessoais',
'Add Photo': 'Incluir Foto',
'Add Population Statistic': 'Incluir População Estatística',
'Add Position': 'Adicionar Posição',
'Add Problem': 'Adicionar Problema',
'Add Project': 'Adicionar Projeto',
'Add Projection': 'Adicionar Projeção',
'Add Question': 'Adicionar Pergunta',
'Add Rapid Assessment': 'Adicionar Avaliação Rápida',
'Add Record': 'Incluir Registro',
'Add Reference Document': 'Incluir documento de referência',
'Add Report': 'Incluir Relatório',
'Add Request': 'Incluir Pedido',
'Add Resource': 'Incluir Recurso',
'Add River': 'Incluir Rio',
'Add Role': 'Incluir Função',
'Add Room': 'Incluir Sala',
'Add Section': 'Incluir Secção',
'Add Sector': 'Incluir Sector',
'Add Service Profile': 'Incluir Perfil de Serviço',
'Add Setting': 'Adicionar Definição',
'Add Shelter': 'Incluir Abrigo',
'Add Shelter Service': 'Incluir Serviço de Abrigo',
'Add Shelter Type': 'Incluir Tipo de Abrigo',
'Add Skill': 'Incluir Habilidade',
'Add Skill Equivalence': 'Incluir equivalência de habilidades',
'Add Skill Provision': 'Incluir provisão de habilidades',
'Add Skill Type': 'Incluir Tipo de Habilidade',
'Add Skill to Request': 'Add Skill to Request',
'Add Solution': 'Incluir Solução',
'Add Staff': 'Incluir equipe',
'Add Staff Member': 'Adicionar membro da equipe',
'Add Staff Type': 'Incluir tipo de equipe',
'Add Status': 'Incluir Status',
'Add Subscription': 'Incluir Assinatura',
'Add Subsector': 'Incluir Subsetor',
'Add Survey Answer': 'Incluir resposta de pesquisa',
'Add Survey Question': 'Adicionar pergunta da pesquisa',
'Add Survey Section': 'Incluir seção da pesquisa',
'Add Survey Series': 'Incluir série da pesquisa',
'Add Survey Template': 'Incluir Modelo De Pesquisa',
'Add Task': 'Incluir Tarefa',
'Add Team': 'Incluir equipe',
'Add Theme': 'Incluir Tema',
'Add Ticket': 'Adicionar Bilhete',
'Add Training': 'Incluir Treinamento',
'Add Unit': 'Incluir Unidade',
'Add User': 'Incluir Usuário',
'Add Vehicle': 'Add Vehicle',
'Add Vehicle Detail': 'Add Vehicle Detail',
'Add Vehicle Details': 'Add Vehicle Details',
'Add Volunteer': 'Incluir Voluntário',
'Add Volunteer Availability': 'Incluir disponibilidade do voluntário',
'Add Warehouse': 'Adicionar Data Warehouse',
'Add a Person': 'Incluir uma pessoa',
'Add a Reference Document such as a file, URL or contact person to verify this data. If you do not enter a Reference Document, your email will be displayed instead.': 'Adicionar um documento de referência como um arquivo, URL ou contacto pessoal para verificar esses dados. Se você não inserir um documento de referência, seu e-mail será exibido no lugar.',
'Add a Volunteer': 'Incluir um Voluntário',
'Add a new certificate to the catalog.': 'Incluir um novo certificado no catálogo.',
'Add a new competency rating to the catalog.': 'Adicionar uma classificação nova competência para o catálogo.',
'Add a new course to the catalog.': 'Adicionar um novo rumo para o catálogo.',
'Add a new job role to the catalog.': 'Incluir uma função nova tarefa para o catálogo.',
'Add a new skill provision to the catalog.': 'Incluir uma disposição nova habilidade para o catálogo.',
'Add a new skill to the catalog.': 'Incluir uma nova habilidade para o catálogo.',
'Add a new skill type to the catalog.': 'Incluir um tipo novo de hailidade para o catálogo.',
'Add new Group': 'Adicionar novo grupo',
'Add new Individual': 'Incluir novo indivíduo',
'Add new Patient': 'Add new Patient',
'Add new project.': 'Adicionar novo projeto.',
'Add new staff role.': 'Incluir função de novos funcionários.',
'Add staff members': 'Incluir membros da equipe',
'Add to Bundle': 'Incluir no Pacote Configurável',
'Add to budget': 'Incluir no orçamento',
'Add volunteers': 'Incluir voluntários',
'Add/Edit/Remove Layers': 'Incluir/editar/remover camadas',
'Additional Beds / 24hrs': 'Camas adicionais / 24 horas',
'Address': 'endereços',
'Address Details': 'Detalhes do Endereço',
'Address Type': 'Tipo de Endereço',
'Address added': 'Endereço incluído',
'Address deleted': 'Endereço excluído',
'Address updated': 'Endereço actualizado',
'Addresses': 'Endereços',
'Adequate': 'adequar',
'Adequate food and water available': 'Comida e água adequado disponível',
'Admin Email': 'email do administrador',
'Admin Name': 'nome do administrador',
'Admin Tel': 'Telefone do administrador',
'Administration': 'administração',
'Admissions/24hrs': 'admissões/24 horas',
'Adolescent (12-20)': 'adolescente (12-20)',
'Adolescent participating in coping activities': 'Adolescente participando em actividades de superação',
'Adult (21-50)': 'Adulto (21-50)',
'Adult ICU': 'UTI para adultos',
'Adult Psychiatric': 'Psiquiátrico para adultos',
'Adult female': 'Mulher adulta',
'Adult male': 'Homem adulto',
'Adults in prisons': 'Adultos em prisões',
'Advanced:': 'Avançado:',
'Advisory': 'Aconselhamento',
'After clicking on the button, a set of paired items will be shown one by one. Please select the one solution from each pair that you prefer over the other.': 'Depois de pressionar o botão será mostrado um conjunto de dois elementos, um de cada vez. Por favor selecione a uma solução de cada par de sua preferência sobre o outro.',
'Age Group': 'Grupo etário',
'Age group': 'Grupo etário',
'Age group does not match actual age.': 'Grupo etário não corresponde à idade real.',
'Aggravating factors': 'Fatores agravantes',
'Agriculture': 'Agricultura',
'Air Transport Service': 'Serviço de Transporte Aéreo',
'Air tajin': 'Tajin AR',
'Aircraft Crash': 'Despenho de Avião',
'Aircraft Hijacking': 'Sequestro de Avião',
'Airport Closure': 'Encerramento de Aeroporto',
'Airspace Closure': 'Encerramento de Espaço Aéreo',
'Alcohol': 'álcool',
'Alert': 'Alertar',
'All': 'Tudo',
'All Inbound & Outbound Messages are stored here': 'Todas as mensagens enviadas e recebidas são armazenados aqui',
'All Resources': 'Todos os recursos',
'All data provided by the Sahana Software Foundation from this site is licenced under a Creative Commons Attribution licence. However, not all data originates here. Please consult the source field of each entry.': 'Todos os dados fornecidos pelos Sahana Software Foundation a partir deste site é licenciado sob uma Licença Atribuição Comuns criativos. No entanto, nem todos os dados se origina aqui. Por favor consulte o campo de origem de cada entrada.',
'Allowed to push': 'Permissão para pressionar',
'Allows a Budget to be drawn up': 'Permite que um orçamento seja estabelecido',
'Allows authorized users to control which layers are available to the situation map.': 'Permite usuários autorizados a controlar quais camadas estão disponíveis no mapa de situação.',
'Alternative Item': 'Item Alternativo',
'Alternative Item Details': 'Detalhes do Item alternativo',
'Alternative Item added': 'Item alternativo incluído',
'Alternative Item deleted': 'Item alternativo excluído',
'Alternative Item updated': 'Item Alternativo atualizado',
'Alternative Items': 'Itens alternativos',
'Alternative places for studying': 'Locais alternativos para estudo',
'Ambulance Service': 'Serviço de Ambulância',
'An asset must be assigned to a person, site OR location.': 'Um ATIVO deve ser designado a uma pessoa, local ou site.',
'An intake system, a warehouse management system, commodity tracking, supply chain management, procurement and other asset and resource management capabilities.': 'Um sistema de admissão, um sistema de gestão de depósitos, tracking and commodity, gestão da cadeia de fornecimentos, aquisições de ativos e outros e os recursos de gerenciamento de recurso.',
'An item which can be used in place of another item': 'Um item que pode ser utilizado no lugar de outro item',
'Analysis of Completed Surveys': 'Análise das Pesquisas Concluídas',
'Analysis of assessments': 'Analysis of assessments',
'Animal Die Off': 'Morte Animal',
'Animal Feed': 'Alimentação Animal',
'Answer Choices (One Per Line)': 'Resposta opções (Um por linha)',
'Anthropolgy': 'Anthropolgy',
'Antibiotics available': 'Antibióticos disponíveis',
'Antibiotics needed per 24h': 'Antibióticos necessário por H',
'Apparent Age': 'Idade aparente',
'Apparent Gender': 'Género aparente',
'Application Deadline': 'Prazo Final da aplicação',
'Applications': 'Requisições',
'Approve': 'Aprovar',
'Approved': 'aprovado',
'Approver': 'Aprovador',
'Arabic': 'Arabic',
'Arctic Outflow': 'Árctico Exfluxo',
'Area': 'Área',
'Areas inspected': 'Inspeccionados áreas',
'As of yet, no sections have been added to this template.': 'As of yet, no sections have been added to this template.',
'Assessment': 'Avaliação',
'Assessment Details': 'Detalhes da Avaliação',
'Assessment Reported': 'Avaliação Relatada',
'Assessment Summaries': 'Sumário de Avaliações',
'Assessment Summary Details': 'Detalhes do sumário de avaliação',
'Assessment Summary added': 'Anexado sumário de avaliações',
'Assessment Summary deleted': 'Avaliação de resumo apagado',
'Assessment Summary updated': 'Sumário de avaliação atualizado',
'Assessment added': 'Avaliação incluída',
'Assessment admin level': 'Avaliação de nível administrativo',
'Assessment deleted': 'Avaliação excluída',
'Assessment timeline': 'sequência temporal de avaliação',
'Assessment updated': 'Avaliação atualizada',
'Assessments': 'avaliações',
'Assessments Needs vs. Activities': 'Necessidades de Avaliações vs. Atividades',
'Assessments and Activities': 'Avaliações e Atividades',
'Assessments:': 'Avaliações',
'Assessor': 'Avaliador',
'Asset': 'Recurso',
'Asset Assigned': 'Ativo Designado',
'Asset Assignment Details': 'Detalhes da Designação de Recursos',
'Asset Assignment deleted': 'Designação De ativo excluído',
'Asset Assignment updated': 'Atribuição de Ativo atualizada',
'Asset Assignments': 'Designações de Ativo',
'Asset Details': 'Detalhes do Ativo',
'Asset Log': 'Log de ATIVOS',
'Asset Log Details': 'Detalhes do Log de ativos',
'Asset Log Empty': 'Log de Ativos vazio',
'Asset Log Entry Added - Change Label': 'Adicionada uma entrada no Log de ativos -Alterar Etiqueta',
'Asset Log Entry deleted': 'Apagada uma entrada no Log de ativos',
'Asset Log Entry updated': 'Atualizada uma entrada no Log de Ativos',
'Asset Management': 'gerenciamento de recursos',
'Asset Number': 'número do recurso',
'Asset added': 'Ativo Incluído',
'Asset deleted': 'ativo excluído',
'Asset removed': 'Ativo Removido',
'Asset updated': 'recurso atualizado',
'Assets': 'recursos',
'Assets are resources which are not consumable but are expected back, so they need tracking.': 'Os ativos são recursos que não são consumíveis e serão devolvidos, portanto precisam de rastreamento.',
'Assign': 'Designar',
'Assign Asset': 'designar recurso',
'Assign Group': 'Designar Grupo',
'Assign Staff': 'Atribuir Equipe',
'Assign to Org.': 'Designar para Org.',
'Assign to Organisation': 'Atribuir para Organização',
'Assign to Organization': 'Atribuir para Organização',
'Assign to Person': 'Atribuir uma Pessoa',
'Assign to Site': 'Atribuir um Site',
'Assigned': 'Designado',
'Assigned By': 'Designado por',
'Assigned To': 'Designado Para',
'Assigned to': 'Designado para',
'Assigned to Organisation': 'Designado para Organização',
'Assigned to Person': 'Designado para a Pessoa',
'Assigned to Site': 'Designado para o Site',
'Assignments': 'Designações',
'At/Visited Location (not virtual)': 'Em/Visitou Local (não virtual)',
'Attend to information sources as described in <instruction>': 'Participar de fontes de informação, conforme descrito em<instruction>',
'Attribution': 'Atribuição',
"Authenticate system's Twitter account": 'Sistema de Autenticação para conta de Twitter',
'Author': 'autor',
'Availability': 'Disponibilidade',
'Available Alternative Inventories': 'Alternativas de Inventário disponíveis',
'Available Alternative Inventory Items': 'Itens alternativos de Inventário disponíveis',
'Available Beds': 'camas disponíveis',
'Available Forms': 'Available Forms',
'Available Inventories': 'Inventários disponíveis',
'Available Inventory Items': 'Itens de inventário disponíveis',
'Available Messages': 'Mensagens disponíveis',
'Available Records': 'Registros disponíveis',
'Available databases and tables': 'Banco de Dados e Tabelas disponíveis',
'Available for Location': 'Disponível para locação',
'Available from': 'disponível de',
'Available in Viewer?': 'Disponível no visualizador?',
'Available until': 'Disponível até',
'Avalanche': 'Avalanche',
'Avoid the subject event as per the <instruction>': 'Evitar o assunto do evento de acordo com a',
'Background Color': 'Background Color',
'Background Color': 'Cor de Plano de Fundo',
'Background Color for Text blocks': 'Cor de segundo plano para blocos de texto',
'Bahai': 'Bahai',
'Baldness': 'Calvície',
'Banana': 'Banana',
'Bank/micro finance': 'banco/micro finanças',
'Barricades are needed': 'Barricadas são necessárias',
'Base Layer?': 'Camada De Base?',
'Base Location': 'Local da Base',
'Base Site Set': 'Conjunto de Site básico',
'Baseline Data': 'Dados básicos',
'Baseline Number of Beds': 'Numero de camadas base de camas',
'Baseline Type': 'Tipo de Linha Base',
'Baseline Type Details': 'Detalhes de Tipo de Linha Base',
'Baseline Type added': 'Tipo de Linha Base adicionado',
'Baseline Type deleted': 'Tipo de Linha Base removido',
'Baseline Type updated': 'Tipo de Linha Base actualizado',
'Baseline Types': 'Tipos de Linha Base',
'Baseline added': 'Camada Base incluída',
'Baseline deleted': 'Camada Base Excluída',
'Baseline number of beds of that type in this unit.': 'Numero de camadas base de camas desse tipo nesta unidade.',
'Baseline updated': 'Linha Base actulizada',
'Baselines': 'Camadas Base',
'Baselines Details': 'Detalhes de Camadas Base',
'Basic Assessment': 'Avaliação Básica',
'Basic Assessment Reported': 'Avaliação Básica Relatada',
'Basic Details': 'Detalhes Básicos',
'Basic reports on the Shelter and drill-down by region': 'Relatórios básicos sobre o Abrigo e abertura por região',
'Baud': 'Transmissão',
'Baud rate to use for your modem - The default is safe for most cases': 'Taxa de transmissão para ser usada pelo seu modem - O padrão é seguro para a maioria dos casos',
'Beam': 'feixe',
'Bed Capacity': 'Capacidade de leitos',
'Bed Capacity per Unit': 'Capacidade cama por Unidade',
'Bed Type': 'Tipo de cama',
'Bed type already registered': 'Tipo de cama já registrado',
'Below ground level': 'Abaixo do nível do solo',
'Beneficiary Type': 'Tipo de beneficiário',
"Bing Layers cannot be displayed if there isn't a valid API Key": "Bing Layers cannot be displayed if there isn't a valid API Key",
'Biological Hazard': 'Risco Biológico',
'Biscuits': 'Biscoitos',
'Blizzard': 'Nevasca',
'Blood Type (AB0)': 'Tipo sanguíneo (AB0)',
'Blowing Snow': 'Soprando neve',
'Boat': 'Barco',
'Bodies': 'Bodies',
'Bodies found': 'Corpos encontrados',
'Bodies recovered': 'corpos recuperados',
'Body': 'corpo',
'Body Recovery': 'Body Recovery',
'Body Recovery Request': 'Pedido de recuperação de corpos',
'Body Recovery Requests': 'Pedidos de recuperação de corpos',
'Bomb': 'Bomba',
'Bomb Explosion': 'Explosão de bomba',
'Bomb Threat': 'Ameaça de bomba',
'Border Color for Text blocks': 'Cor da borda para blocos de texto',
'Bounding Box Insets': 'Delimitadora Inserções Caixa',
'Bounding Box Size': 'CAIXA delimitadora Tamanho',
'Brand': 'Marca',
'Brand Details': 'Detalhes da Marca',
'Brand added': 'Marca incluída',
'Brand deleted': 'Marca excluída',
'Brand updated': 'marca atualizada',
'Brands': 'marcas',
'Bricks': 'Tijolos',
'Bridge Closed': 'PONTE FECHADA',
'Bucket': 'Balde',
'Buddhist': 'Budista',
'Budget': 'Orçamento',
'Budget Details': 'Detalhes de Orçamento',
'Budget Updated': 'Orçamento Atualizado',
'Budget added': 'Orçamento incluído',
'Budget deleted': 'Orçamento excluído',
'Budget updated': 'Orçamento atualizado',
'Budgeting Module': 'Módulo de Orçamento',
'Budgets': 'Orçamentos',
'Buffer': 'buffer',
'Bug': 'erro',
'Building Assessments': 'Avaliações de construção',
'Building Collapsed': 'Construção Fechada',
'Building Name': 'Nome do edifício',
'Building Safety Assessments': 'Regras de Segurança do Edifício',
'Building Short Name/Business Name': 'Nome curto/Nome completo do Edifício',
'Building or storey leaning': 'Edifício ou andar em inclinação',
'Built using the Template agreed by a group of NGOs working together as the': 'Construído de acordo com o formulário acordado por um grupo de ONGs',
'Bulk Uploader': 'Carregador em massa',
'Bundle': 'Pacote',
'Bundle Contents': 'Conteúdo do Pacote',
'Bundle Details': 'Detalhes do Pacote',
'Bundle Updated': 'Pacote configurável ATUALIZADO',
'Bundle added': 'Pacote incluído',
'Bundle deleted': 'Pacote Excluído',
'Bundle updated': 'Pacote atualizado',
'Bundles': 'Pacotes',
'Burn': 'Gravar',
'Burn ICU': 'Queimar ICU',
'Burned/charred': 'Queimados/carbonizados',
'By Facility': 'Por Facilidade',
'By Inventory': 'Por Inventário',
'By Person': 'Por pessoa',
'By Site': 'Por Site',
'CBA Women': 'CBA Mulheres',
'CLOSED': 'CLOSED',
'CN': 'CN',
'CSS file %s not writable - unable to apply theme!': 'Arquivo CSS %s não é gravável - Impossível aplicar o tema!',
'Calculate': 'calcular',
'Camp': 'Acampamento',
'Camp Coordination/Management': 'Campo Coordenação/gestão',
'Camp Details': 'Detalhes do Alojamento',
'Camp Service': 'Serviço de Alojamento',
'Camp Service Details': 'Detalhe do Serviço de Campo',
'Camp Service added': 'Serviço de Alojamento incluído',
'Camp Service deleted': 'Serviço de Alojamento excluído',
'Camp Service updated': 'Serviço de campo atualizado',
'Camp Services': 'Serviço de campo',
'Camp Type': 'Tipo de Campo',
'Camp Type Details': 'Detalhes do tipo de campo',
'Camp Type added': 'Tipo de Campo incluso.',
'Camp Type deleted': 'Tipo de campo excluído.',
'Camp Type updated': 'Tipo De acampamento atualizado',
'Camp Types': 'TIPOS DE acampamento',
'Camp Types and Services': 'Tipos e serviços de acampamentos',
'Camp added': 'Alojamento incluído',
'Camp deleted': 'Alojamento excluído',
'Camp updated': 'Acampamento atualizado',
'Camps': 'Alojamentos',
'Can only disable 1 record at a time!': 'Pode desativar apenas 1 registro por vez!',
'Can only enable 1 record at a time!': 'Can only enable 1 record at a time!',
"Can't import tweepy": 'Não pode importar tweepy',
'Cancel': 'Cancelar',
'Cancel Log Entry': 'Cancelar Registro De Entrada',
'Cancel Shipment': 'Cancelar Embarque',
'Canceled': 'cancelado',
'Candidate Matches for Body %s': 'Candidato Corresponde ao Corpo %s',
'Canned Fish': 'Conservas de Peixe',
'Cannot be empty': 'Não pode ser vazio',
'Cannot disable your own account!': 'Voce não pode desativar sua própria conta!',
'Capacity (Max Persons)': 'Capacidade (Máximo De pessoas)',
'Capture Information on Disaster Victim groups (Tourists, Passengers, Families, etc.)': 'CAPTURA informações sobre grupos Desastre Vítima (Turistas, passageiros, Famílias, etc. ).',
'Capture Information on each disaster victim': 'Informações de captura em cada vítima Desastre',
'Capturing organizational information of a relief organization and all the projects they have in the region': 'Capturando informações organizacionais de uma organização de ajuda e todos os projetos têm na região',
'Capturing the projects each organization is providing and where': 'Capturando os projetos que cada organização está fornecendo e onde',
'Cardiology': 'Cardiologia',
'Cassava': 'Mandioca',
'Casual Labor': 'Trabalho Casual',
'Casualties': 'Acidentes',
'Catalog': 'catálogo',
'Catalog Details': 'Detalhes do Catálogo',
'Catalog Item': 'Item do catálogo de',
'Catalog Item added': 'Item incluído no catálogo',
'Catalog Item deleted': 'Catálogo de Item excluído',
'Catalog Item updated': 'Item do catálogo de atualização',
'Catalog Items': 'Itens do Catálogo',
'Catalog added': 'Catálogo Incluído',
'Catalog deleted': 'Catálogo excluído',
'Catalog updated': 'Catálogo Atualizado',
'Catalogs': 'Catálogos',
'Categories': 'Categorias',
'Category': 'category',
"Caution: doesn't respect the framework rules!": 'Cuidado: não respeitar as regras de enquadramento!',
'Ceilings, light fixtures': 'Tetos, luminarias',
'Cell Phone': 'Cell Phone',
'Central point to record details on People': 'Ponto Central para registrar detalhes sobre pessoas',
'Certificate': 'Certificate',
'Certificate Catalog': 'Catálogo de Certificados',
'Certificate Details': 'Detalhes do Certificado',
'Certificate Status': 'Status do Certificado',
'Certificate added': 'Certificado incluído',
'Certificate deleted': 'Certificado Removido',
'Certificate updated': 'Certificado Actualizado',
'Certificates': 'Certificados',
'Certification': 'Certificação',
'Certification Details': 'Detalhes da Certificação',
'Certification added': 'Certificação incluída',
'Certification deleted': 'Certificação excluída',
'Certification updated': 'Certificação atualizada',
'Certifications': 'Certificações',
'Certifying Organization': 'Certificação da Organização',
'Change Password': 'Alterar Senha',
'Check': 'Verifique',
'Check Request': 'Verificar Pedido',
'Check for errors in the URL, maybe the address was mistyped.': 'Verifique se há erros na URL, talvez o endereço foi digitado incorretamente.',
'Check if the URL is pointing to a directory instead of a webpage.': 'Verifique se a URL está apontando para um diretório em vez de uma página da Web.',
'Check outbox for the message status': 'Outbox para verificar o status da mensagem',
'Check to delete': 'Verificar para Excluir',
'Check-in': 'Registrar Entrada',
'Check-in at Facility': 'Check-in at Facility',
'Check-out': 'Registrar Saída',
'Checked': 'verificado',
'Checklist': 'lista de verificação',
'Checklist created': 'Lista de verificação criada',
'Checklist deleted': 'Lista de verificação excluída',
'Checklist of Operations': 'Lista de Verificação das Operações',
'Checklist updated': 'Lista de verificação atualizado',
'Chemical Hazard': 'Risco Químico',
'Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack': 'Ameaça ou ataque Químico, Biológico, Radiológico, Nuclear ou de alto concentração Explosiva',
'Chicken': 'Frango',
'Child': 'Criança',
'Child (2-11)': 'Criança (2-11)',
'Child (< 18 yrs)': 'Criança (< 18 anos)',
'Child Abduction Emergency': 'Emergência de Rapto De Criança',
'Child headed households (<18 yrs)': 'Famílias chefiadas por Filho (<18 anos)',
'Children (2-5 years)': 'Crianças (2 a 5 anos)',
'Children (5-15 years)': 'Crianças (5 a 15 anos)',
'Children (< 2 years)': 'Crianças (< 2 anos)',
'Children in adult prisons': 'Crianças nas prisões para adultos',
'Children in boarding schools': 'Crianças em internatos',
'Children in homes for disabled children': 'Crianças em lares para crianças deficientes',
'Children in juvenile detention': 'Crianças em detenção juvenil',
'Children in orphanages': 'Crianças nos orfanatos',
'Children living on their own (without adults)': 'Crianças vivendo por conta própria (sem adultos)',
'Children not enrolled in new school': 'Crianças não matriculadas em Nova Escola',
'Children orphaned by the disaster': 'Crianças órfãs pela catástrofe',
'Children separated from their parents/caregivers': 'Crianças SEPARADAS de seus pais/responsáveis',
'Children that have been sent to safe places': 'Crianças que foram enviadas para locais seguros',
'Children who have disappeared since the disaster': 'Crianças que desapareceram desde o desastre',
'Chinese (Simplified)': 'Chinese (Simplified)',
'Chinese (Taiwan)': 'Chinês (Taiwan)',
'Cholera Treatment': 'Tratamento da cólera',
'Cholera Treatment Capability': 'Capacidade de Tratamento da Cólera',
'Cholera Treatment Center': 'Centro de Tratamento de Cólera',
'Cholera-Treatment-Center': 'Centro de tratamento de cólera',
'Choose a new posting based on the new evaluation and team judgement. Severe conditions affecting the whole building are grounds for an UNSAFE posting. Localised Severe and overall Moderate conditions may require a RESTRICTED USE. Place INSPECTED placard at main entrance. Post all other placards at every significant entrance.': 'Escolha uma nova alocação baseada na nova avaliação e julgamento do time. Condições severas que afetem o prédio inteiro são base para uma colocação INSEGURA. Grave localizada e no geral condições moderadas podem exigir um USO RESTRITO. Local INSPECCIONADO cartaz na entrada principal. Coloque todos os outros cartazes em cada entrada importante.',
'Christian': 'Cristão',
'Church': 'Igreja',
'Circumstances of disappearance, other victims/witnesses who last saw the missing person alive.': 'Circunstâncias do desaparecimento, outras vítimas/testemunhas quais viram pela última vez a pessoa desaparecida viva.',
'City': 'CIDADE',
'Civil Emergency': 'Emergência Civil',
'Cladding, glazing': 'Revestimentos, vidros',
'Click on the link': 'Clique no link',
'Client IP': 'Client IP',
'Climate': 'Climate',
'Clinical Laboratory': 'Laboratório clínico',
'Clinical Operations': 'operações clinicas',
'Clinical Status': 'estado clínico',
'Close map': 'Close map',
'Closed': 'fechado',
'Clothing': 'vestuário',
'Cluster': 'agrupamento',
'Cluster Details': 'Detalhes do Grupo',
'Cluster Distance': 'Distância entre Grupos',
'Cluster Subsector': 'Subsector de Grupos',
'Cluster Subsector Details': 'Detalhes do sub-setor do cluster',
'Cluster Subsector added': 'Subsector de Grupos incluído',
'Cluster Subsector deleted': 'Subsector de Grupos removido',
'Cluster Subsector updated': 'Sub-setores do cluster atualizado',
'Cluster Subsectors': 'Sub-setores do cluster',
'Cluster Threshold': 'Limite do Cluster',
'Cluster added': 'adicionar agrupamento',
'Cluster deleted': 'Grupo removido',
'Cluster updated': 'Cluster atualizado',
'Cluster(s)': 'Grupo(s)',
'Clusters': 'clusters',
'Code': 'Código',
'Cold Wave': 'onda fria',
'Collapse, partial collapse, off foundation': 'Reduzir, reduzir parciais, off foundation',
'Collective center': 'Centro coletivo',
'Color for Underline of Subheadings': 'Cor para Sublinhar de Subposições',
'Color of Buttons when hovering': 'Cor dos botões quando erguidos',
'Color of bottom of Buttons when not pressed': 'Cor da parte inferior dos botões quando não for pressionado',
'Color of bottom of Buttons when pressed': 'Cor da parte de baixo dos botões quando pressionados',
'Color of dropdown menus': 'Cor de menus DROP-',
'Color of selected Input fields': 'Cor dos campos de entrada selecionados',
'Color of selected menu items': 'cor dos ítens selecionados do menu',
'Column Choices (One Per Line': 'Coluna de opções (uma por linha)',
'Columns, pilasters, corbels': 'Colunas, pilastras , cavaletes',
'Combined Method': 'Método combinado',
'Come back later.': 'Volte mais tarde.',
'Come back later. Everyone visiting this site is probably experiencing the same problem as you.': 'Volte mais tarde. Todos que visitam este site esta, provavelmente, enfrentando o mesmo problema que você.',
'Comments': 'Comentários',
'Commercial/Offices': 'Comercial/Escritórios',
'Commit': 'Consolidar',
'Commit Date': 'Commit Data',
'Commit from %s': 'Consolidação de s%',
'Commit. Status': 'Commit. Status',
'Commiting a changed spreadsheet to the database': 'Consolidando uma planilha alterada no banco de dados',
'Commitment': 'Comprometimento',
'Commitment Added': 'Compromisso Incluído',
'Commitment Canceled': 'Compromisso cancelado',
'Commitment Details': 'Detalhes do compromisso',
'Commitment Item': 'Item do compromisso',
'Commitment Item Details': 'Detalhes do item de compromisso',
'Commitment Item added': 'Item de compromisso incluído',
'Commitment Item deleted': 'Item do compromisso excluído',
'Commitment Item updated': 'Compromisso Item atualizado',
'Commitment Items': 'Itens compromisso',
'Commitment Status': 'Empenhamento Status',
'Commitment Updated': 'Compromisso Atualizado',
'Commitments': 'Compromissos',
'Committed': 'Comprometido',
'Committed By': 'Cometido por',
'Committed People': 'Committed People',
'Committed Person Details': 'Committed Person Details',
'Committed Person updated': 'Committed Person updated',
'Committing Inventory': 'Confirmando Inventário',
'Committing Organization': 'Committing Organization',
'Committing Person': 'Committing Person',
'Communication problems': 'Problemas de Comunicação',
'Community Centre': 'Comunidade Centro',
'Community Health Center': 'Centro Comunitário de Saúde',
'Community Member': 'Membro da Comunidade',
'Competencies': 'Competências',
'Competency': 'Competência',
'Competency Details': 'Competência Detalhes',
'Competency Rating Catalog': 'Catálogo de Classificação de Competências',
'Competency Rating Details': 'Detalhes da classificação de competências',
'Competency Rating added': 'Classificação de Habilidades incluída',
'Competency Rating deleted': 'Classificação de competência excluída',
'Competency Rating updated': 'Atualização da classificação de competências',
'Competency Ratings': 'Classificação de competências',
'Competency added': 'Competência incluída',
'Competency deleted': 'Competência excluído',
'Competency updated': 'Competência atualizada',
'Complete': 'Concluir',
'Completed': 'Concluído',
'Complexion': 'Compleição',
'Compose': 'Redigir',
'Compromised': 'Comprometida',
'Concrete frame': 'Quadro concreto',
'Concrete shear wall': 'Muro de corteconcreto',
'Condition': 'Condição',
'Configurations': 'Configurações',
'Configure Run-time Settings': 'Configurar as configurações de tempo de execução',
'Confirm Shipment Received': 'Confirmar Remessa Recebida',
'Confirmed': 'Confirmado',
'Confirming Organization': 'Confirmando Organização',
'Conflict Details': 'Detalhes Do conflito',
'Conflict Resolution': 'Resolução de Conflito',
'Consignment Note': 'NOTA REMESSA',
'Constraints Only': 'Somente restrições',
'Consumable': 'Consumível',
'Contact': 'contato',
'Contact Data': 'Dados contato',
'Contact Details': 'Detalhes do contato',
'Contact Info': 'Informações de Contato',
'Contact Information': 'Informações de Contato',
'Contact Information Added': 'Informação de contato incluída',
'Contact Information Deleted': 'Informação de contato excluída',
'Contact Information Updated': 'Informações de contato atualizadas',
'Contact Method': 'Método de Contato',
'Contact Name': 'Nome do contato',
'Contact Person': 'Pessoa de Contato',
'Contact Phone': 'Telefone para Contato',
'Contact details': 'Detalhes do contato',
'Contact information added': 'Informações de contato incluídas',
'Contact information deleted': 'Informações de contato excluídas',
'Contact information updated': 'Informações de contato atualizadas',
'Contact person(s) in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.': 'Pessoa(s) a contactar em caso de notícias ou mais perguntas (se for diferente da pessoa que reportou). Incluir número de telefone, endereço e correio electrónico se disponível.',
'Contact us': 'Fale Conosco',
'Contacts': 'contatos',
'Contents': 'Conteúdo',
'Contributor': 'Contribuidor',
'Conversion Tool': 'Ferramenta de Conversão',
'Cooking NFIs': 'Cozinhando NFIs',
'Cooking Oil': 'Cozinhando Óleo',
'Coordinate Conversion': 'COORDENAR a Conversão',
'Coping Activities': 'Atividades de lida',
'Copy': 'copiar',
'Corn': 'Milho',
'Cost Type': 'Tipo de custo',
'Cost per Megabyte': 'Custo por megabyte',
'Cost per Minute': 'Custo por Minuto',
'Country': 'País',
'Country of Residence': 'País de Residência',
'County': 'Município',
'Course': 'Curso',
'Course Catalog': 'Catálogo de Cursos',
'Course Certificate Details': 'Detalhes do Certificado do Curso',
'Course Certificate added': 'Certificado do Curso adicionado',
'Course Certificate deleted': 'Certificado do Curso excluído',
'Course Certificate updated': 'Certificado do Curso atualizado',
'Course Certificates': 'Certificados de Curso',
'Course Certificates': 'Certificados de Curso',
'Course Details': 'Detalhes do curso',
'Course added': 'Curso incluído',
'Course deleted': 'Curso excluído',
'Course updated': 'Curso atualizado',
'Courses': 'Cursos',
'Create & manage Distribution groups to receive Alerts': 'Criar & GERENCIAR grupos de distribuição de receber alertas',
'Create Checklist': 'Criar Lista de Verificação',
'Create Group Entry': 'Criar Grupo De Entrada',
'Create Impact Assessment': 'Criar Avaliação de Impacto',
'Create Mobile Impact Assessment': 'Criar Avaliação de Impacto Movel',
'Create New Asset': 'Criar Novo Recurso',
'Create New Catalog Item': 'Criar Novo Item de Catálogo',
'Create New Event': 'Criar Novo Evento',
'Create New Item Category': 'Criar Nova Categoria de Item',
'Create New Request': 'Criar Novo Pedido',
'Create New Scenario': 'Criar Novo cenário',
'Create New Vehicle': 'Create New Vehicle',
'Create Rapid Assessment': 'Criar Avaliação Rápida',
'Create Request': 'Criar solicitação',
'Create Task': 'Criar Tarefa',
'Create a group entry in the registry.': 'Criar uma entrada de grupo no registro.',
'Create new Office': 'Criar novo escritório',
'Create new Organization': 'Criar nova organização',
'Create, enter, and manage surveys.': 'Criar, digitar e gerenciar pesquisas.',
'Creation of Surveys': 'Criação de Pesquisas',
'Creation of assessments': 'Creation of assessments',
'Credential Details': 'Detalhes da Credencial',
'Credential added': 'Credencial incluída',
'Credential deleted': 'Credencial Excluída',
'Credential updated': 'Credencial ATUALIZADA',
'Credentialling Organization': 'Organização acreditada',
'Credentials': 'credenciais',
'Credit Card': 'Cartão de crédito',
'Crime': 'crime',
'Criteria': 'Critério',
'Currency': 'moeda',
'Current Entries': 'Entradas Atuais',
'Current Group Members': 'Membros do Grupo atual',
'Current Identities': 'Identidades atuais',
'Current Location': 'Posição Atual',
'Current Location Country': 'Current Location Country',
'Current Location Phone Number': 'Current Location Phone Number',
'Current Location Treating Hospital': 'Current Location Treating Hospital',
'Current Log Entries': 'Entradas de Log atuais',
'Current Memberships': 'Participações atuais',
'Current Mileage': 'Current Mileage',
'Current Notes': 'Notes atual',
'Current Records': 'Registros atuais',
'Current Registrations': 'Registros atuais',
'Current Status': 'Status atual',
'Current Team Members': 'Os atuais membros da equipe',
'Current Twitter account': 'Conta atual no Twitter',
'Current community priorities': 'Atuais prioridades da comunidade',
'Current general needs': 'Atuais necessidades gerais',
'Current greatest needs of vulnerable groups': 'Maiores necessidades atuais dos grupos vulneráveis',
'Current health problems': 'Problemas de saúde atuais',
'Current number of patients': 'Número atual de pacientes',
'Current problems, categories': 'Problemas atuais, categorias',
'Current problems, details': 'Problemas atuais, detalhes',
'Current request': 'Pedido atual',
'Current response': 'Resposta atual',
'Current session': 'Sessão atual',
'Currently no Certifications registered': 'Nenhuma certificação registrada atualmente',
'Currently no Competencies registered': 'Nenhuma competência registrada atualmente',
'Currently no Course Certificates registered': 'Nenhum Curso Certificado registrado atualmente',
'Currently no Credentials registered': 'Nenhuma credencial registrada atualmente',
'Currently no Missions registered': 'Nenhuma missão registrada atualmente',
'Currently no Skill Equivalences registered': 'Nenhuma equivelência de habilidade registrada atualmente',
'Currently no Skills registered': 'Currently no Skills registered',
'Currently no Trainings registered': 'Atualmente não há treinamentos registrados',
'Currently no entries in the catalog': 'Nenhuma entrada no catálogo atualmente',
'Custom Database Resource (e.g., anything defined as a resource in Sahana)': 'Bnaco de Dados customizado de Recursos (por exemplo, nada definido como recurso no Sahana)',
'DC': 'DC',
'DNA Profile': 'Perfil de DNA',
'DNA Profiling': 'Perfil de DNA',
'DVI Navigator': 'Navegador DVI',
'Dam Overflow': 'Barragem ESTOURO',
'Damage': 'dano',
'Dangerous Person': 'Pessoa perigosa',
'Dashboard': 'Painel',
'Data': 'Dados',
'Data Type': 'Data Type',
'Data uploaded': 'Dados carregados',
'Database': 'DATABASE',
'Date': 'date',
'Date & Time': 'Date & Time',
'Date Available': 'Data Disponível',
'Date Received': 'Data do recebimento',
'Date Requested': 'Data do pedido',
'Date Required': 'Necessária',
'Date Sent': 'Data de Envio',
'Date Until': 'Data Até',
'Date and Time': 'Data e Hora',
'Date and time this report relates to.': 'Data e hora relacionadas a este relatório.',
'Date of Birth': 'Data de Nascimento',
'Date of Latest Information on Beneficiaries Reached': 'Data da última informação sobre Beneficiários Alcançado',
'Date of Report': 'Data do relatório',
'Date of Treatment': 'Date of Treatment',
'Date/Time': 'data/hora',
'Date/Time of Find': 'Pesquisa de data/hora',
'Date/Time of disappearance': 'Data/hora do desaparecimento',
'Date/Time when found': 'Data/hora quando foi encontrado',
'Date/Time when last seen': 'Data/ hora em que foi visto pela última vez',
'De-duplicator': 'Anti duplicador',
'Dead Bodies': 'Dead Bodies',
'Dead Body': 'Cadáver',
'Dead Body Details': 'Detalhes do Cadáver',
'Dead Body Reports': 'Relatórios de Cadáver',
'Dead body report added': 'Relatório de cadaver incluso.',
'Dead body report deleted': 'Relatório de cadáver excluído.',
'Dead body report updated': 'Relatório de cadáver atualizado',
'Deaths in the past 24h': 'Mortes nas últimas 24 horas',
'Deaths/24hrs': 'Mortes/24hrs',
'Decimal Degrees': 'Graus decimais',
'Decision': 'DECISÃO',
'Decomposed': 'Decomposto',
'Default Height of the map window.': 'Altura Padrão da janela do mapa.',
'Default Location': 'Default Location',
'Default Map': 'Mapa padrão',
'Default Marker': 'Padrão de mercado',
'Default Width of the map window.': 'Padrão de largura da janela do mapa.',
'Default synchronization policy': 'Política de sincronização de padrão',
'Defecation area for animals': 'Área de defecação para animais',
'Define Scenarios for allocation of appropriate Resources (Human, Assets & Facilities).': 'Cenários De definir para alocação adequado de recursos (humanos, Ativos & instalações).',
'Defines the icon used for display of features on handheld GPS.': 'Define o ícone utilizado para exibição de recursos no GPS portátil.',
'Defines the icon used for display of features on interactive map & KML exports.': 'Define o ícone utilizado para exibição de recursos no mapa interativo & exportações KML.',
'Defines the marker used for display & the attributes visible in the popup.': 'Define o marcador utilizado para exibir & os atributos visíveis no pop-up.',
'Degrees must be a number between -180 and 180': 'Os graus devem ser um número entre -180 e 180',
'Dehydration': 'Desidratação',
'Delete': 'Excluir',
'Delete Alternative Item': 'EXCLUIR Item Alternativo',
'Delete Assessment': 'Excluir Avaliação',
'Delete Assessment Summary': 'Excluir Resumo da Avaliação',
'Delete Asset': 'Excluir Ativo',
'Delete Asset Assignment': 'Excluir o recurso designado',
'Delete Asset Log Entry': 'EXCLUIR recurso de entrada de Log',
'Delete Baseline': 'apagar linha base',
'Delete Baseline Type': 'apagar tipo de linha base',
'Delete Brand': 'apagar marca',
'Delete Budget': 'apagar orçamento',
'Delete Bundle': 'apagar pacote',
'Delete Catalog': 'Excluir o Catálogo',
'Delete Catalog Item': 'apagar item do catálogo',
'Delete Certificate': 'Excluir Certificado',
'Delete Certification': 'Excluir Certificação',
'Delete Cluster': 'Exclui Cluster',
'Delete Cluster Subsector': 'EXCLUIR Cluster Subsector',
'Delete Commitment': 'Excluir Compromisso',
'Delete Commitment Item': 'Excluir Item de Compromisso',
'Delete Competency': 'Excluir Competência',
'Delete Competency Rating': 'Excluir Classificação da Competência',
'Delete Contact Information': 'Excluir Informações de Contato',
'Delete Course': 'Excluir Curso',
'Delete Course Certificate': 'Excluir Certificado do Curso',
'Delete Credential': 'Excluir Credencial',
'Delete Document': 'Excluir documento',
'Delete Donor': 'EXCLUIR Dador',
'Delete Entry': 'Excluir Entrada',
'Delete Event': 'Excluir Evento',
'Delete Feature Class': 'Excluir Classe de Recurso',
'Delete Feature Layer': 'Excluir Camada de Componentes',
'Delete GPS data': 'Delete GPS data',
'Delete Group': 'Excluir Grupo',
'Delete Home': 'Delete Home',
'Delete Hospital': 'Excluir Hospital',
'Delete Image': 'Excluir Imagem',
'Delete Impact': 'Excluir Impacto',
'Delete Impact Type': 'Excluir Tipo De Impacto',
'Delete Incident Report': 'Excluir Relatório de Incidentes',
'Delete Inventory Item': 'EXCLUIR Item De Inventário',
'Delete Item': 'Excluir Item',
'Delete Item Category': 'EXCLUIR categoria de Itens',
'Delete Item Pack': 'EXCLUIR Pacote de Itens',
'Delete Job Role': 'Excluir Cargo',
'Delete Key': 'Tecla de exclusão',
'Delete Kit': 'EXCLUIR Kit',
'Delete Layer': 'Excluir Camada',
'Delete Level 1 Assessment': 'EXCLUIR Nível 1 Avaliação',
'Delete Level 2 Assessment': 'EXCLUIR Nível 2 Avaliação',
'Delete Location': 'Excluir locação',
'Delete Map Configuration': 'EXCLUIR Mapa de configuração',
'Delete Marker': 'EXCLUIR Marcador',
'Delete Membership': 'Excluir membro',
'Delete Message': 'Excluir mensagem',
'Delete Mission': 'EXCLUIR Missão',
'Delete Need': 'Excluir necessidades',
'Delete Need Type': 'Excluir tipos de necessidades',
'Delete Office': 'Excluir escritório',
'Delete Organization': 'Excluir organização',
'Delete Patient': 'Delete Patient',
'Delete Peer': 'Excluir par',
'Delete Person': 'excluir pessoa',
'Delete Photo': 'Excluir Foto',
'Delete Population Statistic': 'EXCLUIR População Estatística',
'Delete Position': 'EXCLUIR Posição',
'Delete Project': 'Excluir Projeto',
'Delete Projection': 'Excluir Projeção',
'Delete Rapid Assessment': 'Excluir Avaliação Rápida',
'Delete Received Item': 'Excluir Item Recebido',
'Delete Received Shipment': 'Excluir Embarque Recebido',
'Delete Record': 'Excluir Registro',
'Delete Relative': 'Delete Relative',
'Delete Report': 'Excluir Relatório',
'Delete Request': 'Excluir Solicitação',
'Delete Request Item': 'Excluir item de solicitação',
'Delete Resource': 'Excluir Recurso',
'Delete Room': 'Excluir Sala',
'Delete Scenario': 'Excluir Cenário',
'Delete Section': 'Excluir seção',
'Delete Sector': 'Excluir Setor',
'Delete Sent Item': 'Excluir Item Enviado',
'Delete Sent Shipment': 'Excluir Embarque Enviado',
'Delete Service Profile': 'Excluir perfil de serviço',
'Delete Setting': 'Excluir Definição',
'Delete Skill': 'Excluir habilidade',
'Delete Skill Equivalence': 'Excluir equivalência de habilidade',
'Delete Skill Provision': 'Excluir Provisão de Habilidade',
'Delete Skill Type': 'Excluir Tipo de Habilidade',
'Delete Staff Type': 'Excluir Tipo De Equipe',
'Delete Status': 'Excluir Posição/Estado',
'Delete Subscription': 'Excluir assinatura',
'Delete Subsector': 'Excluir subsetor',
'Delete Survey Answer': 'Excluir reposta da pesquisa',
'Delete Survey Question': 'Excluir pergunta da pesquisa',
'Delete Survey Section': 'Excluir seção da pesquisa',
'Delete Survey Series': 'Excluir série da pesquisa',
'Delete Survey Template': 'Excluir modelo da pesquisa',
'Delete Training': 'Excluir Treinamento',
'Delete Unit': 'Excluir Unidade',
'Delete User': 'Excluir usuário',
'Delete Vehicle': 'Delete Vehicle',
'Delete Vehicle Details': 'Delete Vehicle Details',
'Delete Volunteer': 'EXCLUIR Voluntário',
'Delete Warehouse': 'Excluír Armazém',
'Delete from Server?': 'Excluir do Servidor?',
'Delphi Decision Maker': 'tomador de decisão Delphi',
'Demographic': 'Demográfico',
'Demonstrations': 'Demonstrações',
'Dental Examination': 'Exame Dentário',
'Dental Profile': 'Perfil Dentário',
'Deployment Location': 'Deployment Location',
'Describe the condition of the roads to your hospital.': 'Descreva as condições da estrada até o seu hospital.',
'Describe the procedure which this record relates to (e.g. "medical examination")': 'Descreva o procedimento ao qual este registro está relacionado (Ex: "exame médico")',
'Description': 'Descrição',
'Description of Contacts': 'Descrição dos Contatos',
'Description of defecation area': 'Descrição da área de defecação',
'Description of drinking water source': 'Descrição da fonte de água potável',
'Description of sanitary water source': 'Descrição da fonte de água sanitária',
'Description of water source before the disaster': 'Descrição da fonte de água antes do desastre',
'Descriptive Text (e.g., Prose, etc)': 'Texto Descritivo (por exemplo, Prosa, etc.)',
'Desire to remain with family': 'O desejo de permanecer com a família',
'Destination': 'destino',
'Destroyed': 'Destruído',
'Details': 'detalhes',
'Details field is required!': 'Campo de detalhes é obrigatório!',
'Dialysis': 'Diálise',
'Diaphragms, horizontal bracing': 'Diafragmas, interditará horizontal',
'Diarrhea': 'Diarréia',
'Dignitary Visit': 'Visita de Dignatários',
'Direction': 'Endereço',
'Disable': 'Desativar',
'Disabled': 'desativado',
'Disabled participating in coping activities': 'Deficiente participando de enfrentamento',
'Disabled?': 'Desativado?',
'Disaster Victim Identification': 'Identificação de Vítima de Desastre',
'Disaster Victim Registry': 'Registro de Vítima de Desastre',
'Disaster clean-up/repairs': 'Desastre limpeza/reparos',
'Discharge (cusecs)': 'Quitação (cusecs)',
'Discharges/24hrs': 'Descargas/24horas',
'Discussion Forum': 'Fórum de Discussão',
'Discussion Forum on item': 'Fórum de discussão do item',
'Disease vectors': 'Vectores doença',
'Dispensary': 'Dispensário',
'Displaced': 'Deslocadas',
'Displaced Populations': 'Populações deslocadas',
'Display Polygons?': 'exibir Polígonos?',
'Display Routes?': 'Exibir Rotas?',
'Display Tracks?': 'exibir Trilhas?',
'Display Waypoints?': 'Exibir Rota?',
'Distance between defecation area and water source': 'Distância entre área de esgoto e fonte de água',
'Distance from %s:': 'Distância de %s:',
'Distance(Kms)': 'Distância(Kms)',
'Distribution': 'Distribuição de',
'Distribution groups': 'Grupos de distribuição',
'District': 'Distrito',
'Do you really want to delete these records?': 'Você realmente deseja excluir esses registros?',
'Do you want to cancel this received shipment? The items will be removed from the Inventory. This action CANNOT be undone!': 'Você deseja cancelar este carregamento que foi recebido? Os itens serão removidos do inventário. Esta ação não pode ser desfeita!',
'Do you want to cancel this sent shipment? The items will be returned to the Inventory. This action CANNOT be undone!': 'Você deseja cancelar esse carregamento enviado? Os itens serão retornados para o inventário. Esta ação não pode ser desfeita!',
'Do you want to receive this shipment?': 'Você deseja receber esse carregamento?',
'Do you want to send these Committed items?': 'Você deseja enviar esses itens Consolidados?',
'Do you want to send this shipment?': 'Você deseja enviar este carregamento?',
'Document': 'documento',
'Document Details': 'Detalhes do Documento',
'Document Scan': 'Scanear Documento',
'Document added': 'Documento incluído',
'Document deleted': 'Documento excluído',
'Document removed': 'Document removed',
'Document updated': 'Documento Atualizado',
'Documents': 'Documentos',
'Documents and Photos': 'Documentos e Fotos',
'Does this facility provide a cholera treatment center?': 'Esta facilidade proporciona um centro de tratamento da cólera?',
'Doing nothing (no structured activity)': 'Fazendo nada (sem atividade estruturada)',
'Dollars': 'dólares',
'Domain': 'domínio',
'Domestic chores': 'Afazeres domésticos',
'Donated': 'Doado',
'Donation Certificate': 'Certificado de doaçao',
'Donation Phone #': 'Número de Telefone de doaçao',
'Donor': 'Dador',
'Donor Details': 'Doador Detalhes',
'Donor added': 'Doador incluído',
'Donor deleted': 'Doador excluído',
'Donor updated': 'Doador ATUALIZADO',
'Donors': 'Doadores',
'Donors Report': 'Relatório de Doadores',
'Door frame': 'Quadro de porta',
'Download PDF': 'Fazer download do PDF',
'Download Template': 'Download Template',
'Draft': 'rascunho',
'Drainage': 'Drenagem',
'Drawing up a Budget for Staff & Equipment across various Locations.': 'Elaborar um orçamento para Equipe & Equipamento de vários locais.',
'Drill Down by Group': 'Detalhar por grupo',
'Drill Down by Incident': 'Detalhar por incidente',
'Drill Down by Shelter': 'Detalhar por abrigo',
'Driving License': 'Carteira de Motorista',
'Drought': 'Seca',
'Drugs': 'Drogas',
'Dug Well': 'Cavaram Bem',
'Duplicate?': 'Duplicado?',
'Duration': 'Duração',
'Dust Storm': 'Tempestade de Poeira',
'Dwelling': 'Habitação',
'Dwellings': 'Habitações',
'E-mail': 'Correio eletrônico',
'EMS Reason': 'Razão EMS',
'EMS Status': 'EMS Status',
'ER Status': 'ER Status',
'ER Status Reason': 'Razão ER Status',
'EXERCISE': 'EXERCISE',
'Early Recovery': 'Início De Recuperação',
'Earth Enabled?': 'Earth Enabled?',
'Earthquake': 'Terremotos',
'Edit': 'Editar',
'Edit Activity': 'Editar Atividade',
'Edit Address': 'Editar Endereço',
'Edit Alternative Item': 'Editar Item Alternativo',
'Edit Application': 'Editar Aplicação',
'Edit Assessment': 'Editar avaliação',
'Edit Assessment Summary': 'Editar resumo da avaliação',
'Edit Asset': 'Editar recurso',
'Edit Asset Assignment': 'Editar designação do recurso',
'Edit Asset Log Entry': 'EDITAR ENTRADA DE Log de ATIVOs',
'Edit Baseline': 'Editar base de avaliação',
'Edit Baseline Type': 'Editar tipo de base de avaliação',
'Edit Brand': 'Editar marca',
'Edit Budget': 'Editar orçamento',
'Edit Bundle': 'Editar Pacote',
'Edit Camp': 'EDITAR acampamento',
'Edit Camp Service': 'EDITAR Serviço de acampamento',
'Edit Camp Type': 'Editar Tipo de Campo',
'Edit Catalog': 'Editar catálogo',
'Edit Catalog Item': 'Editar item do catálogo',
'Edit Certificate': 'Editar Certificado',
'Edit Certification': 'Editar Certificação',
'Edit Cluster': 'Editar grupo',
'Edit Cluster Subsector': 'Editar subgrupo',
'Edit Commitment': 'Editar compromisso',
'Edit Commitment Item': 'Editar Item De Compromisso',
'Edit Committed Person': 'Edit Committed Person',
'Edit Competency': 'Editar Competência',
'Edit Competency Rating': 'Editar Classificação da Competência',
'Edit Contact': 'Editar Contato',
'Edit Contact Information': 'Editar Informações de Contato',
'Edit Contents': 'Editar Conteúdo',
'Edit Course': 'Editar Curso',
'Edit Course Certificate': 'Editar Certificado de Curso',
'Edit Credential': 'Editar Credencial',
'Edit Dead Body Details': 'Editar Detalhes do Cadáver',
'Edit Description': 'Editar Descrição',
'Edit Details': 'Editar detalhes',
'Edit Disaster Victims': 'Editar vítimas do desastre',
'Edit Document': 'Editar documento',
'Edit Donor': 'Editar Doador',
'Edit Email Settings': 'Editar As Configurações De E-Mail',
'Edit Entry': 'Editar Entrada',
'Edit Event': 'Editar evento',
'Edit Facility': 'Editar recurso',
'Edit Feature Class': 'EDITAR CLASSE DE RECURSO',
'Edit Feature Layer': 'Editar Recurso Camada',
'Edit Flood Report': 'Editar Relatório de Enchente',
'Edit GPS data': 'Edit GPS data',
'Edit Gateway Settings': 'Editar Configurações de Gateway',
'Edit Group': 'Grupo de edição',
'Edit Home': 'Edit Home',
'Edit Hospital': 'Editar Hospital',
'Edit Human Resource': 'Editar Recursos Humanos',
'Edit Identification Report': 'Editar Relatório de identificação',
'Edit Identity': 'Editar Identidade',
'Edit Image': 'Editar Imagem',
'Edit Image Details': 'Editar Detalhes da Imagem',
'Edit Impact': 'Editar Impacto',
'Edit Impact Type': 'Editar Tipo De Impacto',
'Edit Import File': 'Edit Import File',
'Edit Incident Report': 'Editar Relatório de Incidente',
'Edit Inventory Item': 'Editar Item De Inventário',
'Edit Item': 'Editar Item',
'Edit Item Category': 'Editar Item de categoria',
'Edit Item Pack': 'Editar Pacote de Itens',
'Edit Job Role': 'Editar cargo',
'Edit Key': 'Editar Tecla',
'Edit Kit': 'Editar Kit',
'Edit Layer': 'Editar Camada',
'Edit Level %d Locations?': 'Editar Locais Nível d% ?',
'Edit Level 1 Assessment': 'Editar Avaliação Nível 1',
'Edit Level 2 Assessment': 'Editar nível 2 de acesso',
'Edit Location': 'Local de edição',
'Edit Log Entry': 'EDITAR ENTRADA DE Log',
'Edit Map Configuration': 'Editar Mapa de configuração',
'Edit Map Services': 'Editar mapa de serviços',
'Edit Marker': 'Marcador de Edição',
'Edit Membership': 'Editar inscrição',
'Edit Message': 'Editar mensagem',
'Edit Messaging Settings': 'Editar Configurações De Mensagens',
'Edit Mission': 'Editar Missão',
'Edit Modem Settings': 'Editar Configurações Do Modem',
'Edit Need': 'Ediçao Necessária',
'Edit Need Type': 'Editar tipo de necessidade',
'Edit Note': 'Editar nota',
'Edit Office': 'Escritório de edição',
'Edit Options': 'Opções de edição',
'Edit Organization': 'Organizar edições',
'Edit Parameters': 'Parametros de edição',
'Edit Patient': 'Edit Patient',
'Edit Peer Details': 'Detalhes do par editado',
'Edit Person Details': 'Editar detalhes pessoais',
'Edit Personal Effects Details': 'Editar detalhes de objectos pessoais',
'Edit Photo': 'Editar Foto',
'Edit Population Statistic': 'Editar Estatística da População',
'Edit Position': 'Editar Posição',
'Edit Problem': 'Editar Problema',
'Edit Project': 'Editar Projecto',
'Edit Projection': 'Editar Projeção',
'Edit Rapid Assessment': 'Editar Rápida Avaliação',
'Edit Received Item': 'Editar Item Recebido',
'Edit Received Shipment': 'Editar Embarque Recebido',
'Edit Record': 'Editar Registro',
'Edit Registration': 'Editar Registro',
'Edit Registration Details': 'Editar Detalhes De Registro',
'Edit Relative': 'Edit Relative',
'Edit Report': 'Editar Relatório',
'Edit Request': 'Editar Pedido',
'Edit Request Item': 'Editar Item Pedido',
'Edit Requested Skill': 'Edit Requested Skill',
'Edit Resource': 'Editar Recurso',
'Edit River': 'EDITAR RIO',
'Edit Role': 'Editar Função',
'Edit Room': 'Editar Sala',
'Edit SMS Settings': 'Edit SMS Settings',
'Edit SMTP to SMS Settings': 'Edit SMTP to SMS Settings',
'Edit Scenario': 'Editar cenário',
'Edit Sector': 'Editar Setor',
'Edit Sent Item': 'Editar Item Enviado',
'Edit Setting': 'Editar Definição',
'Edit Settings': 'Editar Configurações',
'Edit Shelter': 'EDITAR ABRIGO',
'Edit Shelter Service': 'Editar Serviço de Abrigo',
'Edit Shelter Type': 'EDITAR Tipo De Abrigo',
'Edit Skill': 'editar competência',
'Edit Skill Equivalence': 'Editar Equivalência de Habilidade',
'Edit Skill Provision': 'Editar Habilidade de Fornecimento',
'Edit Skill Type': 'editar tipo de competência',
'Edit Solution': 'editar solução',
'Edit Staff': 'editar pessoal',
'Edit Staff Member Details': 'Editar detalhes do membro da equipe',
'Edit Staff Type': 'EDITAR Tipo De Equipe',
'Edit Subscription': 'Editar assinatura',
'Edit Subsector': 'EDITAR Subsector',
'Edit Survey Answer': 'Editar resposta da pesquisa',
'Edit Survey Question': 'Editar pergunta da pesquisa',
'Edit Survey Section': 'EDITAR Seção de Pesquisa',
'Edit Survey Series': 'EDITAR Pesquisa de Série',
'Edit Survey Template': 'EDITAR MODELO DE PESQUISA',
'Edit Task': 'Editar Tarefa',
'Edit Team': 'Editar equipe',
'Edit Theme': 'Editar tema',
'Edit Themes': 'EDITAR TEMAs',
'Edit Ticket': 'EDITAR Bilhete',
'Edit Track': 'EDITAR RASTREAMENTO',
'Edit Training': 'Editar Treinamento',
'Edit Tropo Settings': 'Editar Configurações Tropo',
'Edit User': 'Editar Usuário',
'Edit Vehicle': 'Edit Vehicle',
'Edit Vehicle Details': 'Edit Vehicle Details',
'Edit Volunteer Availability': 'Editar Disponibilidade de Voluntário',
'Edit Volunteer Details': 'Editar Detalhes de Voluntário',
'Edit Warehouse': 'Editar Armazém',
'Edit Web API Settings': 'Edit Web API Settings',
'Edit current record': 'Editar Registro Atual',
'Edit message': 'Editar mensagem',
'Edit the Application': 'Editar a Aplicação',
'Editable?': 'Editável?',
'Education': 'Educação',
'Education materials received': 'Materiais de educação recebido',
'Education materials, source': 'materiais de Educação, origem',
'Effects Inventory': 'Inventário de efeitos',
'Eggs': 'Ovos',
'Either a shelter or a location must be specified': 'Um abrigo ou um local deve ser especificado',
'Either file upload or document URL required.': 'Um arquivo de upload ou URL do documento são necessários.',
'Either file upload or image URL required.': 'Um arquivo de upload ou URL de imagem são necessárias.',
'Elderly person headed households (>60 yrs)': 'Chefes de Familia de idade avançada (>60 anos)',
'Electrical': 'Elétrico',
'Electrical, gas, sewerage, water, hazmats': 'Elétrica, gás, esgotos, água, hazmats',
'Elevated': 'Elevado',
'Elevators': 'Elevadores',
'Email': 'E-MAIL',
'Email Address': 'Endereço de e-mail',
'Email Address to which to send SMS messages. Assumes sending to phonenumber@address': 'Email Address to which to send SMS messages. Assumes sending to phonenumber@address',
'Email Settings': 'Configurações de e-mail',
'Email settings updated': 'As configurações de e-mail atualizado',
'Embalming': 'Embalsamento',
'Embassy': 'Embaixada',
'Emergency Capacity Building project': 'Plano de emergência de capacidade dos prédios',
'Emergency Department': 'Departamento de Emergência',
'Emergency Shelter': 'Abrigo de Emergência',
'Emergency Support Facility': 'Recurso De Suporte de emergência',
'Emergency Support Service': 'Suporte do Serviço de Emergência',
'Emergency Telecommunications': 'Emergência De Telecomunicações',
'Enable': 'Enable',
'Enable/Disable Layers': 'Ativar/Desativar Camadas',
'Enabled': 'Habilitado',
'Enabled?': 'Enabled?',
'Enabling MapMaker layers disables the StreetView functionality': 'Enabling MapMaker layers disables the StreetView functionality',
'End Date': 'Data de encerramento',
'End date': 'Data de Término',
'End date should be after start date': 'Data Final deve ser maior do que a data de início',
'End of Period': 'Fim de Período',
'English': 'Inglês',
'Enter Coordinates:': 'Entre as coordenadas:',
'Enter a GPS Coord': 'Digite uma Coordada GPS',
'Enter a name for the spreadsheet you are uploading (mandatory).': 'Digite um nome para a planilha que está fazendo Upload (obrigatório).',
'Enter a name for the spreadsheet you are uploading.': 'Enter a name for the spreadsheet you are uploading.',
'Enter a new support request.': 'Digite um pedido novo de suporte.',
'Enter a unique label!': 'Digite um rótulo exclusivo!',
'Enter a valid date before': 'Digite uma data válida antes de',
'Enter a valid email': 'Insira um email válido',
'Enter a valid future date': 'Digite uma data futura válida',
'Enter a valid past date': 'Enter a valid past date',
'Enter some characters to bring up a list of possible matches': 'Digite alguns caracteres para trazer uma lista de correspondências possíveis',
'Enter some characters to bring up a list of possible matches.': 'Digite alguns caracteres para trazer uma lista de correspondências possíveis.',
'Enter tags separated by commas.': 'Insira as tags separadas por vírgulas.',
'Enter the data for an assessment': 'Enter the data for an assessment',
'Enter the same password as above': 'Digite a mesma senha acima',
'Enter your firstname': 'Enter your firstname',
'Enter your organization': 'Enter your organization',
'Entered': 'Inserido',
'Entering a phone number is optional, but doing so allows you to subscribe to receive SMS messages.': 'Digitar um número de telefone é opcional, mas ao fazer isto permite a voçe se registrar para receber mensagens SMS.',
'Entry deleted': 'Entrada removida',
'Environment': 'Ambiente do',
'Equipment': 'Equipamento',
'Error encountered while applying the theme.': 'Erro encontrado ao aplicar o tema.',
'Error in message': 'Erro na mensagem',
'Error logs for "%(app)s"': 'Registro de erros de "%(app)s"',
'Error: no such record': 'Erro: nenhum registro',
'Errors': 'Erros',
'Est. Delivery Date': 'Est. Data de entrega',
'Estimated # of households who are affected by the emergency': '# estimado das famílias que são afetados pela emergência',
'Estimated # of people who are affected by the emergency': '# estimado de pessoas que são afetados pela emergência',
'Estimated Overall Building Damage': 'Dano total de construção estimado',
'Estimated total number of people in institutions': 'Número total estimado de pessoas em instituições',
'Euros': 'Euros',
'Evacuating': 'abandono',
'Evaluate the information in this message. (This value SHOULD NOT be used in public warning applications.)': 'Valide as informações desta mensagem. (Este valor não deve ser utilizado em aplicações de aviso público. ).',
'Event': 'Evento',
'Event Details': 'Detalhes do evento',
'Event added': 'Evento incluído',
'Event deleted': 'Evento excluído',
'Event updated': 'Evento atualizado',
'Events': 'eventos',
'Example': 'Exemplo:',
'Exceeded': 'Excedido',
'Excellent': 'Excelente',
'Exclude contents': 'Excluir conteúdo',
'Excreta disposal': 'Eliminação de dejetos',
'Execute a pre-planned activity identified in <instruction>': 'Executar uma atividade pré-planejada identificada no',
'Exercise': 'Excercício',
'Exercise?': 'Exercício ?',
'Exercises mean all screens have a watermark & all notifications have a prefix.': "Exercícios significa que todas as telas têm uma marca d'água & todas as comunicações têm um prefixo.",
'Existing Placard Type': 'Cartaz existente Tipo',
'Existing Sections': 'Existing Sections',
'Existing food stocks': 'Estoques de alimentos existente',
'Existing location cannot be converted into a group.': 'Local Existente não pode ser convertido em um grupo.',
'Exits': 'Saídas',
'Expected Return Home': 'Expected Return Home',
'Experience': 'Experiência',
'Expiry Date': 'Data de expiração',
'Explosive Hazard': 'Perigo explosivo',
'Export': 'Exportar',
'Export Data': 'Exportar dados.',
'Export Database as CSV': 'Exportar o banco de dados como CSV',
'Export in GPX format': 'Exportar no formato GPX',
'Export in KML format': 'Exportar no formato KML',
'Export in OSM format': 'Exportar no formato OSM',
'Export in PDF format': 'Exportar no formato PDF',
'Export in RSS format': 'Exportar no formato RSS',
'Export in XLS format': 'Exportar no formato XLS',
'Exterior Only': 'Exterior Apenas',
'Exterior and Interior': 'Exterior e Interior',
'Eye Color': 'Cor dos Olhos',
'Facebook': 'Facebook',
'Facial hair, color': 'Cabelo Facial, cor',
'Facial hair, type': 'Cabelo Facial, digite',
'Facial hear, length': 'Facial ouvir, COMPRIMENTO',
'Facilities': 'Instalações',
'Facility': 'Instalação',
'Facility Details': 'Detalhes da Instalação',
'Facility Operations': 'Facilidades nas Operações',
'Facility Status': 'Status Facility',
'Facility Type': 'Tipo de Instalação',
'Facility added': 'Instalação incluída',
'Facility or Location': 'Instalação ou Local',
'Facility removed': 'Recurso removido',
'Facility updated': 'Recurso atualizado',
'Fail': 'Falha',
'Failed!': 'Falha!',
'Fair': 'Razoável',
'Falling Object Hazard': 'Queda Objeto Risco',
'Families/HH': 'Famílias/HH',
'Family': 'Familia',
'Family tarpaulins received': 'lonas de familia recebidas',
'Family tarpaulins, source': 'lonas de familia, fuente',
'Family/friends': 'Família/amigos',
'Farmland/fishing material assistance, Rank': 'TERRAS/assistência de material de Pesca, posição',
'Fatalities': 'Fatalidades',
'Fax': 'Número do Fax',
'Feature Class': 'Classe de Recursos',
'Feature Class Details': 'Detalhes da classe de recurso',
'Feature Class added': 'Classe de Recurso incluída',
'Feature Class deleted': 'Classe de recurso excluída',
'Feature Class updated': 'Classe De recurso atualizada',
'Feature Classes': 'Classes de Recursos',
'Feature Classes are collections of Locations (Features) of the same type': 'Classes De recurso são grupos de localidades (recursos) do mesmo tipo',
'Feature Layer Details': 'Recurso Camada Detalhes',
'Feature Layer added': 'Recurso Camada incluída',
'Feature Layer deleted': 'Recurso Camada excluído',
'Feature Layer updated': 'Recurso Camada atualizada',
'Feature Layers': 'Camadas recurso',
'Feature Namespace': 'Espaço De recurso',
'Feature Request': 'Pedido de Componente',
'Feature Type': 'Tipo de Componente',
'Features Include': 'Componentes Incluidos',
'Female': 'Sexo Feminino',
'Female headed households': 'Famílias chefiadas por mulheres',
'Few': 'Poucos',
'Field': 'Campo',
'Field Hospital': 'Campo Hospital',
'File': 'arquivo',
'File Imported': 'File Imported',
'File Importer': 'File Importer',
'File name': 'File name',
'Fill in Latitude': 'Preencher na Latitude',
'Fill in Longitude': 'Preencher na Longitude',
'Filter': 'Filtro',
'Filter Field': 'Filtro de Campo',
'Filter Value': 'Filtro de Valor',
'Find': 'Localizar',
'Find All Matches': 'Localizar todos os equivalentes',
'Find Dead Body Report': 'Localizar Relatório de Cadáver',
'Find Hospital': 'Localizar Hospital',
'Find Person Record': 'Localizar registro de pessoa',
'Find Volunteers': 'Localizar Voluntários',
'Find a Person Record': 'Localizar um Registro de Pessoa',
'Finder': 'Localizador',
'Fingerprint': 'Impressão digital',
'Fingerprinting': 'Impressões digitais',
'Fingerprints': 'Impressões Digitais',
'Finish': 'Terminar',
'Finished Jobs': 'Tarefa Terminada',
'Fire': 'Fogo',
'Fire suppression and rescue': 'Supressão e salvamento de incêndio',
'First Name': 'Primeiro Nome',
'First name': 'Primeiro Nome',
'Fishing': 'Pesca',
'Flash Flood': 'Enchente',
'Flash Freeze': 'congelar o momento',
'Flexible Impact Assessments': 'Flexibilidade no Impacto de avaliações',
'Flood': 'Enchente',
'Flood Alerts': 'Alertas de Enchente',
'Flood Alerts show water levels in various parts of the country': 'Os alertas de inundação mostram o nível da água em várias partes do país',
'Flood Report': 'Relatório de Inundação',
'Flood Report Details': 'Detalhes do Relatório de Inundação',
'Flood Report added': 'Relatório de Inundação incluído',
'Flood Report deleted': 'Relatório de Inundação removido',
'Flood Report updated': 'Relatório de Inundação actualizado',
'Flood Reports': 'Relatórios de Inundação',
'Flow Status': 'posição de fluxo',
'Focal Point': 'Ponto Central',
'Fog': 'Nevoeiro',
'Food': 'Food',
'Food Supply': 'Alimentação',
'Food assistance': 'Ajuda alimentar',
'Footer': 'Rodapé',
'Footer file %s missing!': '% Arquivo rodapé ausente!',
'For': 'Por',
'For POP-3 this is usually 110 (995 for SSL), for IMAP this is usually 143 (993 for IMAP).': 'For POP-3 this is usually 110 (995 for SSL), for IMAP this is usually 143 (993 for IMAP).',
'For a country this would be the ISO2 code, for a Town, it would be the Airport Locode.': 'Para um país este seria o código ISO2, para uma cidade, este seria o codigo do aeroporto (UNE/Locode).',
'For each sync partner, there is a default sync job that runs after a specified interval of time. You can also set up more sync jobs which could be customized on your needs. Click the link on the right to get started.': 'Para cada parceiro de sincronização, há uma tarefa de sincronização padrão que é executada após um intervalo de tempo especificado. Você também pode configurar mais tarefas de sincronização que podem ser customizadas de acordo com as suas necessidades. Clique no link à direita para começar.',
'For enhanced security, you are recommended to enter a username and password, and notify administrators of other machines in your organization to add this username and password against your UUID in Synchronization -> Sync Partners': 'Para segurança reforçada, é recomendável digitar um nome de usuário e senha, e notificar os administradores de outras máquinas em sua organização para incluir esse usuário e senha no UUID em Sincronização -> Parceiros De Sincronização',
'For live help from the Sahana community on using this application, go to': 'Para ajuda ao vivo da comunidade do Sahana sobre como utilizar esse aplicativo, vá para',
'For messages that support alert network internal functions': 'Para mensagens que suportam funções internas de alertas de rede',
'For more details on the Sahana Eden system, see the': 'Para obter mais detalhes sobre o sistema Sahana Eden, consulte o',
'For more information, see': 'Para obter mais informações, consulte o',
'For more information, see ': 'For more information, see ',
'For other types, the next screen will allow you to enter the relevant details...': 'Para outros tipos, a próxima tela permitirá que você digite os detalhes relevantes.',
'Forest Fire': 'Incêndios florestais',
'Formal camp': 'Acampamento formal',
'Format': 'Formato',
"Format the list of attribute values & the RGB value to use for these as a JSON object, e.g.: {Red: '#FF0000', Green: '#00FF00', Yellow: '#FFFF00'}": "Formatar a lista de valores de atributos & o valor RGB a ser usado para esses como o objeto JSON, Exemplo: {Red: '#FF0000, Green: '#00FF00', Yellow: '#FFFF00'}",
'Forms': 'formulários',
'Found': 'localizado',
'Foundations': 'Fundações',
'Freezing Drizzle': 'Garoa gelada',
'Freezing Rain': 'Chuva Gelada',
'Freezing Spray': 'Spray Gelado',
'French': 'Francês',
'Friday': 'sexta-feira',
'From': 'from',
'From Facility': 'From Facility',
'From Inventory': 'A partir do Inventário',
'From Location': 'Do Local',
'From Organisation': 'Da organização',
'From Organization': 'Da Organização',
'From Person': 'Da Pessoa',
'Frost': 'Geada',
'Fulfil. Status': 'Encher. Status',
'Fulfillment Status': 'Status de preenchimento',
'Full': 'Cheio',
'Full beard': 'Barba completa',
'Fullscreen Map': 'Mapa em tela cheia',
'Functions available': 'Funções disponíveis',
'Funding Organization': 'Financiar a Organização',
'Funeral': 'Funeral',
'Further Action Recommended': 'Mais Acção Recomendada',
'GIS Reports of Shelter': 'Relatórios GIS de abrigos',
'GIS integration to view location details of the Shelter': 'Integration GIS para visualizar detalhes do local do Abrigo',
'GPS': 'GPS',
'GPS Data': 'GPS Data',
'GPS ID': 'GPS ID',
'GPS Marker': 'Marcador De GPS',
'GPS Track': 'Rastrear por GPS',
'GPS Track File': 'Rastrear Arquivo GPS',
'GPS data': 'GPS data',
'GPS data added': 'GPS data added',
'GPS data deleted': 'GPS data deleted',
'GPS data updated': 'GPS data updated',
'GPX Track': 'GPX RASTREAR',
'GRN': 'NRG',
'GRN Status': 'Status GRN',
'Gale Wind': 'Temporal',
'Gap Analysis': 'Análise de Falhas',
'Gap Analysis Map': 'Mapa de Análise de Falhas',
'Gap Analysis Report': 'Relatório de Análise de Falhas',
'Gap Map': 'Mapa de Falhas',
'Gap Report': 'Relatório de Falhas',
'Gateway': 'Portão',
'Gateway Settings': 'Configurações de Gateway',
'Gateway settings updated': 'Configurações de Gateway atualizadas',
'Gender': 'Sexo',
'General': 'geral',
'General Comment': 'Comentário Geral',
'General Medical/Surgical': 'Médico/Cirúrgico Geral',
'General emergency and public safety': 'Geral de emergência e segurança pública',
'General information on demographics': 'Informações gerais sobre demografia',
'Generator': 'Gerador',
'Geocode': 'Geocodificar',
'Geocoder Selection': 'Seleção De geocodificador',
'Geometry Name': 'Nome da geometria',
'Geophysical (inc. landslide)': 'Geofísica (inc. deslizamento)',
'Geotechnical': 'Geotécnica',
'Geotechnical Hazards': 'RISCOS geotécnicos',
'Geraldo module not available within the running Python - this needs installing for PDF output!': 'Geraldo não disponíveis no módulo a execução Python- é necessário instalar para saída PDF!',
'Geraldo not installed': 'Geraldo não instalado',
'German': 'German',
'Get incoming recovery requests as RSS feed': 'Obter pedidos recebidos de recuperação como feed RSS',
'Give a brief description of the image, e.g. what can be seen where on the picture (optional).': 'Fornecer uma descrição breve da imagem, por exemplo, o que pode ser visto no local da imagem (opcional).',
'Give information about where and when you have seen them': 'Fornecer informações sobre onde e quando você os viu',
'Global Messaging Settings': 'Configurações Globais de Menssagem',
'Go': 'ir',
'Go to Request': 'Ir para Pedido',
'Goatee': 'Barbicha',
'Good': 'Válido',
'Good Condition': 'Boa Condição',
'Goods Received Note': 'Nota de Recebimento de Mercadorias',
"Google Layers cannot be displayed if there isn't a valid API Key": "Google Layers cannot be displayed if there isn't a valid API Key",
'Government': 'Governamental',
'Government UID': 'GOVERNO UID',
'Government building': 'Prédios Públicos',
'Grade': 'Grau',
'Greek': 'grego',
'Green': 'verde',
'Ground movement, fissures': 'Movimento do solo terrestre, fissuras',
'Ground movement, settlement, slips': 'Movimento do solo terrestre, assentamentos, escorregões',
'Group': 'Grupo',
'Group Description': 'Descrição do Grupo',
'Group Details': 'Detalhes do grupo',
'Group ID': 'Group ID',
'Group Member added': 'Membro do grupo incluído',
'Group Members': 'membros do grupo',
'Group Memberships': 'Associados do Grupo',
'Group Name': 'Nome do grupo',
'Group Title': 'Título do grupo',
'Group Type': 'Tipo de grupo',
'Group added': 'Grupo adicionado',
'Group deleted': 'Grupo Excluído',
'Group description': 'Descrição do Grupo',
'Group updated': 'GRUPO ATUALIZADO',
'Groups': 'Grupos do',
'Groups removed': 'Grupos Removido',
'Guest': 'Convidado',
'HR Data': 'Dados de RH',
'HR Manager': 'Responsável de RH',
'Hail': 'granizo',
'Hair Color': 'Cor do Cabelo',
'Hair Length': 'Comprimento do cabelo',
'Hair Style': 'Estilo do Cabelo',
'Has additional rights to modify records relating to this Organization or Site.': 'Tem direitos adicionais para modificar os registros relativos a esta organização ou site.',
'Has data from this Reference Document been entered into Sahana?': 'Os dados deste documento de referência foi digitado no Sahana?',
'Has only read-only access to records relating to this Organization or Site.': 'Tem apenas acesso de leitura para os registros relativos a esta organização ou site.',
'Has the Certificate for receipt of the shipment been given to the sender?': 'O certificado de recepção do carregamento foi dado para o remetente?',
'Has the GRN (Goods Received Note) been completed?': 'O GRN (nota de mercadorias recebidas) foi concluído?',
'Hazard Pay': 'Pagar Risco',
'Hazardous Material': 'Material perigoso',
'Hazardous Road Conditions': 'Estradas em Condições de Risco',
'Header Background': 'Conhecimento de Chefia',
'Header background file %s missing!': 'Arquivo de Cabeçalho de Base %s ausente!',
'Headquarters': 'Matriz',
'Health': 'Saúde',
'Health care assistance, Rank': 'Assistência Saúde, Classificação',
'Health center': 'Centro de Saúde',
'Health center with beds': 'Centro de saúde com camas',
'Health center without beds': 'Centro de saúde sem camas',
'Health services status': 'Situação dos serviços de saúde',
'Healthcare Worker': 'Profissional de Saúde',
'Heat Wave': 'Onda de calor',
'Heat and Humidity': 'Calor e Umidade',
'Height': 'Altura',
'Height (cm)': 'Altura (cm)',
'Height (m)': 'Altura (m)',
'Help': 'Ajuda',
'Helps to monitor status of hospitals': 'Ajuda para monitorar status de hospitais',
'Helps to report and search for Missing Persons': 'Ajuda a reportar e procurar pessoas desaparecidas.',
'Helps to report and search for missing persons': 'Ajuda a reportar e procurar pessoas desaparecidas.',
'Here are the solution items related to the problem.': 'Aqui estão as soluções relacionadas ao problema.',
'Heritage Listed': 'Património Listado',
'Hierarchy Level %d Name': 'Hierarquia de Nível% de d Nome',
'Hierarchy Level 0 Name (e.g. Country)': 'Hierarquia Nível 0 Nome (por exemplo, País)',
'Hierarchy Level 0 Name (i.e. Country)': 'Hierarquia Nível 0 nome (por exemplo País)',
'Hierarchy Level 1 Name (e.g. Province)': 'Hierarquia Nível 1 Nome (por exemplo, Província)',
'Hierarchy Level 1 Name (e.g. State or Province)': 'Hierarquia Nível 1 nome (por exemplo, Estado ou Província)',
'Hierarchy Level 2 Name (e.g. District or County)': 'Hierarquia de Nível 2 Nome (por exemplo, Região ou Município)',
'Hierarchy Level 3 Name (e.g. City / Town / Village)': 'Hierarquia Nível 3 Nome (por exemplo, Cidade / Municipio / Vila)',
'Hierarchy Level 4 Name (e.g. Neighbourhood)': 'Hierarquia de Nível 4 Nome (por exemplo, Bairro)',
'Hierarchy Level 5 Name': 'Nome de Nível 5 na Hierarquia',
'High': 'Alta',
'High Water': "d'água alta",
'Hindu': 'Hindu',
'History': 'História',
'Hit the back button on your browser to try again.': 'Clique no ícone de voltar em seu navegador para tentar novamente.',
'Holiday Address': 'Endereço durante Feriado',
'Home': 'Residência',
'Home Address': 'Endereço Residencial',
'Home City': 'Home City',
'Home Country': 'País natal',
'Home Crime': 'Crime Doméstico',
'Home Details': 'Home Details',
'Home Phone Number': 'Home Phone Number',
'Home Relative': 'Home Relative',
'Home added': 'Home added',
'Home deleted': 'Home deleted',
'Home updated': 'Home updated',
'Homes': 'Homes',
'Hospital': 'Hospital',
'Hospital Details': 'Detalhes do Hospital',
'Hospital Status Report': 'Relatório de Status do Hospital',
'Hospital information added': 'Informações do hospital inclusas.',
'Hospital information deleted': 'Informações do hospital excluídas',
'Hospital information updated': 'informações do Hospital atualizadas',
'Hospital status assessment.': 'Avaliação de status do Hospital.',
'Hospitals': 'Hospitais',
'Hot Spot': 'ponto de acesso',
'Hour': 'Hora',
'Hours': 'Horas',
'Household kits received': 'Kits caseiros recebidos',
'Household kits, source': 'Kit de família, origem',
'How does it work?': 'Como funciona?',
'How is this person affected by the disaster? (Select all that apply)': 'Como esta pessoa é afetada pelo desastre? (selecione todos que se aplicam)',
'How long will the food last?': 'Quanto tempo irá durar a comida?',
'How many Boys (0-17 yrs) are Dead due to the crisis': 'Quantos rapazes (0-17 anos) estão Mortos devido à crise',
'How many Boys (0-17 yrs) are Injured due to the crisis': 'Quantos rapazes (0-17 anos) estão Feridos devido à crise',
'How many Boys (0-17 yrs) are Missing due to the crisis': 'Quantos rapazes (0-17 anos) estão Desaparecidos devido à crise',
'How many Girls (0-17 yrs) are Dead due to the crisis': 'Quantas garotas (0-17 anos) morreram devido à crise',
'How many Girls (0-17 yrs) are Injured due to the crisis': 'Quantas garotas (0-17 anos) estão feridas devido à crise',
'How many Girls (0-17 yrs) are Missing due to the crisis': 'Quantas garotas (0-17 anos) estão perdidas devido à crise',
'How many Men (18 yrs+) are Dead due to the crisis': 'Quantos homens (18 anos+) estão mortos devido à crise',
'How many Men (18 yrs+) are Injured due to the crisis': 'Quantos homens (18 anos +) são feridos devido à crise',
'How many Men (18 yrs+) are Missing due to the crisis': 'Quantos homens (18 anos +) estão ausentes devido à crise',
'How many Women (18 yrs+) are Dead due to the crisis': 'Quantas mulheres (+18 anos) estão mortas devido à crise',
'How many Women (18 yrs+) are Injured due to the crisis': 'Quantas mulheres (+18 anos) estão feridas devido à crise',
'How many Women (18 yrs+) are Missing due to the crisis': 'Quantas mulheres acima de 18 anos estão ausentes devido à crise',
'How many days will the supplies last?': 'Quantos dias irão durar os abastecimentos?',
'How many new cases have been admitted to this facility in the past 24h?': 'Quantos novos casos tenham sido admitidos a esta facilidade nas últimas 24 horas?',
'How many of the patients with the disease died in the past 24h at this facility?': 'Como muitos dos pacientes com a doença morreram nas últimas 24 horas nesta unidade?',
'How many patients with the disease are currently hospitalized at this facility?': 'Quantos pacientes com a doença estão atualmente internados nesta instalação?',
'How much detail is seen. A high Zoom level means lot of detail, but not a wide area. A low Zoom level means seeing a wide area, but not a high level of detail.': 'Quanto detalhe é visto. Um nível alto de Zoom mostra muitos detalhes, mas não uma grande área. Um nível de Zoom baixo significa ver uma grande área, mas não com um alto nível de detalhe.',
'Human Resource': 'Recursos humanos',
'Human Resource Details': 'Detalhes de Recursos Humanos',
'Human Resource Management': 'Gerenciamento de recursos humanos',
'Human Resource added': 'Recurso humano adicionado',
'Human Resource removed': 'Recursos Humanos removido',
'Human Resource updated': 'Recursos Humanos atualizado',
'Human Resources': 'Recursos Humanos',
'Human Resources Management': 'Gerenciamento de Recursos Humanos',
'Humanitarian NGO': 'ONG humanitária',
'Hurricane': 'Furacão',
'Hurricane Force Wind': 'Furacão Força Vento',
'Hybrid Layer': 'Hybrid Layer',
'Hygiene': 'Higiene',
'Hygiene NFIs': 'Higiene NFIs',
'Hygiene kits received': 'Kits de higiene recebido',
'Hygiene kits, source': 'Kits de higiene, origem',
'Hygiene practice': 'Prática de higiene',
'Hygiene problems': 'PROBLEMAS DE HIGIENE',
'I accept. Create my account.': 'I accept. Create my account.',
'I am available in the following area(s)': 'Estou disponível na(s) seguinte(s) área(s)',
'ID Tag': 'Etiqueta de Identificação',
'ID Tag Number': 'Número da Etiqueta de Identificação',
'ID type': 'Tipo de ID',
'Ice Pressure': 'Pressão de gelo',
'Iceberg': 'Icebergue',
'Identification': 'Identification',
'Identification Report': 'Identificação Relatório',
'Identification Reports': 'Relatórios de Identificação',
'Identification Status': 'Status da Identificação',
'Identified as': 'Identificado como',
'Identified by': 'Identificado por',
'Identity': 'Identidade',
'Identity Details': 'Detalhes da identidade',
'Identity added': 'Identidade incluída',
'Identity deleted': 'Identidade excluída',
'Identity updated': 'Identidade atualizada',
'If Staff have login accounts then they are given access to edit the details of the': 'Se o pessoal tiver contas de login, então lhes é dado acesso para editar os detalhes do',
'If a ticket was issued then please provide the Ticket ID.': 'Se um bilhete foi emitido então por favor forneça o ID do bilhete.',
'If a user verifies that they own an Email Address with this domain, the Approver field is used to determine whether & by whom further approval is required.': 'Se um usuário verifica que eles possuem um endereço de email com este domínio, o campo Aprovador é utilizado para determinar se e por quem aprovação adicional é necessária.',
'If it is a URL leading to HTML, then this will downloaded.': 'Se for uma URL levando a HTML, então este será baixado.',
'If neither are defined, then the Default Marker is used.': 'Se nem são definidos, então o Marcador Padrão é utilizado.',
'If no marker defined then the system default marker is used': 'Se nenhum marcador definido, o marcador padrão do sistema é utilizada',
'If no, specify why': 'Se não, especifique por que',
'If none are selected, then all are searched.': 'Se nenhuma for selecionada, então todos são procurados.',
"If selected, then this Asset's Location will be updated whenever the Person's Location is updated.": 'Se selecionado, esta localização do ativo será atualizado sempre que a localização da pessoa é atualizada.',
'If the location is a geographic area, then state at what level here.': 'Se o local é uma área geográfica, então defina em que nível aqui.',
'If the request is for %s, please enter the details on the next screen.': 'If the request is for %s, please enter the details on the next screen.',
'If the request is for type "Other", you should enter a summary of the request here.': 'Se o pedido for para o tipo \ " Outro", você deve digitar um resumo do pedido aqui.',
'If the request type is "Other", please enter request details here.': 'Se o tipo de pedido é "other", por favor, digite aqui detalhes do pedido.',
"If this configuration represents a region for the Regions menu, give it a name to use in the menu. The name for a personal map configuration will be set to the user's name.": 'Se esta configuração representa uma região para o menu regiões, dê-lhe um nome a ser utilizado no menu. O nome de uma configuração pessoal do mapa será configurado para o nome do usuário.',
"If this field is populated then a user who specifies this Organization when signing up will be assigned as a Staff of this Organization unless their domain doesn't match the domain field.": 'Se esse campo for Preenchido, então, um usuário que especificar esta organização quando se registrar será designado como um agente desta organização a menos que seu domínio não corresponde ao campo de domínio.',
'If this field is populated then a user with the Domain specified will automatically be assigned as a Staff of this Organization': 'Se esse campo for preenchido, o usuário de um específico Domain será automaticamente registrado como funcionário desta organização.',
'If this is set to True then mails will be deleted from the server after downloading.': 'Se isso for ajustado para “True”, as correspondências serão deletadas do servidor depois que o downloading for feito.',
"If this is ticked, then this will become the user's Base Location & hence where the user is shown on the Map": 'Se isso for ticado, se tornará a base geográfica do usuário e, consequentemente onde este aparece no mapa.',
'If this record should be restricted then select which role is required to access the record here.': 'Se esse registro deve ser restrito, selecione qual regra é necessária para acessar o record aqui.',
'If this record should be restricted then select which role(s) are permitted to access the record here.': 'Se esse registro deve ser restrito, selectione qual (is) regra (s) serão permitidas para assessá-lo aqui.',
'If yes, specify what and by whom': 'Se SIM, Especifique o quê e por quem',
'If yes, which and how': 'Se sim, quais e como',
'If you do not enter a Reference Document, your email will be displayed to allow this data to be verified.': 'Se você não inserir um documento de referência, seu e-mail será exibido para permitir que esses dados sejam verificados.',
"If you don't see the Hospital in the list, you can add a new one by clicking link 'Add Hospital'.": "Se você não vê o Hospital na lista, você pode incluir um novo clicando no link 'incluir Hospital'.",
"If you don't see the Office in the list, you can add a new one by clicking link 'Add Office'.": "Se você não vê o escritório na lista, você pode incluir um novo clicando no link 'incluir escritório'.",
"If you don't see the Organization in the list, you can add a new one by clicking link 'Add Organization'.": 'Se voce não vê a Organização na lista, voce poderá adicionar uma nova clicando no link "Incluir Organização"',
'If you know what the Geonames ID of this location is then you can enter it here.': 'Se voce conhecer o Geonames ID desta localização então voce poderá inserí-lo aqui.',
'If you know what the OSM ID of this location is then you can enter it here.': 'Se voce conhecer o OSM ID desta localização, então voce pode inserí-lo aqui.',
'If you need to add a new document then you can click here to attach one.': 'Se houver necessidade de incluir um novo documento então voce poderá clicar aqui para anexá-lo.',
'If you want several values, then separate with': 'Se voce deseja varios valores, separe com',
'If you would like to help, then please': 'Se você gostaria de ajudar, então por favor',
'Illegal Immigrant': 'Imigrante Ilegal',
'Image': 'Imagem',
'Image Details': 'Detalhes da Imagem',
'Image File(s), one image per page': 'Image File(s), one image per page',
'Image Tags': 'Imagem Tags',
'Image Type': 'Tipo de Imagem',
'Image Upload': 'Fazer atualizacao Da imagem',
'Image added': 'Imagem Adicionada',
'Image deleted': 'Imagem excluída',
'Image updated': 'Imagem atualizada',
'Imagery': 'Imagens',
'Images': 'Imagens',
'Impact Assessments': 'Avaliações de impacto',
'Impact Details': 'Detalhes de impacto',
'Impact Type': 'Tipo de impacto',
'Impact Type Details': 'Detalhes dos tipos de impacto',
'Impact Type added': 'Tipo de impacto incluído',
'Impact Type deleted': 'Tipo de impacto excluído',
'Impact Type updated': 'Atualização dos tipos de impacto',
'Impact Types': 'Tipos de impactos',
'Impact added': 'Impacto incluído',
'Impact deleted': 'Impacto excluído',
'Impact updated': 'Atualização de impacto',
'Impacts': 'Impactos',
'Import': 'Importação',
'Import & Export Data': 'Importar & Exportar Dados',
'Import Data': 'Importar Dados',
'Import File': 'Import File',
'Import File Details': 'Import File Details',
'Import File deleted': 'Import File deleted',
'Import Files': 'Import Files',
'Import Job Count': 'Import Job Count',
'Import Jobs': 'Importar Tarefas',
'Import New File': 'Import New File',
'Import and Export': 'Importação e Exportação',
'Import from Ushahidi Instance': 'Importação da Instância Ushahidi',
'Import if Master': 'Importar se Mestre',
'Import multiple tables as CSV': 'Importar tabelas multiplas como CSV',
'Import/Export': 'Importar/Exportar',
'Important': 'Importante',
'Importantly where there are no aid services being provided': 'Importante onde não há serviços de apoio a ser prestado',
'Imported': 'Imported',
'Importing data from spreadsheets': 'Importar dados de planilhas',
'Improper decontamination': 'Descontaminação Imprópria',
'Improper handling of dead bodies': 'Manipulação inadequada de cadáveres',
'In Catalogs': 'Em Catálogos',
'In Inventories': 'Em Inventários',
'In Process': 'Em Processo',
'In Progress': 'Em Progresso',
'In Window layout the map maximises to fill the window, so no need to set a large value here.': 'Maximize o ajuste da janela para preenche-la toda, desta forma não será necessário configurar para uso de fonte grande.',
'Inbound Mail Settings': 'Definições de correio de entrada',
'Incident': 'Incidente',
'Incident Categories': 'Categorias Incidente',
'Incident Report': 'Relatório de Incidente',
'Incident Report Details': 'Detalhes do relatório de incidentes',
'Incident Report added': 'Relatório de Incidente incluído',
'Incident Report deleted': 'Relatório de Incidente excluído',
'Incident Report updated': 'Relatório de incidente atualizado',
'Incident Reporting': 'Relatório de incidentes',
'Incident Reporting System': 'Sistema de relatórios de incidentes',
'Incident Reports': 'Relatório de incidentes',
'Incidents': 'incidentes',
'Include any special requirements such as equipment which they need to bring.': 'Include any special requirements such as equipment which they need to bring.',
'Incoming': 'Entrada',
'Incoming Shipment canceled': 'Chegada da encomenda cancelada',
'Incoming Shipment updated': 'Chegada de encomenda actualizada.',
'Incomplete': 'Incompleto',
'Individuals': 'Individuais',
'Industrial': 'Industrial',
'Industrial Crime': 'Crime Industrial',
'Industry Fire': 'Indústria Fogo',
'Infant (0-1)': 'Criança (0-1)',
'Infectious Disease': 'Doença INFECCIOSA',
'Infectious Disease (Hazardous Material)': 'Doenças infecciosas (Material perigoso)',
'Infectious Diseases': 'Doenças infecciosas',
'Infestation': 'Infestação',
'Informal Leader': 'Líder Informal',
'Informal camp': 'Acampamento Informal',
'Information gaps': 'problemas de informação',
'Infusion catheters available': 'Cateteres de infusão disponível',
'Infusion catheters need per 24h': 'Cateteres infusão necessário por 24 H',
'Infusion catheters needed per 24h': 'Cateteres infusão necessário por H',
'Infusions available': 'Infusões disponíveis',
'Infusions needed per 24h': 'Infusões necessário por 24H',
'Inspected': 'Inspecionado',
'Inspection Date': 'Data de Inspeção',
'Inspection date and time': 'Data e hora de inspeção',
'Inspection time': 'Hora da inspeção',
'Inspector ID': 'ID do Inspetor',
'Instant Porridge': 'Mingau Instantâneo',
"Instead of automatically syncing from other peers over the network, you can also sync from files, which is necessary where there's no network. You can use this page to import sync data from files and also export data to sync files. Click the link on the right to go to this page.": 'Em vez de sincronizar automaticamente com outros pares pela rede, voce também pode sincronizar com arquivos, o que é necessário quando não há rede. Você pode utilizar esta página para importar dados de sincronização de arquivos e também exportar dados para arquivos de Sincronização. Clique no link à direita para ir para esta página.',
'Institution': 'Instituição',
'Insufficient': 'insuficiente',
'Insufficient privileges': 'Insufficient privileges',
'Insufficient vars: Need module, resource, jresource, instance': 'Variaveis insuficientes: necessario modulo, recurso, jrecurso, instância',
'Insurance Renewal Due': 'Insurance Renewal Due',
'Intake Items': 'Itens de admissão',
'Intergovernmental Organization': 'Organização Intergovernamental',
'Interior walls, partitions': 'Do Interior das paredes, partições',
'Internal State': 'Estado Interno',
'International NGO': 'ONG internacional',
'International Organization': 'Organização Internacional',
'Interview taking place at': 'Entrevista em',
'Invalid': 'Inválido',
'Invalid Query': 'Consulta inválida',
'Invalid email': 'Invalid email',
'Invalid phone number': 'Invalid phone number',
'Invalid phone number!': 'Invalid phone number!',
'Invalid request!': 'Pedido inválido!',
'Invalid ticket': 'Bilhete Inválido',
'Inventories': 'Inventários.',
'Inventory': 'Inventário',
'Inventory Item': 'Item do inventário',
'Inventory Item Details': 'Detalhes do Item de inventário',
'Inventory Item added': 'Item incluído no inventário',
'Inventory Item deleted': 'Item do inventário excluído',
'Inventory Item updated': 'Item de Inventário atualizado',
'Inventory Items': 'Itens do Inventário',
'Inventory Items Available for Request Item': 'Itens de inventário disponíveis para Pedir um Item',
'Inventory Items include both consumable supplies & those which will get turned into Assets at their destination.': 'Itens de invenrário incluem ambos suprimentos consumíveis & aqueles que se transformarão em Ativos no seu destino.',
'Inventory Management': 'Gerenciamento de Inventário',
'Inventory Stock Position': 'Inventory Stock Position',
'Inventory functionality is available for:': 'Inventário de funcionalidades esta disponível para:',
'Inventory of Effects': 'Inventário de Efeitos',
'Is editing level L%d locations allowed?': 'É permitido editar o nível dos locais L%d?',
'Is it safe to collect water?': 'É seguro coletar água?',
'Is this a strict hierarchy?': 'Esta é uma hierarquia rigorosa?',
'Issuing Authority': 'Autoridade emissora',
'It captures not only the places where they are active, but also captures information on the range of projects they are providing in each area.': 'Ele captura não apenas os locais onde elas estão ativas, mas também captura informações sobre o conjunto de projetos que está fornecendo em cada região.',
'Italian': 'Italian',
'Item': 'Item',
'Item Added to Shipment': 'Item Incluído para Embarque',
'Item Catalog Details': 'Detalhes do item do catálogo',
'Item Categories': 'Categorias do Item',
'Item Category': 'Categoria do Item',
'Item Category Details': 'Detalhes da categoria de item',
'Item Category added': 'Categoria de item incluída',
'Item Category deleted': 'Categoria de item excluída',
'Item Category updated': 'Atualização da categoria de item',
'Item Details': 'Detalhes do item',
'Item Pack Details': 'Detalhes do pacote de itens',
'Item Pack added': 'Pacote de itens',
'Item Pack deleted': 'Pacote de itens excluído',
'Item Pack updated': 'Itens de Pacote atualizados',
'Item Packs': 'Item de Pacotes',
'Item added': 'Item incluído',
'Item added to Inventory': 'Itens adicionados ao Inventário',
'Item added to shipment': 'Item incluído para embarque',
'Item already in Bundle!': 'Item já no pacote configurável!',
'Item already in Kit!': 'Item já no Kit!',
'Item already in budget!': 'Item já no Orçamento!',
'Item deleted': 'Item Excluído',
'Item removed from Inventory': 'Item removido do Inventário',
'Item updated': 'Item atualizado',
'Items': 'Itens',
'Items in Category can be Assets': 'itens na categoria podem ser ativos',
'Japanese': 'japonês',
'Jerry can': 'Jerry pode',
'Jew': 'Judeu',
'Job Market': 'Mercado de trabalho',
'Job Role': 'Função de trabalho',
'Job Role Catalog': 'Catalogo de Funçao de trabalho',
'Job Role Details': 'Detalhes da Função',
'Job Role added': 'funçao de trabalho inclusa',
'Job Role deleted': 'Funçao de trabalho excluida',
'Job Role updated': 'Função actualizada',
'Job Roles': 'Funções',
'Job Title': 'Título do Cargo',
'Jobs': 'Tarefas',
'Journal': 'Diário',
'Journal Entry Details': 'Detalhes da Entrada de Diário',
'Journal entry added': 'Entrada de diário incluída',
'Journal entry deleted': 'Entrada de diário removida',
'Journal entry updated': 'Entrada de diário atualizado',
'Key': 'Tecla',
'Key Details': 'Detalhes da Chave',
'Key added': 'Chave adicionada',
'Key deleted': 'Chave removida',
'Key updated': 'Chave actualizada',
'Keys': 'Teclas',
'Kit': 'kit',
'Kit Contents': 'Conteúdo Kit',
'Kit Details': 'Detalhes do Kit',
'Kit Updated': 'Kit de Atualização',
'Kit added': 'Pacote adicionado',
'Kit deleted': 'Kit excluído',
'Kit updated': 'Kit de atualização',
'Kits': 'Kits',
'Known Identities': 'Identidades conhecido',
'Known incidents of violence against women/girls': 'Incidentes de violência conhecidos contra mulheres/garotas',
'Known incidents of violence since disaster': 'Incidentes de violência conhecidos desde o desastre',
'Korean': 'Korean',
'LICENSE': 'LICENÇA',
'Lack of material': 'Falta de material',
'Lack of school uniform': 'Falta de uniforme escolar',
'Lack of supplies at school': 'Falta de suprimentos na escola',
'Lack of transport to school': 'Falta de transporte escolar',
'Lactating women': 'Mulheres lactantes',
'Lahar': 'Lahar',
'Landslide': 'Deslizamento',
'Language': 'Linguagem',
'Last Name': 'sobrenome',
'Last known location': 'Último local conhecido',
'Last name': 'Last name',
'Last synchronization time': 'Horário da última sincronização',
'Last updated': 'Última atualização',
'Last updated ': 'Last updated ',
'Last updated by': 'Última atualização por',
'Last updated on': 'Última Atualização em',
'Latitude': 'Latitude',
'Latitude & Longitude': 'Latitude & Longitude',
'Latitude is North-South (Up-Down).': 'Latitude é sentido norte-sul (emcima-embaixo).',
'Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere.': 'Latitude é zero na linha do Equador, positiva no hemisfério norte e negativa no hemisfério sul.',
'Latitude of Map Center': 'Latitude DO MAPA Centro',
'Latitude of far northern end of the region of interest.': 'Latitude do extremo Norte longe do Região de interesse.',
'Latitude of far southern end of the region of interest.': 'Latitude da extremidade sul longe do Região de interesse.',
'Latitude should be between': 'Latitude deve estar entre',
'Latrines': 'Privadas',
'Law enforcement, military, homeland and local/private security': 'Execução da lei militar, interna e segurança local/privada',
'Layer': 'Camada',
'Layer Details': 'Detalhes de Camada',
'Layer ID': 'Layer ID',
'Layer Name': 'Layer Name',
'Layer Type': 'Layer Type',
'Layer added': 'Camada incluída',
'Layer deleted': 'Camada excluída',
'Layer has been Disabled': 'Layer has been Disabled',
'Layer has been Enabled': 'Layer has been Enabled',
'Layer updated': 'Camada atualizada',
'Layers': 'Camadas',
'Layers updated': 'Camadas atualizadas',
'Layout': 'Modelo',
'Leader': 'guia',
'Leave blank to request an unskilled person': 'Leave blank to request an unskilled person',
'Legend Format': 'Formato da Legenda',
'Length (m)': 'Comprimento (m)',
'Level': 'Nível',
'Level 1': 'Nível 1',
'Level 1 Assessment Details': 'Detalhes da Avaliação Nível 1',
'Level 1 Assessment added': 'Avaliação Nível 1 incluído',
'Level 1 Assessment deleted': 'Avaliação Nível 1 excluído',
'Level 1 Assessment updated': 'Avaliação Nível 1 atualizada',
'Level 1 Assessments': 'Avaliações Nível 1',
'Level 2': 'nível 2',
'Level 2 Assessment Details': 'Nível 2 de avaliação Detalhado',
'Level 2 Assessment added': 'Nível 2 avaliação incluído',
'Level 2 Assessment deleted': 'Nível 2 de avaliação excluído',
'Level 2 Assessment updated': 'Nível 2 de avaliação atualizada',
'Level 2 Assessments': 'Nível 2 de Avaliações',
'Level 2 or detailed engineering evaluation recommended': 'Nível 2 ou engenharia detalhada de avaliação recomendado',
"Level is higher than parent's": 'Nível superior ao dos pais',
'Library support not available for OpenID': 'Apoio de biblioteca não está disponível para OpenID',
'License Number': 'License Number',
'License Plate': 'License Plate',
'LineString': 'cadeia-de-linhas',
'List': 'Listar',
'List / Add Baseline Types': 'Lista / Incluir Linha de Tipos',
'List / Add Impact Types': 'Lista / Incluir Tipos de Impacto',
'List / Add Services': 'Lista / Incluir Serviços',
'List / Add Types': 'Lista / Incluir Tipos',
'List Activities': 'listar atividades',
'List All': 'Mostrar Tudo',
'List All Assets': 'Lista todos os ativos',
'List All Catalog Items': 'Lista todos os Itens Do Catálogo',
'List All Commitments': 'Lista todos os compromissos',
'List All Entries': 'Listar todas as entradas',
'List All Item Categories': 'Lista todos os itens Categorias',
'List All Memberships': 'Listar Todas As Associações',
'List All Received Shipments': 'Lista todas as transferências Recebidas',
'List All Records': 'Lista todos os registros',
'List All Reports': 'Listar todos os Relatórios',
'List All Requested Items': 'Lista Todos Os itens solicitados',
'List All Requested Skills': 'List All Requested Skills',
'List All Requests': 'Lista Todos Os Pedidos',
'List All Sent Shipments': 'Listar todos os embarques enviados',
'List All Vehicles': 'List All Vehicles',
'List Alternative Items': 'Listar Itens Alternativos',
'List Assessment Summaries': 'Listar Resumos das Avaliações',
'List Assessments': 'Listar as Avaliações',
'List Asset Assignments': 'Listar Atribuições de Ativos',
'List Assets': 'Listar Ativos',
'List Availability': 'Listar Disponibilidade',
'List Baseline Types': 'Lista de Tipos De Linha',
'List Baselines': 'Lista de Linhas',
'List Brands': 'Lista de Marcas',
'List Budgets': 'Listar Orçamentos',
'List Bundles': 'Listar Pacotes',
'List Camp Services': 'Listar Serviços de Acampamento',
'List Camp Types': 'Listar Tipos de Acampamentos',
'List Camps': 'Listar Acampamentos',
'List Catalog Items': 'Lista de Itens Do Catálogo',
'List Catalogs': 'Listar catálogos',
'List Certificates': 'Listar certificados',
'List Certifications': 'Listar certificações',
'List Checklists': 'Lista Listas de Verificação.',
'List Cluster Subsectors': 'Lista Subsetores de Cluster',
'List Clusters': 'Lista Clusters',
'List Commitment Items': 'Lista Itens de Compromisso',
'List Commitments': 'Lista Compromissos',
'List Committed People': 'List Committed People',
'List Competencies': 'Listar competencias',
'List Competency Ratings': 'Listar classificações de competencias',
'List Conflicts': 'Lista Conflitos',
'List Contact Information': 'Listar informações do contato',
'List Contacts': 'Listar contatos',
'List Course Certificates': 'Listar certificados de cursos',
'List Courses': 'Listar Cursos',
'List Credentials': 'Listar credenciais',
'List Current': 'Lista Atual',
'List Documents': 'Listar documentos',
'List Donors': 'Listar doadores',
'List Events': 'Lista de Eventos',
'List Facilities': 'Lista de Facilidades',
'List Feature Classes': 'Listar Classes De Recursos',
'List Feature Layers': 'LISTAr Camadas DE RECURSOS',
'List Flood Reports': 'Listar Relatórios de Inundações',
'List GPS data': 'List GPS data',
'List Groups': 'Listar grupos',
'List Groups/View Members': 'Listar Grupos/visualizar membros',
'List Homes': 'List Homes',
'List Hospitals': 'Listar de Hospitais',
'List Human Resources': 'Lista de Recursos Humanos',
'List Identities': 'Lista de Identidades',
'List Images': 'Lista de Imagens',
'List Impact Assessments': 'Lista de Avaliações De Impacto',
'List Impact Types': 'Lista de Tipos De Impacto',
'List Impacts': 'Lista de impactos',
'List Import Files': 'List Import Files',
'List Incident Reports': 'Lista de relatórios de incidentes',
'List Inventory Items': 'Listar ítens de inventário',
'List Item Categories': 'Listar categorias de ítens',
'List Item Packs': 'Lista pacotes de itens',
'List Items': 'Listar itens',
'List Items in Inventory': 'Lista de Itens no inventário',
'List Job Roles': 'Listar cargos',
'List Keys': 'Listar Chaves',
'List Kits': 'LISTAR Kits',
'List Layers': 'Listar Camadas',
'List Level 1 Assessments': 'Listar avaliações nível 1',
'List Level 1 assessments': 'Listar avaliação nível 1',
'List Level 2 Assessments': 'Listar avaliações nível 2',
'List Level 2 assessments': 'Listar avaliações nível 2',
'List Locations': 'Listar Localizações',
'List Log Entries': 'Listar as entradas de log',
'List Map Configurations': 'Listar configurações de mapa',
'List Markers': 'Listar marcadores',
'List Members': 'Lista de membros',
'List Memberships': 'Lista de associados',
'List Messages': 'Listar Mensagens',
'List Missing Persons': 'Lista de pessoas desaparecidas',
'List Missions': 'Listar Missões',
'List Need Types': 'Listar tipos de necessidades',
'List Needs': 'Lista de Necessidades',
'List Notes': 'Lista de Notas',
'List Offices': 'Lista de Escritórios',
'List Organizations': 'Listar Organizações',
'List Patients': 'List Patients',
'List Peers': 'LISTA DE PARES',
'List Personal Effects': 'Lista de objetos pessoais',
'List Persons': 'LISTA DE PESSOAS',
'List Photos': 'Lista de Fotos',
'List Population Statistics': 'Lista das Estatisticas da População',
'List Positions': 'Lista de Posições',
'List Problems': 'Lista de Problemas',
'List Projections': 'Lista de Projeções',
'List Projects': 'Listar Projectos',
'List Rapid Assessments': 'Listar Avaliações Rápidas',
'List Received Items': 'Listar Elementos Recebidos',
'List Received Shipments': 'Listar Carga Recebida',
'List Records': 'Listar Registros',
'List Registrations': 'Listar Registrações',
'List Relatives': 'List Relatives',
'List Reports': 'Relatórios de Listas',
'List Request Items': 'Pedido de Itens de lista',
'List Requested Skills': 'List Requested Skills',
'List Requests': 'LISTA DE PEDIDOS',
'List Resources': 'Listar Recursos',
'List Rivers': 'Lista de Rios',
'List Roles': 'Listar Funções',
'List Rooms': 'Listar Salas',
'List Scenarios': 'Listar cenários',
'List Sections': 'lista de Seções',
'List Sectors': 'Lista de Sectores',
'List Sent Items': 'Os itens da lista Enviada',
'List Sent Shipments': 'Embarques lista Enviada',
'List Service Profiles': 'Lista de serviços Perfis',
'List Settings': 'Lista de configurações',
'List Shelter Services': 'Lista de serviços de abrigo',
'List Shelter Types': 'Lista de Tipos De Abrigo',
'List Shelters': 'Lista de Abrigos',
'List Skill Equivalences': 'LISTA DE HABILIDADE Equivalências',
'List Skill Provisions': 'Listar suprimento de habilidades',
'List Skill Types': 'Lista de Tipos De Habilidade',
'List Skills': 'LISTA DE HABILIDADES',
'List Solutions': 'Listar Soluções',
'List Staff': 'Listar Pessoal',
'List Staff Members': 'Listar funcionários',
'List Staff Types': 'Listar Tipos De Equipe',
'List Status': 'Listar Status',
'List Subscriptions': 'Lista de Assinaturas',
'List Subsectors': 'Listar Subsetores',
'List Support Requests': 'Listar Pedidos de Suporte',
'List Survey Answers': 'Listar Respostas de Pesquisa',
'List Survey Questions': 'Listar Perguntas da Pesquisa',
'List Survey Sections': 'Listar Seções da Pesquisa',
'List Survey Series': 'Listar Séries de Pesquisa',
'List Survey Templates': 'Listar Modelos de Pesquisa',
'List Tasks': 'Lista de Tarefas',
'List Teams': 'Lista de Equipes',
'List Themes': 'Lista de Temas',
'List Tickets': 'lista de Bilhetes',
'List Tracks': 'Rastreia lista',
'List Trainings': 'Listar Treinamentos',
'List Units': 'Lista de Unidades',
'List Users': 'Mostrar usuários',
'List Vehicle Details': 'List Vehicle Details',
'List Vehicles': 'List Vehicles',
'List Volunteers': 'Mostrar Voluntários',
'List Warehouses': 'Mostrar Depósitos',
'List all': 'Mostrar tudo',
'List available Scenarios': 'Listar Cenários Disponíveis',
'List of CSV files': 'List of CSV files',
'List of CSV files uploaded': 'List of CSV files uploaded',
'List of Items': 'Lista de Itens',
'List of Missing Persons': 'Lista de pessoas desaparecidas',
'List of Peers': 'Lista de pares',
'List of Reports': 'Lista de Relatórios',
'List of Requests': 'Lista de Pedidos',
'List of Spreadsheets': 'Lista de Folhas de Cálculo',
'List of Spreadsheets uploaded': 'Lista de Folhas de Cálculo transferidas',
'List of Volunteers': 'Lista de Voluntários',
'List of Volunteers for this skill set': 'Lista de Voluntários para este conjunto de competências',
'List of addresses': 'Lista de endereços',
'List unidentified': 'Lista não identificada',
'List/Add': 'Lista/incluir',
'Lists "who is doing what & where". Allows relief agencies to coordinate their activities': 'Lista "quem está fazendo o que & aonde". Permite a agências humanitárias coordenar suas atividades',
'Live Help': 'Ajuda ao vivo',
'Livelihood': 'Subsistência',
'Load Cleaned Data into Database': 'Carregue Informações Claras no Banco de Dados',
'Load Raw File into Grid': 'Carregamento de arquivo bruto na Grid',
'Loading': 'Carregando',
'Local Name': 'Nome local',
'Local Names': 'Nomes locais',
'Location': 'Localização',
'Location 1': 'Local 1',
'Location 2': 'Local 2',
'Location Details': 'Detalhes da Localização',
'Location Hierarchy Level 0 Name': 'Nivel Local de hierarquia 0 nome',
'Location Hierarchy Level 1 Name': 'Nivel local de hierarquia 1 nome',
'Location Hierarchy Level 2 Name': 'Nivel local de hierarquia 2 nome',
'Location Hierarchy Level 3 Name': 'Hierarquia local Nível 3 Nome',
'Location Hierarchy Level 4 Name': 'Hierarquia local Nível 4 Nome',
'Location Hierarchy Level 5 Name': 'Hierarquia local Nível 5 Nome',
'Location added': 'Local incluído',
'Location cannot be converted into a group.': 'Local não pode ser convertido em um grupo.',
'Location deleted': 'Localidade excluída',
'Location details': 'Detalhes do Local',
'Location group cannot be a parent.': 'Localização de grupo não pode ser um pai.',
'Location group cannot have a parent.': 'Localização de grupo não tem um pai.',
'Location groups can be used in the Regions menu.': 'Grupos local pode ser utilizado no menu Regiões.',
'Location groups may be used to filter what is shown on the map and in search results to only entities covered by locations in the group.': 'Grupos locais podem ser utilizados para filtrar o que é mostrado no mapa e nos resultados da procura apenas as entidades locais abrangidas no grupo.',
'Location updated': 'Local atualizado',
'Location:': 'Localização:',
'Location: ': 'Location: ',
'Locations': 'Localizações',
'Locations of this level need to have a parent of level': 'Locais de esse nível precisa ter um pai de nível',
'Lockdown': 'BLOQUEIO',
'Log': 'registro',
'Log Entry Details': 'detalhes da entrada de registro',
'Log entry added': 'Entrada de Log incluída',
'Log entry deleted': 'Entrada de Log Excluída',
'Log entry updated': 'Entrada de Log de atualização',
'Login': 'login',
'Logistics': 'Logística',
'Logistics Management System': 'Sistema de Gestão de Logística',
'Logo': 'Logotipo',
'Logo file %s missing!': 'Arquivo de logotipo %s ausente!',
'Logout': 'Deslogar',
'Long Text': 'Texto Longo',
'Longitude': 'Longitude',
'Longitude is West - East (sideways).': 'Longitude é Oeste - Leste (lateral).',
'Longitude is West-East (sideways).': 'Longitude é leste-oeste (direções).',
'Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas.': 'Longitude é zero no primeiro meridiano (Greenwich Mean Time) e é positivo para o leste, em toda a Europa e Ásia. Longitude é negativo para o Ocidente, no outro lado do Atlântico e nas Américas.',
'Longitude is zero on the prime meridian (through Greenwich, United Kingdom) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas.': 'Longitude é zero no primeiro meridiano (por meio de Greenwich, Reino Unido) e é positivo para o leste, em toda a Europa e Ásia. Longitude é negativo para o Ocidente, no outro lado do Atlântico e nas Américas.',
'Longitude of Map Center': 'Longitude do Centro do Mapa',
'Longitude of far eastern end of the region of interest.': 'Longitude longe do Oeste no final da região de interesse.',
'Longitude of far western end of the region of interest.': 'Longitude de oeste longínquo no final da Região de interesse.',
'Longitude should be between': 'Longitude deve estar entre',
'Looting': 'Saques',
'Lost': 'Perdido',
'Lost Password': 'Senha Perdida',
'Low': 'Baixo',
'Magnetic Storm': 'Tempestade magnética',
'Major Damage': 'Grandes danos',
'Major expenses': 'Despesas principais',
'Major outward damage': 'Danos exteriores principais',
'Make Commitment': 'Ter obrigação',
'Make New Commitment': 'Fazer Novo Compromisso',
'Make Request': 'Fazer Pedido',
'Make preparations per the <instruction>': 'Fazer Preparações por',
'Male': 'masculino',
'Manage': 'Gerenciar',
'Manage Events': 'Manage Events',
'Manage Relief Item Catalogue': 'Gerenciar Catálogo de Item de Alívio',
'Manage Users & Roles': 'GERENCIAR Usuários & Funções',
'Manage Vehicles': 'Manage Vehicles',
'Manage Warehouses/Sites': 'GERENCIAR Armazéns/Sites',
'Manage Your Facilities': 'Gerenciar suas instalações',
'Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.': 'Gerenciar pedidos de suprimentos, patrimônio, pessoal ou outros recursos. Corresponde aos estoques onde os suprimentos são solicitados.',
'Manage requests of hospitals for assistance.': 'GERENCIAR Pedidos de hospitais para obter assistência.',
'Manage volunteers by capturing their skills, availability and allocation': 'GERENCIAR voluntários por captura sua capacidade, Alocação e disponibilidade',
'Manager': 'Gerente',
'Managing Office': 'Gerenciando Office',
'Mandatory. In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).': 'Obrigatório. Em GeoServer, este é o nome Da Camada. No getCapabilities WFS, este é o nome da parte FeatureType após os dois pontos (:).',
'Mandatory. The URL to access the service.': 'Obrigatório. A URL para acessar o serviço.',
'Manual': 'Manual',
'Manual Synchronization': 'Sincronização Manual',
'Many': 'Muitos',
'Map': 'Mapa',
'Map Center Latitude': 'Latitude do Centro do Mapa',
'Map Center Longitude': 'Longitude do centro do mapa',
'Map Configuration': 'Configuração de Mapa',
'Map Configuration Details': 'Detalhes de configuração de mapa',
'Map Configuration added': 'Configuração de mapa incluído',
'Map Configuration deleted': 'Configuração de mapa excluído',
'Map Configuration removed': 'Configuração de mapa removido',
'Map Configuration updated': 'Configuração de mapa atualizada',
'Map Configurations': 'Configuracões de mapa',
'Map Height': 'Altura do Mapa',
'Map Service Catalogue': 'Catálogo do serviço de mapas',
'Map Settings': 'Configurações do Mapa',
'Map Viewing Client': 'Cliente de visualização do mapa',
'Map Width': 'Largura do mapa',
'Map Zoom': 'Zoom do mapa',
'Map of Hospitals': 'Mapa de Hospitais',
'MapMaker Hybrid Layer': 'MapMaker Hybrid Layer',
'MapMaker Layer': 'MapMaker Layer',
'Maps': 'Maps',
'Marine Security': 'Segurança da marina',
'Marital Status': 'Estado Civil',
'Marker': 'Marcador',
'Marker Details': 'Detalhes do Marcador',
'Marker added': 'Marcador incluído',
'Marker deleted': 'Marcador removido',
'Marker updated': 'Marcador atualizado',
'Markers': 'Marcadores',
'Master': 'Master',
'Master Message Log': 'Mensagem de Log principal',
'Master Message Log to process incoming reports & requests': 'Log de Mensagem Principal para processar relatórios de entrada e pedidos',
'Match Percentage': 'Porcentagem de correspondência',
'Match Requests': 'Corresponder Pedidos',
'Match percentage indicates the % match between these two records': 'Porcentagem idêntica indica a % idêntica entre estes dois registros.',
'Match?': 'Combina?',
'Matching Catalog Items': 'Catálogo de itens correspondentes',
'Matching Items': 'Itens correspondentes',
'Matching Records': 'Registros de correspondência',
'Matrix of Choices (Multiple Answers)': 'Matrix de Opções (Respostas Múltiplas)',
'Matrix of Choices (Only one answer)': 'Matrix de Opções (Apenas uma resposta)',
'Matrix of Text Fields': 'Matriz de campos de texto',
'Max Persons per Dwelling': 'Máx. Pessoas por Habitação',
'Maximum Location Latitude': 'Latitude máxima local',
'Maximum Location Longitude': 'Longitude máxima local',
'Medical and public health': 'Saúde Médica e Pública',
'Medium': 'Médio',
'Megabytes per Month': 'Megabytes por mês',
'Members': 'membros',
'Membership': 'Membresia',
'Membership Details': 'Detalhes de Associação',
'Membership added': 'Associação incluído',
'Membership deleted': 'Associação Excluída',
'Membership updated': 'Associação ATUALIZADO',
'Memberships': 'Parcelas',
'Message': 'message',
'Message Details': 'deatlhes de mesagens',
'Message Variable': 'Mensagem variável',
'Message added': 'Mensagem incluída',
'Message deleted': 'Mensagem Excluída',
'Message field is required!': 'Campo mensagem é obrigatório!',
'Message updated': 'Mensagem atualizada',
'Message variable': 'Mensagem variável',
'Messages': 'mensagens.',
'Messaging': 'sistema de mensagens',
'Messaging settings updated': 'Configurações de mensagens atualizadas',
'Meteorite': 'Meteorito',
'Meteorological (inc. flood)': 'Meteorológico (inc. Enchente)',
'Method used': 'Método utilizado',
'Middle Name': 'Nome do meio',
'Migrants or ethnic minorities': 'Imigrantes ou minorias étnicas',
'Mileage': 'Mileage',
'Military': 'Militares',
'Minimum Bounding Box': 'Caixa Delimitadora Mínima',
'Minimum Location Latitude': 'Mínimo Latitude de Localidade',
'Minimum Location Longitude': 'Longitude de Localização Mínima',
'Minimum shift time is 6 hours': 'tempo mínimo de Shift é de 6 horas',
'Minor Damage': 'Dano secundário',
'Minor/None': 'Secundária/Nenhum',
'Minorities participating in coping activities': 'Minorias participando em atividades de cópia',
'Minute': 'Minuto',
'Minutes must be a number between 0 and 60': 'Minutos devem ser um número entre 0 e 60',
'Minutes per Month': 'Minutos por Mês',
'Minutes should be a number greater than 0 and less than 60': 'Minutos devem ser um número maior que 0 e menor que 60',
'Miscellaneous': 'Variados',
'Missing': 'Perdido',
'Missing Person': 'Pessoa desaparecida',
'Missing Person Details': 'Detalhes da pessoa perdida',
'Missing Person Registry': 'Faltando Registro da Pessoa',
'Missing Person Reports': 'Relatórios da pessoa desaparecida',
'Missing Persons': 'Pessoas desaparecidas',
'Missing Persons Registry': 'Registro de pessoas desaparecidas',
'Missing Persons Report': 'Relatório de pessoas desaparecidas',
'Missing Report': 'Relatório de desaparecimento',
'Missing Senior Citizen': 'Cidadão sênior desaparecido',
'Missing Vulnerable Person': 'Pessoa vulnerável desaparecida',
'Mission Details': 'Detalhes da Missão',
'Mission Record': 'Registro da Missão',
'Mission added': 'Missão incluída',
'Mission deleted': 'Missão excluída',
'Mission updated': 'Missão atualizada',
'Missions': 'Missões',
'Mobile': 'telefone celular',
'Mobile Basic Assessment': 'Taxação básica móvel',
'Mobile Phone': 'Telefone celular',
'Mode': 'modo',
'Model/Type': 'Modelo/Tipo',
'Modem': 'Modem',
'Modem Settings': 'Configurações do Modem',
'Modem settings updated': 'Configurações de modem atualizadas',
'Moderate': 'moderate',
'Moderator': 'moderator',
'Modify Information on groups and individuals': 'Modificar Informações sobre grupos e pessoas',
'Modifying data in spreadsheet before importing it to the database': 'Modificando dados na planilha antes de importá-los para o banco de dados',
'Module': 'Módulo',
'Module disabled!': 'Módulo desativado!',
'Module provides access to information on current Flood Levels.': 'Módulo fornece acesso a informações na atual Onda níveis.',
'Monday': 'segunda-feira',
'Monthly Cost': 'Custo mensal',
'Monthly Salary': 'Salário mensal',
'Months': 'meses',
'Morgue': 'Morgue',
'Morgue Details': 'Morgue Details',
'Morgue Status': 'Situação do necrotério',
'Morgue Units Available': 'Unidades disponíveis no necrotério',
'Morgues': 'Morgues',
'Mosque': 'Mesquita',
'Motorcycle': 'Motocicleta',
'Moustache': 'Bigode',
'MultiPolygon': 'multipolygon',
'Multiple': 'Múltiplos',
'Multiple Choice (Multiple Answers)': 'Múltipla escolha (Várias Respostas)',
'Multiple Choice (Only One Answer)': 'Múltipla Escolha (Apenas uma resposta)',
'Multiple Matches': 'Múltiplas Correspondências',
'Multiple Text Fields': 'Vários campos de texto',
'Muslim': 'Muçulmano',
'Must a location have a parent location?': 'Um local deve ter uma posição pai?',
'My Current function': 'Minha função Atual',
'My Details': 'My Details',
'My Tasks': 'Minhas tarefas',
'My Volunteering': 'My Volunteering',
'N/A': 'n/d',
'NO': 'no',
'NZSEE Level 1': 'NZSEE Nível 1',
'NZSEE Level 2': 'NZSEE Nível 2',
'Name': 'nome',
'Name and/or ID': 'Nome E/OU ID',
'Name of the file (& optional sub-path) located in static which should be used for the background of the header.': 'O nome do arquivo (& sub OPCIONAL-path) localizado no estáticamente que deve ser utilizado para o segundo plano do Cabeçalho.',
'Name of the file (& optional sub-path) located in static which should be used for the top-left image.': 'Nome do arquivo (e sub-caminho opcional) localizado estático que deveria ser utilizado para a imagem superior esquerda.',
'Name of the file (& optional sub-path) located in views which should be used for footer.': 'Nome do arquivo (e sub-caminho opcional) localizado nas visualizações que deve ser utilizado no rodapé.',
'Name of the person in local language and script (optional).': 'Nome da pessoa no idioma local e script local (opcional).',
'Name or Job Title': 'Nome ou cargo',
'Name, Org and/or ID': 'Nome, organização e/ou ID.',
'Name/Model/Type': 'Nome/Modelo/Tipo',
'Names can be added in multiple languages': 'Nomes podem ser adicionados em múltiplos idiomas',
'National': 'Nacional',
'National ID Card': 'Cartão de ID Nacional',
'National NGO': 'Nacional ONG',
'Nationality': 'Nacionalidade',
'Nationality of the person.': 'Nacionalidade da pessoa.',
'Nautical Accident': 'Acidente Náutico',
'Nautical Hijacking': 'Sequestro Náutico',
'Need Type': 'Precisa de Tipo',
'Need Type Details': 'Tipo precisa de Detalhes',
'Need Type added': 'Precisa de tipo incluído',
'Need Type deleted': 'Precisa de Tipo excluído',
'Need Type updated': 'Tipo de necessidade atualizada',
'Need Types': 'Tipos de necessidade',
"Need a 'url' argument!": "Precisa de um argumento ' url!",
'Need added': 'Necessidade incluída',
'Need deleted': 'Necessidade excluída',
'Need to be logged-in to be able to submit assessments': 'Precisa estar conectado ao programa para conseguir submeter avaliações',
'Need to configure Twitter Authentication': 'Precisa configurar a autenticação do Twitter',
'Need to specify a Budget!': 'É necessário especificar um orçamento!',
'Need to specify a Kit!': 'É necessário especificar um Kit!',
'Need to specify a Resource!': 'É necessário especificar um recurso!',
'Need to specify a bundle!': 'É necessário especificar um pacote!',
'Need to specify a group!': 'É necessário especificar um grupo!',
'Need to specify a location to search for.': 'É necessário especificar um local para procurar.',
'Need to specify a role!': 'Será necessário especificar um papel!',
'Need to specify a table!': 'Será necessário especificar uma tabela!',
'Need to specify a user!': 'Será necessário especificar um usuário!',
'Need updated': 'Precisa de atualização',
'Needs': 'necessidades',
'Needs Details': 'detalhes necessarios',
'Needs Maintenance': 'Necessita Manutenção',
'Needs to reduce vulnerability to violence': 'Necessidade de reduzir a vulnerabilidade à violência.',
'Negative Flow Isolation': 'NEGATIVO Fluxo ISOLAMENTO',
'Neighborhood': 'Bairro',
'Neighbouring building hazard': 'Risco de construção vizinhos',
'Neonatal ICU': 'Neonatal ICU',
'Neonatology': 'Neonatologia',
'Network': 'rede',
'Neurology': 'Neurologia',
'New': 'Novo(a)',
'New Assessment reported from': 'Nova Avaliação relatada a partir de',
'New Certificate': 'Novo Certificado',
'New Checklist': 'Nova Verificação',
'New Entry': 'Nova Entrada',
'New Event': 'Novo Evento',
'New Home': 'New Home',
'New Item Category': 'Nova Categoria de Ítem',
'New Job Role': 'Novo Papel',
'New Location': 'Novo Local',
'New Location Group': 'Novo Grupo de Locais',
'New Patient': 'New Patient',
'New Peer': 'Novo Par',
'New Record': 'Novo Registro',
'New Relative': 'New Relative',
'New Request': 'Nova Requisição',
'New Scenario': 'Novo Cenário',
'New Skill': 'Nova Habilidade',
'New Solution Choice': 'Escolha nova solução',
'New Staff Member': 'Novo membro da equipe',
'New Support Request': 'Novo pedido de suporte',
'New Synchronization Peer': 'Novo par de sincronização',
'New Team': 'Nova equipe',
'New Ticket': 'New Ticket',
'New Training Course': 'Novo Curso de Treinamento',
'New Volunteer': 'Novo Voluntário',
'New cases in the past 24h': 'Novos casos nas últimas 24H',
'News': 'Notícias',
'Next': 'Seguinte',
'No': 'no',
'No Activities Found': 'Não há actividades',
'No Activities currently registered in this event': 'No Activities currently registered in this event',
'No Alternative Items currently registered': 'Nenhum item alternativo atualmente registrado',
'No Assessment Summaries currently registered': 'Nenhum Sumário De Avaliação actualmente registrado',
'No Assessments currently registered': 'Nenhuma Avaliação actualmente registrada',
'No Asset Assignments currently registered': 'Nenhum ativo designado encontra-se atualmente registrado',
'No Assets currently registered': 'Sem Ativos registrados atualmente',
'No Assets currently registered in this event': 'Sem ativos atualmente registrados neste evento',
'No Assets currently registered in this scenario': 'Sem ativos atualmente registrados neste cenário',
'No Baseline Types currently registered': 'Nenhum tipo de base line registrado atualmente',
'No Baselines currently registered': 'Nenhuma linha base registrada atualmente',
'No Brands currently registered': 'Sem Marcas atualmente registrado',
'No Budgets currently registered': 'Nenhum Dos Orçamentos registrados atualmente',
'No Bundles currently registered': 'Nenhum pacote atualmente registrado',
'No Camp Services currently registered': 'Nenhum serviço de acampamento atualmente registrado',
'No Camp Types currently registered': 'Nenhum tipo de acampamento atualmente registrado',
'No Camps currently registered': 'Sem Acampamentos atualmente registrados',
'No Catalog Items currently registered': 'Nenhum itens do catálogo registrado atualmente',
'No Catalogs currently registered': 'Nenhum catálogo atualmente registrado',
'No Checklist available': 'Checklist não disponível',
'No Cluster Subsectors currently registered': 'Nenhum sub-setor de cluster registrado atualmente',
'No Clusters currently registered': 'Nenhum Cluster registrado atualmente',
'No Commitment Items currently registered': 'Nenhum Item de Compromisso registrado atualmente',
'No Commitments': 'Sem Compromissos',
'No Credentials currently set': 'Nenhuma credencial atualmente configurada',
'No Details currently registered': 'Nenhum detalhes registrado atualmente',
'No Documents currently attached to this request': 'No Documents currently attached to this request',
'No Documents found': 'Nenhum Documento encontrado',
'No Donors currently registered': 'Sem doadores registrados atualmente',
'No Events currently registered': 'Não há eventos atualmente registrados',
'No Facilities currently registered in this event': 'Não há Recursos atualmente registrado nesse evento',
'No Facilities currently registered in this scenario': 'Não há recursos atualmente registrados neste cenário',
'No Feature Classes currently defined': 'Nenhuma Classe de Componentes atualmente definidos',
'No Feature Layers currently defined': 'Nenhuma Camada de Componentes atualmente definidos',
'No Flood Reports currently registered': 'Nenhum relatório de Inundação atualmente registrado',
'No GPS data currently registered': 'No GPS data currently registered',
'No Groups currently defined': 'Não há Grupos definidos atualmente',
'No Groups currently registered': 'Nenhum Grupo atualmente registrado',
'No Homes currently registered': 'No Homes currently registered',
'No Hospitals currently registered': 'Nenhum hospital atualmente registrado',
'No Human Resources currently registered in this event': 'Nao há recursos humanos atualmente registrados nesse evento',
'No Human Resources currently registered in this scenario': 'Sem recursos humanos atualmente registrados neste cenário',
'No Identification Report Available': 'Nenhum Relatório de Identificação Disponível',
'No Identities currently registered': 'Nenhuma Identidade atualmente registrada',
'No Image': 'Nenhuma Imagem',
'No Images currently registered': 'Nenhuma Imagem atualmente registrada',
'No Impact Types currently registered': 'Nenhum tipo de impacto atualmente registrado',
'No Impacts currently registered': 'Nenhum Impacto atualmente registrado',
'No Import Files currently uploaded': 'No Import Files currently uploaded',
'No Incident Reports currently registered': 'Nenhum relatório de incidente registrado atualmente',
'No Incoming Shipments': 'Nenhum Embarque de Entrada',
'No Inventories currently have suitable alternative items in stock': 'No Inventories currently have suitable alternative items in stock',
'No Inventories currently have this item in stock': 'No Inventories currently have this item in stock',
'No Inventory Items currently registered': 'Nenhum Item de Inventário registrado atualmente',
'No Item Categories currently registered': 'Nenhuma Categoria de Item atualmente registrada',
'No Item Packs currently registered': 'Nenhum Pacote de Itens atualmente registrado',
'No Items currently registered': 'Nenhum item registrado atualmente',
'No Items currently registered in this Inventory': 'Sem itens registrados atualmente neste inventário',
'No Keys currently defined': 'Nenhuma chave definida no momento',
'No Kits currently registered': 'Nenhum kit registrado no momento',
'No Level 1 Assessments currently registered': 'Nenhuma avaliação nível 1 registrada no momento',
'No Level 2 Assessments currently registered': 'Nenhum nível 2 Avaliações atualmente registrado',
'No Locations currently available': 'Locais Não disponíveis atualmente',
'No Locations currently registered': 'Locais Não registrados atualmente',
'No Map Configurations currently defined': 'Nenhuma configuração de Mapa estão atualmente definidos',
'No Map Configurations currently registered in this event': 'Nenhuma configuração de Mapa esta atualmente registrado nesse evento',
'No Map Configurations currently registered in this scenario': 'Nenhuma configuração de Mapa está atualmente registrado neste cenário',
'No Markers currently available': 'Não há marcadores atualmente disponíveis',
'No Match': 'Sem correspondência',
'No Matching Catalog Items': 'Nenhum Item de Catálogo Correspondente',
'No Matching Items': 'Sem itens correspondentes',
'No Matching Records': 'Sem registros correspondentes',
'No Members currently registered': 'Sem membros registrados atualmente',
'No Memberships currently defined': 'Sem Associações definidas atualmente',
'No Memberships currently registered': 'Sem Associações registradas atualmente',
'No Messages currently in Outbox': 'Nenhuma mensagem na Caixa de saída',
'No Need Types currently registered': 'Sem necessidade, Tipos atualmente registrados',
'No Needs currently registered': 'Sem necessidade, atualmente registrado',
'No Offices currently registered': 'Nenhum Escritório registrado atualmente',
'No Offices found!': 'Menhum Escritório localizado!',
'No Organizations currently registered': 'Número de Organizações atualmente registradas',
'No Packs for Item': 'No Packs for Item',
'No Patients currently registered': 'No Patients currently registered',
'No People currently committed': 'No People currently committed',
'No People currently registered in this camp': 'Nenhuma pessoa registrada atualmente neste campo',
'No People currently registered in this shelter': 'Nenhuma pessoa registrada atualmente neste abrigo',
'No Persons currently registered': 'Nenhuma pessoa atualmente registrada',
'No Persons currently reported missing': 'nenhuma pessoa reportada atualmente como perdida',
'No Persons found': 'Nenhuma pessoa localizada',
'No Photos found': 'Nenhuma Foto localizada',
'No Picture': 'Nenhuma imagem',
'No Population Statistics currently registered': 'Nenhuma estatística populacional atualmente registrada',
'No Presence Log Entries currently registered': 'Nenhuma entrada no log Presença atualmente registrado',
'No Problems currently defined': 'Nenhum Problema atualmente definido',
'No Projections currently defined': 'Nenhuma projeção atualmente definida',
'No Projects currently registered': 'Nenhum projeto atualmente registrado',
'No Rapid Assessments currently registered': 'Nenhuma Tributação Rápida atualmente registrada',
'No Ratings for Skill Type': 'No Ratings for Skill Type',
'No Received Items currently registered': 'Nenhum item recebido atualmente registrado',
'No Received Shipments': 'Entregas/Despachos não recebidos',
'No Records currently available': 'Registros atualmente não disponíveis',
'No Relatives currently registered': 'No Relatives currently registered',
'No Request Items currently registered': 'Não há items de Pedidos registados',
'No Requests': 'Não há pedidos',
'No Rivers currently registered': 'Não Rios atualmente registrado',
'No Roles currently defined': 'Nenhumas funções atualmente definidas',
'No Rooms currently registered': 'Nenhuma sala atualmente registrada',
'No Scenarios currently registered': 'Nenhum cenário atualmente registrado',
'No Sections currently registered': 'Sem seções atualmente registradas',
'No Sectors currently registered': 'setores nao atualmente registrados',
'No Sent Items currently registered': 'Nenhum item Enviado atualmente registrado',
'No Sent Shipments': 'Nenhum carregamento enviado',
'No Settings currently defined': 'configuraçoes atualmente nao definida',
'No Shelter Services currently registered': 'nenhum serviço de abrigo atualmente registrado',
'No Shelter Types currently registered': 'Nenhum tipo de abrigo registrado atualmente',
'No Shelters currently registered': 'abrigos atualmente nao registrados',
'No Skills currently requested': 'No Skills currently requested',
'No Solutions currently defined': 'Sem Soluções actualmente definidas',
'No Staff Types currently registered': 'Sem Tipos de Funcionários actualmente registrados',
'No Staff currently registered': 'Sem Funcionários actualmente registrados',
'No Subscription available': 'Nenhuma assinatura disponível',
'No Subsectors currently registered': 'Nenhum sub setor atualmente registrado',
'No Support Requests currently registered': 'Nenhum suporte a pedido atualmente registrado',
'No Survey Answers currently entered.': 'Nenhuma resposta de pesquisa atualmente inscrita.',
'No Survey Answers currently registered': 'Nenhuma resposta a pesquisa atualmente registrada',
'No Survey Questions currently registered': 'Nenhuma pergunta de pesquisa atualmente registrada',
'No Survey Sections currently registered': 'Nenhuma seção de pesquisa atualmente registrada',
'No Survey Series currently registered': 'Nenhuma série de pesquisa atualmente registrada',
'No Survey Template currently registered': 'Nenhum Modelo de Pesquisa atualmente registrado',
'No Tasks currently registered in this event': 'No Tasks currently registered in this event',
'No Tasks currently registered in this scenario': 'No Tasks currently registered in this scenario',
'No Tasks with Location Data': 'Nenhuma tarefa com local de dados',
'No Teams currently registered': 'Nenhuma equipe atualmente registrada',
'No Themes currently defined': 'Nenhum Tema atualmente definido',
'No Tickets currently registered': 'Sem ingressos atualmente registrados',
'No Tracks currently available': 'nenhum rastreamento atualmente disponível',
'No Users currently registered': 'Nenhum Usuário actualmente registrado',
'No Vehicle Details currently defined': 'No Vehicle Details currently defined',
'No Vehicles currently registered': 'No Vehicles currently registered',
'No Volunteers currently registered': 'Nenhum Voluntário actualmente registrado',
'No Warehouses currently registered': 'Nenhum Armazém actualmente registrado',
'No access at all': 'Nenhum acesso',
'No access to this record!': 'Não há acesso a esta entrada!',
'No action recommended': 'Nenhuma acção recomendada',
'No conflicts logged': 'Nenhum conflito registrado',
'No contact information available': 'Nenhuma informações de contato disponível',
'No contact method found': 'No contact method found',
'No contacts currently registered': 'Nenhum contato atualmente registrado',
'No data in this table - cannot create PDF!': 'Nenhum dado nesta tabela- PDF não pode ser criado!',
'No databases in this application': 'Nenhum banco de dados neste aplicativo',
'No dead body reports available': 'Nenhum relatório de óbito disponível',
'No entries found': 'Nenhum artigo encontrado',
'No entries matching the query': 'Nenhuma entrada correspondente a consulta',
'No entry available': 'Nenhuma entrada disponível',
'No forms to the corresponding resource have been downloaded yet.': 'No forms to the corresponding resource have been downloaded yet.',
'No location known for this person': 'Nenhum local conhecido para essa pessoa',
'No locations found for members of this team': 'Locais não localizado para membros deste equipe',
'No log entries matching the query': 'Nenhuma entrada de log correspondente a consulta',
'No match': 'No match',
'No matching records found': 'No matching records found',
'No messages in the system': 'Nenhuma mensagem no sistema',
'No notes available': 'Notas não disponíveis',
'No peers currently registered': 'Não há pares registrados atualmente',
'No pending registrations found': 'Não foram encontrados registros pendentes',
'No pending registrations matching the query': 'Não foram encontrados registros pendentes correspondentes à consulta efetuada',
'No person record found for current user.': 'Nenhum registro de pessoa localizado para o usuário atual.',
'No problem group defined yet': 'Nenhum grupo problema definido ainda',
'No records matching the query': 'Sem registros correspondentes a consulta',
'No report available.': 'Nenhum Relatório disponível.',
'No reports available.': 'Não há relatórios disponíveis.',
'No reports currently available': 'Não há relatórios disponíveis actualmente',
'No requests found': 'Não foram foram encontrados pedidos',
'No resources currently reported': 'Recursos não reportados actualmente',
'No service profile available': 'Nenhum perfil de serviço disponível',
'No skills currently set': 'Não há habilidades atualmente configuradas',
'No staff members currently registered': 'Nenhum membro da equipe atualmente registrado',
'No staff or volunteers currently registered': 'Nenhum funcionário ou voluntário atualmente registrado',
'No status information available': 'Informação não está disponível',
'No synchronization': 'Sem Sincronização',
'No tasks currently assigned': 'No tasks currently assigned',
'No tasks currently registered': 'Nenhuma tarefa atualmente registrada',
'No template found!': 'Nenhum modelo localizado!',
'No units currently registered': 'Nenhuma unidade actualmente registrada',
'No volunteer availability registered': 'Sem disponibilidade de voluntário registrada',
'No volunteers currently registered': 'Nenhum Voluntário actualmente registrado',
'Non-structural Hazards': 'Riscos não-estruturais',
'None': 'Nenhum',
'None (no such record)': 'Nenhum (sem registro )',
'Noodles': 'Macarrão',
'Normal': 'Normal',
'Not Applicable': 'Não se aplica',
'Not Authorised!': 'Não Autorizado!',
'Not Possible': 'Impossível',
'Not Set': 'não configurado',
'Not authorised!': 'Não autorizado!',
'Not installed or incorrectly configured.': 'Não instalado ou Configurado Incorretamente.',
'Note': 'Nota',
'Note Details': 'Detalhes da Nota',
'Note Status': 'Status da Nota',
'Note Type': 'Tipo de nota',
'Note added': 'Nota Incluída',
'Note deleted': 'NOTA Excluída',
'Note that this list only shows active volunteers. To see all people registered in the system, search from this screen instead': 'Observer que essa lista mostra apenas voluntários ativos. Para ver todas as pessoas registradas no sistema, procure a partir deste ecrã em vez de',
'Note updated': 'Nota atualizada',
'Notes': 'Observações',
'Notice to Airmen': 'Aviso ao piloto',
'Number': 'número',
'Number of Columns': 'Número de colunas',
'Number of Patients': 'Número de Pacientes',
'Number of People Required': 'Number of People Required',
'Number of Rows': 'Número de Linhas',
'Number of additional beds of that type expected to become available in this unit within the next 24 hours.': 'Número de camas adicionais de tipo esperado tornar disponível nesta unidade nas próximas 24 horas.',
'Number of alternative places for studying': 'Número de locais alternativos para estudar',
'Number of available/vacant beds of that type in this unit at the time of reporting.': 'Número de camas disponíveis/livre desse tipo nesta unidade no momento do relatório.',
'Number of bodies found': 'Number of bodies found',
'Number of deaths during the past 24 hours.': 'Número de mortes durante as últimas 24 horas.',
'Number of discharged patients during the past 24 hours.': 'Número de pacientes Descarregados durante as últimas 24 horas.',
'Number of doctors': 'Número de médicos',
'Number of in-patients at the time of reporting.': 'Número de pacientes internos na hora do relatório.',
'Number of newly admitted patients during the past 24 hours.': 'Número de pacientes admitidos durante as últimas 24 horas.',
'Number of non-medical staff': 'Número de funcionários não-médico',
'Number of nurses': 'Número de enfermeiras',
'Number of private schools': 'Número de escolas privadas',
'Number of public schools': 'Número de escolas públicas',
'Number of religious schools': 'Número de escolas religiosas',
'Number of residential units': 'Número de unidades residenciais',
'Number of residential units not habitable': 'Unidades de número residencial não habitáveis',
'Number of vacant/available beds in this hospital. Automatically updated from daily reports.': 'Número de leitos vagos/disponíveis nesse hospital. Atualizado automaticamente a partir de relatórios diários.',
'Number of vacant/available units to which victims can be transported immediately.': 'Número de unidades vagas/disponíveis em que vítimas podem ser transportadas imediatamente.',
'Number or Label on the identification tag this person is wearing (if any).': 'Número ou código na etiqueta de identificação que a pessoa está usando (se houver).',
'Number or code used to mark the place of find, e.g. flag code, grid coordinates, site reference number or similar (if available)': 'Número ou código utilizado para marcar o local de localização, por exemplo, código de bandeira, grade de coordenadas, número de referência do site ou similar (se disponível)',
'Number/Percentage of affected population that is Female & Aged 0-5': 'Número/percentagem da população afetada que é uma mulher entre 0 e 5 anos',
'Number/Percentage of affected population that is Female & Aged 13-17': 'Número/percentagem da população afetadas do sexo feminino entre 13 e 17 anos',
'Number/Percentage of affected population that is Female & Aged 18-25': 'Número/percentagem da população afetada que é Mulher com 18-25 anos',
'Number/Percentage of affected population that is Female & Aged 26-60': 'Número/percentagem da população afetada que é Mulher com 26-60 anos',
'Number/Percentage of affected population that is Female & Aged 6-12': 'Número/percentagem da população afetada que é Mulher com 6-12 anos',
'Number/Percentage of affected population that is Female & Aged 61+': 'Número/percentagem da população afetada que é Mulher > 61 anos',
'Number/Percentage of affected population that is Male & Aged 0-5': 'Número/percentagem da população afetada que é Homem com 0-5 anos',
'Number/Percentage of affected population that is Male & Aged 13-17': 'Número/percentagem da população afetada que é Homem com 13-17 anos',
'Number/Percentage of affected population that is Male & Aged 18-25': 'Número/percentagem da população afetada que é Homem com 18-25 anos',
'Number/Percentage of affected population that is Male & Aged 26-60': 'Número/percentagem de população afetada que é do sexo masculino & Idade 26-60',
'Number/Percentage of affected population that is Male & Aged 6-12': 'Número/percentagem de população afectada que é do sexo masculino & Idade 6-12',
'Number/Percentage of affected population that is Male & Aged 61+': 'Número/percentagem da população afetada que é do sexo masculino & Idade 61+',
'Nursery Beds': 'Camas de berçario',
'Nutrition': 'Nutrição',
'Nutrition problems': 'Problemas nutricionais',
'OK': 'OK',
'OR Reason': 'Ou Razão',
'OR Status': 'Ou Status',
'OR Status Reason': 'Ou razão do status',
'OR a site OR a location': 'OU um site OU um local',
'Observer': 'observador',
'Obsolete': 'Obsoleto',
'Obstetrics/Gynecology': 'Obstetrícia/Ginecologia',
'Office': 'escritório',
'Office Address': 'Endereço do escritório',
'Office Details': 'Detalhes do Escritório.',
'Office Phone': 'Telefone do escritório',
'Office added': 'Escritório',
'Office deleted': 'Escritório excluído',
'Office updated': 'Escritório atualizado',
'Offices': 'Escritórios',
'Offices & Warehouses': 'Escritórios & Armazéns',
'Offline Sync': 'Sincronização desconectada.',
'Offline Sync (from USB/File Backup)': 'Off-line (Sync a partir do USB/arquivo de Backup)',
'Older people as primary caregivers of children': 'Pessoas mais velhas como responsáveis primárias de crianças',
'Older people in care homes': 'Pessoas mais velhas em casas de cuidados',
'Older people participating in coping activities': 'Pessoas mais antigos participantes em lidar atividades',
'Older person (>60 yrs)': 'Idosos (>60 anos)',
'On by default?': 'Por padrão?',
'On by default? (only applicable to Overlays)': 'Por padrão? (apenas aplicável para Sobreposições)',
'One Time Cost': 'Custo Único',
'One time cost': 'Custo único',
'One-time': 'Único',
'One-time costs': 'Custos únicos',
'Oops! Something went wrong...': 'Oops! Algo deu errado...',
'Oops! something went wrong on our side.': 'Oops! algo deu errado do nosso lado.',
'Opacity (1 for opaque, 0 for fully-transparent)': 'Opacidade (1 para opaco, 0 para totalmente transparente)',
'Open': 'Abrir',
'Open area': 'Abrir área',
'Open recent': 'Abrir recente',
'Operating Rooms': 'Salas operacionais',
'Optional': 'Optional',
'Optional Subject to put into Email - can be used as a Security Password by the service provider': 'Optional Subject to put into Email - can be used as a Security Password by the service provider',
'Optional link to an Incident which this Assessment was triggered by.': 'Link opcional para um incidente que esta avaliação foi desencadeada por.',
'Optional selection of a MapServer map.': 'Optional selection of a MapServer map.',
'Optional selection of a background color.': 'Optional selection of a background color.',
'Optional selection of an alternate style.': 'Optional selection of an alternate style.',
'Optional. If you wish to style the features based on values of an attribute, select the attribute to use here.': 'opcional Se você desejar apresenta o estilo com base nos valores de um atributo, Selecione o atributo a ser utilizado aqui.',
'Optional. In GeoServer, this is the Workspace Namespace URI (not the name!). Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).': 'opcional Em GeoServer, esta é a área de trabalho Namespace URI (não o nome!). Dentro do getCapabilities WFS, este é parte do nome FeatureType antes dos dois pontos (:).',
'Optional. In GeoServer, this is the Workspace Namespace URI. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).': 'optional. Em GeoServer, este é o espaço de Nomes URI. No getCapabilities WFS, este é o nome da parte FeatureType antes de os dois pontos (:).',
'Optional. The name of an element whose contents should be a URL of an Image file put into Popups.': 'opcional O nome de um elemento cujo conteúdo deve ser uma URL de um arquivo de imagem para Popups.',
'Optional. The name of an element whose contents should be put into Popups.': 'opcional O nome de um elemento cujo conteúdo deve ser adicionado em Popups.',
"Optional. The name of the geometry column. In PostGIS this defaults to 'the_geom'.": "opcional O nome da coluna de geometria. Em PostGIS padroniza para 'the_geom'.",
'Optional. The name of the schema. In Geoserver this has the form http://host_name/geoserver/wfs/DescribeFeatureType?version=1.1.0&;typename=workspace_name:layer_name.': 'opcional O nome do esquema. Em Geoserver isto tem o formato http://host_name/geoserver/wfs/DescribeFeatureType?version=1.1.0&;typename=workspace_name:layer_name.',
'Options': 'opções',
'Organisation': 'Organização',
'Organization': 'Organização',
'Organization Details': 'Detalhes da Organização',
'Organization Registry': 'Registro de Organização',
'Organization added': 'Organização incluída',
'Organization deleted': 'Organização excluída',
'Organization updated': 'Organização atualizada',
'Organizations': 'Organizações',
'Origin': 'Origem',
'Origin of the separated children': 'Origem das crianças separadas',
'Other': 'outro',
'Other (describe)': 'Outros (descreva)',
'Other (specify)': 'Outros motivos (especifique)',
'Other Evidence': 'outras evidencias',
'Other Faucet/Piped Water': 'Outras Torneiras /Agua Encanada',
'Other Isolation': 'Outro Isolamento',
'Other Name': 'outro nome',
'Other activities of boys 13-17yrs': 'Outras atividades de garotos 13-17anos',
'Other activities of boys 13-17yrs before disaster': 'Outras atividades de garotos 17-13anos antes do desastre',
'Other activities of boys <12yrs': 'Outras atividades de garotos <12 anos',
'Other activities of boys <12yrs before disaster': 'Outras atividades de garotos <12anos antes do desastre',
'Other activities of girls 13-17yrs': 'Outras atividades de meninas 13-17anos',
'Other activities of girls 13-17yrs before disaster': 'Outras atividades de meninas 13-17anos antes do desastre',
'Other activities of girls<12yrs': 'Outras atividades de garotas<12anos',
'Other activities of girls<12yrs before disaster': 'Outras atividades de garotas<12anos antes do desastre',
'Other alternative infant nutrition in use': 'Nutrição infantil alternativa em uso',
'Other alternative places for study': 'Outros locais alternativos para estudo',
'Other assistance needed': 'Outra assistência necessária',
'Other assistance, Rank': 'Outra assistência, Número',
'Other current health problems, adults': 'Outros problemas actuais de saúde, adultos',
'Other current health problems, children': 'Outros problemas actuais de saúde, crianças',
'Other events': 'outros eventos',
'Other factors affecting school attendance': 'Outros fatores que afetam a frequencia escolar',
'Other major expenses': 'outras despesas importantes',
'Other non-food items': 'Outros itens não alimentícios',
'Other recommendations': 'Outras recomendações',
'Other residential': 'Outros residentes',
'Other school assistance received': 'Assistência de outra escola recebida',
'Other school assistance, details': 'Assistência de outra escola, detalhes',
'Other school assistance, source': 'Assistência de outra escola, origem',
'Other settings can only be set by editing a file on the server': 'Outras configurações só podem ser definidas editando um arquivo no servidor',
'Other side dishes in stock': 'Pratos outro lado em ações',
'Other types of water storage containers': 'Outros tipos de recipientes de armazenamento de água',
'Other ways to obtain food': 'Outras maneiras de obter alimentos',
'Outbound Mail settings are configured in models/000_config.py.': 'Definições de correio de saída são configurados em modelos/000_config..py',
'Outbox': 'Caixa de Saída',
'Outgoing SMS Handler': 'Saída do Manipulador SMS',
'Outgoing SMS handler': 'Manipulador de SMS de saída',
'Overall Hazards': 'Riscos gerais',
'Overhead falling hazard': 'Risco de queda sobrecarga',
'Overland Flow Flood': 'Por via terrestre Fluxo de Enchente',
'Owned Resources': 'Recursos Próprios',
'PAHO UID': 'OPS UID',
'PDAM': 'PDAM',
'PDF File': 'PDF File',
'PIN': 'alfinete',
'PIN number': 'Número do pino',
'PIN number ': 'PIN number ',
'PL Women': 'Mulheres PL',
'Pack': 'Pacote',
'Packs': 'Pacotes',
'Page': 'Page',
'Parameters': 'Parâmetros de Monitoramento',
'Parapets, ornamentation': 'Passarelas, ornamentação',
'Parent': 'parent',
'Parent Office': 'Escritório Principal',
"Parent level should be higher than this record's level. Parent level is": 'Nível dos pais deve ser maior que o nível do registro. Nível do Pai é',
'Parent needs to be of the correct level': 'Pai precisa ser do nível correto',
'Parent needs to be set': 'Principal precisa ser configurado',
'Parent needs to be set for locations of level': 'Principal precisa ser configurado para locais de nível',
'Parents/Caregivers missing children': 'Pais/cuidadores de crianças desaparecidas',
'Parking Area': 'Parking Area',
'Partial': 'Parcial',
'Participant': 'Participante',
'Pashto': 'Pachto',
'Pass': 'Passou',
'Passport': 'passaporte',
'Password': 'senha',
"Password fields don't match": 'Os campos de senha não são iguais.',
'Path': 'Caminho',
'Pathology': 'Patologia',
'Patient': 'Patient',
'Patient Details': 'Patient Details',
'Patient Tracking': 'Patient Tracking',
'Patient added': 'Patient added',
'Patient deleted': 'Patient deleted',
'Patient updated': 'Patient updated',
'Patients': 'Pacientes',
'Pediatric ICU': 'UTI Pediatrica',
'Pediatric Psychiatric': 'Psiquiátrico Pediátra',
'Pediatrics': 'Pediatria',
'Peer': 'Membro',
'Peer Details': 'Detalhes do Membro',
'Peer Registration': 'Registro de par',
'Peer Registration Details': 'Detalhes de Registro do Par',
'Peer Registration Request': 'Requerido Registro do Par',
'Peer Type': 'Por Tipo',
'Peer UID': 'Por UID',
'Peer added': 'Membro adicionado',
'Peer deleted': 'Membro excluído',
'Peer not allowed to push': 'Peer não permitido para envio',
'Peer registration request added': 'Registro Requerido do Par adicionado',
'Peer registration request deleted': 'Registro requerido do par excluído',
'Peer registration request updated': 'Registro requerido do par atualizado',
'Peer updated': 'PAR ATUALIZADO',
'Peers': 'Pares',
'Pending': 'pendente',
'Pending Requests': 'PEDIDOS PENDENTES',
'People': 'pessoas',
'People Needing Food': 'Pessoas precisando de alimento',
'People Needing Shelter': 'Pessoas precisando de abrigo',
'People Needing Water': 'Pessoas precisando de água',
'People Trapped': 'Pessoas presas',
'Performance Rating': 'Classificação da Performance',
'Person': 'pessoa',
'Person 1': 'Pessoa 1',
'Person 1, Person 2 are the potentially duplicate records': 'Pessoa 1, Pessoa 2 são os registros potencialmente duplicados',
'Person 2': 'Pessoa 2',
'Person De-duplicator': 'Anti-duplicador de Pessoas',
'Person Details': 'Detalhes Pessoais',
'Person Finder': 'Buscador de pessoas',
'Person Registry': 'Registro De Pessoa',
'Person added': 'Pessoa Incluída',
'Person added to Commitment': 'Person added to Commitment',
'Person deleted': 'Pessoa removida',
'Person details updated': 'Detalhes pessoais actualizados',
'Person interviewed': 'Pessoa entrevistada',
'Person missing': 'Pessoa perdida',
'Person must be specified!': 'Person must be specified!',
'Person removed from Commitment': 'Person removed from Commitment',
'Person reporting': 'Pessoa relatando',
'Person who has actually seen the person/group.': 'Pessoa que tenha realmente visto a pessoa/Grupo.',
'Person/Group': 'Pessoa/Grupo',
'Personal': 'Pessoal',
'Personal Data': 'Dados pessoais',
'Personal Effects': 'Efeitos pessoal',
'Personal Effects Details': 'Detalhes dos Efeitos Pessoais',
'Personal Map': 'Mapa De Pessoal',
'Personal Profile': 'Perfil pessoal',
'Personal impact of disaster': 'Impacto de desastre pessoal',
'Persons': 'Pessoas',
'Persons in institutions': 'Pessoas em instituições',
'Persons with disability (mental)': 'Pessoas com deficiência (mental)',
'Persons with disability (physical)': 'Pessoas com deficiência (física)',
'Phone': 'telefone',
'Phone 1': 'Telefone 1',
'Phone 2': 'Telefone 2',
"Phone number to donate to this organization's relief efforts.": 'Número de telefone para doar ao serviço de assistência social desta organização',
'Phone/Business': 'Telefone comercial',
'Phone/Emergency': 'Telefone de emergência',
'Phone/Exchange': 'Telefone/Exchange',
'Phone/Exchange (Switchboard)': 'Telefone/Câmbio (Central)',
'Photo': 'foto',
'Photo Details': 'Foto com detalhes',
'Photo Taken?': 'Foto tomada?',
'Photo added': 'Foto adicionada (ou incluída)',
'Photo deleted': 'Foto deletada (apagada, excluída em definitivo)',
'Photo updated': 'Foto ATUALIZADA',
'Photograph': 'Fotografia ou Arte Fotográfica',
'Photos': 'fotos, imagens fotográficas',
'Physical Description': 'Descrição física',
'Physical Safety': 'Segurança Física',
'Picture': 'Imagem',
'Picture upload and finger print upload facility': 'Fazer upload de imagem e impressão dedo upload facility',
'Place': 'Local',
'Place of Recovery': 'Local de recuperação',
'Places for defecation': 'Locais para a defecação',
'Places the children have been sent to': 'Lugares que as crianças foram enviadas para',
'Planner': 'Planejador',
'Playing': 'Reproduzindo',
"Please come back after sometime if that doesn't help.": 'Por favor, volte após algum tempo se isso não ajuda.',
'Please correct all errors.': 'Por favor CORRIJA todos os erros.',
'Please enter a First Name': 'Por favor insira um primeiro nome',
'Please enter a first name': 'Por favor insira um primeiro nome',
'Please enter a number only': 'Please enter a number only',
'Please enter a person': 'Insira uma pessoa',
'Please enter a site OR a location': 'Por favor digite um site ou um local',
'Please enter a valid email address': 'Please enter a valid email address',
'Please enter the first few letters of the Person/Group for the autocomplete.': 'Por favor Digite as primeiras letras do Pessoa/Grupo para o AutoCompletar.',
'Please enter the recipient': 'Por favor Digite o destinatário',
'Please fill this!': 'Por favor preencha isso!',
'Please give an estimated figure about how many bodies have been found.': 'Please give an estimated figure about how many bodies have been found.',
'Please provide the URL of the page you are referring to, a description of what you expected to happen & what actually happened.': 'Por favor Forneça a URL da página que você está fazendo referência à, uma descrição do que você esperava que acontecesse & O que realmente aconteceu.',
'Please provide the URL of the page you are referring to, a description of what you expected to happen & what actually happened. If a ticket was issued then please provide the Ticket ID.': 'Por favor Forneça a URL da página que você está fazendo referência à, uma descrição do que você esperava que acontecesse & O que realmente aconteceu. Se um bilhete foi emitido então por favor forneça o ID do bilhete.',
'Please report here where you are:': 'Por favor informe aqui onde você está:',
'Please select': 'Por favor Selecione',
'Please select another level': 'Por favor selecione outro nível',
'Please sign-up with your Cell Phone as this allows us to send you Text messages. Please include full Area code.': 'Por favor inscrever-se com seu celular como isso nos permite lhe enviar mensagens de texto. Por favor inclua código de Área total.',
'Please specify any problems and obstacles with the proper handling of the disease, in detail (in numbers, where appropriate). You may also add suggestions the situation could be improved.': 'Por favor especifique quaisquer problemas e obstáculos com a manipulação correcta da doença, em detalhes (em números, se for o caso). Pode também dar sugestões - a situação pode ser melhorada.',
'Please use this field to record any additional information, including a history of the record if it is updated.': 'Por favor utilize esse campo para registrar quaisquer informações adicionais, incluindo um histórico do registro se ele estiver sendo atualizado.',
'Please use this field to record any additional information, including any Special Needs.': 'Por favor utilize esse campo para registrar quaisquer informações adicionais, incluindo quaisquer necessidades especiais.',
'Please use this field to record any additional information, such as Ushahidi instance IDs. Include a history of the record if it is updated.': 'Por favor utilize esse campo para registrar quaisquer informações adicionais, como IDs de instância Ushahidi. Incluir o histórico do registo se este fôr actualizado.',
'Pledge Support': 'Suporte da promessa',
'Point': 'Ponto',
'Poisoning': 'Envenenamento',
'Poisonous Gas': 'Gás venenoso',
'Police': 'Polícia',
'Pollution and other environmental': 'Poluição ambiental e outras',
'Polygon': 'Polígono',
'Polygon reference of the rating unit': 'Polígono de referência da unidade de classificação',
'Poor': 'Pobre',
'Population': 'População',
'Population Statistic Details': 'População Estatística Detalhes',
'Population Statistic added': 'População Estatística incluída',
'Population Statistic deleted': 'População Estatística excluído',
'Population Statistic updated': 'População De Estatística atualizada',
'Population Statistics': 'Estatísticas De população',
'Population and number of households': 'população e número de residentes',
'Popup Fields': 'Pop-up Campos',
'Popup Label': 'Rótulo do pop-up',
'Porridge': 'mingau',
'Port': 'porta',
'Port Closure': 'Porta Encerramento',
'Portuguese': 'Português',
'Portuguese (Brazil)': 'Português (Brasil)',
'Position': 'Posição',
'Position Catalog': 'Catálogo de posições',
'Position Details': 'detalhamento do cargo',
'Position added': 'Cargo inserido',
'Position deleted': 'Cargo excluído',
'Position updated': 'Posição atualizada',
'Positions': 'cargos',
'Postcode': 'Código Postal',
'Poultry': 'Aves',
'Poultry restocking, Rank': 'Reabastecimento de aves domésticas, posição',
'Pounds': 'Libras',
'Power Failure': 'Falha de Energia',
'Powered by Sahana Eden': 'Desenvolvido pela Sahana Eden',
'Pre-cast connections': 'Conexões-cast pré',
'Preferred Name': 'Nome Preferido',
'Pregnant women': 'Mulheres grávidas',
'Preliminary': 'Preliminar',
'Presence': 'Presença',
'Presence Condition': 'Condição de Presença',
'Presence Log': 'Log de Presença',
'Previous': 'Anterior',
'Primary Name': 'Nome Principal',
'Primary Occupancy': 'Principal Ocupação',
'Priority': 'priority',
'Priority from 1 to 9. 1 is most preferred.': 'Prioridade de 1 a 9. 1 é preferível',
'Private': 'Privado',
'Problem': 'Problema do',
'Problem Administration': 'Gestão de problema',
'Problem Details': 'Detalhes do Problema',
'Problem Group': 'Grupo do Problema',
'Problem Title': 'Título do Problema',
'Problem added': 'Problema incluído',
'Problem connecting to twitter.com - please refresh': 'Problema ao conectar-se ao twitter.com, tente novamente',
'Problem deleted': 'Problema Excluído',
'Problem updated': 'Problema Atualizado',
'Problems': 'Problemas',
'Procedure': 'Procedimento',
'Process Received Shipment': 'Processo recebeu embarque',
'Process Shipment to Send': 'Processar remessa a enviar',
'Profile': 'profile',
'Project': 'projeto',
'Project Details': 'Detalhes do Projeto',
'Project Status': 'Status do Projeto',
'Project Tracking': 'Acompanhamento do Projeto',
'Project added': 'Projeto incluído',
'Project deleted': 'Projeto Excluído',
'Project has no Lat/Lon': 'Projeto não possui Latitude/Longitude',
'Project updated': 'Projeto ATUALIZADO',
'Projection': 'Projeção',
'Projection Details': 'Detalhes da Projeção',
'Projection Type': 'Projection Type',
'Projection added': 'Projeção incluída',
'Projection deleted': 'Projeção excluída',
'Projection updated': 'Projecção atualizada',
'Projections': 'projeções',
'Projects': 'projetos',
'Property reference in the council system': 'Referência de propriedade no sistema do conselho',
'Protected resource': 'Recurso protegido',
'Protection': 'Protecção',
'Provide Metadata for your media files': 'Fornecer Metadados para os seus ficheiros media',
'Provide a password': 'Provide a password',
'Provide an optional sketch of the entire building or damage points. Indicate damage points.': 'Fornecer um retrato opcional de todo o edifício ou áreas danificadas. Pontos danos indicar.',
'Proxy-server': 'Servidor Proxy',
'Psychiatrics/Adult': 'Psiquiatras/Adulto',
'Psychiatrics/Pediatric': 'Psiquiatras/Pediátrica',
'Public': 'Público',
'Public Event': 'Evento público',
'Public and private transportation': 'Transporte Público e Privado',
'Public assembly': 'Assembléia Pública',
'Pull tickets from external feed': 'Pull de bilhetes alimentação externa',
'Punjabi': 'Punjabi',
'Purchase Date': 'Data de aquisição',
'Push tickets to external system': 'BILHETES Push PARA sistema externo',
'Pyroclastic Flow': 'Pyroclastic FLuxo',
'Pyroclastic Surge': 'Pyroclastic Aumento',
'Python Serial module not available within the running Python - this needs installing to activate the Modem': 'Módulo Serial Python não disponíveis no a execução Python-isto tem de instalar para ativar o Modem',
'Quantity': 'Quantidade',
'Quantity Committed': 'Quantidade Comprometida',
'Quantity Fulfilled': 'Quantidade Preenchida',
"Quantity in %s's Inventory": 'Quantidade de %s do Inventário',
'Quantity in Transit': 'Quantidade em Trânsito',
'Quarantine': 'Quarentena',
'Queries': 'Buscas',
'Query': 'Busca',
'Queryable?': 'Consultável?',
'RC frame with masonry infill': 'Quadro de RC com aterros de alvenaria',
'RECORD A': 'Registro A',
'RECORD B': 'REGISTRO B',
'Race': 'Corrida',
'Radio': 'Radio',
'Radio Callsign': 'Rádio Chamada',
'Radio Details': 'Radio Details',
'Radiological Hazard': 'Risco Radiológico',
'Radiology': 'Radiologia',
'Railway Accident': 'Acidente Ferroviário',
'Railway Hijacking': 'Sequestro Ferroviário',
'Rain Fall': 'Queda de Chuva',
'Rapid Assessment': 'Avaliação Rápida',
'Rapid Assessment Details': 'Rápida Avaliação Detalhes',
'Rapid Assessment added': 'Rapid Avaliação incluído',
'Rapid Assessment deleted': 'Rápida Avaliação excluído',
'Rapid Assessment updated': 'Rapid avaliação atualizada',
'Rapid Assessments': 'Rapid Avaliações',
'Rapid Assessments & Flexible Impact Assessments': 'Rapid Avaliações & Flexível Impacto Avaliações',
'Rapid Close Lead': 'Fechamento Lead rápido',
'Rapid Data Entry': 'Entrada de dados rápida',
'Rating Scale': 'Escala de avaliação',
'Raw Database access': 'Acesso bruto a Base de dados',
'Read-Only': 'somente para leitura',
'Read-only': 'somente para leitura',
'Receive': 'Receber',
'Receive Items': 'Aceitar itens',
'Receive New Shipment': 'Receber Novos Embarques',
'Receive Shipment': 'Receber carregamento',
'Receive this shipment?': 'Receber esse embarque?',
'Received': 'Recebido',
'Received By': 'Recebido Por',
'Received By Person': 'Recebido Por Pessoa',
'Received Item Details': 'Detalhes do item recebido',
'Received Item deleted': 'Recebido item excluído',
'Received Item updated': 'Item recebido atualizado',
'Received Shipment Details': 'Lista de remessa de mercadorias/produtos',
'Received Shipment canceled': 'Remessa de produtos cancelada',
'Received Shipment canceled and items removed from Inventory': 'Recebido carregamento cancelado e itens removidos do inventário',
'Received Shipment updated': 'Carregamento Recebido Atualizado',
'Received Shipments': 'Carregamento de produtos recebido',
'Receiving and Sending Items': 'Receber e enviar Itens',
'Recipient': 'destinatário',
'Recipients': 'destinatários',
'Recommendations for Repair and Reconstruction or Demolition': 'Recomendações para reparo e reconstrução ou demolição',
'Record': 'registro',
'Record Details': 'Detalhes do Registro',
'Record Saved': 'Registro Gravado',
'Record added': 'Registro incluído',
'Record any restriction on use or entry': 'Registro de qualquer restrição à utilização ou entrada',
'Record deleted': 'Registro excluído',
'Record last updated': 'Último registro atualizado',
'Record not found': 'Registro não encontrado',
'Record not found!': 'Registro não encontrado!',
'Record updated': 'registro atualizado',
'Recording and Assigning Assets': 'Ativos de Gravação e Designação',
'Records': 'Registros',
'Recovery': 'recuperação',
'Recovery Request': 'pedido de recuperação',
'Recovery Request added': 'Pedido de recuperação adicionado',
'Recovery Request deleted': 'Pedido de recuperação apagado',
'Recovery Request updated': 'Pedido de recuperação atualizado',
'Recovery Requests': 'Pedidos de recuperação',
'Recruitment': 'Recrutamento',
'Recurring': 'Recorrente',
'Recurring Cost': 'Custo recorrente',
'Recurring cost': 'Custo recorrente',
'Recurring costs': 'Custos recorrentes',
'Red': 'vermelho',
'Red Cross / Red Crescent': 'Cruz Vermelha / Red Crescent',
'Reference Document': 'Documento de referência',
'Refresh Rate (seconds)': 'Taxa de Atualização (Segundos)',
'Region Location': 'Localizaçao da regiao',
'Regional': 'regional',
'Regions': 'Regiões',
'Register': 'registro',
'Register Person': 'REGISTRAR PESSOA',
'Register Person into this Camp': 'Registrar Pessoa neste Acampamento',
'Register Person into this Shelter': 'REGISTRAR PESSOA PARA ESTE Abrigo',
'Register them as a volunteer': 'Registrá-los como voluntários',
'Registered People': 'Pessoas Registradas',
'Registered users can': 'Os usuários registrados podem',
'Registration': 'Inscrição',
'Registration Details': 'Detalhes da Inscrição',
'Registration added': 'Inscrição adicionada',
'Registration entry deleted': 'Inscrição excluída',
'Registration is still pending approval from Approver (%s) - please wait until confirmation received.': 'Registro ainda está pendente de aprovação do Aprovador (%s) - Por favor, aguarde até a confirmação recebida.',
'Registration key': 'Registration key',
'Registration updated': 'Inscrição atualizada',
'Rehabilitation/Long Term Care': 'Reabilitação/Cuidados de Longo Termo',
'Reinforced masonry': 'Alvenaria reforçada',
'Rejected': 'rejeitado',
'Relative Details': 'Relative Details',
'Relative added': 'Relative added',
'Relative deleted': 'Relative deleted',
'Relative updated': 'Relative updated',
'Relatives': 'Relatives',
'Relief': 'Alivio',
'Relief Team': 'Equipe de socorro',
'Religion': 'Religião',
'Religious': 'Religiosas',
'Religious Leader': 'Líder religioso',
'Relocate as instructed in the <instruction>': 'Relocalizar conforme instruído no',
'Remove': 'remover',
'Remove Activity from this event': 'Remove Activity from this event',
'Remove Asset from this event': 'Remover ativo deste evento',
'Remove Asset from this scenario': 'Remover ativo deste cenário',
'Remove Document from this request': 'Remove Document from this request',
'Remove Facility from this event': 'Remover recurso deste evento',
'Remove Facility from this scenario': 'Remover recurso deste cenário',
'Remove Human Resource from this event': 'REMOVER RECURSOS HUMANOS A partir deste evento',
'Remove Human Resource from this scenario': 'REMOVER RECURSOS HUMANOS A partir deste cenário',
'Remove Item from Inventory': 'Remover Item do Inventário',
'Remove Map Configuration from this event': 'REMOVER Mapa de configuração a partir deste evento',
'Remove Map Configuration from this scenario': 'REMOVER Mapa de configuração a partir deste cenário',
'Remove Person from Commitment': 'Remove Person from Commitment',
'Remove Skill': 'Remove Skill',
'Remove Skill from Request': 'Remove Skill from Request',
'Remove Task from this event': 'Remove Task from this event',
'Remove Task from this scenario': 'Remove Task from this scenario',
'Remove this asset from this event': 'REMOVER este recurso a partir deste evento',
'Remove this asset from this scenario': 'Remover este recurso deste cenário',
'Remove this facility from this event': 'Remove this facility from this event',
'Remove this facility from this scenario': 'Remove this facility from this scenario',
'Remove this human resource from this event': 'Remove this human resource from this event',
'Remove this human resource from this scenario': 'Remove this human resource from this scenario',
'Remove this task from this event': 'Remove this task from this event',
'Remove this task from this scenario': 'Remove this task from this scenario',
'Repair': 'REPARO',
'Repaired': 'Reparado',
'Repeat your password': 'REPITA sua senha',
'Replace': 'TROCAR',
'Replace if Master': 'Substituir se Principal',
'Replace if Newer': 'Substituir se o Mais Recente',
'Report': 'Relatório',
'Report Another Assessment...': 'Adicionar Outro Relatório De Avaliação....',
'Report Details': 'Detalhes do Relatório',
'Report Resource': 'Reportar Recursos',
'Report Types Include': 'Tipos de relatório incluem',
'Report added': 'Relatório incluído',
'Report deleted': 'Relatório removido',
'Report my location': 'Relate meu local',
'Report the contributing factors for the current EMS status.': 'Reportar os factores que contribuem para a situação EMS actual.',
'Report the contributing factors for the current OR status.': 'Reportar os factores que contribuem para a situação OR actual.',
'Report them as found': 'Reportar como encontrados',
'Report them missing': 'Reportar como perdidos',
'Report updated': 'Relatório atualizado',
'ReportLab module not available within the running Python - this needs installing for PDF output!': 'O módulo de ReportLab não disponíveis na execução Python - isto requer a instalação para a entrega em PDF!',
'ReportLab not installed': 'ReportLab não instalado',
'Reporter': 'Relator',
'Reporter Name': 'Nome do Relator',
'Reporting on the projects in the region': 'Relatórios sobre os projetos na região',
'Reports': 'Relatórios',
'Request': 'Pedido',
'Request Added': 'Pedido Incluído',
'Request Canceled': 'Pedido Cancelado',
'Request Details': 'Detalhes do Pedido',
'Request From': 'Pedido De',
'Request Item': 'Item de pedido',
'Request Item Details': 'Detalhes do item de pedido',
'Request Item added': 'Item incluído no pedido',
'Request Item deleted': 'Item de pedido excluído',
'Request Item from Available Inventory': 'PEDIDO DE Item de Inventário Disponível',
'Request Item updated': 'Pedido actualizado',
'Request Items': 'Itens de pedido',
'Request New People': 'Request New People',
'Request Status': 'Status do Pedido',
'Request Type': 'Tipo de Pedido',
'Request Updated': 'Solicitação atualizada',
'Request added': 'Pedido adicionado',
'Request deleted': 'Solicitação excluída',
'Request for Role Upgrade': 'Pedido de upgrade de função',
'Request updated': 'Pedido actualizado',
'Request, Response & Session': 'Pedido, Resposta & Sessão',
'Requested': 'solicitado',
'Requested By': 'Solicitado Por',
'Requested By Facility': 'Solicitado Pela Instalação',
'Requested By Site': 'Solicitado Por Site',
'Requested From': 'Solicitada a Partir de',
'Requested Items': 'Itens solicitados',
'Requested Skill': 'Requested Skill',
'Requested Skill Details': 'Requested Skill Details',
'Requested Skill updated': 'Requested Skill updated',
'Requested Skills': 'Requested Skills',
'Requested by': 'Solicitado Por',
'Requested on': 'Em solicitada',
'Requester': 'Solicitante',
'Requests': 'Pedidos',
'Requests Management': 'Gerenciamento de Pedidos',
'Required Skill': 'Required Skill',
'Requires Login!': 'É necessário fazer login!',
'Rescue and recovery': 'Resgate e recuperação',
'Reset': 'Restaurar',
'Reset Password': 'restabelecer senha',
'Resolve': 'Resolver',
'Resolve Conflict': 'Resolver Conflito',
'Resolve link brings up a new screen which helps to resolve these duplicate records and update the database.': 'Resolva link que levará até uma nova tela que ajudará a resolver esses registros duplicados e atualizar o banco de dados.',
'Resource': 'Recurso',
'Resource Details': 'Detalhes do recurso',
'Resource added': 'Recurso incluído',
'Resource deleted': 'Recurso Excluído',
'Resource updated': 'Recurso atualizado',
'Resources': 'Recursos',
'Respiratory Infections': 'Infecções respiratórias',
'Response': 'Resposta',
'Restricted Access': 'Acesso Restrito',
'Restricted Use': 'Uso restrito',
'Results': 'results',
'Retail Crime': 'Crime a varejo',
'Retrieve Password': 'Recuperar Senha',
'Return': 'Retorno',
'Return to Request': 'Retornar ao pedido',
'Returned': 'Retornado',
'Returned From': 'Retornado a partir de',
'Returned Status': 'Retornado Status',
'Review Incoming Shipment to Receive': 'Revisão da Remessa de Entrada para Receber',
'Rice': 'Arroz',
'Riot': 'Motim',
'River': 'Rio',
'River Details': 'Detalhes do Rio',
'River added': 'Rio adicionado',
'River deleted': 'Rio deletado',
'River updated': 'Rio atualizado',
'Rivers': 'Rios',
'Road Accident': 'Acidente na rua/estrada',
'Road Closed': 'Rua/Estrada fechada',
'Road Conditions': 'Condições da Estrada',
'Road Delay': 'Atraso de Estrada',
'Road Hijacking': 'Sequestro de Estrada',
'Road Usage Condition': 'Condição de Uso de Estrada',
'Roads Layer': 'Roads Layer',
'Role': 'Função',
'Role Details': 'Detalhes da Função',
'Role Required': 'Função requerida',
'Role Updated': 'Funções atualizadas',
'Role added': 'Regra incluída',
'Role deleted': 'Função excluída',
'Role updated': 'Funções atualizadas',
'Role-based': 'Baseada em regra',
'Roles': 'Funções',
'Roles Permitted': 'Funções Permitidas',
'Roof tile': 'Telhado lado a lado',
'Roofs, floors (vertical load)': 'Telhados, pisos (carga vertical)',
'Room': 'Sala',
'Room Details': 'Detalhes da sala',
'Room added': 'Sala incluída',
'Room deleted': 'Sala excluída',
'Room updated': 'Sala atualizada',
'Rooms': 'Salas',
'Roster': 'Lista',
'Row Choices (One Per Line)': 'Opções da linha (Um por linha)',
'Rows in table': 'Linhas na tabela',
'Rows selected': 'Linhas Selecionadas',
'Run Functional Tests': 'Executar testes funcionais',
'Run Interval': 'Intervalo de execução',
'Running Cost': 'Custo corrente',
'Russian': 'Russian',
'SMS Modems (Inbound & Outbound)': 'SMS Modems (Inbound & Outbound)',
'SMS Outbound': 'SMS Outbound',
'SMS Settings': 'SMS Settings',
'SMS settings updated': 'SMS settings updated',
'SMTP to SMS settings updated': 'SMTP to SMS settings updated',
'Safe environment for vulnerable groups': 'Ambiente seguro para grupos vulneráveis',
'Safety Assessment Form': 'Formulário de avaliação de segurança',
'Safety of children and women affected by disaster?': 'Segurança das crianças e mulheres afetadas pela catástrofe?',
'Sahana Administrator': 'Sahana AdmiNistrador',
'Sahana Agasti': 'Sahana Agasti',
'Sahana Blue': 'Sahana Azul',
'Sahana Community Chat': 'Sahana COMUNIDADE de BATE-PAPO',
'Sahana Eden': 'Sahana Eden',
'Sahana Eden <=> Other': 'Sahana Eden <=> Outros',
'Sahana Eden <=> Sahana Eden': 'Sahana Éden <=> Sahana Éden',
'Sahana Eden Humanitarian Management Platform': 'plataforma de gerenciamento humanitário Sahana Éden',
'Sahana Eden Website': 'SITE Sahana Éden',
'Sahana Green': 'Sahana Verde',
'Sahana Steel': 'Sahana Steel',
'Sahana access granted': 'Acesso Sahana CONCEDIDO',
'Salted Fish': 'Peixe Salgado',
'Sanitation problems': 'Problemas de saneamento',
'Satellite': 'satélite',
'Satellite Layer': 'Satellite Layer',
'Satellite Office': 'Escritório experimental',
'Saturday': 'SAturday',
'Save': 'armazenar',
'Saved.': 'armazenado.',
'Saving...': 'Guardando...',
'Scale of Results': 'Nível de Resultados',
'Scanned Copy': 'Scanned Copy',
'Scanned Forms Upload': 'Scanned Forms Upload',
'Scenario': 'Cenário',
'Scenario Details': 'Detalhes do Cenário',
'Scenario added': 'Cenário incluído',
'Scenario deleted': 'Cenário excluído',
'Scenario updated': 'Cenário atualizado',
'Scenarios': 'Cenários',
'Schedule': 'Horário',
'Schema': 'Esquema',
'School': 'Escola',
'School Closure': 'Encerramento Escolar',
'School Lockdown': 'Bloqueio escolar',
'School Teacher': 'Professor de escola',
'School activities': 'Actividades escolares',
'School assistance': 'Assistência escolar',
'School attendance': 'Presença escolar',
'School destroyed': 'Escola Destruída',
'School heavily damaged': 'Escola fortemente danificada',
'School tents received': 'Tendas da escola recebidas',
'School tents, source': 'Tendas de escolha, origem',
'School used for other purpose': 'Escola utilizada para outros fins',
'School/studying': 'Escola/estudando',
'Schools': 'Escolas',
'Search': 'Pesquisar',
'Search Activities': 'procurar atividades',
'Search Activity Report': 'Relatório de pesquisa de atividades',
'Search Addresses': 'procurar endereços',
'Search Alternative Items': 'Procurar itens alternativos',
'Search Assessment Summaries': 'Procura De Avaliação De RESUMOS',
'Search Assessments': 'Avaliações de procura',
'Search Asset Assignments': 'Procurar ATIVO Designações',
'Search Asset Log': 'Procurar log de ativo',
'Search Assets': 'Procurar Recursos',
'Search Baseline Type': 'Procurar Typo de Base',
'Search Baselines': 'Procurar Bases',
'Search Brands': 'Procurar Marcas',
'Search Budgets': 'Procura Orçamentos',
'Search Bundles': 'PACOTES Configuráveis de procura',
'Search Camp Services': 'Procurar Serviços de Acampamento',
'Search Camp Types': 'Procurar Tipos De Acampamento',
'Search Camps': 'Procurar acampamentos',
'Search Catalog Items': 'Itens de procura De Catálogo',
'Search Catalogs': 'Procurar nos Catálogos',
'Search Certificates': 'Procurar Certificados',
'Search Certifications': 'Procurar Certificações',
'Search Checklists': 'Listas De procura',
'Search Cluster Subsectors': 'Procura De Cluster Subsectores',
'Search Clusters': 'Clusters de procura',
'Search Commitment Items': 'Itens de procura Compromisso',
'Search Commitments': 'Compromissos de procura',
'Search Committed People': 'Search Committed People',
'Search Competencies': 'Procurar Competências',
'Search Competency Ratings': 'Procurar Indices de Competência',
'Search Contact Information': 'Procurar informações de contato',
'Search Contacts': 'Buscar contatos',
'Search Course Certificates': 'procura Certificados de Curso',
'Search Courses': 'Procurar Cursos',
'Search Credentials': 'Credenciais de busca',
'Search Documents': 'Pesquisar documentos',
'Search Donors': 'Procura de Doadores',
'Search Entries': 'Pesquisar Entradas',
'Search Events': 'Pesquisar Eventos',
'Search Facilities': 'Pesquisar Instalações',
'Search Feature Class': 'Pesquisar classe de dispositivos',
'Search Feature Layers': 'Pesquisar camadas do dispositivo',
'Search Flood Reports': 'Pesquisar relatórios de inundação',
'Search GPS data': 'Search GPS data',
'Search Groups': 'Buscar Grupos',
'Search Homes': 'Search Homes',
'Search Human Resources': 'Pesquise recursos humanos.',
'Search Identity': 'Buscar Identidade',
'Search Images': 'Procurar Imagens',
'Search Impact Type': 'Procurar Tipo de Impacto',
'Search Impacts': 'Procurar Impactos',
'Search Import Files': 'Search Import Files',
'Search Incident Reports': 'Procurar Relatórios de Incidentes',
'Search Inventory Items': 'Procurar Entradas De Inventário',
'Search Inventory items': 'Procurar Entradas De Inventário',
'Search Item Categories': 'Buscar categorias de Item',
'Search Item Packs': 'Buscar pocotes de itens',
'Search Items': 'Buscar Itens',
'Search Job Roles': 'Pesquise papéis de trabalho',
'Search Keys': 'Procurar chaves',
'Search Kits': 'Procurar kits',
'Search Layers': 'Procurar camadas',
'Search Level': 'Search Level',
'Search Level 1 Assessments': 'Procurar Avaliações Nível 1',
'Search Level 2 Assessments': 'Procurar Avaliações Nível 2',
'Search Locations': 'Procurar Localidades',
'Search Log Entry': 'Procura de entrada de Log',
'Search Map Configurations': 'Pesquise mapa de configurações.',
'Search Markers': 'Marcadores De procura',
'Search Member': 'Procurar Membro',
'Search Membership': 'Procurar filiação',
'Search Memberships': 'Pesquisar Associações',
'Search Missions': 'Procurar Missões',
'Search Need Type': 'Procura Precisa De Tipo',
'Search Needs': 'Procura precisa',
'Search Notes': 'Notes procura',
'Search Offices': 'Escritórios de procura',
'Search Organizations': 'Pesquisar Organizações',
'Search Patients': 'Search Patients',
'Search Peer': 'PROCURA Par',
'Search Personal Effects': 'Procura objetos pessoais',
'Search Persons': 'Buscar Membros',
'Search Photos': 'Procura Fotos',
'Search Population Statistics': 'Procurar Estatística de População',
'Search Positions': 'Procura de Posições',
'Search Problems': 'Procura de Problemas',
'Search Projections': 'Projeções de procura',
'Search Projects': 'Procura de Projetos',
'Search Rapid Assessments': 'Procura de Avaliações Rápidas',
'Search Received Items': 'Procura de Itens Recebidos',
'Search Received Shipments': 'Embarques de procura Recebidos',
'Search Records': 'registros de procura',
'Search Registations': 'Registations procura',
'Search Registration Request': 'Pedido de registro de procura',
'Search Relatives': 'Search Relatives',
'Search Report': 'Procurar Relatório',
'Search Reports': 'Procurar Relatórios',
'Search Request': 'pedido de pesquisa',
'Search Request Items': 'Pedido de procura de Itens',
'Search Requested Items': 'Procura de itens solicitados',
'Search Requested Skills': 'Search Requested Skills',
'Search Requests': 'Procura de solicitações',
'Search Resources': 'Pesquisa de recursos',
'Search Rivers': 'Rios procura',
'Search Roles': 'Pesquisa de papéis',
'Search Rooms': 'Procurar Salas',
'Search Scenarios': 'Procurar cenários',
'Search Sections': 'As Seções de procura',
'Search Sectors': 'Procurar Setores',
'Search Sent Items': 'Procurar Itens Enviados',
'Search Sent Shipments': 'Procurar Despachos Enviados',
'Search Service Profiles': 'Serviço de procura Perfis',
'Search Settings': 'Definições de Pesquisa',
'Search Shelter Services': 'Procura Abrigo de serviços',
'Search Shelter Types': 'Procura tipos de Abrigo',
'Search Shelters': 'Procurar Abrigos',
'Search Skill Equivalences': 'Procurar equivalencias de habilidades',
'Search Skill Provisions': 'Procurar Disposições de habilidade',
'Search Skill Types': 'Pesquisar Tipos de Habilidades',
'Search Skills': 'Pesquisar Habilidades',
'Search Solutions': 'Pesquisar Soluções',
'Search Staff': 'Busca de pessoal',
'Search Staff Types': 'Busca de tipo de pessoal',
'Search Staff or Volunteer': 'Procurar Funcionário ou Voluntário',
'Search Status': 'Busca de status',
'Search Subscriptions': 'Busca de assinaturas',
'Search Subsectors': 'Buscar subsetores',
'Search Support Requests': 'Pedidos de suporte a pesquisa',
'Search Tasks': 'Tarefa de Pesquisa',
'Search Teams': 'Times de pesquisa',
'Search Themes': 'Temas de pesquisa',
'Search Tickets': 'Buscar Bilhetes',
'Search Tracks': 'Procurar Trilhas',
'Search Trainings': 'Buscar Treinamentos',
'Search Twitter Tags': 'Procurar Twitter Tags',
'Search Units': 'Procura Unidades',
'Search Users': 'Procurar Usuários',
'Search Vehicle Details': 'Search Vehicle Details',
'Search Vehicles': 'Search Vehicles',
'Search Volunteer Availability': 'Buscar Disponibilidade para Voluntáriado',
'Search Volunteers': 'Procura Voluntários',
'Search Warehouses': 'procura Warehouses',
'Search and Edit Group': 'Procurar e editar GRUPO',
'Search and Edit Individual': 'Procurar e Editar Individual',
'Search for Staff or Volunteers': 'Pesquise por funcionários ou voluntários',
'Search for a Location by name, including local names.': 'Pesquisar local por nome, incluindo nomes locais.',
'Search for a Person': 'Procurar Pessoa',
'Search for a Project': 'Procurar Projecto',
'Search for a shipment by looking for text in any field.': 'Procurar carga fazendo uma pesquisa de texto em qualquer campo.',
'Search for a shipment received between these dates': 'Procurar carga recebida entre estas datas',
'Search for a vehicle by text.': 'Search for a vehicle by text.',
'Search for an Organization by name or acronym': 'Procurar por uma Organização por nome ou iniciais',
'Search for an Organization by name or acronym.': 'Procurar por uma organização por nome ou iniciais.',
'Search for an asset by text.': 'Pesquisar um recurso por texto.',
'Search for an item by category.': 'Procurar por categoria.',
'Search for an item by Year of Manufacture.': 'Search for an item by Year of Manufacture.',
'Search for an item by brand.': 'Search for an item by brand.',
'Search for an item by catalog.': 'Search for an item by catalog.',
'Search for an item by category.': 'Search for an item by category.',
'Search for an item by its code, name, model and/or comment.': 'Search for an item by its code, name, model and/or comment.',
'Search for an item by text.': 'Procurar por texto.',
'Search for asset by country.': 'Procurar bens por país.',
'Search for asset by location.': 'Search for asset by location.',
'Search for office by country.': 'Procurar escritórios por país.',
'Search for office by location.': 'Search for office by location.',
'Search for office by organization.': 'Procurar escritórios por organização.',
'Search for office by text.': 'Procura por texto do gabinete.',
'Search for vehicle by location.': 'Search for vehicle by location.',
'Search for warehouse by country.': 'Pesquise por depósito por país.',
'Search for warehouse by location.': 'Search for warehouse by location.',
'Search for warehouse by organization.': 'Pesquise por depósito por organização.',
'Search for warehouse by text.': 'Pesquise por depósito via campo-texto.',
'Search here for a person record in order to:': 'Buscar aqui por um registro de pessoa a fim de:',
'Search messages': 'Mensagens de Procura',
'Searching for different groups and individuals': 'Procurar diferentes grupos e indivíduos',
'Secondary Server (Optional)': 'Servidor secundário (opcional)',
'Seconds must be a number between 0 and 60': 'Segundos deve ser um número entre 0 e 60',
'Section': 'Section',
'Section Details': 'Seção Detalhes',
'Section deleted': 'Seção excluído',
'Section updated': 'Seção atualizada',
'Sections': 'Seções',
'Sections that are part of this template': 'Sections that are part of this template',
'Sections that can be selected': 'Sections that can be selected',
'Sector': 'setor',
'Sector Details': 'Detalhes do Setor',
'Sector added': 'Sector incluído',
'Sector deleted': 'Sector apagado',
'Sector updated': 'Setor atualizado',
'Sector(s)': 'Setor(es)',
'Sectors': 'Setores',
'Security Status': 'Status de Segurança',
'Security problems': 'Problemas de Segurança',
'See All Entries': 'Ver todas as entradas',
'See all': 'Ver tudo',
'See unassigned recovery requests': 'Consulte Pedidos de recuperação designado',
'Seen': 'Visto',
'Select': 'select',
'Select Items from the Request': 'Selecionar itens do pedido',
'Select Items from this Inventory': 'Selecionar itens a partir deste Inventário',
'Select Organization': 'Selecionar Organização',
'Select Skills from the Request': 'Select Skills from the Request',
"Select a Room from the list or click 'Add Room'": "Escolha uma sala da lista ou clique 'Incluir sala'",
'Select a location': 'Selecionar um local',
"Select a manager for status 'assigned'": "Select a manager for status 'assigned'",
"Select a person in charge for status 'assigned'": "Selecione uma pessoa responsável para status 'DESIGNADO'",
'Select a question from the list': 'Selecione uma pergunta a partir da lista',
'Select a range for the number of total beds': 'Selecione um intervalo para o número de camas total',
'Select all that apply': 'Selecione todas as que se applicam',
'Select an Organization to see a list of offices': 'Selecione uma organização para ver uma lista de escritórios',
'Select the overlays for Assessments and Activities relating to each Need to identify the gap.': 'Selecione as sobreposições de avaliação e actividades relacionadas com cada necessidade para identificar as lacunas.',
'Select the person assigned to this role for this project.': 'Selecione a pessoa designada para essa função neste projeto.',
"Select this if all specific locations need a parent at the deepest level of the location hierarchy. For example, if 'district' is the smallest division in the hierarchy, then all specific locations would be required to have a district as a parent.": "Selecione isto se todas as localidades especificas precisarem de um pai no nível mais alto da hierarquia. Por exemplo, se 'distrito' é a menor divisão na hierarquia e, em seguida, todos os locais específicos seriam obrigados a ter um distrito como um pai.",
"Select this if all specific locations need a parent location in the location hierarchy. This can assist in setting up a 'region' representing an affected area.": 'Selecione isto se todos os locais específicos de uma posição pai na hierarquia do local. Isso pode ajudar na configuração de uma "região" representando uma área afetada.',
'Select to show this configuration in the Regions menu.': 'Selecione para mostrar essa configuração no menu regiões.',
'Select to show this configuration in the menu.': 'Select to show this configuration in the menu.',
'Selected Jobs': 'Selected Jobs',
'Selects what type of gateway to use for outbound SMS': 'Selects what type of gateway to use for outbound SMS',
'Selects whether to use a Modem, Tropo or other Gateway for sending out SMS': 'Selecione se vau utilizar um Modem, Tropo ou outro Gateway para enviar SMS',
'Send': 'Envie',
'Send Alerts using Email &/or SMS': 'Envio de alertas usando e-mail e/ou SMS',
'Send Commitment as Shipment': 'Enviar compromisso como carregamento',
'Send New Shipment': 'Enviar nova remessa',
'Send Notification': 'Enviar notificação',
'Send Shipment': 'Enviar Carregamento',
'Send a message to this person': 'Enviar uma mensagem para esta pessoa',
'Send a message to this team': 'Enviar uma mensagem para essa equipe',
'Send from %s': 'Enviar de %s',
'Send message': 'Enviar mensagem',
'Send new message': 'Enviar nova mensagem',
'Sends & Receives Alerts via Email & SMS': 'Envia & Recebe Alertas via E-Mail & SMS',
'Senior (50+)': 'Sênior (50+)',
'Sent': 'Enviadas',
'Sent By': 'Enviado Por',
'Sent By Person': 'Enviado Por Pessoa',
'Sent Item Details': 'Detalhes do Item enviado',
'Sent Item deleted': 'Enviado Item excluído',
'Sent Item updated': 'Enviado Item atualizado',
'Sent Shipment Details': 'Enviado Detalhes de Embarque',
'Sent Shipment canceled': 'Enviado Carregamento cancelado',
'Sent Shipment canceled and items returned to Inventory': 'Enviado Carregamento cancelado e itens retornado ao Inventário',
'Sent Shipment updated': 'Enviado Embarque atualizado',
'Sent Shipments': 'Remessas Enviadas',
'Separated children, caregiving arrangements': 'Crianças separados, disposições caregiving',
'Serial Number': 'Numero de série',
'Series': 'serie',
'Server': 'servidor',
'Service': 'serviço',
'Service Catalogue': 'Catálogo de Serviços',
'Service Due': 'Service Due',
'Service or Facility': 'Serviço ou facilidade',
'Service profile added': 'Perfil de serviço adicionado',
'Service profile deleted': 'Perfil de serviço Excluído',
'Service profile updated': 'Perfil de serviço atualizado',
'Services': 'Serviços',
'Services Available': 'Serviços Disponíveis',
'Set Base Site': 'Definir base de dados do site',
'Set By': 'Definido por',
'Set True to allow editing this level of the location hierarchy by users who are not MapAdmins.': 'Configure como True para permitir que este nível da hierarquia do local possa ser editado por usuários que não sejam administradores.',
'Setting Details': 'Detalhes de ajuste',
'Setting added': 'Configuração adicionada',
'Setting deleted': 'Configuração Excluída',
'Setting updated': 'Configuração atualizada',
'Settings': 'Ajustes',
'Settings updated': 'Ajustes atualizados',
'Settings were reset because authenticating with Twitter failed': 'As configurações foram redefinidas porque a autenticação com Twitter falhou',
'Settings which can be configured through the web interface are available here.': 'As configurações que podem ser definidas através da interface da web estão disponíveis aqui.',
'Severe': 'Severo',
'Severity': 'Gravidade',
'Share a common Marker (unless over-ridden at the Feature level)': 'Compartilhar um marcador comum (a não ser que abaixo-assinado ao nível de Componente)',
'Shelter': 'Abrigo',
'Shelter & Essential NFIs': 'Abrigo & NFIs Essenciais',
'Shelter Details': 'Detalhes de Abrigo',
'Shelter Name': 'Nome de Abrigo',
'Shelter Registry': 'Registro de Abrigo',
'Shelter Service': 'Serviço de Abrigo',
'Shelter Service Details': 'Detalhes do serviço de abrigo',
'Shelter Service added': 'Serviço de Abrigo incluído',
'Shelter Service deleted': 'Serviço de Abrigo excluído',
'Shelter Service updated': 'Atualização de serviços de abrigo',
'Shelter Services': 'Serviços de abrigo',
'Shelter Type': 'Tipo de abrigo',
'Shelter Type Details': 'Detalhes do tiipo de abrigo',
'Shelter Type added': 'Tipo de abrigo incluído',
'Shelter Type deleted': 'Tipo de abrigo excluído',
'Shelter Type updated': 'Abrigos Tipo De atualização',
'Shelter Types': 'Tipos De abrigo',
'Shelter Types and Services': 'Abrigo Tipos e serviços',
'Shelter added': 'Abrigo incluído',
'Shelter deleted': 'Abrigo excluído',
'Shelter updated': 'Abrigo atualizado',
'Shelter/NFI Assistance': 'Abrigo/ Assistência NFI',
'Shelters': 'Abrigos',
'Shipment Created': 'Embarque Criado',
'Shipment Items': 'Itens de Carregamento',
'Shipment Items received by Inventory': 'Itens de Remessa recebidos pelo Inventário',
'Shipment Items sent from Inventory': 'Itens de Remessa enviados pelo Inventário',
'Shipment to Send': 'Carga para Enviar',
'Shipments': 'Remessas',
'Shipments To': 'Remessas Para',
'Shooting': 'Tiroteio',
'Short Assessment': 'Curta Avaliação',
'Short Description': 'Breve Descrição',
'Show Checklist': 'Mostrar Lista De Verificação',
'Show Details': 'Mostrar detalhes',
'Show Map': 'Mostrar Mapa',
'Show Region in Menu?': 'Mostrar Região no Menu?',
'Show in Menu?': 'Show in Menu?',
'Show on Map': 'Mostrar no mapa',
'Show on map': 'Mostrar no mapa',
'Sign-up as a volunteer': 'Inscrever-se como um voluntário',
'Sign-up for Account': 'Inscrever-se para conta',
'Sign-up succesful - you should hear from us soon!': 'Sua inscriçao foi feita com sucesso - aguarde notícias em breve!',
'Sindhi': 'Sindi',
'Single PDF File': 'Single PDF File',
'Site': 'site',
'Site Administration': 'Administração do site',
'Site or Location': 'Sítio ou Local',
'Sites': 'sites',
'Situation': 'Situação',
'Situation Awareness & Geospatial Analysis': 'Situação Reconhecimento & Geoespaciais Análise',
'Sketch': 'Esboço',
'Skill': 'QUALIFICAÇÃO',
'Skill Catalog': 'Catálogo de Conhecimentos',
'Skill Details': 'Detalhes das habilidades',
'Skill Equivalence': 'Equivalência de Conhecimentos',
'Skill Equivalence Details': 'Detalhes da Equivalência de Habilidade',
'Skill Equivalence added': 'Equivalência de Habilidade incluída',
'Skill Equivalence deleted': 'Equivalência de Habilidade excluída',
'Skill Equivalence updated': 'Equivalência de Habilidade atualizada',
'Skill Equivalences': 'Equivalências de habilidade',
'Skill Provision': 'Provisão de Habilidade',
'Skill Provision Catalog': 'Catálogo de habilidades disponível',
'Skill Provision Details': 'Detalhes de habilidades disponível',
'Skill Provision added': 'Provisão de Habilidade incluída',
'Skill Provision deleted': 'Catalogo de habilidades excluído',
'Skill Provision updated': 'Catálogo de habilidades atualizado',
'Skill Provisions': 'Habilidades disponíveis',
'Skill Status': 'Status da Habilidade',
'Skill TYpe': 'Tipo de habilidade',
'Skill Type': 'Skill Type',
'Skill Type Catalog': 'Catálogo de tipos de habilidades',
'Skill Type Details': 'Detalhes do tipo de habilidade',
'Skill Type added': 'Tipo de habilidade incluído',
'Skill Type deleted': 'Tipo de habilidade excluído',
'Skill Type updated': 'Tipo de habilidade atualizado',
'Skill Types': 'Tipos de habilidade',
'Skill added': 'Habilidade incluída',
'Skill added to Request': 'Skill added to Request',
'Skill deleted': 'Habilidade Excluída',
'Skill removed': 'Skill removed',
'Skill removed from Request': 'Skill removed from Request',
'Skill updated': 'Habilidade ATUALIZADA',
'Skill/Training': 'Habilidades/Treinamento',
'Skills': 'Habilidades',
'Skills Catalog': 'Catálogo de habilidades',
'Skills Management': 'Gerenciamento das Habilidades',
'Skype': 'Skype',
'Skype ID': 'ID DO Skype',
'Slightly Damaged': 'Ligeiramente Danificado',
'Slope failure, debris': 'falha de inclinação, destroços',
'Small Trade': 'Pequeno Comércio',
'Smoke': 'Fumaça',
'Snapshot': 'snapshot',
'Snapshot Report': 'Relatório de snapshot',
'Snow Fall': 'Queda de neve , nevasca',
'Snow Squall': 'Rajada de neve',
'Soil bulging, liquefaction': 'abaulamento do solo, liquefação',
'Solid waste': 'Resíduos sólidos',
'Solution': 'Solução',
'Solution Details': 'Detalhes da Solução',
'Solution Item': 'Item de Solução',
'Solution added': 'Solução adicionada',
'Solution deleted': 'Solução excluída',
'Solution updated': 'Solução atualizada',
'Solutions': 'Soluções',
'Some': 'Algum',
'Sorry - the server has a problem, please try again later.': 'Sorry - the server has a problem, please try again later.',
'Sorry that location appears to be outside the area of the Parent.': 'Desculpe ! Essa localização está fora da área do Pai.',
'Sorry that location appears to be outside the area supported by this deployment.': 'Desculpe ! Essa localização parece estar fora da área suportada por esta implementação.',
'Sorry, I could not understand your request': 'Desculpe, eu não pude entender o seu pedido',
'Sorry, only users with the MapAdmin role are allowed to create location groups.': 'Desculpe, apenas usuários com o perfil MapAdmin tem permissão para criar locais dos grupos.',
'Sorry, only users with the MapAdmin role are allowed to edit these locations': 'Desculpe, apenas usuários com o perfil MapAdmin tem permissão para editar estes locais',
'Sorry, something went wrong.': 'Desculpe, algo deu errado.',
'Sorry, that page is forbidden for some reason.': 'Desculpe ! Esta página tem acesso restrito por alguma razão.',
'Sorry, that service is temporary unavailable.': 'Desculpe ! Este serviço está indisponível temporariamente.',
'Sorry, there are no addresses to display': 'Desculpe ! Não há endereços para visualizar.',
"Sorry, things didn't get done on time.": 'Desculpe ! As tarefas não foram concluídas em tempo útil.',
"Sorry, we couldn't find that page.": 'Desculpe, não foi possível localizar essa página.',
'Source': 'source',
'Source ID': 'ID de origem',
'Source Time': 'Origem do tempo',
'Sources of income': 'Fontes de rendimento',
'Space Debris': 'Destroços Espaciais',
'Spanish': 'espanhol',
'Special Ice': 'Gelo Especial',
'Special Marine': 'Marinha especial',
'Specialized Hospital': 'Hospital especializado.',
'Specific Area (e.g. Building/Room) within the Location that this Person/Group is seen.': 'Área específica (exemplo: edifício/quarto) com a localização de onde essa pessoa/grupo é visto.',
'Specific locations need to have a parent of level': 'Locais específicos precisam ter um nível paterno.',
'Specify a descriptive title for the image.': 'Especifique um título descritivo para a imagem.',
'Specify the bed type of this unit.': 'Especifique o tipo de cama dessa unidade.',
'Specify the number of available sets': 'Especificar o número de conjuntos disponíveis',
'Specify the number of available units (adult doses)': 'Especifique o número de unidades disponíveis (doses para adultos)',
'Specify the number of available units (litres) of Ringer-Lactate or equivalent solutions': 'Especificar o número de unidades disponíveis (litros) de Ringer-Lactato ou soluções equivalentes',
'Specify the number of sets needed per 24h': 'Especificar o número de conjuntos necessários por 24h',
'Specify the number of units (adult doses) needed per 24h': 'Especificar o número de unidades (doses para adultos) necessário por 24h',
'Specify the number of units (litres) of Ringer-Lactate or equivalent solutions needed per 24h': 'Especificar o número de unidades (litros) de Ringer-Lactato ou soluções equivalentes necessárias para 24h',
'Speed': 'Speed',
'Spherical Mercator?': 'Mapa Mercator Esférico?',
'Spreadsheet Importer': 'PLANILHA IMPORTADOR',
'Spreadsheet uploaded': 'Planilha transferido por UPLOAD',
'Spring': 'Primavera',
'Squall': 'Rajada',
'Staff': 'Equipe',
'Staff & Volunteers': 'Colaboradores & Voluntários',
'Staff 2': 'Equipe 2',
'Staff Details': 'Equipe Detalhes',
'Staff ID': 'ID da equipe',
'Staff List': 'Lista de pessoal',
'Staff Member Details': 'Detalhes de membro da equipe',
'Staff Members': 'Membros da equipe',
'Staff Record': 'Registro de pessoal',
'Staff Type Details': 'Equipe Tipo Detalhes',
'Staff Type added': 'Equipe tipo incluído',
'Staff Type deleted': 'Tipo De equipe excluído',
'Staff Type updated': 'Equipe Tipo De atualização',
'Staff Types': 'Tipos de equipe',
'Staff added': 'Equipe incluída',
'Staff and Volunteers': 'Funcionários e Voluntários',
'Staff deleted': 'Equipe excluída',
'Staff member added': 'Membro da equipe incluído',
'Staff member updated': 'Membro da equipe atualizado',
'Staff present and caring for residents': 'Equipe presente e cuidando de moradores',
'Staff updated': 'Equipe atualizado',
'Staff2': 'staff2',
'Staffing': 'Equipe',
'Stairs': 'Escadas',
'Start Date': 'Data do início',
'Start date': 'Data Inicial',
'Start of Period': 'Início do Período',
'State': 'Status',
'Stationery': 'Papel de Carta',
'Status': 'Status',
'Status Report': 'Relatório de status',
'Status Updated': 'Status atualizado',
'Status added': 'Estado adicionado',
'Status deleted': 'Estado excluído',
'Status of clinical operation of the facility.': 'Estado da operação clínica da instalação.',
'Status of general operation of the facility.': 'Estado da operação geral da instalação.',
'Status of morgue capacity.': 'Estado da capacidade da morgue.',
'Status of operations of the emergency department of this hospital.': 'Estado das operações do Departamento de Emergência deste hospital.',
'Status of security procedures/access restrictions in the hospital.': 'Estado dos procedimentos de segurança/Restrições de Acesso no hospital.',
'Status of the operating rooms of this hospital.': 'Status das salas de operação deste hospital.',
'Status updated': 'Status atualizado',
'Steel frame': 'Estrutura de aço',
'Stolen': 'Roubado',
'Store spreadsheets in the Eden database': 'Arquivar as planilhas no banco de dados Eden',
'Storeys at and above ground level': 'Andares e no nível do solo acima',
'Storm Force Wind': 'Tempestade Força Vento',
'Storm Surge': 'ressaca',
'Stowaway': 'Penetra',
'Street Address': 'Endereço residencial',
'Streetview Enabled?': 'Streetview Enabled?',
'Strong Wind': 'vento forte',
'Structural': 'estrutural',
'Structural Hazards': 'riscos estruturais',
'Style': 'Style',
'Style Field': 'Estilo do Campo',
'Style Values': 'Estilo dos Valores',
'Sub-type': 'Subtipo',
'Subject': 'assunto',
'Submission successful - please wait': 'envio bem sucedido - por favor aguarde',
'Submission successful - please wait...': 'envio bem sucedido - por favor aguarde...',
'Submit New': 'Submeter Novamente',
'Submit New (full form)': 'Submeter Novo (formulário completo)',
'Submit New (triage)': 'Submeter novo (triagem)',
'Submit a request for recovery': 'envie um pedido de recuperação',
'Submit new Level 1 assessment (full form)': 'Submeter novo nível 1 de avaliação (formulário completo)',
'Submit new Level 1 assessment (triage)': 'Submeter novo nível 1 de avaliação (triagem)',
'Submit new Level 2 assessment': 'Submeter novo nível 2 de avaliação',
'Subscription Details': 'Detalhes da Assinatura',
'Subscription added': 'Assinatura Incluída',
'Subscription deleted': 'Assinatura Excluída',
'Subscription updated': 'Assinatura ATUALIZADO',
'Subscriptions': 'assinaturas',
'Subsector': 'Subsetor',
'Subsector Details': 'Detalhes de subsetor',
'Subsector added': 'Subsetor incluído',
'Subsector deleted': 'Subsetor excluído',
'Subsector updated': 'Subsetor atualizado',
'Subsectors': 'Subsetores',
'Subsistence Cost': 'custo de subsistencia',
'Suburb': 'Subúrbio',
'Suggest not changing this field unless you know what you are doing.': 'Sugerimos não alterar esse campo a menos que você saiba o que está fazendo.',
'Summary': 'Sumário',
'Summary by Administration Level': 'Resumo por Nível de Administração',
'Sunday': 'Domingo',
'Supervisor': 'Supervisor',
'Supplies': 'Suprimentos',
'Supply Chain Management': 'Supply Chain Management',
'Supply Item Categories': 'Supply Item Categories',
'Support Request': 'Pedido de Suporte',
'Support Requests': 'Pedidos de Suporte',
'Supports the decision making of large groups of Crisis Management Experts by helping the groups create ranked list.': 'Suporta a tomada de decisão de grandes grupos de Especialistas em Gestão de Crises ajudando os grupos a criar listas de classificados.',
'Sure you want to delete this object?': 'Tem certeza que você quer excluir este objeto?',
'Surgery': 'Cirurgia',
'Survey Answer': 'Resposta da Pesquisa',
'Survey Answer Details': 'Detalhes da Resposta da Pesquisa',
'Survey Answer added': 'Incluído Resposta da Pesquisa',
'Survey Answer deleted': 'Excluído a Resposta da Pesquisa',
'Survey Answer updated': 'Resposta da Pesquisa atualizada',
'Survey Module': 'Módulo de Pesquisa',
'Survey Name': 'Nome da Pesquisa',
'Survey Question': 'Questão de Pesquisa de Opinião',
'Survey Question Details': 'Detalhes da Pergunta de Pesquisa',
'Survey Question Display Name': 'Nome da pergunta de pesquisa',
'Survey Question added': 'Pergunta de pesquisa incluída',
'Survey Question deleted': 'Pergunta de pesquisa excluída',
'Survey Question updated': 'Pergunta de pesquisa atualizada',
'Survey Section': 'Seção da Pesquisa de Opinião',
'Survey Section Details': 'Detalhes de Seção de Pesquisa',
'Survey Section Display Name': 'Seção de pesquisa do nome de exibição',
'Survey Section added': 'Seção de Pesquisa incluída',
'Survey Section deleted': 'Seção de Pesquisa excluída',
'Survey Section updated': 'Seção de pesquisa atualizada',
'Survey Series': 'Série de Pesquisa',
'Survey Series Details': 'Série de Pesquisa Detalhes',
'Survey Series Name': 'Nome de Série de Pesquisa',
'Survey Series added': 'Série de Pesquisa incluída',
'Survey Series deleted': 'Série de Pesquisa excluída',
'Survey Series updated': 'Série de Pesquisa atualizada',
'Survey Template': 'Modelo de Pesquisa de Opinião',
'Survey Template Details': 'Definir detalhes do formulário',
'Survey Template added': 'Modelo de Pesquisa incluído',
'Survey Template deleted': 'Modelo de Pesquisa excluído',
'Survey Template updated': 'Definição de formulário actualizada',
'Survey Templates': 'Definir formulários',
'Symbology': 'Simbologia',
'Sync Conflicts': 'Conflitos de Sincronização',
'Sync History': 'Histórico de Sincronização',
'Sync Now': 'Sincronizar Agora',
'Sync Partners': 'Sincronizar parceiros',
'Sync Partners are instances or peers (SahanaEden, SahanaAgasti, Ushahidi, etc.) that you want to sync information with. Click on the link on the right to go the page where you can add sync partners, search for sync partners and modify them.': 'PARCEIROS DE Sincronização são instâncias ou PARES (SahanaEden, SahanaAgasti, Ushahidi, etc. ) que você deseja a informação de sincronização com. Clique no link sobre o direito de ir a página em que você pode incluir parceiros de sincronização, procurar por parceiros de sincronização e Modificá-las.',
'Sync Pools': 'Conjuntos de Sincronização',
'Sync Schedule': 'Planejamento de Sincronização',
'Sync Settings': 'Configurações de Sincronização',
'Sync process already started on': 'Processo de Sincronização já iniciado em',
'Sync process already started on ': 'Sync process already started on ',
'Synchronisation': 'Sincronização',
'Synchronization': 'Sincronização',
'Synchronization Conflicts': 'Conflitos de Sincronização',
'Synchronization Details': 'Detalhes de Sincronização',
'Synchronization History': 'Histórico de Sincronização',
'Synchronization Peers': 'Parceiros de Sincronização',
'Synchronization Settings': 'Configurações de sincronização',
'Synchronization allows you to share data that you have with others and update your own database with latest data from other peers. This page provides you with information about how to use the synchronization features of Sahana Eden': 'Sincronização permite compartilhar dados que você tenha com outros e Atualizar seu próprio banco de dados com informações recentes de outros parceiros. Esta página fornece informações sobre como utilizar os recursos de sincronização de Sahana Éden',
'Synchronization not configured.': 'Sincronização não Configurada.',
'Synchronization settings updated': 'Configurações de sincronização atualizadas',
'Syncronisation History': 'Histórico De Sincronização',
"System's Twitter account updated": 'DO SISTEMA Chilreiam conta ATUALIZADO',
'Tags': 'Tags',
'Take shelter in place or per <instruction>': 'Abrigue-se no local ou por',
'Task': 'Task',
'Task Details': 'Detalhes da Tarefa',
'Task List': 'Lista de tarefas',
'Task Status': 'Status da tarefa',
'Task added': 'Task Inclusa',
'Task deleted': 'Tarefa excluída',
'Task removed': 'Task removed',
'Task updated': 'Tarefa atualizada',
'Tasks': 'Tarefas',
'Team': 'Equipe',
'Team Description': 'Descrição da Equipe',
'Team Details': 'Detalhes da Equipe',
'Team ID': 'ID da Equipe',
'Team Id': 'Id da Equipe',
'Team Leader': 'Líder de Equipe',
'Team Member added': 'Membro da equipe incluído',
'Team Members': 'Membros da equipe',
'Team Name': 'Nome da equipe',
'Team Type': 'Tipo de equipe',
'Team added': 'Equipe incluída',
'Team deleted': 'Equipe excluída',
'Team updated': 'Equipa actualizada',
'Teams': 'Equipes',
'Technical testing only, all recipients disregard': 'Apenas teste técnico, todos os recipientes ignorem',
'Telecommunications': 'Telecomunicações',
'Telephone': 'Telefone',
'Telephone Details': 'Telephone Details',
'Telephony': 'Telefonia',
'Tells GeoServer to do MetaTiling which reduces the number of duplicate labels.': 'Tells GeoServer to do MetaTiling which reduces the number of duplicate labels.',
'Temp folder %s not writable - unable to apply theme!': 'PASTA Temp%s não gravável-impossível aplicar tema!',
'Template Name': 'Template Name',
'Template file %s not readable - unable to apply theme!': 'Modelo% arquivo não é Legível-impossível aplicar tema!',
'Templates': 'modelos',
'Term for the fifth-level within-country administrative division (e.g. a voting or postcode subdivision). This level is not often used.': 'Termo para o 5º nível de divisão administrativa nacional (por exemplo, uma subdivisão de código postal ou de zona de votação). Este nível não é frequentemente utilizado.',
'Term for the fourth-level within-country administrative division (e.g. Village, Neighborhood or Precinct).': 'Termo para o 4º nível de divisão administrativa nacional(por exemplo, vila, bairro ou distrito).',
'Term for the primary within-country administrative division (e.g. State or Province).': 'Prazo para a principal divisão administrativa dentro do país (i.e. Estado ou Distrito).',
'Term for the secondary within-country administrative division (e.g. District or County).': 'Prazo para a Secundária divisão administrativa dentro do país (por exemplo, Bairro ou Município).',
'Term for the secondary within-country administrative division (e.g. District).': 'Prazo para a Secundária divisão administrativa dentro do país (i.e. Bairro).',
'Term for the third-level within-country administrative division (e.g. City or Town).': 'Prazo para o 3ᵉʳ nível de divisão administrativa dentro do país (por exemplo, Cidade ou Municipio).',
'Term for the top-level administrative division (i.e. Country).': 'Prazo para a divisão administrativa de nível superior (por exemplo País).',
'Term for the top-level administrative division (typically Country).': 'Prazo para a divisão administrativa de nível superior (geralmente País).',
'Terms of Service\n\nYou have to be eighteen or over to register as a volunteer.': 'Terms of Service\n\nYou have to be eighteen or over to register as a volunteer.',
'Terms of Service:': 'Terms of Service:',
'Territorial Authority': 'Autoridade territoriais',
'Terrorism': 'Terrorismo',
'Tertiary Server (Optional)': 'Servidor terciário (opcional)',
'Text': 'texto',
'Text Color for Text blocks': 'Cor de texto para os blocos de texto',
'Text before each Text Field (One per line)': 'Texto antes de cada campo de texto (um por linha)',
'Thank you for validating your email. Your user account is still pending for approval by the system administator (%s).You will get a notification by email when your account is activated.': 'Obrigado para validar seu e-mail. Sua conta de usuário ainda está pendente para aprovação pelo administrador do Sistema (%s). você receberá uma notificação por e-mail quando sua conta esteja ativada.',
'Thanks for your assistance': 'Obrigado por sua ajuda',
'The': 'O',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1 == db.table2.field2" results in a SQL JOIN.': 'O "query" é uma condição como "db.table1.field1==\'value\'". Algo como "db.table1.field1 == db.table2.field2" resulta em uma junção SQL.',
'The Area which this Site is located within.': 'A área que este Site está localizado',
'The Assessments module allows field workers to send in assessments.': 'O Modulo Avaliações permite aos trabalhadores de campo que enviem avaliações.',
'The Author of this Document (optional)': 'O autor deste documento (opcional)',
'The Building Asssesments module allows building safety to be assessed, e.g. after an Earthquake.': 'O módulo avaliações De Construção permite a segurança edifício a ser avaliada, por exemplo, depois de um terremoto.',
'The Camp this Request is from': 'O Alojamento neste pedido é de',
'The Camp this person is checking into.': 'O Alojamento que esta pessoa está se registrando.',
'The Current Location of the Person/Group, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'O local atual do Usuário/Grupo, que pode ser geral (para relatórios) ou precisa (para exibir em um mapa). Digite alguns caracteres para procurar nos locais disponíveis.',
"The Donor(s) for this project. Multiple values can be selected by holding down the 'Control' key.": "O doador(s) para este projeto. Vários valores podem ser selecionados ao manter pressionado a chave 'control'",
'The Email Address to which approval requests are sent (normally this would be a Group mail rather than an individual). If the field is blank then requests are approved automatically if the domain matches.': 'O endereço de e-mail para onde os pedidos de aprovação são enviados (normalmente seria um correio de Grupo ao invés de um individual). Se o campo estiver em branco, os pedidos são aprovados automaticamente se o domínio corresponder.',
'The Incident Reporting System allows the General Public to Report Incidents & have these Tracked.': 'O Sistema de Comunicação de Incidentes permite o Público em Geral reportar incidentes & ter esses rastreados.',
'The Location the Person has come from, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'A Localização da Pessoa vem do, que pode ser geral (para relatórios) ou precisa (para exibir em um mapa). Digite alguns caracteres para procurar nos locais disponíveis.',
'The Location the Person is going to, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'O local que a pessoa vai, que pode ser genérico (para Relatórios) ou preciso (para exibir em um mapa). Digite alguns caracteres para procurar nos locais disponíveis.',
'The Media Library provides a catalog of digital media.': 'A Biblioteca de mídias fornece um catálogo de mídia digital.',
'The Messaging Module is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.': 'O módulo de mensagens é o hub de comunicação principal do sistema Sahana. É utilizado para enviar alertas e/ou mensagens utilizando o SMS & e-mail para diferentes grupos e indivíduos antes, durante e após um desastre.',
'The Organization Registry keeps track of all the relief organizations working in the area.': 'O registro Da Organização mantém controle de todos as organizações de apoio que trabalham na área.',
'The Organization Registry keeps track of all the relief organizations working in the disaster region. It captures not only the places where they are active, but also captures information on the range of projects they are providing in each area.': 'O registro da Organização mantém controle de todas organizações de ajuda trabalhando numa região de desastre. Ele captura não apenas os locais onde elas estão ativas, mas também captura informações sobre o conjunto de projetos que está fornecendo em cada região.',
'The Patient Tracking system keeps track of all the evacuated patients & their relatives.': 'The Patient Tracking system keeps track of all the evacuated patients & their relatives.',
'The Person currently filling this Role.': 'A pessoa atualmente preenchendo esta função.',
'The Project Tracking module allows the creation of Activities to meet Gaps in Needs Assessments.': 'O módulo acompanhamento do projeto permite a criação de atividades para preencher Lacunas nas avaliações de necessidades.',
'The Requests Management System is a central online repository where all relief organizations, relief workers, government agents and camp sites for displaced personnel can coordinate the supply of aid with their demand. It allows users to allocate the available resources to fulfill the demands effectively and efficiently.': 'O sistema De Gerenciamento De Pedidos é um repositório online central em todas as organizações de ajuda, trabalhadores de assistência, agentes do governo e sites de acampamento para a equipe de refugiados pode coordenar o fornecimento da ajuda com seu pedido. Ela permite que usuários aloquem os recursos disponíveis para suprir as demandas de forma efetiva e eficiente.',
'The Role this person plays within this hospital.': 'A Função desta pessoa neste hospital.',
'The Role to which this Role reports.': 'A função à qual essa função responde.',
'The Shelter Registry tracks all shelters and stores basic details regarding them. It collaborates with other modules to track people associated with a shelter, the services available etc.': 'O registro do Abrigo rastreia todos os detalhes básicos abrigos e armazena sobre eles. Ele colabora com outros módulos para rastrear as pessoas associadas com um abrigo, os serviços disponíveis etc.',
'The Shelter this Request is from': 'O pedido deste abrigo é de',
'The Shelter this Request is from (optional).': 'O pedido este Abrigo é de (opcional).',
'The Shelter this person is checking into.': 'O abrigo esta pessoa está verificando no.',
'The URL for the GetCapabilities of a WMS Service whose layers you want accessible via the Map.': 'A URL para o GetCapabilities de um serviço WMS cujas camadas você deseja acessíveis através do mapa.',
'The URL for the GetCapabilities page of a Web Map Service (WMS) whose layers you want available via the Browser panel on the Map.': 'A URL para a página do GetCapabilities de um Web Map Service (WMS), cujas camadas que você deseja disponíveis através do painel do navegador no Mapa.',
"The URL of the image file. If you don't upload an image file, then you must specify its location here.": 'A URL do arquivo de imagem. Se voce não fizer o upload de um arquivo de imagem, então voce deverá especificar sua localização aqui.',
'The URL of your web gateway without the post parameters': 'A URL de seu gateway da web sem os parâmetros post',
'The URL to access the service.': 'A URL para acessar o serviço.',
'The Unique Identifier (UUID) as assigned to this facility by the government.': 'O Idenfificador Único (UUID) conforme designado pelo governo para esta filial.',
'The asset must be assigned to a site OR location.': 'O ativo deve ser assinalado para um site ou local.',
'The attribute which is used for the title of popups.': 'O atributo que é usado para o título de popups.',
'The attribute within the KML which is used for the title of popups.': 'O Atributo dentro do KML que é utilizado para o título dos pop-ups.',
'The attribute(s) within the KML which are used for the body of popups. (Use a space between attributes)': 'O Atributo(s) no KML que são utilizados para o corpo dos pop-ups. ( utilizar um espaço entre atributos )',
'The body height (crown to heel) in cm.': 'A altura do corpo (cabeça até o calcanhar) em cm.',
'The contact person for this organization.': 'A pessoa de contato nessa organização.',
'The country the person usually lives in.': 'O país que a pessoa vive habitualmente',
'The default Facility for which this person is acting.': 'The default Facility for which this person is acting.',
'The default Facility for which you are acting.': 'The default Facility for which you are acting.',
'The default Organization for whom this person is acting.': 'A Organização padrão para quem esta pessoa está atuando.',
'The default Organization for whom you are acting.': 'A Organização padrão para quem você está atuando.',
'The duplicate record will be deleted': 'O registro duplicado será excluído',
'The first or only name of the person (mandatory).': 'O primeiro nome ou único nome da pessoa (obrigatório).',
'The form of the URL is http://your/web/map/service?service=WMS&request=GetCapabilities where your/web/map/service stands for the URL path to the WMS.': 'O formulário da URL é http://your/web/map/service?service=WMS&request=GetCapabilities where your/web/map/service que representa o caminho da URL para o WMS.',
'The language you wish the site to be displayed in.': 'O idioma que você deseja que o site seja exibido.',
'The last known location of the missing person before disappearance.': 'A última localização conhecida da pessoa desaparecida antes do desaparecimento.',
'The level at which Searches are filtered.': 'The level at which Searches are filtered.',
'The list of Brands are maintained by the Administrators.': 'A lista de Marcas serão mantidas pelos administradores.',
'The list of Catalogs are maintained by the Administrators.': 'A lista de catálogos é mantida pelos administradores.',
'The list of Item categories are maintained by the Administrators.': 'A lista de categorias dos itens são mantidas pelos administradores.',
'The map will be displayed initially with this latitude at the center.': 'O mapa será exibido inicialmente com esta latitude no centro.',
'The map will be displayed initially with this longitude at the center.': 'O mapa será exibido inicialmente com esta longitude no centro.',
'The minimum number of features to form a cluster.': 'O número mínimo de recursos para formar um cluster.',
'The name to be used when calling for or directly addressing the person (optional).': 'O nome a ser usado ao chamar por ou diretamente endereçar a pessoa (opcional).',
'The next screen will allow you to detail the number of people here & their needs.': 'A próxima tela permitirá que você detalhe o número de pessoas aqui e as suas necessidades.',
'The number of Units of Measure of the Alternative Items which is equal to One Unit of Measure of the Item': 'O número de unidades de medida dos Itens alternativos é igual a uma unidade de medida do Item',
'The number of pixels apart that features need to be before they are clustered.': 'O número de separado de pixels de funcionalidades tem que ser antes que eles sejam agrupados.',
'The number of tiles around the visible map to download. Zero means that the 1st page loads faster, higher numbers mean subsequent panning is faster.': 'O número de títulos em torno do mapa visível para fazer download. Zero significa que a primeira página carrega mais rápido, números maiores que zero significam que as paginas seguintes são mais rápida.',
'The person at the location who is reporting this incident (optional)': 'A pessoa no local que está relatando este incidenten (opcional)',
'The person reporting the missing person.': 'A pessoa reportando o desaparecimento de alguem',
'The post variable containing the phone number': 'A variavel post contendo o numero de telefone',
'The post variable on the URL used for sending messages': 'A variável post no URL é utilizada para enviar mensagens',
'The post variables other than the ones containing the message and the phone number': 'As variáveis post diferentes das que contém a mensagem e o número de telefone',
'The serial port at which the modem is connected - /dev/ttyUSB0, etc on linux and com1, com2, etc on Windows': 'A porta serial no qual o modem está conectado-/dev/ttyUSB0, etc. No linux e com1, com2, etc. No Windows',
'The server did not receive a timely response from another server that it was accessing to fill the request by the browser.': 'O servidor não receber uma resposta oportuna de outro servidor que ele estava acessando para preencher o pedido pelo navegador.',
'The server received an incorrect response from another server that it was accessing to fill the request by the browser.': 'O servidor recebeu uma resposta incorreta a partir de outro servidor que ele estava acessando para preencher o pedido pelo navegador.',
'The site where this position is based.': 'O local onde esta posição se baseia.',
'The staff responsibile for Facilities can make Requests for assistance. Commitments can be made against these Requests however the requests remain open until the requestor confirms that the request is complete.': 'O pessoal responsável pelas Instalações podem fazer pedidos de assistência. Compromissos podem ser feitas em relação a esses pedidos no entanto os pedidos permanecem abertas até o SOLICITANTE confirma que o pedido foi concluído.',
'The subject event no longer poses a threat or concern and any follow on action is described in <instruction>': 'O acontecimento já não representa uma ameaça ou preocupação e a ação a ser tomada é descrita em<instruction>',
'The time at which the Event started.': 'O momento em que o evento começou.',
'The time difference between UTC and your timezone, specify as +HHMM for eastern or -HHMM for western timezones.': 'The time difference between UTC and your timezone, specify as +HHMM for eastern or -HHMM for western timezones.',
'The title of the WMS Browser panel in the Tools panel.': 'O título do painel do navegador WMS em ferramentas.',
'The token associated with this application on': 'O token associado a este aplicativo em',
'The unique identifier which identifies this instance to other instances.': 'O indentificador único diferencia esta instância de outras.',
'The way in which an item is normally distributed': 'O modo em que um item é normalmente distribuído',
'The weight in kg.': 'O peso em quilogramas.',
'Theme': 'Tema',
'Theme Details': 'Detalhes do Tema',
'Theme added': 'Tema incluído',
'Theme deleted': 'Tema excluído',
'Theme updated': 'Tema atualizado',
'Themes': 'Temas',
'There are errors': 'Há erros',
'There are insufficient items in the Inventory to send this shipment': 'não há itens suficientes no armazém para o envio desse carregamento',
'There are multiple records at this location': 'Há vários registros neste local',
'There are not sufficient items in the Inventory to send this shipment': 'não há itens suficientes no inventário para enviar esse carregamento',
'There is no address for this person yet. Add new address.': 'Não há endereço para esta pessoa ainda. Adicionar novo endereço.',
'There was a problem, sorry, please try again later.': 'There was a problem, sorry, please try again later.',
'These are settings for Inbound Mail.': 'Estas são as configurações para Correio de entrada.',
'These are the Incident Categories visible to normal End-Users': 'Estes são as Categorias de incidentes visíveis para usuários finais normais.',
'These need to be added in Decimal Degrees.': 'estas precisam ser incluídas em graus decimais.',
'They': 'Eles',
'This appears to be a duplicate of': 'Isto parece ser duplicado de',
'This appears to be a duplicate of ': 'This appears to be a duplicate of ',
'This email address is already in use': 'This email address is already in use',
'This file already exists on the server as': 'Este arquivo já existe como no servidor',
'This is appropriate if this level is under construction. To prevent accidental modification after this level is complete, this can be set to False.': 'Isso é apropriado se esse nível estiver em construção. Para evitar modificação acidental após esse nível estar concluído, pode ser configurado como False.',
'This is the way to transfer data between machines as it maintains referential integrity.': 'Este é o caminho para a transferência de dados entre máquinas que mantém a integridade referencial.',
'This is the way to transfer data between machines as it maintains referential integrity...duplicate data should be removed manually 1st!': 'Este é o caminho para a transferência de dados entre máquinas que mantém a integridade referencial...duplicado dados devem ser removidos manualmente 1ᵉʳ!',
'This level is not open for editing.': 'Este nível não é aberto para edição.',
'This might be due to a temporary overloading or maintenance of the server.': 'Isso pode ser devido a uma sobrecarga temporária ou manutenção do servidor.',
'This module allows Inventory Items to be Requested & Shipped between the Inventories of Facilities.': 'Este módulo permite que itens de inventário sejam Solicitados & Enviados entre os Inventários das instalações.',
'This module allows you to manage Events - whether pre-planned (e.g. exercises) or Live Incidents. You can allocate appropriate Resources (Human, Assets & Facilities) so that these can be mobilized easily.': 'This module allows you to manage Events - whether pre-planned (e.g. exercises) or Live Incidents. You can allocate appropriate Resources (Human, Assets & Facilities) so that these can be mobilized easily.',
'This module allows you to plan scenarios for both Exercises & Events. You can allocate appropriate Resources (Human, Assets & Facilities) so that these can be mobilized easily.': 'Este módulo permite que você planeje cenários para os Exercícios & Eventos. Você pode alocar apropriado recursos (humanos, Ativos e Recursos) para que estes possam ser mobilizados facilmente.',
'This page shows you logs of past syncs. Click on the link below to go to this page.': 'Esta página mostra as logs das sincronizações passadas. Clique no link abaixo para ir para essa página.',
'This screen allows you to upload a collection of photos to the server.': 'Esta tela permite que você faça upload de um conjunto de fotografias para o servidor.',
'This setting can only be controlled by the Administrator.': 'Esta definicão só pode ser controlado pelo administrador.',
'This shipment has already been received.': 'Este carregamento já foi recebido.',
'This shipment has already been sent.': 'Este carregamento já foi enviado.',
'This shipment has not been received - it has NOT been canceled because can still be edited.': 'Este carregamento não foi recebido-ele não foi cancelado porque ainda pode ser editado.',
'This shipment has not been sent - it has NOT been canceled because can still be edited.': 'Este carregamento não foi enviado- ele não foi cancelado porque ainda pode ser editado.',
'This shipment will be confirmed as received.': 'Este carregamento será confirmado como recebido.',
'This value adds a small mount of distance outside the points. Without this, the outermost points would be on the bounding box, and might not be visible.': 'Esse valor inclui um pequeno valor de distância fora dos pontos. Sem isto, os pontos mais afastados estariam na caixa delimitadora, e podem não estar visíveis.',
'This value gives a minimum width and height in degrees for the region shown. Without this, a map showing a single point would not show any extent around that point. After the map is displayed, it can be zoomed as desired.': 'Este valor fornece uma largura e altura minimas em graus para a região mostrada. Sem isto, um mapa que mostre um ponto único não mostraria nenhuma extensão ao redor desse ponto. Depois que o mapa for exibido, pode ser ampliado, conforme desejado.',
'Thunderstorm': 'Trovoada',
'Thursday': 'Quinta-feira',
'Ticket': 'Bilhete',
'Ticket Details': 'Detalhes do bilhete',
'Ticket ID': 'ID do Bilhete',
'Ticket added': 'Bilhete incluído',
'Ticket deleted': 'Bilhete removido',
'Ticket updated': 'Bilhete atualizado',
'Ticketing Module': 'Módulo de bilhetes',
'Tickets': 'Bilhetes',
'Tiled': 'Tiled',
'Tilt-up concrete': 'Inclinar concreto',
'Timber frame': 'Quadro de madeira',
'Timeline': 'Prazo',
'Timeline Report': 'Relatório de períodos de tempo',
'Timestamp': 'Timestamp',
'Timestamps can be correlated with the timestamps on the photos to locate them on the map.': 'Timestamps can be correlated with the timestamps on the photos to locate them on the map.',
'Title': 'título',
'Title to show for the Web Map Service panel in the Tools panel.': 'Título para mostrar o painel de serviço de Mapa da Web no painel de Ferramentas.',
'To': 'para',
'To Location': 'Localidade de destino',
'To Person': 'Para Pessoa',
'To begin the sync process, click the button on the right =>': 'Para iniciar o processo de Sincronização, clique no botão à direita.',
'To begin the sync process, click the button on the right => ': 'To begin the sync process, click the button on the right => ',
'To begin the sync process, click this button =>': 'Para iniciar o processo de Sincronização, clique neste botão.',
'To begin the sync process, click this button => ': 'To begin the sync process, click this button => ',
'To create a personal map configuration, click': 'Para criar uma configuração do mapa pessoal, clique',
'To create a personal map configuration, click ': 'To create a personal map configuration, click ',
'To edit OpenStreetMap, you need to edit the OpenStreetMap settings in models/000_config.py': 'Para editar OpenStreetMap, você precisa editar as configurações do OpenStreetMap em models/000_config.py',
'To search by job title, enter any portion of the title. You may use % as wildcard.': 'Para pesquisar por título, digite qualquer parte do título. Pode utilizar o % como um substituto para qualquer caracter.',
"To search by person name, enter any of the first, middle or last names, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons.": "Para pesquisar por nome, digite qualquer do primeiro, meio ou últimos nomes, separados por espaços. Pode utilizar o % como um substituto para qualquer caracter. PRESSIONE ' Search' sem entrada para listar todas as pessoas.",
"To search for a body, enter the ID tag number of the body. You may use % as wildcard. Press 'Search' without input to list all bodies.": "Para procurar um corpo, digite o número da ID do corpo. Pode utilizar o % como um substituto para qualquer caracter. PRESSIONE ' Search' sem entrada para listar todos os organismos.",
"To search for a hospital, enter any of the names or IDs of the hospital, or the organization name or acronym, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all hospitals.": "Para procurar um hospital, digite qualquer um dos nomes ou IDs do hospital, ou o nome da organização ou Acrônimo, separados por espaços. Pode utilizar o % como um substituto para qualquer caracter. PRESSIONE ' Search' sem entrada para listar todos os hospitais.",
"To search for a hospital, enter any of the names or IDs of the hospital, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all hospitals.": "Para procurar um hospital, digite qualquer um dos nomes ou IDs do hospital, separados por espaços. Pode utilizar o % como um substituto para qualquer caracter. PRESSIONE ' Search' sem entrada para listar todos os hospitais.",
"To search for a location, enter the name. You may use % as wildcard. Press 'Search' without input to list all locations.": "Para procurar um local, digite o nome. Pode utilizar o % como um substituto para qualquer caracter. PRESSIONE ' Search' sem entrada para listar todos os locais.",
"To search for a patient, enter any of the first, middle or last names, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all patients.": "To search for a patient, enter any of the first, middle or last names, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all patients.",
"To search for a person, enter any of the first, middle or last names and/or an ID number of a person, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons.": "Para procurar por uma pessoa, digite qualquer do primeiro, meio ou últimos nomes e/ou um número de ID de uma pessoa, separados por espaços. Pode utilizar o % como um substituto para qualquer caracter. PRESSIONE ' Search' sem entrada para listar todas as pessoas.",
"To search for a person, enter any of the first, middle or last names, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons.": "Para procurar por uma pessoa, digite ou o primeiro nome, ou o nome do meio ou sobrenome, separados por espaços. Pode utilizar o % como um substituto para qualquer caracter. PRESSIONE ' Search' sem entrada para listar todas as pessoas.",
"To search for an assessment, enter any portion the ticket number of the assessment. You may use % as wildcard. Press 'Search' without input to list all assessments.": "Para procurar por uma avaliação, digite qualquer parte o número da permissão da avaliação. Pode utilizar o % como um substituto para qualquer caracter. PRESSIONE ' Search' sem entrada para listar todas as avaliações.",
'To variable': 'Para variável',
'Tools': 'ferramentas',
'Tornado': 'tornado',
'Total': 'Total',
'Total # of Target Beneficiaries': 'Nº Total de Beneficiários De Destino',
'Total # of households of site visited': 'Nº Total de famílias de site Visitado',
'Total Beds': 'Total de Camas',
'Total Beneficiaries': 'Total de Beneficiários',
'Total Cost per Megabyte': 'Custo Total por Megabyte',
'Total Cost per Minute': 'Custo Total por Minuto',
'Total Monthly': 'Total Mensal',
'Total Monthly Cost': 'Custo Total mensal',
'Total Monthly Cost:': 'Custo Total mensal:',
'Total Monthly Cost: ': 'Total Monthly Cost: ',
'Total One-time Costs': 'Total Um tempo de Custos',
'Total Persons': 'Totalizar Pessoas',
'Total Recurring Costs': 'Totalizar Custos Recorrentes',
'Total Unit Cost': 'Total do custo unitário',
'Total Unit Cost:': 'Custo Unitário Total:',
'Total Unit Cost: ': 'Total Unit Cost: ',
'Total Units': 'Total de unidades',
'Total gross floor area (square meters)': 'Total de área bruta (metros quadrados)',
'Total number of beds in this hospital. Automatically updated from daily reports.': 'Número Total de leitos neste hospital. Atualizado automaticamente a partir de relatórios diários.',
'Total number of houses in the area': 'Número Total de casas na área',
'Total number of schools in affected area': 'Número Total de escolas em área afetada',
'Total population of site visited': 'Totalizar População do site Visitado',
'Totals for Budget:': 'Total para Orçamento',
'Totals for Bundle:': 'Total do Pacote',
'Totals for Kit:': 'Totais para Kit',
'Tourist Group': 'Grupo turístico',
'Town': 'Urbano',
'Traces internally displaced people (IDPs) and their needs': 'Rastreia pessoas deslocadas internamente (PDI) e suas necessidades',
'Tracing': 'Rastreio',
'Track': 'Rastrear',
'Track Details': 'Detalhes do restraio',
'Track deleted': 'Rastreio excluído',
'Track updated': 'Rastreamento atualizado',
'Track uploaded': 'Rastreamento enviado',
'Track with this Person?': 'RASTREAR com esta pessoa?',
'Tracking of Patients': 'Tracking of Patients',
'Tracking of Projects, Activities and Tasks': 'Rastreamento de projetos, atividades e tarefas',
'Tracking of basic information on the location, facilities and size of the Shelters': 'Rastreamento de informações básicas sobre a localização, instalações e tamanho dos abrigos',
'Tracks': 'Tracks',
'Tracks the location, distibution, capacity and breakdown of victims in Shelters': 'Rastreia o local, distribuição, capacidade e discriminação da vítima em Abrigos',
'Traffic Report': 'Relatório de tráfego',
'Training': 'Treinamento',
'Training Course Catalog': 'Catálogo de cursos de treinamento',
'Training Details': 'Detalhes do treinamento',
'Training added': 'Treinamento incluído',
'Training deleted': 'Treinamento excluído',
'Training updated': 'Treinamento atualizado',
'Trainings': 'Treinamentos',
'Transit': 'Trânsito',
'Transit Status': 'Status do Transito',
'Transition Effect': 'Efeito de Transição',
'Transparent?': 'TRANSPARENTE?',
'Transportation assistance, Rank': 'Assistência de transporte, Classificação',
'Trauma Center': 'Centro de traumas',
'Travel Cost': 'Custo da Viagem',
'Tropical Storm': 'Tempestade Tropical',
'Tropo': 'substiuir, mudar',
'Tropo Messaging Token': 'Sinal de Mensagem Tropo',
'Tropo Settings': 'Configurações esteja doido parceiro',
'Tropo Voice Token': 'Sinal de Voz Tropo',
'Tropo settings updated': 'Configurações Tropo Atualizadas',
'Truck': 'Caminhão',
'Try checking the URL for errors, maybe it was mistyped.': 'Tente verificar se existem erros na URL, talvez tenha sido um erro de digitação',
'Try hitting refresh/reload button or trying the URL from the address bar again.': 'Tente apertar o botão atualizar/recarregar ou tente a URL a partir da barra de endereços novamente',
'Try refreshing the page or hitting the back button on your browser.': 'Tente atualizar a página ou apertar o botão voltar em seu navegador.',
'Tsunami': 'Tsunami',
'Tuesday': 'Terça-feira',
'Twitter': 'Twitter',
'Twitter ID or #hashtag': 'ID Twitter ou #hashtag',
'Twitter Settings': 'Configurações do Twitter',
'Type': 'type',
'Type of Construction': 'Tipo de Construção',
'Type of water source before the disaster': 'Tipo de fonte de água antes do desastre',
"Type the first few characters of one of the Person's names.": 'Digite os primeiros caracteres de um dos nomes da pessoa.',
'UID': 'uid',
'UN': 'ONU',
'URL': 'Localizador-Padrão de Recursos',
'UTC Offset': 'UTC Offset',
'Un-Repairable': 'ONU-Reparáveis',
'Unable to parse CSV file!': 'Não é possível analisar Arquivo CSV!',
'Understaffed': 'Pessoal',
'Unidentified': 'Não identificado',
'Unit Cost': 'Custo por unidade',
'Unit added': 'Unidade incluída',
'Unit deleted': 'Unidade Excluída',
'Unit of Measure': 'Unidade de medida',
'Unit updated': 'Unidade Atualizados',
'Units': 'Unidades',
'Unknown': 'unknown',
'Unknown Peer': 'Peer desconhecido',
'Unknown type of facility': 'Tipo desconhecido de instalação',
'Unreinforced masonry': 'Alvenaria obras',
'Unresolved Conflicts': 'Conflitos não resolvidos',
'Unsafe': 'Inseguro',
'Unselect to disable the modem': 'Desmarcar para desativar o modem',
'Unselect to disable this API service': 'Unselect to disable this API service',
'Unselect to disable this SMTP service': 'Unselect to disable this SMTP service',
'Unsent': 'não enviado',
'Unsupported data format!': 'Formato de dados não Suportado!',
'Unsupported method!': 'Método não Suportado!',
'Update': 'atualização',
'Update Activity Report': 'Atualizar Relatório de atividade',
'Update Cholera Treatment Capability Information': 'Atualizar informações de capacidade de tratamento de Cólera',
'Update Request': 'Atualizar Pedido',
'Update Service Profile': 'Atualizar Perfil de Serviço',
'Update Status': 'Status da Atualização',
'Update Task Status': 'Atualizar Status da Tarefa',
'Update Unit': 'Atualizar Unidade',
'Update if Master': 'Atualizar se for o principal',
'Update if Newer': 'Atualizar se Mais Recente',
'Update your current ordered list': 'ATUALIZE a seu atual lista ordenada',
'Updated By': 'Atualizado por',
'Upload Comma Separated Value File': 'Upload Comma Separated Value File',
'Upload Format': 'Upload Format',
'Upload OCR Form': 'Upload OCR Form',
'Upload Photos': 'Fazer Upload de Fotos',
'Upload Spreadsheet': 'Fazer atualizacao de Planilha',
'Upload Track': 'Pista de carregamento',
'Upload a CSV file': 'Upload a CSV file',
'Upload a CSV file formatted according to the Template.': 'Upload a CSV file formatted according to the Template.',
'Upload a Spreadsheet': 'Fazer Upload de uma planilha',
'Upload an image file (bmp, gif, jpeg or png), max. 300x300 pixels!': 'Fazer Upload de um arquivo de imagem (bmp, gif, jpeg ou png), máx. 300x300 pixels!',
'Upload an image file here.': 'Fazer atualizacao de um arquivo de imagem aqui.',
"Upload an image file here. If you don't upload an image file, then you must specify its location in the URL field.": 'Fazer atualizacao de um arquivo de imagem aqui. Se voce não fizer o upload de um arquivo de imagem, então voce deverá especificar sua localização no campo URL',
'Upload an image, such as a photo': 'Fazer Upload de uma imagem, como uma foto',
'Uploaded': 'Uploaded',
'Urban Fire': 'Incêndio urbano',
'Urban area': 'Zona Urbana',
'Urdu': 'Urdu',
'Urgent': 'Urgente',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Utilize (...)&(...) para e, (...)|(...) ou para, e ~(...) para não para construir consultas mais complexas.',
'Use Geocoder for address lookups?': 'Utiliza Geocodificador para consultas de endereços?',
'Use default': 'usar o padrão',
'Use these links to download data that is currently in the database.': 'Use estes links para fazer o download de dados actualmente na base de dados.',
'Use this to set the starting location for the Location Selector.': 'Use this to set the starting location for the Location Selector.',
'Used by IRS & Assess': 'Utilizado pela Receita Federal & Avaliar',
'Used in onHover Tooltip & Cluster Popups to differentiate between types.': 'Utilizado em onHover De Dicas & Cluster Popups para diferenciar entre tipos.',
'Used to build onHover Tooltip & 1st field also used in Cluster Popups to differentiate between records.': 'Utilizado para construir onHover Dicas & primeiro campo também utilizado no Popups Cluster para diferenciar entre os registros.',
'Used to check that latitude of entered locations is reasonable. May be used to filter lists of resources that have locations.': 'Usado para verificar latitude de locais inseridos é razoável. Pode ser utilizado para filtrar listas de recursos que possuem locais.',
'Used to check that longitude of entered locations is reasonable. May be used to filter lists of resources that have locations.': 'Usado para verificar que longitude de locais inserido é razoável. Pode ser utilizado para filtrar listas de recursos que possuem locais.',
'Used to import data from spreadsheets into the database': 'Para importar dados utilizada a partir de planilhas no banco de dados',
'Used within Inventory Management, Request Management and Asset Management': 'Utilizado no gerenciamento de inventário, gerenciamento de Pedido e gerenciamento de ativos',
'User': 'usuário',
'User Account has been Disabled': 'Conta de Usuário foi Desativado',
'User Details': 'Detalhes do Usuário',
'User ID': 'User ID',
'User Management': 'gerenciamento do usuário',
'User Profile': 'Perfil do Utilizador',
'User Requests': 'Pedidos do Utilizador',
'User Updated': 'Utilizador actualizado',
'User added': 'Usuário Incluído',
'User already has this role': 'Usuário já tem essa função',
'User deleted': 'Usuário Excluído',
'User updated': 'Utilizador actualizado',
'Username': 'userName',
'Users': 'usuários',
'Users removed': 'Utilizadores removidos',
'Uses the REST Query Format defined in': 'Utiliza o formato de consulta REST definido em',
'Ushahidi': 'Ushahidi',
'Utilities': 'Serviços Públicos',
'Utility, telecommunication, other non-transport infrastructure': 'Serviços Públicos, telecomunicações, outra infra-estrutura não-transporte',
'Vacancies': 'Vagas',
'Value': 'value',
'Various Reporting functionalities': 'Diversas funcionalidades de relatório',
'Vehicle': 'veículo',
'Vehicle Crime': 'Roubo/Furto de veículo',
'Vehicle Details': 'Vehicle Details',
'Vehicle Details added': 'Vehicle Details added',
'Vehicle Details deleted': 'Vehicle Details deleted',
'Vehicle Details updated': 'Vehicle Details updated',
'Vehicle Management': 'Vehicle Management',
'Vehicle Types': 'Tipos de veículo',
'Vehicle added': 'Vehicle added',
'Vehicle deleted': 'Vehicle deleted',
'Vehicle updated': 'Vehicle updated',
'Vehicles': 'Vehicles',
'Vehicles are assets with some extra details.': 'Vehicles are assets with some extra details.',
'Verification Status': 'Status de verificação',
'Verified?': 'Verificado?',
'Verify password': 'Verificar senha',
'Version': 'Version',
'Very Good': 'Muito bom',
'Very High': 'muito alto',
'View Alerts received using either Email or SMS': 'Visualizar alertas utilizando quer o correio electrónico quer SMS.',
'View All': 'Visualizar todos',
'View All Tickets': 'View All Tickets',
'View Error Tickets': 'Ver bilhetes de erro',
'View Fullscreen Map': 'Visualização Inteira Mapa',
'View Image': 'Visualizar imagem',
'View Items': 'Ver itens',
'View On Map': 'Visualizar no mapa',
'View Outbox': 'Visualização Outbox',
'View Picture': 'Visualização de imagem',
'View Results of completed and/or partially completed assessments': 'View Results of completed and/or partially completed assessments',
'View Settings': 'Ver Configurações',
'View Tickets': 'Visualizar Bilhetes',
'View and/or update their details': 'Visualizar e/ou actualizar os seus detalhes',
'View or update the status of a hospital.': 'VISUALIZAR ou atualizar o status de um hospital.',
'View pending requests and pledge support.': 'Visualizar pedidos pendentes e suporte promessa.',
'View the hospitals on a map.': 'Visualizar os hospitais em um mapa.',
'View/Edit the Database directly': 'Visualizar/Editar o banco de dados diretamente',
"View/Edit the Database directly (caution: doesn't respect the framework rules!)": 'Visualizar/Alterar a base de dados directamente ( cuidado : não cumpre com as regras da infraestrutura ! ) ).',
'Village': 'Vila',
'Village Leader': 'Líder da Aldeia',
'Visible?': 'Visível?',
'Visual Recognition': 'Reconhecimento visual',
'Volcanic Ash Cloud': 'Nuvem de cinzas vulcânicas',
'Volcanic Event': 'Evento vulcânico',
'Volume (m3)': 'Volume (m3)',
'Volunteer Availability': 'Disponibilidade de Voluntário',
'Volunteer Details': 'Detalhes do voluntário',
'Volunteer Information': 'Voluntário Informações',
'Volunteer Management': 'Gestão de voluntário',
'Volunteer Project': 'Projeto voluntário',
'Volunteer Record': 'Voluntário Registro',
'Volunteer Request': 'Pedido voluntário',
'Volunteer added': 'Voluntário incluído',
'Volunteer availability added': 'Disponibilidade de voluntário incluída',
'Volunteer availability deleted': 'Disponibilidade de voluntário excluída',
'Volunteer availability updated': 'Disponibilidade de voluntário atualizada',
'Volunteer deleted': 'Voluntário excluído',
'Volunteer details updated': 'Atualização dos detalhes de voluntários',
'Volunteer updated': 'Voluntário atualizado',
'Volunteers': 'Voluntários',
'Volunteers List': 'Voluntários Lista',
'Volunteers were notified!': 'Voluntários foram notificados!',
'Vote': 'voto',
'Votes': 'votos',
'WASH': 'LAVAR',
'WMS Browser Name': 'WMS Nome do Navegador',
'WMS Browser URL': 'WMS Navegador URL',
'Walking Only': 'Apenas andando',
'Wall or other structural damage': 'Parede ou outros danos estruturais',
'Warehouse': 'Depósito',
'Warehouse Details': 'Detalhes do Armazém',
'Warehouse added': 'Warehouse incluído',
'Warehouse deleted': 'Deposito apagado',
'Warehouse updated': 'Warehouse ATUALIZADO',
'Warehouses': 'Armazéns',
'WatSan': 'WatSan',
'Water Sanitation Hygiene': 'Saneamento de água',
'Water collection': 'Coleta de água',
'Water gallon': 'Galão de água',
'Water storage containers in households': 'Recipientes de armazenamento de água nos domicílios',
'Water supply': 'Abastecimento de água',
'Waterspout': 'Waterspout',
'We have tried': 'We have tried',
'Web API settings updated': 'Web API settings updated',
'Web Map Service Browser Name': 'Nome do mapa da Web navegador de serviços',
'Web Map Service Browser URL': 'Web Mapa Do navegador de Serviços URL',
'Website': 'WebSite',
'Wednesday': 'Wednesday',
'Weight': 'peso',
'Weight (kg)': 'peso (kg)',
'Welcome to the Sahana Portal at': 'Bem-vindo ao Portal Sahana em',
'Well-Known Text': 'Texto bem conhecido',
'What order to be contacted in.': 'What order to be contacted in.',
'Wheat': 'Trigo',
'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points.': 'Quando o mapa é que exibido incide sobre um conjunto de pontos, o mapa é aproximado para mostrar apenas a região delimitadora dos pontos.',
'When reports were entered': 'Quando os relatórios foram Digitados',
"When syncing data with others, conflicts happen in cases when two (or more) parties want to sync information which both of them have modified, i.e. conflicting information. Sync module tries to resolve such conflicts automatically but in some cases it can't. In those cases, it is up to you to resolve those conflicts manually, click on the link on the right to go to this page.": 'Quando Sincronizando dados com outros, os conflitos acontecem em casos onde dois (ou mais) grupos desejam sincronizar informações que os dois tenham modificado, ou seja, informações conflitantes. Módulo de sincronização tenta resolver esses conflitos automaticamente mas em alguns casos isso não consegue. Nesses casos, cabe a si resolver esses conflitos manualmente, clique no link à direita para ir para esta página.',
'Whiskers': 'Bigodes',
'Who is doing what and where': 'Quem está a fazer o quê e onde',
'Who usually collects water for the family?': 'Quem habitualmente colecta água para a família ?',
'Width': 'width',
'Width (m)': 'Largura (m)',
'Wild Fire': 'Fogo Selvagem',
'Wind Chill': 'Vento Frio',
'Window frame': 'Esquadria de janela',
'Winter Storm': 'Tempestade de inverno',
'Women of Child Bearing Age': 'Mulheres da criança Tendo Idade',
'Women participating in coping activities': 'Mulheres que participam em lidar atividades',
'Women who are Pregnant or in Labour': 'Mulheres que esto grávidas ou no trabalho',
'Womens Focus Groups': 'Mulheres de Grupos Foco',
'Wooden plank': 'Tábua de madeira',
'Wooden poles': 'Postes de madeira',
'Working hours end': 'Horas de trabalho final',
'Working hours start': 'Horas de trabalho iniciar',
'Working or other to provide money/food': 'Trabalhando para outros para prover dinheiro / alimentos',
'X-Ray': 'Raio-X',
'XMPP': 'XMPP',
'YES': 'YES',
"Yahoo Layers cannot be displayed if there isn't a valid API Key": "Yahoo Layers cannot be displayed if there isn't a valid API Key",
'Year': 'Year',
'Year built': 'Ano de construção',
'Year of Manufacture': 'Ano de fabricação',
'Yellow': 'amarelo',
'Yes': 'YES',
'You are a recovery team?': 'Você é uma equipe de recuperação?',
'You are attempting to delete your own account - are you sure you want to proceed?': 'Você está tentando excluir sua própria conta-Tem certeza de que deseja continuar?',
'You are currently reported missing!': 'Você está atualmente desaparecido!',
'You can change the configuration of synchronization module in the Settings section. This configuration includes your UUID (unique identification number), sync schedules, beacon service and so on. Click the following link to go to the Sync Settings page.': 'Você pode alterar a configuração do Módulo de Sincronização na seção configurações. Essa configuração inclui o seu UUID (número de identificação exclusivo), Planejamentos de Sincronização, serviço Farol e assim por diante. Clique no link a seguir para ir para a página Configurações de Sincronização.',
'You can click on the map below to select the Lat/Lon fields': 'Você pode clicar no mapa abaixo para selecionar os campos Lat/Lon',
'You can select the Draw tool': 'Pode selecionar a ferramenta Desenho',
'You can set the modem settings for SMS here.': 'Pode definir a configuração do modem SMS aqui.',
'You can use the Conversion Tool to convert from either GPS coordinates or Degrees/Minutes/Seconds.': 'Você pode utilizar a ferramenta de conversão para converter coordenadas de GPS ou graus/minutos/segundos.',
'You do no have permission to cancel this received shipment.': 'Você não tem permissão para cancelar o recebimento deste carregamento.',
'You do no have permission to cancel this sent shipment.': 'Você não tem permissão para cancelar o envio desse carregamento.',
'You do no have permission to make this commitment.': 'Você não tem permissão de fazer este compromisso.',
'You do no have permission to receive this shipment.': 'Você não tem permissão para receber este carregamento.',
'You do no have permission to send this shipment.': 'Você não tem permissão para enviar este carregamento.',
'You do not have permission for any facility to make a commitment.': 'Você não tem permissão em qualquer instalação para estabelecer um compromisso.',
'You do not have permission for any facility to make a request.': 'Você não tem permissão em qualquer instalação para fazer um pedido.',
'You do not have permission for any facility to receive a shipment.': 'You do not have permission for any facility to receive a shipment.',
'You do not have permission for any facility to send a shipment.': 'You do not have permission for any facility to send a shipment.',
'You do not have permission for any site to add an inventory item.': 'Você não tem permissão em qualquer site para incluir um item de inventário.',
'You do not have permission for any site to make a commitment.': 'Você não tem permissão em qualquer site para assumir um compromisso.',
'You do not have permission for any site to make a request.': 'Você não tem permissão em qualquer site para fazer um pedido.',
'You do not have permission for any site to perform this action.': 'Você não tem permissão em qualquer site para executar esta ação.',
'You do not have permission for any site to receive a shipment.': 'Você não tem permissão para qualquer site para receber um carregamento.',
'You do not have permission for any site to send a shipment.': 'Você não tem permissão em qualquer site para enviar um carregamento.',
'You do not have permission to cancel this received shipment.': 'Você não tem permissão para cancelar este carregamento recebido.',
'You do not have permission to cancel this sent shipment.': 'Você não tem permissão para cancelar essa remessa enviada.',
'You do not have permission to make this commitment.': 'Você não tem permissão para assumir este compromisso.',
'You do not have permission to receive this shipment.': 'Você não tem permissão para receber esta remessa.',
'You do not have permission to send a shipment from this site.': 'Você não tem permissão para enviar um carregamento a partir deste site.',
'You do not have permission to send this shipment.': 'Você não tem permissão para enviar este carregamento.',
'You have a personal map configuration. To change your personal configuration, click': 'Você tem uma configuração de mapa pessoal. Para alterar a sua configuração pessoal, clique',
'You have a personal map configuration. To change your personal configuration, click ': 'You have a personal map configuration. To change your personal configuration, click ',
'You have found a dead body?': 'Descobriu um cadáver ?',
"You have unsaved changes. Click Cancel now, then 'Save' to save them. Click OK now to discard them.": "You have unsaved changes. Click Cancel now, then 'Save' to save them. Click OK now to discard them.",
"You haven't made any calculations": 'Não fez quaisquer cálculos.',
'You must be logged in to register volunteers.': 'Você deve estar com login efetuado para registrar voluntários.',
'You must be logged in to report persons missing or found.': 'Você deve estar registrado para informar pessoas desaparecidas ou localizadas.',
'You must provide a series id to proceed.': 'Você deve fornecer um número de série para continuar.',
'You should edit Twitter settings in models/000_config.py': 'Você deve editar as definições do Twitter em modelos/000_config.py',
'Your current ordered list of solution items is shown below. You can change it by voting again.': 'Seu lista de itens de solução pedidos aparece abaixo. Você pode alterá-lo ao votar novamente.',
'Your post was added successfully.': 'O post foi incluído com êxito.',
'Your system has been assigned a unique identification (UUID), which other computers around you can use to identify you. To view your UUID, you may go to Synchronization -> Sync Settings. You can also see other settings on that page.': 'Uma identificação exclusiva (UUID) foi designada para o seu sistema e poderá ser usada por outros computadores ao seu redor para identificá-lo. Para visualizar o seu UUID, você pode ir para Sincronização -> configurações Sync. Você também pode ver outras configurações nesta página.',
'ZIP Code': 'ZIP Code',
'Zero Hour': 'Hora Zero',
'Zinc roof': 'Telhado de Zinco',
'Zoom': 'Zoom',
'Zoom Levels': 'Níveis de Zoom',
'active': 'ativo',
'added': 'incluído',
'all records': 'todos os registros',
'allows a budget to be developed based on staff & equipment costs, including any admin overheads.': 'Permite que um orçamento seja desenvolvido com base em despesas com o pessoal e equipamento, incluindo quaisquer despesas gerais administrativas.',
'allows for creation and management of assessments.': 'allows for creation and management of assessments.',
'allows for creation and management of surveys to assess the damage following a natural disaster.': 'permite a criação e gerenciamento de pesquisas para avaliar os danos após um desastre natural.',
'an individual/team to do in 1-2 days': 'Uma pessoa/Equipe para fazer em 1 Dias-2',
'assigned': 'designado',
'average': 'Na média',
'black': 'Preto',
'blond': 'Loiro',
'blue': 'azul',
'brown': 'Marrom',
'business_damaged': 'business_damaged',
'by': 'por',
'c/o Name': 'c/o Nome',
'can be used to extract data from spreadsheets and put them into database tables.': 'Pode ser utilizado para extrair dados de planilhas e colocá-los em tabelas de dados.',
'cancelled': 'CANCELADO',
'caucasoid': 'Caucasoid',
'check all': 'Verificar Tudo',
'click for more details': 'Clique para mais detalhes',
'click here': 'click here',
'completed': 'Concluído',
'confirmed': 'Confirmado',
'consider': 'considerar',
"couldn't be parsed so NetworkLinks not followed.": 'Não pôde ser analisado então o NetworkLinks não seguiu.',
'curly': 'Encaracolado',
'currently registered': 'Atualmente registrados',
'daily': 'Diariamente',
'dark': 'Escuro',
'data uploaded': 'dados carregados',
'database': 'DATABASE',
'database %s select': '% de dados s SELECIONE',
'db': 'dB',
'deceased': 'Falecido',
'delete all checked': 'excluir todos marcados',
'deleted': 'excluídos',
'design': 'projecto',
'diseased': 'Doentes',
'displaced': 'Deslocadas',
'divorced': 'Divorciado',
'done!': 'Pronto!',
'duplicate': 'duplicar',
'edit': 'Editar',
'eg. gas, electricity, water': 'Exemplo: Gás, eletricidade, água',
'embedded': 'integrado',
'enclosed area': 'Área anexada',
'enter a number between %(min)g and %(max)g': 'enter a number between %(min)g and %(max)g',
'enter an integer between %(min)g and %(max)g': 'enter an integer between %(min)g and %(max)g',
'export as csv file': 'Exportar como arquivo cvs.',
'fat': 'Gordura',
'feedback': 'Retorno',
'female': 'Sexo Feminino',
'flush latrine with septic tank': 'esvaziar latrina com tanque séptico',
'food_sources': 'fuentes de alimento',
'forehead': 'testa',
'form data': 'form data',
'found': 'Localizado',
'from Twitter': 'do Twitter',
'getting': 'getting',
'green': 'verde',
'grey': 'cinza',
'here': 'aqui',
'high': 'Alta',
'hourly': 'Por hora',
'households': 'Membros da família',
'identified': 'identificado',
'ignore': 'Ignore',
'in Deg Min Sec format': 'GRAUS Celsius no formato Mín. Segundo',
'in GPS format': 'GPS no formato',
'in Inv.': 'in Inv.',
'inactive': 'inativo',
"includes a GroundOverlay or ScreenOverlay which aren't supported in OpenLayers yet, so it may not work properly.": 'Inclui um GroundOverlay ou ScreenOverlay que não são ainda suportados em OpenLayuers, portanto poderá não funcionar na totalidade.',
'injured': 'Feridos',
'insert new': 'inserir novo',
'insert new %s': 'inserir novo %s',
'invalid': 'inválido',
'invalid request': 'PEDIDO INVÁLIDO',
'is a central online repository where information on all the disaster victims and families, especially identified casualties, evacuees and displaced people can be stored. Information like name, age, contact number, identity card number, displaced location, and other details are captured. Picture and finger print details of the people can be uploaded to the system. People can also be captured by group for efficiency and convenience.': 'É um repositório central de informações em tempo real onde vítimas de desastres e seus familiares, especialmente casos isolados, refugiados e pessoas deslocadas podem ser abrigados. Informações como nome, idade, Contate o número de Bilhete de Identidade número, localização Deslocadas, e outros detalhes são capturados. Detalhes de impressão Imagem e Dedo de as pessoas possam ser transferidos por upload para o sistema. As pessoas podem também ser capturados pelo grupo por eficiência e conveniência.',
'is envisioned to be composed of several sub-modules that work together to provide complex functionality for the management of relief and project items by an organization. This includes an intake system, a warehouse management system, commodity tracking, supply chain management, fleet management, procurement, financial tracking and other asset and resource management capabilities': 'tem como visão ser composto de vários sub-módulos que interagem juntos a fim de fornecer funcionalidade complexa para o gerenciamento de itens de ajuda e projeto de uma organização. Isso inclui um sistema de admissão, um sistema de gestão de depósitos, rastreamento de mercadorias, gestão da cadeia de fornecimentos, de gestão da frota, aquisições, recursos de rastreamento financeiro de ativos e outros e gerenciamento de recursos',
'keeps track of all incoming tickets allowing them to be categorised & routed to the appropriate place for actioning.': 'Mantém controle de todos os bilhetes de entrada permitindo que sejam classificados & direcionados ao local apropriado para atuação.',
'latrines': 'privadas',
'leave empty to detach account': 'deixar em branco para desconectar a conta',
'legend URL': 'Legenda URL',
'light': 'luz',
'login': 'login',
'long': 'Longo',
'long>12cm': 'comprimento>12cm',
'low': 'baixo',
'male': 'masculino',
'manual': 'Manual',
'married': 'casado',
'medium': 'médio.',
'medium<12cm': 'médio<12cm',
'meters': 'metros',
'missing': 'ausente',
'module allows the site administrator to configure various options.': 'Módulo permite que o administrador do site configure várias opções.',
'module helps monitoring the status of hospitals.': 'Módulo ajuda monitorando o status de hospitais.',
'module provides a mechanism to collaboratively provide an overview of the developing disaster, using online mapping (GIS).': 'Módulo fornece um mecanismo para colaboração fornecem uma visão geral do desastre de desenvolvimento, utilização de mapeamento online (SIG).',
'mongoloid': 'Mongolóide',
'more': 'Mais',
'n/a': 'n/d',
'negroid': 'Negróide',
'never': 'Nunca',
'new': 'Novo(a)',
'new record inserted': 'Novo registro inserido',
'next 100 rows': 'próximas 100 linhas',
'no': 'no',
'none': 'nenhum',
'normal': 'normal',
'not accessible - no cached version available!': 'Não acessível-nenhuma versão em cache disponível!',
'not accessible - using cached version from': 'Não acessível-Utilizando versão em Cache',
'not specified': 'não especificado',
'num Zoom Levels': 'Num níveis de Zoom',
'obsolete': 'Obsoleto',
'on': 'Ligar',
'once': 'uma vez',
'open defecation': 'Abrir evacuação',
'optional': 'Optional',
'or import from csv file': 'ou importar a partir do arquivo csv',
'other': 'outros',
'over one hour': 'Mais de uma hora',
'people': 'pessoas',
'piece': 'parte',
'pit': 'cova',
'pit latrine': 'cova de latrina',
'postponed': 'Adiado',
'preliminary template or draft, not actionable in its current form': 'Modelo ou rascunho preliminar, não acionável em sua forma atual',
'previous 100 rows': '100 linhas anteriores',
'record does not exist': 'Registro não existe',
'record id': 'ID do Registro',
'red': 'vermelho',
'reported': 'relatado',
'reports successfully imported.': 'relatórios importados com êxito.',
'representation of the Polygon/Line.': 'Representação do polígono /Linha.',
'retired': 'Aposentado',
'retry': 'retry',
'river': 'Rio',
'see comment': 'Veja o comentário',
'selected': 'Selecionado',
'separated': 'Separado',
'separated from family': 'Separados da família',
'shaved': 'raspado',
'short': 'pequeno',
'short<6cm': 'pequeno<6cm',
'sides': 'lados',
'sign-up now': 'Inscreva-se agora',
'single': 'único',
'slim': 'estreito',
'specify': 'Especifique.',
'staff': 'equipe',
'staff members': 'Membros da equipe',
'state': 'Estado',
'state location': 'Localização do Estado',
'straight': 'reto',
'suffered financial losses': 'Sofreram perdas financeiras',
'table': 'table',
'tall': 'Altura',
'this': 'isto',
'times and it is still not working. We give in. Sorry.': 'times and it is still not working. We give in. Sorry.',
'to access the system': 'Para acessar o sistema',
'to download a OCR Form.': 'to download a OCR Form.',
'to reset your password': 'Para Reconfigurar sua senha',
'to verify your email': 'Para verificar seu e-mail',
'tonsure': 'tonsura',
'total': 'Total',
'tweepy module not available within the running Python - this needs installing for non-Tropo Twitter support!': 'Módulo tweepy não disponível com a execução Python-isto necessita da instalação para suporte a tropo Twitter!',
'unable to parse csv file': 'Não é possível analisar arquivo csv',
'uncheck all': 'Desmarcar Tudo',
'unidentified': 'IDENTIFICADO',
'unknown': 'unknown',
'unspecified': 'UNSPECIFIED',
'unverified': 'Não Verificado',
'updated': 'Atualizado',
'updates only': 'Apenas atualizações',
'verified': 'Verificado',
'volunteer': 'voluntário',
'volunteers': 'Voluntários',
'wavy': 'Serpentina',
'weekly': 'Semanalmente',
'white': 'branco',
'wider area, longer term, usually contain multiple Activities': 'maior área, maior prazo, contém usualmente múltiplas actividades',
'widowed': 'Viúvo',
'window': 'janela',
'within human habitat': 'Dentro do habitat humano',
'xlwt module not available within the running Python - this needs installing for XLS output!': 'Módulo Xlwt não disponível no módulo Python sendo executado - isto necessita ser instalado para saída XLS!',
'yes': 'YES',
}
| {
"content_hash": "74341fc5f944f974e0fbd66225fe4ba1",
"timestamp": "",
"source": "github",
"line_count": 4817,
"max_line_length": 994,
"avg_line_length": 60.760224205937305,
"alnum_prop": 0.753760053573503,
"repo_name": "flavour/cert",
"id": "af84e70a79837dea70740b6de0d9ac4743b47425",
"size": "297093",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "languages/pt-br.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "13068308"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "21061411"
},
{
"name": "Shell",
"bytes": "1645"
}
],
"symlink_target": ""
} |
"""Illustrates a generic association which persists association
objects within individual tables, each one generated to persist
those objects on behalf of a particular parent class.
This configuration has the advantage that each type of parent
maintains its "Address" rows separately, so that collection
size for one type of parent will have no impact on other types
of parent. Navigation between parent and "Address" is simple,
direct, and bidirectional.
This recipe is the most efficient (speed wise and storage wise)
and simple of all of them.
The creation of many related tables may seem at first like an issue
but there really isn't any - the management and targeting of these tables
is completely automated.
"""
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy.ext.declarative import as_declarative
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
@as_declarative()
class Base:
"""Base class which provides automated table name
and surrogate primary key column.
"""
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
class Address:
"""Define columns that will be present in each
'Address' table.
This is a declarative mixin, so additional mapped
attributes beyond simple columns specified here
should be set up using @declared_attr.
"""
street = Column(String)
city = Column(String)
zip = Column(String)
def __repr__(self):
return "%s(street=%r, city=%r, zip=%r)" % (
self.__class__.__name__,
self.street,
self.city,
self.zip,
)
class HasAddresses:
"""HasAddresses mixin, creates a new Address class
for each parent.
"""
@declared_attr
def addresses(cls):
cls.Address = type(
"%sAddress" % cls.__name__,
(Address, Base),
dict(
__tablename__="%s_address" % cls.__tablename__,
parent_id=Column(
Integer, ForeignKey("%s.id" % cls.__tablename__)
),
parent=relationship(cls),
),
)
return relationship(cls.Address)
class Customer(HasAddresses, Base):
name = Column(String)
class Supplier(HasAddresses, Base):
company_name = Column(String)
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
session = Session(engine)
session.add_all(
[
Customer(
name="customer 1",
addresses=[
Customer.Address(
street="123 anywhere street", city="New York", zip="10110"
),
Customer.Address(
street="40 main street", city="San Francisco", zip="95732"
),
],
),
Supplier(
company_name="Ace Hammers",
addresses=[
Supplier.Address(
street="2569 west elm", city="Detroit", zip="56785"
)
],
),
]
)
session.commit()
for customer in session.query(Customer):
for address in customer.addresses:
print(address)
print(address.parent)
| {
"content_hash": "55d9d28642416e5ed7aec9223acc0d47",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 78,
"avg_line_length": 26.221374045801525,
"alnum_prop": 0.6197962154294032,
"repo_name": "zzzeek/sqlalchemy",
"id": "5b83e6e68f36fcc6d8d584d9f8440e30e617d1ce",
"size": "3435",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "examples/generic_associations/table_per_related.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cython",
"bytes": "21698"
},
{
"name": "Python",
"bytes": "16838583"
}
],
"symlink_target": ""
} |
import collections
from contextlib import closing
import errno
import logging
import os
import re
import socket
import ssl
import sys
import typing # noqa: F401
from tornado.escape import to_unicode, utf8
from tornado import gen, version
from tornado.httpclient import AsyncHTTPClient
from tornado.httputil import HTTPHeaders, ResponseStartLine
from tornado.ioloop import IOLoop
from tornado.iostream import UnsatisfiableReadError
from tornado.locks import Event
from tornado.log import gen_log
from tornado.netutil import Resolver, bind_sockets
from tornado.simple_httpclient import (
SimpleAsyncHTTPClient,
HTTPStreamClosedError,
HTTPTimeoutError,
)
from tornado.test.httpclient_test import (
ChunkHandler,
CountdownHandler,
HelloWorldHandler,
RedirectHandler,
UserAgentHandler,
)
from tornado.test import httpclient_test
from tornado.testing import (
AsyncHTTPTestCase,
AsyncHTTPSTestCase,
AsyncTestCase,
ExpectLog,
gen_test,
)
from tornado.test.util import skipOnTravis, skipIfNoIPv6, refusing_port
from tornado.web import RequestHandler, Application, url, stream_request_body
class SimpleHTTPClientCommonTestCase(httpclient_test.HTTPClientCommonTestCase):
def get_http_client(self):
client = SimpleAsyncHTTPClient(force_instance=True)
self.assertTrue(isinstance(client, SimpleAsyncHTTPClient))
return client
class TriggerHandler(RequestHandler):
def initialize(self, queue, wake_callback):
self.queue = queue
self.wake_callback = wake_callback
@gen.coroutine
def get(self):
logging.debug("queuing trigger")
event = Event()
self.queue.append(event.set)
if self.get_argument("wake", "true") == "true":
self.wake_callback()
yield event.wait()
class ContentLengthHandler(RequestHandler):
def get(self):
self.stream = self.detach()
IOLoop.current().spawn_callback(self.write_response)
@gen.coroutine
def write_response(self):
yield self.stream.write(
utf8(
"HTTP/1.0 200 OK\r\nContent-Length: %s\r\n\r\nok"
% self.get_argument("value")
)
)
self.stream.close()
class HeadHandler(RequestHandler):
def head(self):
self.set_header("Content-Length", "7")
class OptionsHandler(RequestHandler):
def options(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.write("ok")
class NoContentHandler(RequestHandler):
def get(self):
self.set_status(204)
self.finish()
class SeeOtherPostHandler(RequestHandler):
def post(self):
redirect_code = int(self.request.body)
assert redirect_code in (302, 303), "unexpected body %r" % self.request.body
self.set_header("Location", "/see_other_get")
self.set_status(redirect_code)
class SeeOtherGetHandler(RequestHandler):
def get(self):
if self.request.body:
raise Exception("unexpected body %r" % self.request.body)
self.write("ok")
class HostEchoHandler(RequestHandler):
def get(self):
self.write(self.request.headers["Host"])
class NoContentLengthHandler(RequestHandler):
def get(self):
if self.request.version.startswith("HTTP/1"):
# Emulate the old HTTP/1.0 behavior of returning a body with no
# content-length. Tornado handles content-length at the framework
# level so we have to go around it.
stream = self.detach()
stream.write(b"HTTP/1.0 200 OK\r\n\r\n" b"hello")
stream.close()
else:
self.finish("HTTP/1 required")
class EchoPostHandler(RequestHandler):
def post(self):
self.write(self.request.body)
@stream_request_body
class RespondInPrepareHandler(RequestHandler):
def prepare(self):
self.set_status(403)
self.finish("forbidden")
class SimpleHTTPClientTestMixin(object):
def create_client(self, **kwargs):
raise NotImplementedError()
def get_app(self: typing.Any):
# callable objects to finish pending /trigger requests
self.triggers = (
collections.deque()
) # type: typing.Deque[typing.Callable[[], None]]
return Application(
[
url(
"/trigger",
TriggerHandler,
dict(queue=self.triggers, wake_callback=self.stop),
),
url("/chunk", ChunkHandler),
url("/countdown/([0-9]+)", CountdownHandler, name="countdown"),
url("/hello", HelloWorldHandler),
url("/content_length", ContentLengthHandler),
url("/head", HeadHandler),
url("/options", OptionsHandler),
url("/no_content", NoContentHandler),
url("/see_other_post", SeeOtherPostHandler),
url("/see_other_get", SeeOtherGetHandler),
url("/host_echo", HostEchoHandler),
url("/no_content_length", NoContentLengthHandler),
url("/echo_post", EchoPostHandler),
url("/respond_in_prepare", RespondInPrepareHandler),
url("/redirect", RedirectHandler),
url("/user_agent", UserAgentHandler),
],
gzip=True,
)
def test_singleton(self: typing.Any):
# Class "constructor" reuses objects on the same IOLoop
self.assertTrue(SimpleAsyncHTTPClient() is SimpleAsyncHTTPClient())
# unless force_instance is used
self.assertTrue(
SimpleAsyncHTTPClient() is not SimpleAsyncHTTPClient(force_instance=True)
)
# different IOLoops use different objects
with closing(IOLoop()) as io_loop2:
async def make_client():
await gen.sleep(0)
return SimpleAsyncHTTPClient()
client1 = self.io_loop.run_sync(make_client)
client2 = io_loop2.run_sync(make_client)
self.assertTrue(client1 is not client2)
def test_connection_limit(self: typing.Any):
with closing(self.create_client(max_clients=2)) as client:
self.assertEqual(client.max_clients, 2)
seen = []
# Send 4 requests. Two can be sent immediately, while the others
# will be queued
for i in range(4):
def cb(fut, i=i):
seen.append(i)
self.stop()
client.fetch(self.get_url("/trigger")).add_done_callback(cb)
self.wait(condition=lambda: len(self.triggers) == 2)
self.assertEqual(len(client.queue), 2)
# Finish the first two requests and let the next two through
self.triggers.popleft()()
self.triggers.popleft()()
self.wait(condition=lambda: (len(self.triggers) == 2 and len(seen) == 2))
self.assertEqual(set(seen), set([0, 1]))
self.assertEqual(len(client.queue), 0)
# Finish all the pending requests
self.triggers.popleft()()
self.triggers.popleft()()
self.wait(condition=lambda: len(seen) == 4)
self.assertEqual(set(seen), set([0, 1, 2, 3]))
self.assertEqual(len(self.triggers), 0)
@gen_test
def test_redirect_connection_limit(self: typing.Any):
# following redirects should not consume additional connections
with closing(self.create_client(max_clients=1)) as client:
response = yield client.fetch(self.get_url("/countdown/3"), max_redirects=3)
response.rethrow()
def test_max_redirects(self: typing.Any):
response = self.fetch("/countdown/5", max_redirects=3)
self.assertEqual(302, response.code)
# We requested 5, followed three redirects for 4, 3, 2, then the last
# unfollowed redirect is to 1.
self.assertTrue(response.request.url.endswith("/countdown/5"))
self.assertTrue(response.effective_url.endswith("/countdown/2"))
self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
def test_header_reuse(self: typing.Any):
# Apps may reuse a headers object if they are only passing in constant
# headers like user-agent. The header object should not be modified.
headers = HTTPHeaders({"User-Agent": "Foo"})
self.fetch("/hello", headers=headers)
self.assertEqual(list(headers.get_all()), [("User-Agent", "Foo")])
def test_default_user_agent(self: typing.Any):
response = self.fetch("/user_agent", method="GET")
self.assertEqual(200, response.code)
self.assertEqual(response.body.decode(), "Tornado/{}".format(version))
def test_see_other_redirect(self: typing.Any):
for code in (302, 303):
response = self.fetch("/see_other_post", method="POST", body="%d" % code)
self.assertEqual(200, response.code)
self.assertTrue(response.request.url.endswith("/see_other_post"))
self.assertTrue(response.effective_url.endswith("/see_other_get"))
# request is the original request, is a POST still
self.assertEqual("POST", response.request.method)
@skipOnTravis
@gen_test
def test_connect_timeout(self: typing.Any):
timeout = 0.1
cleanup_event = Event()
test = self
class TimeoutResolver(Resolver):
async def resolve(self, *args, **kwargs):
await cleanup_event.wait()
# Return something valid so the test doesn't raise during shutdown.
return [(socket.AF_INET, ("127.0.0.1", test.get_http_port()))]
with closing(self.create_client(resolver=TimeoutResolver())) as client:
with self.assertRaises(HTTPTimeoutError):
yield client.fetch(
self.get_url("/hello"),
connect_timeout=timeout,
request_timeout=3600,
raise_error=True,
)
# Let the hanging coroutine clean up after itself. We need to
# wait more than a single IOLoop iteration for the SSL case,
# which logs errors on unexpected EOF.
cleanup_event.set()
yield gen.sleep(0.2)
@skipOnTravis
def test_request_timeout(self: typing.Any):
timeout = 0.1
if os.name == "nt":
timeout = 0.5
with self.assertRaises(HTTPTimeoutError):
self.fetch("/trigger?wake=false", request_timeout=timeout, raise_error=True)
# trigger the hanging request to let it clean up after itself
self.triggers.popleft()()
self.io_loop.run_sync(lambda: gen.sleep(0))
@skipIfNoIPv6
def test_ipv6(self: typing.Any):
[sock] = bind_sockets(0, "::1", family=socket.AF_INET6)
port = sock.getsockname()[1]
self.http_server.add_socket(sock)
url = "%s://[::1]:%d/hello" % (self.get_protocol(), port)
# ipv6 is currently enabled by default but can be disabled
with self.assertRaises(Exception):
self.fetch(url, allow_ipv6=False, raise_error=True)
response = self.fetch(url)
self.assertEqual(response.body, b"Hello world!")
def test_multiple_content_length_accepted(self: typing.Any):
response = self.fetch("/content_length?value=2,2")
self.assertEqual(response.body, b"ok")
response = self.fetch("/content_length?value=2,%202,2")
self.assertEqual(response.body, b"ok")
with ExpectLog(
gen_log, ".*Multiple unequal Content-Lengths", level=logging.INFO
):
with self.assertRaises(HTTPStreamClosedError):
self.fetch("/content_length?value=2,4", raise_error=True)
with self.assertRaises(HTTPStreamClosedError):
self.fetch("/content_length?value=2,%202,3", raise_error=True)
def test_head_request(self: typing.Any):
response = self.fetch("/head", method="HEAD")
self.assertEqual(response.code, 200)
self.assertEqual(response.headers["content-length"], "7")
self.assertFalse(response.body)
def test_options_request(self: typing.Any):
response = self.fetch("/options", method="OPTIONS")
self.assertEqual(response.code, 200)
self.assertEqual(response.headers["content-length"], "2")
self.assertEqual(response.headers["access-control-allow-origin"], "*")
self.assertEqual(response.body, b"ok")
def test_no_content(self: typing.Any):
response = self.fetch("/no_content")
self.assertEqual(response.code, 204)
# 204 status shouldn't have a content-length
#
# Tests with a content-length header are included below
# in HTTP204NoContentTestCase.
self.assertNotIn("Content-Length", response.headers)
def test_host_header(self: typing.Any):
host_re = re.compile(b"^127.0.0.1:[0-9]+$")
response = self.fetch("/host_echo")
self.assertTrue(host_re.match(response.body))
url = self.get_url("/host_echo").replace("http://", "http://me:secret@")
response = self.fetch(url)
self.assertTrue(host_re.match(response.body), response.body)
def test_connection_refused(self: typing.Any):
cleanup_func, port = refusing_port()
self.addCleanup(cleanup_func)
with ExpectLog(gen_log, ".*", required=False):
with self.assertRaises(socket.error) as cm:
self.fetch("http://127.0.0.1:%d/" % port, raise_error=True)
if sys.platform != "cygwin":
# cygwin returns EPERM instead of ECONNREFUSED here
contains_errno = str(errno.ECONNREFUSED) in str(cm.exception)
if not contains_errno and hasattr(errno, "WSAECONNREFUSED"):
contains_errno = str(errno.WSAECONNREFUSED) in str( # type: ignore
cm.exception
)
self.assertTrue(contains_errno, cm.exception)
# This is usually "Connection refused".
# On windows, strerror is broken and returns "Unknown error".
expected_message = os.strerror(errno.ECONNREFUSED)
self.assertTrue(expected_message in str(cm.exception), cm.exception)
def test_queue_timeout(self: typing.Any):
with closing(self.create_client(max_clients=1)) as client:
# Wait for the trigger request to block, not complete.
fut1 = client.fetch(self.get_url("/trigger"), request_timeout=10)
self.wait()
with self.assertRaises(HTTPTimeoutError) as cm:
self.io_loop.run_sync(
lambda: client.fetch(
self.get_url("/hello"), connect_timeout=0.1, raise_error=True
)
)
self.assertEqual(str(cm.exception), "Timeout in request queue")
self.triggers.popleft()()
self.io_loop.run_sync(lambda: fut1)
def test_no_content_length(self: typing.Any):
response = self.fetch("/no_content_length")
if response.body == b"HTTP/1 required":
self.skipTest("requires HTTP/1.x")
else:
self.assertEqual(b"hello", response.body)
def sync_body_producer(self, write):
write(b"1234")
write(b"5678")
@gen.coroutine
def async_body_producer(self, write):
yield write(b"1234")
yield gen.moment
yield write(b"5678")
def test_sync_body_producer_chunked(self: typing.Any):
response = self.fetch(
"/echo_post", method="POST", body_producer=self.sync_body_producer
)
response.rethrow()
self.assertEqual(response.body, b"12345678")
def test_sync_body_producer_content_length(self: typing.Any):
response = self.fetch(
"/echo_post",
method="POST",
body_producer=self.sync_body_producer,
headers={"Content-Length": "8"},
)
response.rethrow()
self.assertEqual(response.body, b"12345678")
def test_async_body_producer_chunked(self: typing.Any):
response = self.fetch(
"/echo_post", method="POST", body_producer=self.async_body_producer
)
response.rethrow()
self.assertEqual(response.body, b"12345678")
def test_async_body_producer_content_length(self: typing.Any):
response = self.fetch(
"/echo_post",
method="POST",
body_producer=self.async_body_producer,
headers={"Content-Length": "8"},
)
response.rethrow()
self.assertEqual(response.body, b"12345678")
def test_native_body_producer_chunked(self: typing.Any):
async def body_producer(write):
await write(b"1234")
import asyncio
await asyncio.sleep(0)
await write(b"5678")
response = self.fetch("/echo_post", method="POST", body_producer=body_producer)
response.rethrow()
self.assertEqual(response.body, b"12345678")
def test_native_body_producer_content_length(self: typing.Any):
async def body_producer(write):
await write(b"1234")
import asyncio
await asyncio.sleep(0)
await write(b"5678")
response = self.fetch(
"/echo_post",
method="POST",
body_producer=body_producer,
headers={"Content-Length": "8"},
)
response.rethrow()
self.assertEqual(response.body, b"12345678")
def test_100_continue(self: typing.Any):
response = self.fetch(
"/echo_post", method="POST", body=b"1234", expect_100_continue=True
)
self.assertEqual(response.body, b"1234")
def test_100_continue_early_response(self: typing.Any):
def body_producer(write):
raise Exception("should not be called")
response = self.fetch(
"/respond_in_prepare",
method="POST",
body_producer=body_producer,
expect_100_continue=True,
)
self.assertEqual(response.code, 403)
def test_streaming_follow_redirects(self: typing.Any):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = [] # type: typing.List[str]
chunk_bytes = [] # type: typing.List[bytes]
self.fetch(
"/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunk_bytes.append,
)
chunks = list(map(to_unicode, chunk_bytes))
self.assertEqual(chunks, ["Hello world!"])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
class SimpleHTTPClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPTestCase):
def setUp(self):
super().setUp()
self.http_client = self.create_client()
def create_client(self, **kwargs):
return SimpleAsyncHTTPClient(force_instance=True, **kwargs)
class SimpleHTTPSClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPSTestCase):
def setUp(self):
super().setUp()
self.http_client = self.create_client()
def create_client(self, **kwargs):
return SimpleAsyncHTTPClient(
force_instance=True, defaults=dict(validate_cert=False), **kwargs
)
def test_ssl_options(self):
resp = self.fetch("/hello", ssl_options={})
self.assertEqual(resp.body, b"Hello world!")
def test_ssl_context(self):
resp = self.fetch("/hello", ssl_options=ssl.SSLContext(ssl.PROTOCOL_SSLv23))
self.assertEqual(resp.body, b"Hello world!")
def test_ssl_options_handshake_fail(self):
with ExpectLog(gen_log, "SSL Error|Uncaught exception", required=False):
with self.assertRaises(ssl.SSLError):
self.fetch(
"/hello",
ssl_options=dict(cert_reqs=ssl.CERT_REQUIRED),
raise_error=True,
)
def test_ssl_context_handshake_fail(self):
with ExpectLog(gen_log, "SSL Error|Uncaught exception"):
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
with self.assertRaises(ssl.SSLError):
self.fetch("/hello", ssl_options=ctx, raise_error=True)
def test_error_logging(self):
# No stack traces are logged for SSL errors (in this case,
# failure to validate the testing self-signed cert).
# The SSLError is exposed through ssl.SSLError.
with ExpectLog(gen_log, ".*") as expect_log:
with self.assertRaises(ssl.SSLError):
self.fetch("/", validate_cert=True, raise_error=True)
self.assertFalse(expect_log.logged_stack)
class CreateAsyncHTTPClientTestCase(AsyncTestCase):
def setUp(self):
super().setUp()
self.saved = AsyncHTTPClient._save_configuration()
def tearDown(self):
AsyncHTTPClient._restore_configuration(self.saved)
super().tearDown()
def test_max_clients(self):
AsyncHTTPClient.configure(SimpleAsyncHTTPClient)
with closing(AsyncHTTPClient(force_instance=True)) as client:
self.assertEqual(client.max_clients, 10) # type: ignore
with closing(AsyncHTTPClient(max_clients=11, force_instance=True)) as client:
self.assertEqual(client.max_clients, 11) # type: ignore
# Now configure max_clients statically and try overriding it
# with each way max_clients can be passed
AsyncHTTPClient.configure(SimpleAsyncHTTPClient, max_clients=12)
with closing(AsyncHTTPClient(force_instance=True)) as client:
self.assertEqual(client.max_clients, 12) # type: ignore
with closing(AsyncHTTPClient(max_clients=13, force_instance=True)) as client:
self.assertEqual(client.max_clients, 13) # type: ignore
with closing(AsyncHTTPClient(max_clients=14, force_instance=True)) as client:
self.assertEqual(client.max_clients, 14) # type: ignore
class HTTP100ContinueTestCase(AsyncHTTPTestCase):
def respond_100(self, request):
self.http1 = request.version.startswith("HTTP/1.")
if not self.http1:
request.connection.write_headers(
ResponseStartLine("", 200, "OK"), HTTPHeaders()
)
request.connection.finish()
return
self.request = request
fut = self.request.connection.stream.write(b"HTTP/1.1 100 CONTINUE\r\n\r\n")
fut.add_done_callback(self.respond_200)
def respond_200(self, fut):
fut.result()
fut = self.request.connection.stream.write(
b"HTTP/1.1 200 OK\r\nContent-Length: 1\r\n\r\nA"
)
fut.add_done_callback(lambda f: self.request.connection.stream.close())
def get_app(self):
# Not a full Application, but works as an HTTPServer callback
return self.respond_100
def test_100_continue(self):
res = self.fetch("/")
if not self.http1:
self.skipTest("requires HTTP/1.x")
self.assertEqual(res.body, b"A")
class HTTP204NoContentTestCase(AsyncHTTPTestCase):
def respond_204(self, request):
self.http1 = request.version.startswith("HTTP/1.")
if not self.http1:
# Close the request cleanly in HTTP/2; it will be skipped anyway.
request.connection.write_headers(
ResponseStartLine("", 200, "OK"), HTTPHeaders()
)
request.connection.finish()
return
# A 204 response never has a body, even if doesn't have a content-length
# (which would otherwise mean read-until-close). We simulate here a
# server that sends no content length and does not close the connection.
#
# Tests of a 204 response with no Content-Length header are included
# in SimpleHTTPClientTestMixin.
stream = request.connection.detach()
stream.write(b"HTTP/1.1 204 No content\r\n")
if request.arguments.get("error", [False])[-1]:
stream.write(b"Content-Length: 5\r\n")
else:
stream.write(b"Content-Length: 0\r\n")
stream.write(b"\r\n")
stream.close()
def get_app(self):
return self.respond_204
def test_204_no_content(self):
resp = self.fetch("/")
if not self.http1:
self.skipTest("requires HTTP/1.x")
self.assertEqual(resp.code, 204)
self.assertEqual(resp.body, b"")
def test_204_invalid_content_length(self):
# 204 status with non-zero content length is malformed
with ExpectLog(
gen_log, ".*Response with code 204 should not have body", level=logging.INFO
):
with self.assertRaises(HTTPStreamClosedError):
self.fetch("/?error=1", raise_error=True)
if not self.http1:
self.skipTest("requires HTTP/1.x")
if self.http_client.configured_class != SimpleAsyncHTTPClient:
self.skipTest("curl client accepts invalid headers")
class HostnameMappingTestCase(AsyncHTTPTestCase):
def setUp(self):
super().setUp()
self.http_client = SimpleAsyncHTTPClient(
hostname_mapping={
"www.example.com": "127.0.0.1",
("foo.example.com", 8000): ("127.0.0.1", self.get_http_port()),
}
)
def get_app(self):
return Application([url("/hello", HelloWorldHandler)])
def test_hostname_mapping(self):
response = self.fetch("http://www.example.com:%d/hello" % self.get_http_port())
response.rethrow()
self.assertEqual(response.body, b"Hello world!")
def test_port_mapping(self):
response = self.fetch("http://foo.example.com:8000/hello")
response.rethrow()
self.assertEqual(response.body, b"Hello world!")
class ResolveTimeoutTestCase(AsyncHTTPTestCase):
def setUp(self):
self.cleanup_event = Event()
test = self
# Dummy Resolver subclass that never finishes.
class BadResolver(Resolver):
@gen.coroutine
def resolve(self, *args, **kwargs):
yield test.cleanup_event.wait()
# Return something valid so the test doesn't raise during cleanup.
return [(socket.AF_INET, ("127.0.0.1", test.get_http_port()))]
super().setUp()
self.http_client = SimpleAsyncHTTPClient(resolver=BadResolver())
def get_app(self):
return Application([url("/hello", HelloWorldHandler)])
def test_resolve_timeout(self):
with self.assertRaises(HTTPTimeoutError):
self.fetch("/hello", connect_timeout=0.1, raise_error=True)
# Let the hanging coroutine clean up after itself
self.cleanup_event.set()
self.io_loop.run_sync(lambda: gen.sleep(0))
class MaxHeaderSizeTest(AsyncHTTPTestCase):
def get_app(self):
class SmallHeaders(RequestHandler):
def get(self):
self.set_header("X-Filler", "a" * 100)
self.write("ok")
class LargeHeaders(RequestHandler):
def get(self):
self.set_header("X-Filler", "a" * 1000)
self.write("ok")
return Application([("/small", SmallHeaders), ("/large", LargeHeaders)])
def get_http_client(self):
return SimpleAsyncHTTPClient(max_header_size=1024)
def test_small_headers(self):
response = self.fetch("/small")
response.rethrow()
self.assertEqual(response.body, b"ok")
def test_large_headers(self):
with ExpectLog(gen_log, "Unsatisfiable read", level=logging.INFO):
with self.assertRaises(UnsatisfiableReadError):
self.fetch("/large", raise_error=True)
class MaxBodySizeTest(AsyncHTTPTestCase):
def get_app(self):
class SmallBody(RequestHandler):
def get(self):
self.write("a" * 1024 * 64)
class LargeBody(RequestHandler):
def get(self):
self.write("a" * 1024 * 100)
return Application([("/small", SmallBody), ("/large", LargeBody)])
def get_http_client(self):
return SimpleAsyncHTTPClient(max_body_size=1024 * 64)
def test_small_body(self):
response = self.fetch("/small")
response.rethrow()
self.assertEqual(response.body, b"a" * 1024 * 64)
def test_large_body(self):
with ExpectLog(
gen_log,
"Malformed HTTP message from None: Content-Length too long",
level=logging.INFO,
):
with self.assertRaises(HTTPStreamClosedError):
self.fetch("/large", raise_error=True)
class MaxBufferSizeTest(AsyncHTTPTestCase):
def get_app(self):
class LargeBody(RequestHandler):
def get(self):
self.write("a" * 1024 * 100)
return Application([("/large", LargeBody)])
def get_http_client(self):
# 100KB body with 64KB buffer
return SimpleAsyncHTTPClient(
max_body_size=1024 * 100, max_buffer_size=1024 * 64
)
def test_large_body(self):
response = self.fetch("/large")
response.rethrow()
self.assertEqual(response.body, b"a" * 1024 * 100)
class ChunkedWithContentLengthTest(AsyncHTTPTestCase):
def get_app(self):
class ChunkedWithContentLength(RequestHandler):
def get(self):
# Add an invalid Transfer-Encoding to the response
self.set_header("Transfer-Encoding", "chunked")
self.write("Hello world")
return Application([("/chunkwithcl", ChunkedWithContentLength)])
def get_http_client(self):
return SimpleAsyncHTTPClient()
def test_chunked_with_content_length(self):
# Make sure the invalid headers are detected
with ExpectLog(
gen_log,
(
"Malformed HTTP message from None: Response "
"with both Transfer-Encoding and Content-Length"
),
level=logging.INFO,
):
with self.assertRaises(HTTPStreamClosedError):
self.fetch("/chunkwithcl", raise_error=True)
| {
"content_hash": "bb9a67c01703248ad43562b04d55e09b",
"timestamp": "",
"source": "github",
"line_count": 834,
"max_line_length": 88,
"avg_line_length": 37.10191846522782,
"alnum_prop": 0.6125133309633843,
"repo_name": "TeamSPoon/logicmoo_workspace",
"id": "eadd4ed3034cb0b0cbeeb540e7770df7a187fd31",
"size": "30943",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "packs_web/butterfly/lib/python3.7/site-packages/tornado/test/simple_httpclient_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "342"
},
{
"name": "C",
"bytes": "1"
},
{
"name": "C++",
"bytes": "1"
},
{
"name": "CSS",
"bytes": "126627"
},
{
"name": "HTML",
"bytes": "839172"
},
{
"name": "Java",
"bytes": "11116"
},
{
"name": "JavaScript",
"bytes": "238700"
},
{
"name": "PHP",
"bytes": "42253"
},
{
"name": "Perl 6",
"bytes": "23"
},
{
"name": "Prolog",
"bytes": "440882"
},
{
"name": "PureBasic",
"bytes": "1334"
},
{
"name": "Rich Text Format",
"bytes": "3436542"
},
{
"name": "Roff",
"bytes": "42"
},
{
"name": "Shell",
"bytes": "61603"
},
{
"name": "TeX",
"bytes": "99504"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from mayavi.filters.metadata import *
| {
"content_hash": "f627474d66ed74673a7aa1a45c01a802",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 38,
"avg_line_length": 38.5,
"alnum_prop": 0.7922077922077922,
"repo_name": "enthought/etsproxy",
"id": "821154455e8342cf8f08350dac88cb6979093d5f",
"size": "92",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enthought/mayavi/filters/metadata.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "363714"
}
],
"symlink_target": ""
} |
from ._config_loader import config, logger
from ._utils import app_path, camel2under, root_urlconf_path
import os
import re
import shutil
RESERVED_PARAMS = (
'class_name',
'template_create_from',
'template_dir_choice',
'urls_to_edit',
'url_pattern',
'url_name',
)
class BaseViewAdder(object):
def __init__(self, app_name=None, view_type=None, params=None):
assert(app_name is not None)
assert(view_type is not None)
assert(params is not None)
self.app_name = app_name
self.view_type = view_type
self.params = params
def add_view(self):
logger.debug('ADD VIEW:')
logger.debug(self.view_type)
logger.debug(self.params)
if self.view_type == 'function_view':
self.add_function_view()
else:
self.add_cbv_view()
def add_function_view(self):
code = self.generate_function_view()
self.save_view(code)
self.update_view_imports(
'django.shortcuts',
'render'
)
self.create_template()
self.update_urls()
code = self.generate_test()
self.add_test(code)
def add_cbv_view(self):
code = self.generate_cbv_view()
self.save_view(code)
self.update_view_imports(
'django.views.generic',
self.view_type
)
self.create_template()
self.update_urls()
code = self.generate_test()
self.add_test(code)
def generate_cbv_view(self):
raise NotImplementedError()
def generate_function_view(self):
raise NotImplementedError()
def save_view(self, code):
raise NotImplementedError()
def create_template(self):
raise NotImplementedError()
def update_urls(self):
raise NotImplementedError()
def generate_test(self):
raise NotImplementedError()
def add_test(self, code):
raise NotImplementedError()
class DefaultViewAdder(BaseViewAdder):
indent = ' '
def generate_function_view(self):
code = "def {function_name}(request):\n".format(
function_name=self.params['function_name']
)
code += "{indent}return render(request, '{tpl}')\n".format(
indent=self.indent,
tpl=self.select_template_name()
)
return code
def generate_cbv_view(self):
code = "class {class_name}({view_type}):\n".format(
class_name=self.params['class_name'],
view_type=self.view_type
)
at_least_one_line = False
for param_name, param_value in sorted(self.params.iteritems()):
if param_name in RESERVED_PARAMS or param_value == '':
continue
at_least_one_line = True
code += "{indent}{param_name} = {param_value}\n".format(
indent=self.indent,
param_name=param_name,
param_value=param_value
)
if not at_least_one_line:
code += "{indent}pass\n".format(indent=self.indent)
code += '\n'
return code
def save_view(self, code):
try:
view_file = open(
os.path.join(app_path(self.app_name), 'views.py'),
'a'
)
view_file.write('\n' + code)
view_file.close()
except IOError:
logger.error('Couldn\'t open {0}. View code not added'.format(
os.path.join(app_path(self.app_name), 'views.py')
))
return
def update_view_imports(self, _from, _import):
try:
view_file = open(
os.path.join(app_path(self.app_name), 'views.py'),
'r'
)
view_content = view_file.read()
view_file.close()
except IOError:
logger.error('Couldn\'t open {0}. Imports not added'.format(
os.path.join(app_path(self.app_name), 'views.py')
))
return
lines = view_content.split('\n')
self._insert_import(lines, _from, _import)
if self.params.get('model', None):
self._insert_import(
lines,
_from='{0}.models'.format(self.app_name),
_import=self.params['model']
)
try:
view_file = open(
os.path.join(app_path(self.app_name), 'views.py'),
'w'
)
view_file.write('\n'.join(lines))
view_file.close()
except IOError:
logger.error(
'Couldn\'t open {0} for writing. Imports not added'.format(
os.path.join(app_path(self.app_name), 'views.py')
)
)
def create_template(self):
create_from = self.params.get('template_create_from', None)
if create_from is None:
return
tpl_dir = self._select_template_dir()
if not tpl_dir:
logger.error(
'No tpl dir set:{0}. Template not created.'.format(tpl_dir)
)
return
if not os.path.isdir(tpl_dir):
try:
os.mkdir(tpl_dir)
except:
logger.error(
"Couldn't create dir: {0}."
" Template not created".format(tpl_dir)
)
tpl_path = self.select_template_name()
self._create_dirs_on_path(tpl_dir, tpl_path)
if create_from is '':
open(os.path.join(tpl_dir, tpl_path), 'a').close()
else:
shutil.copy(
os.path.join(tpl_dir, create_from),
os.path.join(tpl_dir, tpl_path)
)
def _select_template_dir(self):
tpl_dir_choice = self.params.get('template_dir_choice', 'local')
if tpl_dir_choice == 'global':
tpl_dir = config['global_template_dir']
else:
tpl_dir = config['local_template_dir'].format(
app_path=app_path(self.app_name),
app_name=self.app_name
)
return tpl_dir
def select_template_name(self):
tpl_path = self.params.get('template_name', '').strip("`'\"")
if not tpl_path:
tpl_suffix = self.params.get('template_name_suffix', '')
tpl_suffix = tpl_suffix.strip("`'\"")
class_name = self.params.get('class_name', None)
function_name = self.params.get('function_name', None)
if class_name:
file_name = camel2under(class_name) + tpl_suffix + '.html'
elif function_name:
file_name = camel2under(function_name) + '.html'
else:
logger.error("No file_name nor class_name provided")
assert False, "It shoudln't happen"
tpl_path = '{0}/{1}'.format(self.app_name, file_name)
return tpl_path
def _create_dirs_on_path(self, main_dir, path):
inner_dirs = path.split('/')[:-1]
for i, _ in enumerate(inner_dirs):
current_dir = os.path.join(main_dir, *inner_dirs[:i + 1])
if not os.path.isdir(current_dir):
os.mkdir(current_dir)
def generate_test(self):
pass
def add_test(self, code):
pass
def update_urls(self):
urls_to_edit = self.params.get('urls_to_edit', None)
if urls_to_edit == 'global':
urls_path = os.path.join(root_urlconf_path(), 'urls.py')
elif urls_to_edit == 'local':
urls_path = os.path.join(app_path(self.app_name), 'urls.py')
else:
return
logger.debug(urls_path)
try:
f = open(urls_path, 'r')
urls_content = f.read()
f.close()
except IOError:
logger.error(
'Couldn\'t open file {0} for read. '
'No entry added to URLconf'.format(urls_path)
)
return
urls_lines = urls_content.split('\n')
self._insert_import(
urls_lines,
'{0}.views'.format(self.app_name),
self.params.get('class_name', None) or \
self.params.get('function_name')
)
urls_content = "\n".join(urls_lines)
urls_content = self._add_pattern(urls_content)
try:
f = open(urls_path, 'w')
f.write(urls_content)
f.close()
except IOError:
logger.error(
'Couldn\'t open file {0} for write. '
'No entry added to URLconf'.format(urls_path)
)
return
def _insert_import(self, lines, _from, _import):
import_text = 'from {0} import {1}'.format(
_from,
_import
)
if self._is_imported(
'\n'.join(lines),
_from,
_import
):
return
last_import_line = self._find_last_import(lines)
lines.insert(last_import_line + 1, import_text)
def _is_imported(self, view_content, _from, _import):
regs = [
r'import\s*{0}'.format(re.escape(_import)),
r'from {0}\s*import\s*\*'.format(re.escape(_from))
]
if any([re.search(reg, view_content) for reg in regs]):
return True
else:
return False
def _find_last_import(self, lines):
last_import_line = -1
for i, line in enumerate(lines):
if re.match('[^#]*import.*', line):
last_import_line = i
return last_import_line
def _add_pattern(self, urls_content):
m = re.match(
r'.*urlpatterns\s*=\s*patterns\((?P<params>.*)\)',
urls_content,
re.DOTALL
)
params = m.group('params').strip()
if params[len(params) - 1] != ',':
params += ','
params += '\n{indent}'.format(indent=self.indent)
if self.params.get('class_name', None):
appendix = '.as_view()'
else:
appendix = ''
if self.params.get('url_name', None):
tpl = ('url({regexp}, {name}{appendix}'
', name={url_name}),\n')
else:
tpl = 'url({regexp}, {name}{appendix}),\n'
params += \
tpl.format(
regexp=self.params.get('url_pattern', ''),
name=self.params.get('class_name', None) or\
self.params.get('function_name'),
url_name=self.params.get('url_name', ''),
appendix=appendix,
)
return re.sub(
r'(.*urlpatterns\s*=\s*patterns\()(.*)(\))',
r'\1{0}\3'.format(params),
urls_content,
flags=re.DOTALL
)
| {
"content_hash": "b1de2a0a0cb179e6afd3e2150e55528e",
"timestamp": "",
"source": "github",
"line_count": 359,
"max_line_length": 75,
"avg_line_length": 30.367688022284124,
"alnum_prop": 0.5059622087690332,
"repo_name": "yakxxx/django-addview",
"id": "90e90ccb0bc592e3e69fcdfe28f346493c43bc5a",
"size": "10902",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_addview/management/commands/_adder.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "52576"
}
],
"symlink_target": ""
} |
def grade(tid, answer):
if answer.lower().find("baconont") != -1:
return { "correct": True, "message": "It's a cool song. If you disagree I'm docking 6000 ponts >:)" }
return { "correct": False, "message": "Man, this article is so long . . ." } | {
"content_hash": "c19d5e0667feca71b01b7aa187054c61",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 103,
"avg_line_length": 62,
"alnum_prop": 0.6330645161290323,
"repo_name": "EasyCTF/easyctf-2015",
"id": "c68006f5af7922862f2e72ea65786f870f767b57",
"size": "248",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/problems/forensics/accel/accel_grader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "192"
},
{
"name": "C",
"bytes": "2022"
},
{
"name": "CSS",
"bytes": "38771"
},
{
"name": "CoffeeScript",
"bytes": "34877"
},
{
"name": "HTML",
"bytes": "92818"
},
{
"name": "Java",
"bytes": "7203"
},
{
"name": "JavaScript",
"bytes": "15750"
},
{
"name": "PHP",
"bytes": "17928"
},
{
"name": "Python",
"bytes": "139452"
},
{
"name": "Shell",
"bytes": "1939"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_dropin.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| {
"content_hash": "a8ed1092916c5719c8431c5382ca3d57",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 78,
"avg_line_length": 26.11111111111111,
"alnum_prop": 0.7148936170212766,
"repo_name": "adamziel/django_translate",
"id": "8798b02c3ca9bc6c9d02b418e8cafd698fa051a0",
"size": "257",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/example_dropin/manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35920"
}
],
"symlink_target": ""
} |
"""Validate the source attributes in the downloaded checklists.
Validation Tests:
Source:
1. the checklist is a dict.
SourceSubmittedBy:
1. submitted_by is a string.
2. submitted_by is set.
3. submitted_by does not have leading/trailing whitespace.
SourceName:
1. source is a string.
2. source is set.
3. source does not have leading/trailing whitespace.
"""
from checklists_scrapers.tests.validation import checklists, ValidationTestCase
class Source(ValidationTestCase):
"""Validate the checklist."""
def test_checklist_type(self):
"""Verify the checklist is a dict."""
for checklist in checklists:
self.assertIsInstance(checklist['source'], dict,
msg=checklist['source'])
class SourceSubmittedBy(ValidationTestCase):
"""Validate the source submitter in the downloaded checklists."""
def test_submitted_by_type(self):
"""Verify the checklist submitter is a unicode string."""
for checklist in checklists:
self.assertIsInstance(checklist['source']['submitted_by'], unicode,
msg=checklist['source'])
def test_submitted_by_set(self):
"""Verify the checklist submitter is set."""
for checklist in checklists:
self.assertTrue(checklist['source']['submitted_by'],
msg=checklist['source'])
def test_submitted_by_stripped(self):
"""Verify the checklist submitter has no extra whitespace."""
for checklist in checklists:
self.assertStripped(checklist['source']['submitted_by'],
msg=checklist['source'])
def test_submitted_by_is_an_observer(self):
"""Verify the checklist submitter is also listed as an observer."""
for checklist in checklists:
self.assertTrue(checklist['source']['submitted_by'] in
checklist['observers']['names'],
msg=checklist['source'])
class SourceName(ValidationTestCase):
"""Validate the checklist source name in the downloaded checklists."""
def test_source_type(self):
"""Verify the checklist source is a unicode string."""
for checklist in checklists:
self.assertIsInstance(checklist['source']['name'], unicode,
msg=checklist['source'])
def test_source_set(self):
"""Verify the checklist submitter is set."""
for checklist in checklists:
self.assertTrue(checklist['source']['name'],
msg=checklist['source'])
def test_source_stripped(self):
"""Verify the source has no extra whitespace."""
for checklist in checklists:
self.assertStripped(checklist['source']['name'],
msg=checklist['source'])
| {
"content_hash": "58d9a364a8c41d4177126f9d253b37c7",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 79,
"avg_line_length": 37.12658227848101,
"alnum_prop": 0.6065461984316399,
"repo_name": "StuartMacKay/checklists_scrapers",
"id": "60e166ce41d524262138a9b87c285960dfb0eca3",
"size": "2933",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "checklists_scrapers/tests/validation/test_source.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "173637"
},
{
"name": "Shell",
"bytes": "6728"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import copy
import json
import os.path
import yaml
from lib.util import Util
import logging
from projecthandler.models import Project
import lib.nemo.nemo_external_parser as Parser
from lib.nemo.nemo_parser import NemoParser
from lib.nemo.nemo_rdcl_graph import NemoRdclGraph
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger('NemoModel.py')
PATH_TO_SCHEMAS = 'lib/nemo/schemas/'
PATH_TO_DESCRIPTORS_TEMPLATES = 'lib/nemo/descriptor_template'
DESCRIPTOR_TEMPLATE_SUFFIX = '.json'
GRAPH_MODEL_FULL_NAME = 'lib/TopologyModels/nemo/nemo.yaml'
EXAMPLES_FOLDER = 'usecases/NEMO/'
class NemoProject(Project):
"""Nemo Project class
The data model has the following descriptors:
# descrtiptor list in comment #
"""
@classmethod
def data_project_from_files(cls, request):
file_dict = {}
for my_key in request.FILES.keys():
file_dict[my_key] = request.FILES.getlist(my_key)
log.debug(file_dict)
data_project = NemoParser.importprojectfiles(file_dict)
return data_project
@classmethod
def data_project_from_example(cls, request):
nemo_id = request.POST.get('example-nemo-id', '')
print 'nemo_id', nemo_id
data_project = NemoParser.importprojectdir(EXAMPLES_FOLDER + nemo_id, 'nemo')
return data_project
@classmethod
def get_example_list(cls):
"""Returns a list of directories, in each directory there is a project nemo"""
path = EXAMPLES_FOLDER
#print "example path ", path
dirs = [d for d in os.listdir(path) if os.path.isdir(os.path.join(path, d))]
#print "dirs ", dirs
return {'nemo': dirs}
@classmethod
def get_new_descriptor(cls, descriptor_type, request_id):
json_template = cls.get_descriptor_template(descriptor_type)
return json_template
@classmethod
def get_descriptor_template(cls, type_descriptor):
"""Returns a descriptor template for a given descriptor type"""
try:
schema = Util.loadjsonfile(os.path.join(PATH_TO_DESCRIPTORS_TEMPLATES, type_descriptor + DESCRIPTOR_TEMPLATE_SUFFIX))
return schema
except Exception as e:
log.exception(e)
return False
@classmethod
def get_json_schema_by_type(cls, type_descriptor):
schema = PATH_TO_SCHEMAS + type_descriptor + ".json"
return schema
@classmethod
def get_clone_descriptor(cls, descriptor, type_descriptor, new_descriptor_id):
new_descriptor = copy.deepcopy(descriptor)
return new_descriptor
def get_type(self):
return "nemo"
def __str__(self):
return self.name
def get_overview_data(self):
current_data = json.loads(self.data_project)
result = {
'owner': self.owner.__str__(),
'name': self.name,
'updated_date': self.updated_date.__str__(),
'info': self.info,
'type': 'nemo',
'intent': len(current_data['intent'].keys()) if 'intent' in current_data else 0,
'nodemodel': len(current_data['nodemodel'].keys()) if 'nodemodel' in current_data else 0,
'validated': self.validated
}
return result
def get_graph_data_json_topology(self, descriptor_id):
rdcl_graph = NemoRdclGraph()
project = self.get_dataproject()
topology = rdcl_graph.build_graph_from_project(project,
model=self.get_graph_model(GRAPH_MODEL_FULL_NAME))
print "topology ", topology['vertices']
return json.dumps(topology)
def create_descriptor(self, descriptor_name, type_descriptor, new_data, data_type):
"""Creates a descriptor of a given type from a json or yaml representation
Returns the descriptor id or False
"""
try:
current_data = json.loads(self.data_project)
if data_type == 'nemo':
new_descriptor = new_data
else:
log.debug('Create descriptor: Unknown data type ' + data_type)
return False
# schema = cls.loadjsonfile("lib/nemo/schemas/"+type_descriptor+".json")
#reference_schema = self.get_json_schema_by_type(type_descriptor)
# validate = Util.validate_json_schema(reference_schema, new_descriptor)
validate = False
new_descriptor_id = descriptor_name
if not type_descriptor in current_data:
current_data[type_descriptor] = {}
current_data[type_descriptor][new_descriptor_id] = new_descriptor
self.data_project = current_data
self.validated = validate
self.update()
result = new_descriptor_id
except Exception as e:
log.exception(e)
result = False
return result
def set_validated(self, value):
self.validated = True if value is not None and value == True else False
def get_add_element(self, request):
result = False
return result
def get_remove_element(self, request):
result = False
return result
def get_add_link(self, request):
result = False
return result
def get_remove_link(self, request):
result = False
return result
def get_available_nodes(self, args):
"""Returns all available node """
log.debug('get_available_nodes')
try:
result = []
#current_data = json.loads(self.data_project)
model_graph = self.get_graph_model(GRAPH_MODEL_FULL_NAME)
for node in model_graph['layer'][args['layer']]['nodes']:
current_data = {
"id": node,
"category_name": model_graph['nodes'][node]['label'],
"types": [
{
"name": "generic",
"id": node
}
]
}
result.append(current_data)
#result = current_data[type_descriptor][descriptor_id]
except Exception as e:
log.debug(e)
result = []
return result
| {
"content_hash": "7cc931acc26739b33ed287435864d9f5",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 129,
"avg_line_length": 31.068292682926828,
"alnum_prop": 0.5906735751295337,
"repo_name": "superfluidity/RDCL3D",
"id": "8748494ce8d980c583b7d9b8688b7dc666a089d4",
"size": "7021",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/projecthandler/nemo_model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "487443"
},
{
"name": "Click",
"bytes": "50000"
},
{
"name": "Dockerfile",
"bytes": "615"
},
{
"name": "HTML",
"bytes": "4167573"
},
{
"name": "JavaScript",
"bytes": "2692373"
},
{
"name": "PHP",
"bytes": "3916"
},
{
"name": "Python",
"bytes": "1236264"
},
{
"name": "Shell",
"bytes": "29452"
}
],
"symlink_target": ""
} |
from fred.clients.categories import CategoriesClient
from fred.clients.releases import ReleasesClient
from fred.clients.tags import TagsClient
from fred.clients.sources import SourcesClient
from fred.clients.eseries import ESeriesClient
import fred.config as c
import weakref
## Establish Federal Reserve Economic Data (Fred) wrapper for Python
class Fred(object):
"""
Fred client. Provides a straightforward mapping from Python to FRED REST endpoints.
The instance has attributes ``cateogry``, ``release``, ``series``, ``tag``
and ``source`` that provide access to instances of
:class:`fred.clients.categories.CategoriesClient`,
:class:`fred.clients.releases.ReleasesClient`,
:class:`fred.clients.eseries.ESeriesClient`,
:class:`fred.clients.tags.TagsClient` and
:class:`fred.clients.sources.SourcesClient` respectively. This is the
preferred (and only supported) way to get access to those classes and their
methods.
:arg str api_key: 32 character alpha-numeric lowercase string. Required.
:arg str realtime_start: The start of the real-time period. Format "YYYY-MM-DD"
:arg str realtime_end: The end of the real-time period. Format "YYYY-MM-DD"
:arg bool ssl_verify: To verify HTTPs.
"""
def __init__(self,api_key=c.api_key,response_type=c.response_type, ssl_verify=c.ssl_verify):
## Set root URL
self.url_root = 'https://api.stlouisfed.org/fred'
## Set default API key
self.api_key = api_key if api_key else None
## Set default file type
self.response_type = response_type if response_type else None
## Set SSL Verify
self.ssl_verify = ssl_verify
## Initiate clients
self.category = CategoriesClient(weakref.proxy(self),self.api_key,self.url_root,self.response_type,self.ssl_verify)
self.release = ReleasesClient(weakref.proxy(self),self.api_key,self.url_root,self.response_type,self.ssl_verify)
self.series = ESeriesClient(weakref.proxy(self),self.api_key,self.url_root,self.response_type,self.ssl_verify)
self.tag = TagsClient(weakref.proxy(self),self.api_key,self.url_root,self.response_type,self.ssl_verify)
self.source = SourcesClient(weakref.proxy(self),self.api_key,self.url_root,self.response_type,self.ssl_verify)
| {
"content_hash": "bce19a735f3aaff6320d3faaecc2acf9",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 123,
"avg_line_length": 54.976190476190474,
"alnum_prop": 0.7223906453009961,
"repo_name": "avelkoski/FRB",
"id": "e0faa36a5bc4f0d3ececf12b7e3caba7b64530bc",
"size": "2309",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fred/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "62863"
}
],
"symlink_target": ""
} |
"""
This is an implementation of decimal floating point arithmetic based on
the General Decimal Arithmetic Specification:
http://speleotrove.com/decimal/decarith.html
and IEEE standard 854-1987:
http://en.wikipedia.org/wiki/IEEE_854-1987
Decimal floating point has finite precision with arbitrarily large bounds.
The purpose of this module is to support arithmetic using familiar
"schoolhouse" rules and to avoid some of the tricky representation
issues associated with binary floating point. The package is especially
useful for financial applications or for contexts where users have
expectations that are at odds with binary floating point (for instance,
in binary floating point, 1.00 % 0.1 gives 0.09999999999999995 instead
of 0.0; Decimal('1.00') % Decimal('0.1') returns the expected
Decimal('0.00')).
Here are some examples of using the decimal module:
>>> from decimal import *
>>> setcontext(ExtendedContext)
>>> Decimal(0)
Decimal('0')
>>> Decimal('1')
Decimal('1')
>>> Decimal('-.0123')
Decimal('-0.0123')
>>> Decimal(123456)
Decimal('123456')
>>> Decimal('123.45e12345678')
Decimal('1.2345E+12345680')
>>> Decimal('1.33') + Decimal('1.27')
Decimal('2.60')
>>> Decimal('12.34') + Decimal('3.87') - Decimal('18.41')
Decimal('-2.20')
>>> dig = Decimal(1)
>>> print(dig / Decimal(3))
0.333333333
>>> getcontext().prec = 18
>>> print(dig / Decimal(3))
0.333333333333333333
>>> print(dig.sqrt())
1
>>> print(Decimal(3).sqrt())
1.73205080756887729
>>> print(Decimal(3) ** 123)
4.85192780976896427E+58
>>> inf = Decimal(1) / Decimal(0)
>>> print(inf)
Infinity
>>> neginf = Decimal(-1) / Decimal(0)
>>> print(neginf)
-Infinity
>>> print(neginf + inf)
NaN
>>> print(neginf * inf)
-Infinity
>>> print(dig / 0)
Infinity
>>> getcontext().traps[DivisionByZero] = 1
>>> print(dig / 0)
Traceback (most recent call last):
...
...
...
decimal.DivisionByZero: x / 0
>>> c = Context()
>>> c.traps[InvalidOperation] = 0
>>> print(c.flags[InvalidOperation])
0
>>> c.divide(Decimal(0), Decimal(0))
Decimal('NaN')
>>> c.traps[InvalidOperation] = 1
>>> print(c.flags[InvalidOperation])
1
>>> c.flags[InvalidOperation] = 0
>>> print(c.flags[InvalidOperation])
0
>>> print(c.divide(Decimal(0), Decimal(0)))
Traceback (most recent call last):
...
...
...
decimal.InvalidOperation: 0 / 0
>>> print(c.flags[InvalidOperation])
1
>>> c.flags[InvalidOperation] = 0
>>> c.traps[InvalidOperation] = 0
>>> print(c.divide(Decimal(0), Decimal(0)))
NaN
>>> print(c.flags[InvalidOperation])
1
>>>
"""
__all__ = [
# Two major classes
'Decimal', 'Context',
# Named tuple representation
'DecimalTuple',
# Contexts
'DefaultContext', 'BasicContext', 'ExtendedContext',
# Exceptions
'DecimalException', 'Clamped', 'InvalidOperation', 'DivisionByZero',
'Inexact', 'Rounded', 'Subnormal', 'Overflow', 'Underflow',
'FloatOperation',
# Exceptional conditions that trigger InvalidOperation
'DivisionImpossible', 'InvalidContext', 'ConversionSyntax', 'DivisionUndefined',
# Constants for use in setting up contexts
'ROUND_DOWN', 'ROUND_HALF_UP', 'ROUND_HALF_EVEN', 'ROUND_CEILING',
'ROUND_FLOOR', 'ROUND_UP', 'ROUND_HALF_DOWN', 'ROUND_05UP',
# Functions for manipulating contexts
'setcontext', 'getcontext', 'localcontext',
# Limits for the C version for compatibility
'MAX_PREC', 'MAX_EMAX', 'MIN_EMIN', 'MIN_ETINY',
# C version: compile time choice that enables the thread local context
'HAVE_THREADS'
]
__xname__ = __name__ # sys.modules lookup (--without-threads)
__name__ = 'decimal' # For pickling
__version__ = '1.70' # Highest version of the spec this complies with
# See http://speleotrove.com/decimal/
__libmpdec_version__ = "2.4.2" # compatible libmpdec version
import math as _math
import numbers as _numbers
import sys
try:
from collections import namedtuple as _namedtuple
DecimalTuple = _namedtuple('DecimalTuple', 'sign digits exponent')
except ImportError:
DecimalTuple = lambda *args: args
# Rounding
ROUND_DOWN = 'ROUND_DOWN'
ROUND_HALF_UP = 'ROUND_HALF_UP'
ROUND_HALF_EVEN = 'ROUND_HALF_EVEN'
ROUND_CEILING = 'ROUND_CEILING'
ROUND_FLOOR = 'ROUND_FLOOR'
ROUND_UP = 'ROUND_UP'
ROUND_HALF_DOWN = 'ROUND_HALF_DOWN'
ROUND_05UP = 'ROUND_05UP'
# Compatibility with the C version
HAVE_THREADS = True
if sys.maxsize == 2**63-1:
MAX_PREC = 999999999999999999
MAX_EMAX = 999999999999999999
MIN_EMIN = -999999999999999999
else:
MAX_PREC = 425000000
MAX_EMAX = 425000000
MIN_EMIN = -425000000
MIN_ETINY = MIN_EMIN - (MAX_PREC-1)
# Errors
class DecimalException(ArithmeticError):
"""Base exception class.
Used exceptions derive from this.
If an exception derives from another exception besides this (such as
Underflow (Inexact, Rounded, Subnormal) that indicates that it is only
called if the others are present. This isn't actually used for
anything, though.
handle -- Called when context._raise_error is called and the
trap_enabler is not set. First argument is self, second is the
context. More arguments can be given, those being after
the explanation in _raise_error (For example,
context._raise_error(NewError, '(-x)!', self._sign) would
call NewError().handle(context, self._sign).)
To define a new exception, it should be sufficient to have it derive
from DecimalException.
"""
def handle(self, context, *args):
pass
class Clamped(DecimalException):
"""Exponent of a 0 changed to fit bounds.
This occurs and signals clamped if the exponent of a result has been
altered in order to fit the constraints of a specific concrete
representation. This may occur when the exponent of a zero result would
be outside the bounds of a representation, or when a large normal
number would have an encoded exponent that cannot be represented. In
this latter case, the exponent is reduced to fit and the corresponding
number of zero digits are appended to the coefficient ("fold-down").
"""
class InvalidOperation(DecimalException):
"""An invalid operation was performed.
Various bad things cause this:
Something creates a signaling NaN
-INF + INF
0 * (+-)INF
(+-)INF / (+-)INF
x % 0
(+-)INF % x
x._rescale( non-integer )
sqrt(-x) , x > 0
0 ** 0
x ** (non-integer)
x ** (+-)INF
An operand is invalid
The result of the operation after these is a quiet positive NaN,
except when the cause is a signaling NaN, in which case the result is
also a quiet NaN, but with the original sign, and an optional
diagnostic information.
"""
def handle(self, context, *args):
if args:
ans = _dec_from_triple(args[0]._sign, args[0]._int, 'n', True)
return ans._fix_nan(context)
return _NaN
class ConversionSyntax(InvalidOperation):
"""Trying to convert badly formed string.
This occurs and signals invalid-operation if a string is being
converted to a number and it does not conform to the numeric string
syntax. The result is [0,qNaN].
"""
def handle(self, context, *args):
return _NaN
class DivisionByZero(DecimalException, ZeroDivisionError):
"""Division by 0.
This occurs and signals division-by-zero if division of a finite number
by zero was attempted (during a divide-integer or divide operation, or a
power operation with negative right-hand operand), and the dividend was
not zero.
The result of the operation is [sign,inf], where sign is the exclusive
or of the signs of the operands for divide, or is 1 for an odd power of
-0, for power.
"""
def handle(self, context, sign, *args):
return _SignedInfinity[sign]
class DivisionImpossible(InvalidOperation):
"""Cannot perform the division adequately.
This occurs and signals invalid-operation if the integer result of a
divide-integer or remainder operation had too many digits (would be
longer than precision). The result is [0,qNaN].
"""
def handle(self, context, *args):
return _NaN
class DivisionUndefined(InvalidOperation, ZeroDivisionError):
"""Undefined result of division.
This occurs and signals invalid-operation if division by zero was
attempted (during a divide-integer, divide, or remainder operation), and
the dividend is also zero. The result is [0,qNaN].
"""
def handle(self, context, *args):
return _NaN
class Inexact(DecimalException):
"""Had to round, losing information.
This occurs and signals inexact whenever the result of an operation is
not exact (that is, it needed to be rounded and any discarded digits
were non-zero), or if an overflow or underflow condition occurs. The
result in all cases is unchanged.
The inexact signal may be tested (or trapped) to determine if a given
operation (or sequence of operations) was inexact.
"""
class InvalidContext(InvalidOperation):
"""Invalid context. Unknown rounding, for example.
This occurs and signals invalid-operation if an invalid context was
detected during an operation. This can occur if contexts are not checked
on creation and either the precision exceeds the capability of the
underlying concrete representation or an unknown or unsupported rounding
was specified. These aspects of the context need only be checked when
the values are required to be used. The result is [0,qNaN].
"""
def handle(self, context, *args):
return _NaN
class Rounded(DecimalException):
"""Number got rounded (not necessarily changed during rounding).
This occurs and signals rounded whenever the result of an operation is
rounded (that is, some zero or non-zero digits were discarded from the
coefficient), or if an overflow or underflow condition occurs. The
result in all cases is unchanged.
The rounded signal may be tested (or trapped) to determine if a given
operation (or sequence of operations) caused a loss of precision.
"""
class Subnormal(DecimalException):
"""Exponent < Emin before rounding.
This occurs and signals subnormal whenever the result of a conversion or
operation is subnormal (that is, its adjusted exponent is less than
Emin, before any rounding). The result in all cases is unchanged.
The subnormal signal may be tested (or trapped) to determine if a given
or operation (or sequence of operations) yielded a subnormal result.
"""
class Overflow(Inexact, Rounded):
"""Numerical overflow.
This occurs and signals overflow if the adjusted exponent of a result
(from a conversion or from an operation that is not an attempt to divide
by zero), after rounding, would be greater than the largest value that
can be handled by the implementation (the value Emax).
The result depends on the rounding mode:
For round-half-up and round-half-even (and for round-half-down and
round-up, if implemented), the result of the operation is [sign,inf],
where sign is the sign of the intermediate result. For round-down, the
result is the largest finite number that can be represented in the
current precision, with the sign of the intermediate result. For
round-ceiling, the result is the same as for round-down if the sign of
the intermediate result is 1, or is [0,inf] otherwise. For round-floor,
the result is the same as for round-down if the sign of the intermediate
result is 0, or is [1,inf] otherwise. In all cases, Inexact and Rounded
will also be raised.
"""
def handle(self, context, sign, *args):
if context.rounding in (ROUND_HALF_UP, ROUND_HALF_EVEN,
ROUND_HALF_DOWN, ROUND_UP):
return _SignedInfinity[sign]
if sign == 0:
if context.rounding == ROUND_CEILING:
return _SignedInfinity[sign]
return _dec_from_triple(sign, '9'*context.prec,
context.Emax-context.prec+1)
if sign == 1:
if context.rounding == ROUND_FLOOR:
return _SignedInfinity[sign]
return _dec_from_triple(sign, '9'*context.prec,
context.Emax-context.prec+1)
class Underflow(Inexact, Rounded, Subnormal):
"""Numerical underflow with result rounded to 0.
This occurs and signals underflow if a result is inexact and the
adjusted exponent of the result would be smaller (more negative) than
the smallest value that can be handled by the implementation (the value
Emin). That is, the result is both inexact and subnormal.
The result after an underflow will be a subnormal number rounded, if
necessary, so that its exponent is not less than Etiny. This may result
in 0 with the sign of the intermediate result and an exponent of Etiny.
In all cases, Inexact, Rounded, and Subnormal will also be raised.
"""
class FloatOperation(DecimalException, TypeError):
"""Enable stricter semantics for mixing floats and Decimals.
If the signal is not trapped (default), mixing floats and Decimals is
permitted in the Decimal() constructor, context.create_decimal() and
all comparison operators. Both conversion and comparisons are exact.
Any occurrence of a mixed operation is silently recorded by setting
FloatOperation in the context flags. Explicit conversions with
Decimal.from_float() or context.create_decimal_from_float() do not
set the flag.
Otherwise (the signal is trapped), only equality comparisons and explicit
conversions are silent. All other mixed operations raise FloatOperation.
"""
# List of public traps and flags
_signals = [Clamped, DivisionByZero, Inexact, Overflow, Rounded,
Underflow, InvalidOperation, Subnormal, FloatOperation]
# Map conditions (per the spec) to signals
_condition_map = {ConversionSyntax:InvalidOperation,
DivisionImpossible:InvalidOperation,
DivisionUndefined:InvalidOperation,
InvalidContext:InvalidOperation}
# Valid rounding modes
_rounding_modes = (ROUND_DOWN, ROUND_HALF_UP, ROUND_HALF_EVEN, ROUND_CEILING,
ROUND_FLOOR, ROUND_UP, ROUND_HALF_DOWN, ROUND_05UP)
##### Context Functions ##################################################
# The getcontext() and setcontext() function manage access to a thread-local
# current context. Py2.4 offers direct support for thread locals. If that
# is not available, use threading.current_thread() which is slower but will
# work for older Pythons. If threads are not part of the build, create a
# mock threading object with threading.local() returning the module namespace.
try:
import threading
except ImportError:
# Python was compiled without threads; create a mock object instead
class MockThreading(object):
def local(self, sys=sys):
return sys.modules[__xname__]
threading = MockThreading()
del MockThreading
try:
threading.local
except AttributeError:
# To fix reloading, force it to create a new context
# Old contexts have different exceptions in their dicts, making problems.
if hasattr(threading.current_thread(), '__decimal_context__'):
del threading.current_thread().__decimal_context__
def setcontext(context):
"""Set this thread's context to context."""
if context in (DefaultContext, BasicContext, ExtendedContext):
context = context.copy()
context.clear_flags()
threading.current_thread().__decimal_context__ = context
def getcontext():
"""Returns this thread's context.
If this thread does not yet have a context, returns
a new context and sets this thread's context.
New contexts are copies of DefaultContext.
"""
try:
return threading.current_thread().__decimal_context__
except AttributeError:
context = Context()
threading.current_thread().__decimal_context__ = context
return context
else:
local = threading.local()
if hasattr(local, '__decimal_context__'):
del local.__decimal_context__
def getcontext(_local=local):
"""Returns this thread's context.
If this thread does not yet have a context, returns
a new context and sets this thread's context.
New contexts are copies of DefaultContext.
"""
try:
return _local.__decimal_context__
except AttributeError:
context = Context()
_local.__decimal_context__ = context
return context
def setcontext(context, _local=local):
"""Set this thread's context to context."""
if context in (DefaultContext, BasicContext, ExtendedContext):
context = context.copy()
context.clear_flags()
_local.__decimal_context__ = context
del threading, local # Don't contaminate the namespace
def localcontext(ctx=None):
"""Return a context manager for a copy of the supplied context
Uses a copy of the current context if no context is specified
The returned context manager creates a local decimal context
in a with statement:
def sin(x):
with localcontext() as ctx:
ctx.prec += 2
# Rest of sin calculation algorithm
# uses a precision 2 greater than normal
return +s # Convert result to normal precision
def sin(x):
with localcontext(ExtendedContext):
# Rest of sin calculation algorithm
# uses the Extended Context from the
# General Decimal Arithmetic Specification
return +s # Convert result to normal context
>>> setcontext(DefaultContext)
>>> print(getcontext().prec)
28
>>> with localcontext():
... ctx = getcontext()
... ctx.prec += 2
... print(ctx.prec)
...
30
>>> with localcontext(ExtendedContext):
... print(getcontext().prec)
...
9
>>> print(getcontext().prec)
28
"""
if ctx is None: ctx = getcontext()
return _ContextManager(ctx)
##### Decimal class #######################################################
# Do not subclass Decimal from numbers.Real and do not register it as such
# (because Decimals are not interoperable with floats). See the notes in
# numbers.py for more detail.
class Decimal(object):
"""Floating point class for decimal arithmetic."""
__slots__ = ('_exp','_int','_sign', '_is_special')
# Generally, the value of the Decimal instance is given by
# (-1)**_sign * _int * 10**_exp
# Special values are signified by _is_special == True
# We're immutable, so use __new__ not __init__
def __new__(cls, value="0", context=None):
"""Create a decimal point instance.
>>> Decimal('3.14') # string input
Decimal('3.14')
>>> Decimal((0, (3, 1, 4), -2)) # tuple (sign, digit_tuple, exponent)
Decimal('3.14')
>>> Decimal(314) # int
Decimal('314')
>>> Decimal(Decimal(314)) # another decimal instance
Decimal('314')
>>> Decimal(' 3.14 \\n') # leading and trailing whitespace okay
Decimal('3.14')
"""
# Note that the coefficient, self._int, is actually stored as
# a string rather than as a tuple of digits. This speeds up
# the "digits to integer" and "integer to digits" conversions
# that are used in almost every arithmetic operation on
# Decimals. This is an internal detail: the as_tuple function
# and the Decimal constructor still deal with tuples of
# digits.
self = object.__new__(cls)
# From a string
# REs insist on real strings, so we can too.
if isinstance(value, str):
m = _parser(value.strip().replace("_", ""))
if m is None:
if context is None:
context = getcontext()
return context._raise_error(ConversionSyntax,
"Invalid literal for Decimal: %r" % value)
if m.group('sign') == "-":
self._sign = 1
else:
self._sign = 0
intpart = m.group('int')
if intpart is not None:
# finite number
fracpart = m.group('frac') or ''
exp = int(m.group('exp') or '0')
self._int = str(int(intpart+fracpart))
self._exp = exp - len(fracpart)
self._is_special = False
else:
diag = m.group('diag')
if diag is not None:
# NaN
self._int = str(int(diag or '0')).lstrip('0')
if m.group('signal'):
self._exp = 'N'
else:
self._exp = 'n'
else:
# infinity
self._int = '0'
self._exp = 'F'
self._is_special = True
return self
# From an integer
if isinstance(value, int):
if value >= 0:
self._sign = 0
else:
self._sign = 1
self._exp = 0
self._int = str(abs(value))
self._is_special = False
return self
# From another decimal
if isinstance(value, Decimal):
self._exp = value._exp
self._sign = value._sign
self._int = value._int
self._is_special = value._is_special
return self
# From an internal working value
if isinstance(value, _WorkRep):
self._sign = value.sign
self._int = str(value.int)
self._exp = int(value.exp)
self._is_special = False
return self
# tuple/list conversion (possibly from as_tuple())
if isinstance(value, (list,tuple)):
if len(value) != 3:
raise ValueError('Invalid tuple size in creation of Decimal '
'from list or tuple. The list or tuple '
'should have exactly three elements.')
# process sign. The isinstance test rejects floats
if not (isinstance(value[0], int) and value[0] in (0,1)):
raise ValueError("Invalid sign. The first value in the tuple "
"should be an integer; either 0 for a "
"positive number or 1 for a negative number.")
self._sign = value[0]
if value[2] == 'F':
# infinity: value[1] is ignored
self._int = '0'
self._exp = value[2]
self._is_special = True
else:
# process and validate the digits in value[1]
digits = []
for digit in value[1]:
if isinstance(digit, int) and 0 <= digit <= 9:
# skip leading zeros
if digits or digit != 0:
digits.append(digit)
else:
raise ValueError("The second value in the tuple must "
"be composed of integers in the range "
"0 through 9.")
if value[2] in ('n', 'N'):
# NaN: digits form the diagnostic
self._int = ''.join(map(str, digits))
self._exp = value[2]
self._is_special = True
elif isinstance(value[2], int):
# finite number: digits give the coefficient
self._int = ''.join(map(str, digits or [0]))
self._exp = value[2]
self._is_special = False
else:
raise ValueError("The third value in the tuple must "
"be an integer, or one of the "
"strings 'F', 'n', 'N'.")
return self
if isinstance(value, float):
if context is None:
context = getcontext()
context._raise_error(FloatOperation,
"strict semantics for mixing floats and Decimals are "
"enabled")
value = Decimal.from_float(value)
self._exp = value._exp
self._sign = value._sign
self._int = value._int
self._is_special = value._is_special
return self
raise TypeError("Cannot convert %r to Decimal" % value)
@classmethod
def from_float(cls, f):
"""Converts a float to a decimal number, exactly.
Note that Decimal.from_float(0.1) is not the same as Decimal('0.1').
Since 0.1 is not exactly representable in binary floating point, the
value is stored as the nearest representable value which is
0x1.999999999999ap-4. The exact equivalent of the value in decimal
is 0.1000000000000000055511151231257827021181583404541015625.
>>> Decimal.from_float(0.1)
Decimal('0.1000000000000000055511151231257827021181583404541015625')
>>> Decimal.from_float(float('nan'))
Decimal('NaN')
>>> Decimal.from_float(float('inf'))
Decimal('Infinity')
>>> Decimal.from_float(-float('inf'))
Decimal('-Infinity')
>>> Decimal.from_float(-0.0)
Decimal('-0')
"""
if isinstance(f, int): # handle integer inputs
return cls(f)
if not isinstance(f, float):
raise TypeError("argument must be int or float.")
if _math.isinf(f) or _math.isnan(f):
return cls(repr(f))
if _math.copysign(1.0, f) == 1.0:
sign = 0
else:
sign = 1
n, d = abs(f).as_integer_ratio()
k = d.bit_length() - 1
result = _dec_from_triple(sign, str(n*5**k), -k)
if cls is Decimal:
return result
else:
return cls(result)
def _isnan(self):
"""Returns whether the number is not actually one.
0 if a number
1 if NaN
2 if sNaN
"""
if self._is_special:
exp = self._exp
if exp == 'n':
return 1
elif exp == 'N':
return 2
return 0
def _isinfinity(self):
"""Returns whether the number is infinite
0 if finite or not a number
1 if +INF
-1 if -INF
"""
if self._exp == 'F':
if self._sign:
return -1
return 1
return 0
def _check_nans(self, other=None, context=None):
"""Returns whether the number is not actually one.
if self, other are sNaN, signal
if self, other are NaN return nan
return 0
Done before operations.
"""
self_is_nan = self._isnan()
if other is None:
other_is_nan = False
else:
other_is_nan = other._isnan()
if self_is_nan or other_is_nan:
if context is None:
context = getcontext()
if self_is_nan == 2:
return context._raise_error(InvalidOperation, 'sNaN',
self)
if other_is_nan == 2:
return context._raise_error(InvalidOperation, 'sNaN',
other)
if self_is_nan:
return self._fix_nan(context)
return other._fix_nan(context)
return 0
def _compare_check_nans(self, other, context):
"""Version of _check_nans used for the signaling comparisons
compare_signal, __le__, __lt__, __ge__, __gt__.
Signal InvalidOperation if either self or other is a (quiet
or signaling) NaN. Signaling NaNs take precedence over quiet
NaNs.
Return 0 if neither operand is a NaN.
"""
if context is None:
context = getcontext()
if self._is_special or other._is_special:
if self.is_snan():
return context._raise_error(InvalidOperation,
'comparison involving sNaN',
self)
elif other.is_snan():
return context._raise_error(InvalidOperation,
'comparison involving sNaN',
other)
elif self.is_qnan():
return context._raise_error(InvalidOperation,
'comparison involving NaN',
self)
elif other.is_qnan():
return context._raise_error(InvalidOperation,
'comparison involving NaN',
other)
return 0
def __bool__(self):
"""Return True if self is nonzero; otherwise return False.
NaNs and infinities are considered nonzero.
"""
return self._is_special or self._int != '0'
def _cmp(self, other):
"""Compare the two non-NaN decimal instances self and other.
Returns -1 if self < other, 0 if self == other and 1
if self > other. This routine is for internal use only."""
if self._is_special or other._is_special:
self_inf = self._isinfinity()
other_inf = other._isinfinity()
if self_inf == other_inf:
return 0
elif self_inf < other_inf:
return -1
else:
return 1
# check for zeros; Decimal('0') == Decimal('-0')
if not self:
if not other:
return 0
else:
return -((-1)**other._sign)
if not other:
return (-1)**self._sign
# If different signs, neg one is less
if other._sign < self._sign:
return -1
if self._sign < other._sign:
return 1
self_adjusted = self.adjusted()
other_adjusted = other.adjusted()
if self_adjusted == other_adjusted:
self_padded = self._int + '0'*(self._exp - other._exp)
other_padded = other._int + '0'*(other._exp - self._exp)
if self_padded == other_padded:
return 0
elif self_padded < other_padded:
return -(-1)**self._sign
else:
return (-1)**self._sign
elif self_adjusted > other_adjusted:
return (-1)**self._sign
else: # self_adjusted < other_adjusted
return -((-1)**self._sign)
# Note: The Decimal standard doesn't cover rich comparisons for
# Decimals. In particular, the specification is silent on the
# subject of what should happen for a comparison involving a NaN.
# We take the following approach:
#
# == comparisons involving a quiet NaN always return False
# != comparisons involving a quiet NaN always return True
# == or != comparisons involving a signaling NaN signal
# InvalidOperation, and return False or True as above if the
# InvalidOperation is not trapped.
# <, >, <= and >= comparisons involving a (quiet or signaling)
# NaN signal InvalidOperation, and return False if the
# InvalidOperation is not trapped.
#
# This behavior is designed to conform as closely as possible to
# that specified by IEEE 754.
def __eq__(self, other, context=None):
self, other = _convert_for_comparison(self, other, equality_op=True)
if other is NotImplemented:
return other
if self._check_nans(other, context):
return False
return self._cmp(other) == 0
def __lt__(self, other, context=None):
self, other = _convert_for_comparison(self, other)
if other is NotImplemented:
return other
ans = self._compare_check_nans(other, context)
if ans:
return False
return self._cmp(other) < 0
def __le__(self, other, context=None):
self, other = _convert_for_comparison(self, other)
if other is NotImplemented:
return other
ans = self._compare_check_nans(other, context)
if ans:
return False
return self._cmp(other) <= 0
def __gt__(self, other, context=None):
self, other = _convert_for_comparison(self, other)
if other is NotImplemented:
return other
ans = self._compare_check_nans(other, context)
if ans:
return False
return self._cmp(other) > 0
def __ge__(self, other, context=None):
self, other = _convert_for_comparison(self, other)
if other is NotImplemented:
return other
ans = self._compare_check_nans(other, context)
if ans:
return False
return self._cmp(other) >= 0
def compare(self, other, context=None):
"""Compare self to other. Return a decimal value:
a or b is a NaN ==> Decimal('NaN')
a < b ==> Decimal('-1')
a == b ==> Decimal('0')
a > b ==> Decimal('1')
"""
other = _convert_other(other, raiseit=True)
# Compare(NaN, NaN) = NaN
if (self._is_special or other and other._is_special):
ans = self._check_nans(other, context)
if ans:
return ans
return Decimal(self._cmp(other))
def __hash__(self):
"""x.__hash__() <==> hash(x)"""
# In order to make sure that the hash of a Decimal instance
# agrees with the hash of a numerically equal integer, float
# or Fraction, we follow the rules for numeric hashes outlined
# in the documentation. (See library docs, 'Built-in Types').
if self._is_special:
if self.is_snan():
raise TypeError('Cannot hash a signaling NaN value.')
elif self.is_nan():
return _PyHASH_NAN
else:
if self._sign:
return -_PyHASH_INF
else:
return _PyHASH_INF
if self._exp >= 0:
exp_hash = pow(10, self._exp, _PyHASH_MODULUS)
else:
exp_hash = pow(_PyHASH_10INV, -self._exp, _PyHASH_MODULUS)
hash_ = int(self._int) * exp_hash % _PyHASH_MODULUS
ans = hash_ if self >= 0 else -hash_
return -2 if ans == -1 else ans
def as_tuple(self):
"""Represents the number as a triple tuple.
To show the internals exactly as they are.
"""
return DecimalTuple(self._sign, tuple(map(int, self._int)), self._exp)
def as_integer_ratio(self):
"""Express a finite Decimal instance in the form n / d.
Returns a pair (n, d) of integers. When called on an infinity
or NaN, raises OverflowError or ValueError respectively.
>>> Decimal('3.14').as_integer_ratio()
(157, 50)
>>> Decimal('-123e5').as_integer_ratio()
(-12300000, 1)
>>> Decimal('0.00').as_integer_ratio()
(0, 1)
"""
if self._is_special:
if self.is_nan():
raise ValueError("cannot convert NaN to integer ratio")
else:
raise OverflowError("cannot convert Infinity to integer ratio")
if not self:
return 0, 1
# Find n, d in lowest terms such that abs(self) == n / d;
# we'll deal with the sign later.
n = int(self._int)
if self._exp >= 0:
# self is an integer.
n, d = n * 10**self._exp, 1
else:
# Find d2, d5 such that abs(self) = n / (2**d2 * 5**d5).
d5 = -self._exp
while d5 > 0 and n % 5 == 0:
n //= 5
d5 -= 1
# (n & -n).bit_length() - 1 counts trailing zeros in binary
# representation of n (provided n is nonzero).
d2 = -self._exp
shift2 = min((n & -n).bit_length() - 1, d2)
if shift2:
n >>= shift2
d2 -= shift2
d = 5**d5 << d2
if self._sign:
n = -n
return n, d
def __repr__(self):
"""Represents the number as an instance of Decimal."""
# Invariant: eval(repr(d)) == d
return "Decimal('%s')" % str(self)
def __str__(self, eng=False, context=None):
"""Return string representation of the number in scientific notation.
Captures all of the information in the underlying representation.
"""
sign = ['', '-'][self._sign]
if self._is_special:
if self._exp == 'F':
return sign + 'Infinity'
elif self._exp == 'n':
return sign + 'NaN' + self._int
else: # self._exp == 'N'
return sign + 'sNaN' + self._int
# number of digits of self._int to left of decimal point
leftdigits = self._exp + len(self._int)
# dotplace is number of digits of self._int to the left of the
# decimal point in the mantissa of the output string (that is,
# after adjusting the exponent)
if self._exp <= 0 and leftdigits > -6:
# no exponent required
dotplace = leftdigits
elif not eng:
# usual scientific notation: 1 digit on left of the point
dotplace = 1
elif self._int == '0':
# engineering notation, zero
dotplace = (leftdigits + 1) % 3 - 1
else:
# engineering notation, nonzero
dotplace = (leftdigits - 1) % 3 + 1
if dotplace <= 0:
intpart = '0'
fracpart = '.' + '0'*(-dotplace) + self._int
elif dotplace >= len(self._int):
intpart = self._int+'0'*(dotplace-len(self._int))
fracpart = ''
else:
intpart = self._int[:dotplace]
fracpart = '.' + self._int[dotplace:]
if leftdigits == dotplace:
exp = ''
else:
if context is None:
context = getcontext()
exp = ['e', 'E'][context.capitals] + "%+d" % (leftdigits-dotplace)
return sign + intpart + fracpart + exp
def to_eng_string(self, context=None):
"""Convert to a string, using engineering notation if an exponent is needed.
Engineering notation has an exponent which is a multiple of 3. This
can leave up to 3 digits to the left of the decimal place and may
require the addition of either one or two trailing zeros.
"""
return self.__str__(eng=True, context=context)
def __neg__(self, context=None):
"""Returns a copy with the sign switched.
Rounds, if it has reason.
"""
if self._is_special:
ans = self._check_nans(context=context)
if ans:
return ans
if context is None:
context = getcontext()
if not self and context.rounding != ROUND_FLOOR:
# -Decimal('0') is Decimal('0'), not Decimal('-0'), except
# in ROUND_FLOOR rounding mode.
ans = self.copy_abs()
else:
ans = self.copy_negate()
return ans._fix(context)
def __pos__(self, context=None):
"""Returns a copy, unless it is a sNaN.
Rounds the number (if more than precision digits)
"""
if self._is_special:
ans = self._check_nans(context=context)
if ans:
return ans
if context is None:
context = getcontext()
if not self and context.rounding != ROUND_FLOOR:
# + (-0) = 0, except in ROUND_FLOOR rounding mode.
ans = self.copy_abs()
else:
ans = Decimal(self)
return ans._fix(context)
def __abs__(self, round=True, context=None):
"""Returns the absolute value of self.
If the keyword argument 'round' is false, do not round. The
expression self.__abs__(round=False) is equivalent to
self.copy_abs().
"""
if not round:
return self.copy_abs()
if self._is_special:
ans = self._check_nans(context=context)
if ans:
return ans
if self._sign:
ans = self.__neg__(context=context)
else:
ans = self.__pos__(context=context)
return ans
def __add__(self, other, context=None):
"""Returns self + other.
-INF + INF (or the reverse) cause InvalidOperation errors.
"""
other = _convert_other(other)
if other is NotImplemented:
return other
if context is None:
context = getcontext()
if self._is_special or other._is_special:
ans = self._check_nans(other, context)
if ans:
return ans
if self._isinfinity():
# If both INF, same sign => same as both, opposite => error.
if self._sign != other._sign and other._isinfinity():
return context._raise_error(InvalidOperation, '-INF + INF')
return Decimal(self)
if other._isinfinity():
return Decimal(other) # Can't both be infinity here
exp = min(self._exp, other._exp)
negativezero = 0
if context.rounding == ROUND_FLOOR and self._sign != other._sign:
# If the answer is 0, the sign should be negative, in this case.
negativezero = 1
if not self and not other:
sign = min(self._sign, other._sign)
if negativezero:
sign = 1
ans = _dec_from_triple(sign, '0', exp)
ans = ans._fix(context)
return ans
if not self:
exp = max(exp, other._exp - context.prec-1)
ans = other._rescale(exp, context.rounding)
ans = ans._fix(context)
return ans
if not other:
exp = max(exp, self._exp - context.prec-1)
ans = self._rescale(exp, context.rounding)
ans = ans._fix(context)
return ans
op1 = _WorkRep(self)
op2 = _WorkRep(other)
op1, op2 = _normalize(op1, op2, context.prec)
result = _WorkRep()
if op1.sign != op2.sign:
# Equal and opposite
if op1.int == op2.int:
ans = _dec_from_triple(negativezero, '0', exp)
ans = ans._fix(context)
return ans
if op1.int < op2.int:
op1, op2 = op2, op1
# OK, now abs(op1) > abs(op2)
if op1.sign == 1:
result.sign = 1
op1.sign, op2.sign = op2.sign, op1.sign
else:
result.sign = 0
# So we know the sign, and op1 > 0.
elif op1.sign == 1:
result.sign = 1
op1.sign, op2.sign = (0, 0)
else:
result.sign = 0
# Now, op1 > abs(op2) > 0
if op2.sign == 0:
result.int = op1.int + op2.int
else:
result.int = op1.int - op2.int
result.exp = op1.exp
ans = Decimal(result)
ans = ans._fix(context)
return ans
__radd__ = __add__
def __sub__(self, other, context=None):
"""Return self - other"""
other = _convert_other(other)
if other is NotImplemented:
return other
if self._is_special or other._is_special:
ans = self._check_nans(other, context=context)
if ans:
return ans
# self - other is computed as self + other.copy_negate()
return self.__add__(other.copy_negate(), context=context)
def __rsub__(self, other, context=None):
"""Return other - self"""
other = _convert_other(other)
if other is NotImplemented:
return other
return other.__sub__(self, context=context)
def __mul__(self, other, context=None):
"""Return self * other.
(+-) INF * 0 (or its reverse) raise InvalidOperation.
"""
other = _convert_other(other)
if other is NotImplemented:
return other
if context is None:
context = getcontext()
resultsign = self._sign ^ other._sign
if self._is_special or other._is_special:
ans = self._check_nans(other, context)
if ans:
return ans
if self._isinfinity():
if not other:
return context._raise_error(InvalidOperation, '(+-)INF * 0')
return _SignedInfinity[resultsign]
if other._isinfinity():
if not self:
return context._raise_error(InvalidOperation, '0 * (+-)INF')
return _SignedInfinity[resultsign]
resultexp = self._exp + other._exp
# Special case for multiplying by zero
if not self or not other:
ans = _dec_from_triple(resultsign, '0', resultexp)
# Fixing in case the exponent is out of bounds
ans = ans._fix(context)
return ans
# Special case for multiplying by power of 10
if self._int == '1':
ans = _dec_from_triple(resultsign, other._int, resultexp)
ans = ans._fix(context)
return ans
if other._int == '1':
ans = _dec_from_triple(resultsign, self._int, resultexp)
ans = ans._fix(context)
return ans
op1 = _WorkRep(self)
op2 = _WorkRep(other)
ans = _dec_from_triple(resultsign, str(op1.int * op2.int), resultexp)
ans = ans._fix(context)
return ans
__rmul__ = __mul__
def __truediv__(self, other, context=None):
"""Return self / other."""
other = _convert_other(other)
if other is NotImplemented:
return NotImplemented
if context is None:
context = getcontext()
sign = self._sign ^ other._sign
if self._is_special or other._is_special:
ans = self._check_nans(other, context)
if ans:
return ans
if self._isinfinity() and other._isinfinity():
return context._raise_error(InvalidOperation, '(+-)INF/(+-)INF')
if self._isinfinity():
return _SignedInfinity[sign]
if other._isinfinity():
context._raise_error(Clamped, 'Division by infinity')
return _dec_from_triple(sign, '0', context.Etiny())
# Special cases for zeroes
if not other:
if not self:
return context._raise_error(DivisionUndefined, '0 / 0')
return context._raise_error(DivisionByZero, 'x / 0', sign)
if not self:
exp = self._exp - other._exp
coeff = 0
else:
# OK, so neither = 0, INF or NaN
shift = len(other._int) - len(self._int) + context.prec + 1
exp = self._exp - other._exp - shift
op1 = _WorkRep(self)
op2 = _WorkRep(other)
if shift >= 0:
coeff, remainder = divmod(op1.int * 10**shift, op2.int)
else:
coeff, remainder = divmod(op1.int, op2.int * 10**-shift)
if remainder:
# result is not exact; adjust to ensure correct rounding
if coeff % 5 == 0:
coeff += 1
else:
# result is exact; get as close to ideal exponent as possible
ideal_exp = self._exp - other._exp
while exp < ideal_exp and coeff % 10 == 0:
coeff //= 10
exp += 1
ans = _dec_from_triple(sign, str(coeff), exp)
return ans._fix(context)
def _divide(self, other, context):
"""Return (self // other, self % other), to context.prec precision.
Assumes that neither self nor other is a NaN, that self is not
infinite and that other is nonzero.
"""
sign = self._sign ^ other._sign
if other._isinfinity():
ideal_exp = self._exp
else:
ideal_exp = min(self._exp, other._exp)
expdiff = self.adjusted() - other.adjusted()
if not self or other._isinfinity() or expdiff <= -2:
return (_dec_from_triple(sign, '0', 0),
self._rescale(ideal_exp, context.rounding))
if expdiff <= context.prec:
op1 = _WorkRep(self)
op2 = _WorkRep(other)
if op1.exp >= op2.exp:
op1.int *= 10**(op1.exp - op2.exp)
else:
op2.int *= 10**(op2.exp - op1.exp)
q, r = divmod(op1.int, op2.int)
if q < 10**context.prec:
return (_dec_from_triple(sign, str(q), 0),
_dec_from_triple(self._sign, str(r), ideal_exp))
# Here the quotient is too large to be representable
ans = context._raise_error(DivisionImpossible,
'quotient too large in //, % or divmod')
return ans, ans
def __rtruediv__(self, other, context=None):
"""Swaps self/other and returns __truediv__."""
other = _convert_other(other)
if other is NotImplemented:
return other
return other.__truediv__(self, context=context)
def __divmod__(self, other, context=None):
"""
Return (self // other, self % other)
"""
other = _convert_other(other)
if other is NotImplemented:
return other
if context is None:
context = getcontext()
ans = self._check_nans(other, context)
if ans:
return (ans, ans)
sign = self._sign ^ other._sign
if self._isinfinity():
if other._isinfinity():
ans = context._raise_error(InvalidOperation, 'divmod(INF, INF)')
return ans, ans
else:
return (_SignedInfinity[sign],
context._raise_error(InvalidOperation, 'INF % x'))
if not other:
if not self:
ans = context._raise_error(DivisionUndefined, 'divmod(0, 0)')
return ans, ans
else:
return (context._raise_error(DivisionByZero, 'x // 0', sign),
context._raise_error(InvalidOperation, 'x % 0'))
quotient, remainder = self._divide(other, context)
remainder = remainder._fix(context)
return quotient, remainder
def __rdivmod__(self, other, context=None):
"""Swaps self/other and returns __divmod__."""
other = _convert_other(other)
if other is NotImplemented:
return other
return other.__divmod__(self, context=context)
def __mod__(self, other, context=None):
"""
self % other
"""
other = _convert_other(other)
if other is NotImplemented:
return other
if context is None:
context = getcontext()
ans = self._check_nans(other, context)
if ans:
return ans
if self._isinfinity():
return context._raise_error(InvalidOperation, 'INF % x')
elif not other:
if self:
return context._raise_error(InvalidOperation, 'x % 0')
else:
return context._raise_error(DivisionUndefined, '0 % 0')
remainder = self._divide(other, context)[1]
remainder = remainder._fix(context)
return remainder
def __rmod__(self, other, context=None):
"""Swaps self/other and returns __mod__."""
other = _convert_other(other)
if other is NotImplemented:
return other
return other.__mod__(self, context=context)
def remainder_near(self, other, context=None):
"""
Remainder nearest to 0- abs(remainder-near) <= other/2
"""
if context is None:
context = getcontext()
other = _convert_other(other, raiseit=True)
ans = self._check_nans(other, context)
if ans:
return ans
# self == +/-infinity -> InvalidOperation
if self._isinfinity():
return context._raise_error(InvalidOperation,
'remainder_near(infinity, x)')
# other == 0 -> either InvalidOperation or DivisionUndefined
if not other:
if self:
return context._raise_error(InvalidOperation,
'remainder_near(x, 0)')
else:
return context._raise_error(DivisionUndefined,
'remainder_near(0, 0)')
# other = +/-infinity -> remainder = self
if other._isinfinity():
ans = Decimal(self)
return ans._fix(context)
# self = 0 -> remainder = self, with ideal exponent
ideal_exponent = min(self._exp, other._exp)
if not self:
ans = _dec_from_triple(self._sign, '0', ideal_exponent)
return ans._fix(context)
# catch most cases of large or small quotient
expdiff = self.adjusted() - other.adjusted()
if expdiff >= context.prec + 1:
# expdiff >= prec+1 => abs(self/other) > 10**prec
return context._raise_error(DivisionImpossible)
if expdiff <= -2:
# expdiff <= -2 => abs(self/other) < 0.1
ans = self._rescale(ideal_exponent, context.rounding)
return ans._fix(context)
# adjust both arguments to have the same exponent, then divide
op1 = _WorkRep(self)
op2 = _WorkRep(other)
if op1.exp >= op2.exp:
op1.int *= 10**(op1.exp - op2.exp)
else:
op2.int *= 10**(op2.exp - op1.exp)
q, r = divmod(op1.int, op2.int)
# remainder is r*10**ideal_exponent; other is +/-op2.int *
# 10**ideal_exponent. Apply correction to ensure that
# abs(remainder) <= abs(other)/2
if 2*r + (q&1) > op2.int:
r -= op2.int
q += 1
if q >= 10**context.prec:
return context._raise_error(DivisionImpossible)
# result has same sign as self unless r is negative
sign = self._sign
if r < 0:
sign = 1-sign
r = -r
ans = _dec_from_triple(sign, str(r), ideal_exponent)
return ans._fix(context)
def __floordiv__(self, other, context=None):
"""self // other"""
other = _convert_other(other)
if other is NotImplemented:
return other
if context is None:
context = getcontext()
ans = self._check_nans(other, context)
if ans:
return ans
if self._isinfinity():
if other._isinfinity():
return context._raise_error(InvalidOperation, 'INF // INF')
else:
return _SignedInfinity[self._sign ^ other._sign]
if not other:
if self:
return context._raise_error(DivisionByZero, 'x // 0',
self._sign ^ other._sign)
else:
return context._raise_error(DivisionUndefined, '0 // 0')
return self._divide(other, context)[0]
def __rfloordiv__(self, other, context=None):
"""Swaps self/other and returns __floordiv__."""
other = _convert_other(other)
if other is NotImplemented:
return other
return other.__floordiv__(self, context=context)
def __float__(self):
"""Float representation."""
if self._isnan():
if self.is_snan():
raise ValueError("Cannot convert signaling NaN to float")
s = "-nan" if self._sign else "nan"
else:
s = str(self)
return float(s)
def __int__(self):
"""Converts self to an int, truncating if necessary."""
if self._is_special:
if self._isnan():
raise ValueError("Cannot convert NaN to integer")
elif self._isinfinity():
raise OverflowError("Cannot convert infinity to integer")
s = (-1)**self._sign
if self._exp >= 0:
return s*int(self._int)*10**self._exp
else:
return s*int(self._int[:self._exp] or '0')
__trunc__ = __int__
def real(self):
return self
real = property(real)
def imag(self):
return Decimal(0)
imag = property(imag)
def conjugate(self):
return self
def __complex__(self):
return complex(float(self))
def _fix_nan(self, context):
"""Decapitate the payload of a NaN to fit the context"""
payload = self._int
# maximum length of payload is precision if clamp=0,
# precision-1 if clamp=1.
max_payload_len = context.prec - context.clamp
if len(payload) > max_payload_len:
payload = payload[len(payload)-max_payload_len:].lstrip('0')
return _dec_from_triple(self._sign, payload, self._exp, True)
return Decimal(self)
def _fix(self, context):
"""Round if it is necessary to keep self within prec precision.
Rounds and fixes the exponent. Does not raise on a sNaN.
Arguments:
self - Decimal instance
context - context used.
"""
if self._is_special:
if self._isnan():
# decapitate payload if necessary
return self._fix_nan(context)
else:
# self is +/-Infinity; return unaltered
return Decimal(self)
# if self is zero then exponent should be between Etiny and
# Emax if clamp==0, and between Etiny and Etop if clamp==1.
Etiny = context.Etiny()
Etop = context.Etop()
if not self:
exp_max = [context.Emax, Etop][context.clamp]
new_exp = min(max(self._exp, Etiny), exp_max)
if new_exp != self._exp:
context._raise_error(Clamped)
return _dec_from_triple(self._sign, '0', new_exp)
else:
return Decimal(self)
# exp_min is the smallest allowable exponent of the result,
# equal to max(self.adjusted()-context.prec+1, Etiny)
exp_min = len(self._int) + self._exp - context.prec
if exp_min > Etop:
# overflow: exp_min > Etop iff self.adjusted() > Emax
ans = context._raise_error(Overflow, 'above Emax', self._sign)
context._raise_error(Inexact)
context._raise_error(Rounded)
return ans
self_is_subnormal = exp_min < Etiny
if self_is_subnormal:
exp_min = Etiny
# round if self has too many digits
if self._exp < exp_min:
digits = len(self._int) + self._exp - exp_min
if digits < 0:
self = _dec_from_triple(self._sign, '1', exp_min-1)
digits = 0
rounding_method = self._pick_rounding_function[context.rounding]
changed = rounding_method(self, digits)
coeff = self._int[:digits] or '0'
if changed > 0:
coeff = str(int(coeff)+1)
if len(coeff) > context.prec:
coeff = coeff[:-1]
exp_min += 1
# check whether the rounding pushed the exponent out of range
if exp_min > Etop:
ans = context._raise_error(Overflow, 'above Emax', self._sign)
else:
ans = _dec_from_triple(self._sign, coeff, exp_min)
# raise the appropriate signals, taking care to respect
# the precedence described in the specification
if changed and self_is_subnormal:
context._raise_error(Underflow)
if self_is_subnormal:
context._raise_error(Subnormal)
if changed:
context._raise_error(Inexact)
context._raise_error(Rounded)
if not ans:
# raise Clamped on underflow to 0
context._raise_error(Clamped)
return ans
if self_is_subnormal:
context._raise_error(Subnormal)
# fold down if clamp == 1 and self has too few digits
if context.clamp == 1 and self._exp > Etop:
context._raise_error(Clamped)
self_padded = self._int + '0'*(self._exp - Etop)
return _dec_from_triple(self._sign, self_padded, Etop)
# here self was representable to begin with; return unchanged
return Decimal(self)
# for each of the rounding functions below:
# self is a finite, nonzero Decimal
# prec is an integer satisfying 0 <= prec < len(self._int)
#
# each function returns either -1, 0, or 1, as follows:
# 1 indicates that self should be rounded up (away from zero)
# 0 indicates that self should be truncated, and that all the
# digits to be truncated are zeros (so the value is unchanged)
# -1 indicates that there are nonzero digits to be truncated
def _round_down(self, prec):
"""Also known as round-towards-0, truncate."""
if _all_zeros(self._int, prec):
return 0
else:
return -1
def _round_up(self, prec):
"""Rounds away from 0."""
return -self._round_down(prec)
def _round_half_up(self, prec):
"""Rounds 5 up (away from 0)"""
if self._int[prec] in '56789':
return 1
elif _all_zeros(self._int, prec):
return 0
else:
return -1
def _round_half_down(self, prec):
"""Round 5 down"""
if _exact_half(self._int, prec):
return -1
else:
return self._round_half_up(prec)
def _round_half_even(self, prec):
"""Round 5 to even, rest to nearest."""
if _exact_half(self._int, prec) and \
(prec == 0 or self._int[prec-1] in '02468'):
return -1
else:
return self._round_half_up(prec)
def _round_ceiling(self, prec):
"""Rounds up (not away from 0 if negative.)"""
if self._sign:
return self._round_down(prec)
else:
return -self._round_down(prec)
def _round_floor(self, prec):
"""Rounds down (not towards 0 if negative)"""
if not self._sign:
return self._round_down(prec)
else:
return -self._round_down(prec)
def _round_05up(self, prec):
"""Round down unless digit prec-1 is 0 or 5."""
if prec and self._int[prec-1] not in '05':
return self._round_down(prec)
else:
return -self._round_down(prec)
_pick_rounding_function = dict(
ROUND_DOWN = _round_down,
ROUND_UP = _round_up,
ROUND_HALF_UP = _round_half_up,
ROUND_HALF_DOWN = _round_half_down,
ROUND_HALF_EVEN = _round_half_even,
ROUND_CEILING = _round_ceiling,
ROUND_FLOOR = _round_floor,
ROUND_05UP = _round_05up,
)
def __round__(self, n=None):
"""Round self to the nearest integer, or to a given precision.
If only one argument is supplied, round a finite Decimal
instance self to the nearest integer. If self is infinite or
a NaN then a Python exception is raised. If self is finite
and lies exactly halfway between two integers then it is
rounded to the integer with even last digit.
>>> round(Decimal('123.456'))
123
>>> round(Decimal('-456.789'))
-457
>>> round(Decimal('-3.0'))
-3
>>> round(Decimal('2.5'))
2
>>> round(Decimal('3.5'))
4
>>> round(Decimal('Inf'))
Traceback (most recent call last):
...
OverflowError: cannot round an infinity
>>> round(Decimal('NaN'))
Traceback (most recent call last):
...
ValueError: cannot round a NaN
If a second argument n is supplied, self is rounded to n
decimal places using the rounding mode for the current
context.
For an integer n, round(self, -n) is exactly equivalent to
self.quantize(Decimal('1En')).
>>> round(Decimal('123.456'), 0)
Decimal('123')
>>> round(Decimal('123.456'), 2)
Decimal('123.46')
>>> round(Decimal('123.456'), -2)
Decimal('1E+2')
>>> round(Decimal('-Infinity'), 37)
Decimal('NaN')
>>> round(Decimal('sNaN123'), 0)
Decimal('NaN123')
"""
if n is not None:
# two-argument form: use the equivalent quantize call
if not isinstance(n, int):
raise TypeError('Second argument to round should be integral')
exp = _dec_from_triple(0, '1', -n)
return self.quantize(exp)
# one-argument form
if self._is_special:
if self.is_nan():
raise ValueError("cannot round a NaN")
else:
raise OverflowError("cannot round an infinity")
return int(self._rescale(0, ROUND_HALF_EVEN))
def __floor__(self):
"""Return the floor of self, as an integer.
For a finite Decimal instance self, return the greatest
integer n such that n <= self. If self is infinite or a NaN
then a Python exception is raised.
"""
if self._is_special:
if self.is_nan():
raise ValueError("cannot round a NaN")
else:
raise OverflowError("cannot round an infinity")
return int(self._rescale(0, ROUND_FLOOR))
def __ceil__(self):
"""Return the ceiling of self, as an integer.
For a finite Decimal instance self, return the least integer n
such that n >= self. If self is infinite or a NaN then a
Python exception is raised.
"""
if self._is_special:
if self.is_nan():
raise ValueError("cannot round a NaN")
else:
raise OverflowError("cannot round an infinity")
return int(self._rescale(0, ROUND_CEILING))
def fma(self, other, third, context=None):
"""Fused multiply-add.
Returns self*other+third with no rounding of the intermediate
product self*other.
self and other are multiplied together, with no rounding of
the result. The third operand is then added to the result,
and a single final rounding is performed.
"""
other = _convert_other(other, raiseit=True)
third = _convert_other(third, raiseit=True)
# compute product; raise InvalidOperation if either operand is
# a signaling NaN or if the product is zero times infinity.
if self._is_special or other._is_special:
if context is None:
context = getcontext()
if self._exp == 'N':
return context._raise_error(InvalidOperation, 'sNaN', self)
if other._exp == 'N':
return context._raise_error(InvalidOperation, 'sNaN', other)
if self._exp == 'n':
product = self
elif other._exp == 'n':
product = other
elif self._exp == 'F':
if not other:
return context._raise_error(InvalidOperation,
'INF * 0 in fma')
product = _SignedInfinity[self._sign ^ other._sign]
elif other._exp == 'F':
if not self:
return context._raise_error(InvalidOperation,
'0 * INF in fma')
product = _SignedInfinity[self._sign ^ other._sign]
else:
product = _dec_from_triple(self._sign ^ other._sign,
str(int(self._int) * int(other._int)),
self._exp + other._exp)
return product.__add__(third, context)
def _power_modulo(self, other, modulo, context=None):
"""Three argument version of __pow__"""
other = _convert_other(other)
if other is NotImplemented:
return other
modulo = _convert_other(modulo)
if modulo is NotImplemented:
return modulo
if context is None:
context = getcontext()
# deal with NaNs: if there are any sNaNs then first one wins,
# (i.e. behaviour for NaNs is identical to that of fma)
self_is_nan = self._isnan()
other_is_nan = other._isnan()
modulo_is_nan = modulo._isnan()
if self_is_nan or other_is_nan or modulo_is_nan:
if self_is_nan == 2:
return context._raise_error(InvalidOperation, 'sNaN',
self)
if other_is_nan == 2:
return context._raise_error(InvalidOperation, 'sNaN',
other)
if modulo_is_nan == 2:
return context._raise_error(InvalidOperation, 'sNaN',
modulo)
if self_is_nan:
return self._fix_nan(context)
if other_is_nan:
return other._fix_nan(context)
return modulo._fix_nan(context)
# check inputs: we apply same restrictions as Python's pow()
if not (self._isinteger() and
other._isinteger() and
modulo._isinteger()):
return context._raise_error(InvalidOperation,
'pow() 3rd argument not allowed '
'unless all arguments are integers')
if other < 0:
return context._raise_error(InvalidOperation,
'pow() 2nd argument cannot be '
'negative when 3rd argument specified')
if not modulo:
return context._raise_error(InvalidOperation,
'pow() 3rd argument cannot be 0')
# additional restriction for decimal: the modulus must be less
# than 10**prec in absolute value
if modulo.adjusted() >= context.prec:
return context._raise_error(InvalidOperation,
'insufficient precision: pow() 3rd '
'argument must not have more than '
'precision digits')
# define 0**0 == NaN, for consistency with two-argument pow
# (even though it hurts!)
if not other and not self:
return context._raise_error(InvalidOperation,
'at least one of pow() 1st argument '
'and 2nd argument must be nonzero ;'
'0**0 is not defined')
# compute sign of result
if other._iseven():
sign = 0
else:
sign = self._sign
# convert modulo to a Python integer, and self and other to
# Decimal integers (i.e. force their exponents to be >= 0)
modulo = abs(int(modulo))
base = _WorkRep(self.to_integral_value())
exponent = _WorkRep(other.to_integral_value())
# compute result using integer pow()
base = (base.int % modulo * pow(10, base.exp, modulo)) % modulo
for i in range(exponent.exp):
base = pow(base, 10, modulo)
base = pow(base, exponent.int, modulo)
return _dec_from_triple(sign, str(base), 0)
def _power_exact(self, other, p):
"""Attempt to compute self**other exactly.
Given Decimals self and other and an integer p, attempt to
compute an exact result for the power self**other, with p
digits of precision. Return None if self**other is not
exactly representable in p digits.
Assumes that elimination of special cases has already been
performed: self and other must both be nonspecial; self must
be positive and not numerically equal to 1; other must be
nonzero. For efficiency, other._exp should not be too large,
so that 10**abs(other._exp) is a feasible calculation."""
# In the comments below, we write x for the value of self and y for the
# value of other. Write x = xc*10**xe and abs(y) = yc*10**ye, with xc
# and yc positive integers not divisible by 10.
# The main purpose of this method is to identify the *failure*
# of x**y to be exactly representable with as little effort as
# possible. So we look for cheap and easy tests that
# eliminate the possibility of x**y being exact. Only if all
# these tests are passed do we go on to actually compute x**y.
# Here's the main idea. Express y as a rational number m/n, with m and
# n relatively prime and n>0. Then for x**y to be exactly
# representable (at *any* precision), xc must be the nth power of a
# positive integer and xe must be divisible by n. If y is negative
# then additionally xc must be a power of either 2 or 5, hence a power
# of 2**n or 5**n.
#
# There's a limit to how small |y| can be: if y=m/n as above
# then:
#
# (1) if xc != 1 then for the result to be representable we
# need xc**(1/n) >= 2, and hence also xc**|y| >= 2. So
# if |y| <= 1/nbits(xc) then xc < 2**nbits(xc) <=
# 2**(1/|y|), hence xc**|y| < 2 and the result is not
# representable.
#
# (2) if xe != 0, |xe|*(1/n) >= 1, so |xe|*|y| >= 1. Hence if
# |y| < 1/|xe| then the result is not representable.
#
# Note that since x is not equal to 1, at least one of (1) and
# (2) must apply. Now |y| < 1/nbits(xc) iff |yc|*nbits(xc) <
# 10**-ye iff len(str(|yc|*nbits(xc)) <= -ye.
#
# There's also a limit to how large y can be, at least if it's
# positive: the normalized result will have coefficient xc**y,
# so if it's representable then xc**y < 10**p, and y <
# p/log10(xc). Hence if y*log10(xc) >= p then the result is
# not exactly representable.
# if len(str(abs(yc*xe)) <= -ye then abs(yc*xe) < 10**-ye,
# so |y| < 1/xe and the result is not representable.
# Similarly, len(str(abs(yc)*xc_bits)) <= -ye implies |y|
# < 1/nbits(xc).
x = _WorkRep(self)
xc, xe = x.int, x.exp
while xc % 10 == 0:
xc //= 10
xe += 1
y = _WorkRep(other)
yc, ye = y.int, y.exp
while yc % 10 == 0:
yc //= 10
ye += 1
# case where xc == 1: result is 10**(xe*y), with xe*y
# required to be an integer
if xc == 1:
xe *= yc
# result is now 10**(xe * 10**ye); xe * 10**ye must be integral
while xe % 10 == 0:
xe //= 10
ye += 1
if ye < 0:
return None
exponent = xe * 10**ye
if y.sign == 1:
exponent = -exponent
# if other is a nonnegative integer, use ideal exponent
if other._isinteger() and other._sign == 0:
ideal_exponent = self._exp*int(other)
zeros = min(exponent-ideal_exponent, p-1)
else:
zeros = 0
return _dec_from_triple(0, '1' + '0'*zeros, exponent-zeros)
# case where y is negative: xc must be either a power
# of 2 or a power of 5.
if y.sign == 1:
last_digit = xc % 10
if last_digit in (2,4,6,8):
# quick test for power of 2
if xc & -xc != xc:
return None
# now xc is a power of 2; e is its exponent
e = _nbits(xc)-1
# We now have:
#
# x = 2**e * 10**xe, e > 0, and y < 0.
#
# The exact result is:
#
# x**y = 5**(-e*y) * 10**(e*y + xe*y)
#
# provided that both e*y and xe*y are integers. Note that if
# 5**(-e*y) >= 10**p, then the result can't be expressed
# exactly with p digits of precision.
#
# Using the above, we can guard against large values of ye.
# 93/65 is an upper bound for log(10)/log(5), so if
#
# ye >= len(str(93*p//65))
#
# then
#
# -e*y >= -y >= 10**ye > 93*p/65 > p*log(10)/log(5),
#
# so 5**(-e*y) >= 10**p, and the coefficient of the result
# can't be expressed in p digits.
# emax >= largest e such that 5**e < 10**p.
emax = p*93//65
if ye >= len(str(emax)):
return None
# Find -e*y and -xe*y; both must be integers
e = _decimal_lshift_exact(e * yc, ye)
xe = _decimal_lshift_exact(xe * yc, ye)
if e is None or xe is None:
return None
if e > emax:
return None
xc = 5**e
elif last_digit == 5:
# e >= log_5(xc) if xc is a power of 5; we have
# equality all the way up to xc=5**2658
e = _nbits(xc)*28//65
xc, remainder = divmod(5**e, xc)
if remainder:
return None
while xc % 5 == 0:
xc //= 5
e -= 1
# Guard against large values of ye, using the same logic as in
# the 'xc is a power of 2' branch. 10/3 is an upper bound for
# log(10)/log(2).
emax = p*10//3
if ye >= len(str(emax)):
return None
e = _decimal_lshift_exact(e * yc, ye)
xe = _decimal_lshift_exact(xe * yc, ye)
if e is None or xe is None:
return None
if e > emax:
return None
xc = 2**e
else:
return None
if xc >= 10**p:
return None
xe = -e-xe
return _dec_from_triple(0, str(xc), xe)
# now y is positive; find m and n such that y = m/n
if ye >= 0:
m, n = yc*10**ye, 1
else:
if xe != 0 and len(str(abs(yc*xe))) <= -ye:
return None
xc_bits = _nbits(xc)
if xc != 1 and len(str(abs(yc)*xc_bits)) <= -ye:
return None
m, n = yc, 10**(-ye)
while m % 2 == n % 2 == 0:
m //= 2
n //= 2
while m % 5 == n % 5 == 0:
m //= 5
n //= 5
# compute nth root of xc*10**xe
if n > 1:
# if 1 < xc < 2**n then xc isn't an nth power
if xc != 1 and xc_bits <= n:
return None
xe, rem = divmod(xe, n)
if rem != 0:
return None
# compute nth root of xc using Newton's method
a = 1 << -(-_nbits(xc)//n) # initial estimate
while True:
q, r = divmod(xc, a**(n-1))
if a <= q:
break
else:
a = (a*(n-1) + q)//n
if not (a == q and r == 0):
return None
xc = a
# now xc*10**xe is the nth root of the original xc*10**xe
# compute mth power of xc*10**xe
# if m > p*100//_log10_lb(xc) then m > p/log10(xc), hence xc**m >
# 10**p and the result is not representable.
if xc > 1 and m > p*100//_log10_lb(xc):
return None
xc = xc**m
xe *= m
if xc > 10**p:
return None
# by this point the result *is* exactly representable
# adjust the exponent to get as close as possible to the ideal
# exponent, if necessary
str_xc = str(xc)
if other._isinteger() and other._sign == 0:
ideal_exponent = self._exp*int(other)
zeros = min(xe-ideal_exponent, p-len(str_xc))
else:
zeros = 0
return _dec_from_triple(0, str_xc+'0'*zeros, xe-zeros)
def __pow__(self, other, modulo=None, context=None):
"""Return self ** other [ % modulo].
With two arguments, compute self**other.
With three arguments, compute (self**other) % modulo. For the
three argument form, the following restrictions on the
arguments hold:
- all three arguments must be integral
- other must be nonnegative
- either self or other (or both) must be nonzero
- modulo must be nonzero and must have at most p digits,
where p is the context precision.
If any of these restrictions is violated the InvalidOperation
flag is raised.
The result of pow(self, other, modulo) is identical to the
result that would be obtained by computing (self**other) %
modulo with unbounded precision, but is computed more
efficiently. It is always exact.
"""
if modulo is not None:
return self._power_modulo(other, modulo, context)
other = _convert_other(other)
if other is NotImplemented:
return other
if context is None:
context = getcontext()
# either argument is a NaN => result is NaN
ans = self._check_nans(other, context)
if ans:
return ans
# 0**0 = NaN (!), x**0 = 1 for nonzero x (including +/-Infinity)
if not other:
if not self:
return context._raise_error(InvalidOperation, '0 ** 0')
else:
return _One
# result has sign 1 iff self._sign is 1 and other is an odd integer
result_sign = 0
if self._sign == 1:
if other._isinteger():
if not other._iseven():
result_sign = 1
else:
# -ve**noninteger = NaN
# (-0)**noninteger = 0**noninteger
if self:
return context._raise_error(InvalidOperation,
'x ** y with x negative and y not an integer')
# negate self, without doing any unwanted rounding
self = self.copy_negate()
# 0**(+ve or Inf)= 0; 0**(-ve or -Inf) = Infinity
if not self:
if other._sign == 0:
return _dec_from_triple(result_sign, '0', 0)
else:
return _SignedInfinity[result_sign]
# Inf**(+ve or Inf) = Inf; Inf**(-ve or -Inf) = 0
if self._isinfinity():
if other._sign == 0:
return _SignedInfinity[result_sign]
else:
return _dec_from_triple(result_sign, '0', 0)
# 1**other = 1, but the choice of exponent and the flags
# depend on the exponent of self, and on whether other is a
# positive integer, a negative integer, or neither
if self == _One:
if other._isinteger():
# exp = max(self._exp*max(int(other), 0),
# 1-context.prec) but evaluating int(other) directly
# is dangerous until we know other is small (other
# could be 1e999999999)
if other._sign == 1:
multiplier = 0
elif other > context.prec:
multiplier = context.prec
else:
multiplier = int(other)
exp = self._exp * multiplier
if exp < 1-context.prec:
exp = 1-context.prec
context._raise_error(Rounded)
else:
context._raise_error(Inexact)
context._raise_error(Rounded)
exp = 1-context.prec
return _dec_from_triple(result_sign, '1'+'0'*-exp, exp)
# compute adjusted exponent of self
self_adj = self.adjusted()
# self ** infinity is infinity if self > 1, 0 if self < 1
# self ** -infinity is infinity if self < 1, 0 if self > 1
if other._isinfinity():
if (other._sign == 0) == (self_adj < 0):
return _dec_from_triple(result_sign, '0', 0)
else:
return _SignedInfinity[result_sign]
# from here on, the result always goes through the call
# to _fix at the end of this function.
ans = None
exact = False
# crude test to catch cases of extreme overflow/underflow. If
# log10(self)*other >= 10**bound and bound >= len(str(Emax))
# then 10**bound >= 10**len(str(Emax)) >= Emax+1 and hence
# self**other >= 10**(Emax+1), so overflow occurs. The test
# for underflow is similar.
bound = self._log10_exp_bound() + other.adjusted()
if (self_adj >= 0) == (other._sign == 0):
# self > 1 and other +ve, or self < 1 and other -ve
# possibility of overflow
if bound >= len(str(context.Emax)):
ans = _dec_from_triple(result_sign, '1', context.Emax+1)
else:
# self > 1 and other -ve, or self < 1 and other +ve
# possibility of underflow to 0
Etiny = context.Etiny()
if bound >= len(str(-Etiny)):
ans = _dec_from_triple(result_sign, '1', Etiny-1)
# try for an exact result with precision +1
if ans is None:
ans = self._power_exact(other, context.prec + 1)
if ans is not None:
if result_sign == 1:
ans = _dec_from_triple(1, ans._int, ans._exp)
exact = True
# usual case: inexact result, x**y computed directly as exp(y*log(x))
if ans is None:
p = context.prec
x = _WorkRep(self)
xc, xe = x.int, x.exp
y = _WorkRep(other)
yc, ye = y.int, y.exp
if y.sign == 1:
yc = -yc
# compute correctly rounded result: start with precision +3,
# then increase precision until result is unambiguously roundable
extra = 3
while True:
coeff, exp = _dpower(xc, xe, yc, ye, p+extra)
if coeff % (5*10**(len(str(coeff))-p-1)):
break
extra += 3
ans = _dec_from_triple(result_sign, str(coeff), exp)
# unlike exp, ln and log10, the power function respects the
# rounding mode; no need to switch to ROUND_HALF_EVEN here
# There's a difficulty here when 'other' is not an integer and
# the result is exact. In this case, the specification
# requires that the Inexact flag be raised (in spite of
# exactness), but since the result is exact _fix won't do this
# for us. (Correspondingly, the Underflow signal should also
# be raised for subnormal results.) We can't directly raise
# these signals either before or after calling _fix, since
# that would violate the precedence for signals. So we wrap
# the ._fix call in a temporary context, and reraise
# afterwards.
if exact and not other._isinteger():
# pad with zeros up to length context.prec+1 if necessary; this
# ensures that the Rounded signal will be raised.
if len(ans._int) <= context.prec:
expdiff = context.prec + 1 - len(ans._int)
ans = _dec_from_triple(ans._sign, ans._int+'0'*expdiff,
ans._exp-expdiff)
# create a copy of the current context, with cleared flags/traps
newcontext = context.copy()
newcontext.clear_flags()
for exception in _signals:
newcontext.traps[exception] = 0
# round in the new context
ans = ans._fix(newcontext)
# raise Inexact, and if necessary, Underflow
newcontext._raise_error(Inexact)
if newcontext.flags[Subnormal]:
newcontext._raise_error(Underflow)
# propagate signals to the original context; _fix could
# have raised any of Overflow, Underflow, Subnormal,
# Inexact, Rounded, Clamped. Overflow needs the correct
# arguments. Note that the order of the exceptions is
# important here.
if newcontext.flags[Overflow]:
context._raise_error(Overflow, 'above Emax', ans._sign)
for exception in Underflow, Subnormal, Inexact, Rounded, Clamped:
if newcontext.flags[exception]:
context._raise_error(exception)
else:
ans = ans._fix(context)
return ans
def __rpow__(self, other, context=None):
"""Swaps self/other and returns __pow__."""
other = _convert_other(other)
if other is NotImplemented:
return other
return other.__pow__(self, context=context)
def normalize(self, context=None):
"""Normalize- strip trailing 0s, change anything equal to 0 to 0e0"""
if context is None:
context = getcontext()
if self._is_special:
ans = self._check_nans(context=context)
if ans:
return ans
dup = self._fix(context)
if dup._isinfinity():
return dup
if not dup:
return _dec_from_triple(dup._sign, '0', 0)
exp_max = [context.Emax, context.Etop()][context.clamp]
end = len(dup._int)
exp = dup._exp
while dup._int[end-1] == '0' and exp < exp_max:
exp += 1
end -= 1
return _dec_from_triple(dup._sign, dup._int[:end], exp)
def quantize(self, exp, rounding=None, context=None):
"""Quantize self so its exponent is the same as that of exp.
Similar to self._rescale(exp._exp) but with error checking.
"""
exp = _convert_other(exp, raiseit=True)
if context is None:
context = getcontext()
if rounding is None:
rounding = context.rounding
if self._is_special or exp._is_special:
ans = self._check_nans(exp, context)
if ans:
return ans
if exp._isinfinity() or self._isinfinity():
if exp._isinfinity() and self._isinfinity():
return Decimal(self) # if both are inf, it is OK
return context._raise_error(InvalidOperation,
'quantize with one INF')
# exp._exp should be between Etiny and Emax
if not (context.Etiny() <= exp._exp <= context.Emax):
return context._raise_error(InvalidOperation,
'target exponent out of bounds in quantize')
if not self:
ans = _dec_from_triple(self._sign, '0', exp._exp)
return ans._fix(context)
self_adjusted = self.adjusted()
if self_adjusted > context.Emax:
return context._raise_error(InvalidOperation,
'exponent of quantize result too large for current context')
if self_adjusted - exp._exp + 1 > context.prec:
return context._raise_error(InvalidOperation,
'quantize result has too many digits for current context')
ans = self._rescale(exp._exp, rounding)
if ans.adjusted() > context.Emax:
return context._raise_error(InvalidOperation,
'exponent of quantize result too large for current context')
if len(ans._int) > context.prec:
return context._raise_error(InvalidOperation,
'quantize result has too many digits for current context')
# raise appropriate flags
if ans and ans.adjusted() < context.Emin:
context._raise_error(Subnormal)
if ans._exp > self._exp:
if ans != self:
context._raise_error(Inexact)
context._raise_error(Rounded)
# call to fix takes care of any necessary folddown, and
# signals Clamped if necessary
ans = ans._fix(context)
return ans
def same_quantum(self, other, context=None):
"""Return True if self and other have the same exponent; otherwise
return False.
If either operand is a special value, the following rules are used:
* return True if both operands are infinities
* return True if both operands are NaNs
* otherwise, return False.
"""
other = _convert_other(other, raiseit=True)
if self._is_special or other._is_special:
return (self.is_nan() and other.is_nan() or
self.is_infinite() and other.is_infinite())
return self._exp == other._exp
def _rescale(self, exp, rounding):
"""Rescale self so that the exponent is exp, either by padding with zeros
or by truncating digits, using the given rounding mode.
Specials are returned without change. This operation is
quiet: it raises no flags, and uses no information from the
context.
exp = exp to scale to (an integer)
rounding = rounding mode
"""
if self._is_special:
return Decimal(self)
if not self:
return _dec_from_triple(self._sign, '0', exp)
if self._exp >= exp:
# pad answer with zeros if necessary
return _dec_from_triple(self._sign,
self._int + '0'*(self._exp - exp), exp)
# too many digits; round and lose data. If self.adjusted() <
# exp-1, replace self by 10**(exp-1) before rounding
digits = len(self._int) + self._exp - exp
if digits < 0:
self = _dec_from_triple(self._sign, '1', exp-1)
digits = 0
this_function = self._pick_rounding_function[rounding]
changed = this_function(self, digits)
coeff = self._int[:digits] or '0'
if changed == 1:
coeff = str(int(coeff)+1)
return _dec_from_triple(self._sign, coeff, exp)
def _round(self, places, rounding):
"""Round a nonzero, nonspecial Decimal to a fixed number of
significant figures, using the given rounding mode.
Infinities, NaNs and zeros are returned unaltered.
This operation is quiet: it raises no flags, and uses no
information from the context.
"""
if places <= 0:
raise ValueError("argument should be at least 1 in _round")
if self._is_special or not self:
return Decimal(self)
ans = self._rescale(self.adjusted()+1-places, rounding)
# it can happen that the rescale alters the adjusted exponent;
# for example when rounding 99.97 to 3 significant figures.
# When this happens we end up with an extra 0 at the end of
# the number; a second rescale fixes this.
if ans.adjusted() != self.adjusted():
ans = ans._rescale(ans.adjusted()+1-places, rounding)
return ans
def to_integral_exact(self, rounding=None, context=None):
"""Rounds to a nearby integer.
If no rounding mode is specified, take the rounding mode from
the context. This method raises the Rounded and Inexact flags
when appropriate.
See also: to_integral_value, which does exactly the same as
this method except that it doesn't raise Inexact or Rounded.
"""
if self._is_special:
ans = self._check_nans(context=context)
if ans:
return ans
return Decimal(self)
if self._exp >= 0:
return Decimal(self)
if not self:
return _dec_from_triple(self._sign, '0', 0)
if context is None:
context = getcontext()
if rounding is None:
rounding = context.rounding
ans = self._rescale(0, rounding)
if ans != self:
context._raise_error(Inexact)
context._raise_error(Rounded)
return ans
def to_integral_value(self, rounding=None, context=None):
"""Rounds to the nearest integer, without raising inexact, rounded."""
if context is None:
context = getcontext()
if rounding is None:
rounding = context.rounding
if self._is_special:
ans = self._check_nans(context=context)
if ans:
return ans
return Decimal(self)
if self._exp >= 0:
return Decimal(self)
else:
return self._rescale(0, rounding)
# the method name changed, but we provide also the old one, for compatibility
to_integral = to_integral_value
def sqrt(self, context=None):
"""Return the square root of self."""
if context is None:
context = getcontext()
if self._is_special:
ans = self._check_nans(context=context)
if ans:
return ans
if self._isinfinity() and self._sign == 0:
return Decimal(self)
if not self:
# exponent = self._exp // 2. sqrt(-0) = -0
ans = _dec_from_triple(self._sign, '0', self._exp // 2)
return ans._fix(context)
if self._sign == 1:
return context._raise_error(InvalidOperation, 'sqrt(-x), x > 0')
# At this point self represents a positive number. Let p be
# the desired precision and express self in the form c*100**e
# with c a positive real number and e an integer, c and e
# being chosen so that 100**(p-1) <= c < 100**p. Then the
# (exact) square root of self is sqrt(c)*10**e, and 10**(p-1)
# <= sqrt(c) < 10**p, so the closest representable Decimal at
# precision p is n*10**e where n = round_half_even(sqrt(c)),
# the closest integer to sqrt(c) with the even integer chosen
# in the case of a tie.
#
# To ensure correct rounding in all cases, we use the
# following trick: we compute the square root to an extra
# place (precision p+1 instead of precision p), rounding down.
# Then, if the result is inexact and its last digit is 0 or 5,
# we increase the last digit to 1 or 6 respectively; if it's
# exact we leave the last digit alone. Now the final round to
# p places (or fewer in the case of underflow) will round
# correctly and raise the appropriate flags.
# use an extra digit of precision
prec = context.prec+1
# write argument in the form c*100**e where e = self._exp//2
# is the 'ideal' exponent, to be used if the square root is
# exactly representable. l is the number of 'digits' of c in
# base 100, so that 100**(l-1) <= c < 100**l.
op = _WorkRep(self)
e = op.exp >> 1
if op.exp & 1:
c = op.int * 10
l = (len(self._int) >> 1) + 1
else:
c = op.int
l = len(self._int)+1 >> 1
# rescale so that c has exactly prec base 100 'digits'
shift = prec-l
if shift >= 0:
c *= 100**shift
exact = True
else:
c, remainder = divmod(c, 100**-shift)
exact = not remainder
e -= shift
# find n = floor(sqrt(c)) using Newton's method
n = 10**prec
while True:
q = c//n
if n <= q:
break
else:
n = n + q >> 1
exact = exact and n*n == c
if exact:
# result is exact; rescale to use ideal exponent e
if shift >= 0:
# assert n % 10**shift == 0
n //= 10**shift
else:
n *= 10**-shift
e += shift
else:
# result is not exact; fix last digit as described above
if n % 5 == 0:
n += 1
ans = _dec_from_triple(0, str(n), e)
# round, and fit to current context
context = context._shallow_copy()
rounding = context._set_rounding(ROUND_HALF_EVEN)
ans = ans._fix(context)
context.rounding = rounding
return ans
def max(self, other, context=None):
"""Returns the larger value.
Like max(self, other) except if one is not a number, returns
NaN (and signals if one is sNaN). Also rounds.
"""
other = _convert_other(other, raiseit=True)
if context is None:
context = getcontext()
if self._is_special or other._is_special:
# If one operand is a quiet NaN and the other is number, then the
# number is always returned
sn = self._isnan()
on = other._isnan()
if sn or on:
if on == 1 and sn == 0:
return self._fix(context)
if sn == 1 and on == 0:
return other._fix(context)
return self._check_nans(other, context)
c = self._cmp(other)
if c == 0:
# If both operands are finite and equal in numerical value
# then an ordering is applied:
#
# If the signs differ then max returns the operand with the
# positive sign and min returns the operand with the negative sign
#
# If the signs are the same then the exponent is used to select
# the result. This is exactly the ordering used in compare_total.
c = self.compare_total(other)
if c == -1:
ans = other
else:
ans = self
return ans._fix(context)
def min(self, other, context=None):
"""Returns the smaller value.
Like min(self, other) except if one is not a number, returns
NaN (and signals if one is sNaN). Also rounds.
"""
other = _convert_other(other, raiseit=True)
if context is None:
context = getcontext()
if self._is_special or other._is_special:
# If one operand is a quiet NaN and the other is number, then the
# number is always returned
sn = self._isnan()
on = other._isnan()
if sn or on:
if on == 1 and sn == 0:
return self._fix(context)
if sn == 1 and on == 0:
return other._fix(context)
return self._check_nans(other, context)
c = self._cmp(other)
if c == 0:
c = self.compare_total(other)
if c == -1:
ans = self
else:
ans = other
return ans._fix(context)
def _isinteger(self):
"""Returns whether self is an integer"""
if self._is_special:
return False
if self._exp >= 0:
return True
rest = self._int[self._exp:]
return rest == '0'*len(rest)
def _iseven(self):
"""Returns True if self is even. Assumes self is an integer."""
if not self or self._exp > 0:
return True
return self._int[-1+self._exp] in '02468'
def adjusted(self):
"""Return the adjusted exponent of self"""
try:
return self._exp + len(self._int) - 1
# If NaN or Infinity, self._exp is string
except TypeError:
return 0
def canonical(self):
"""Returns the same Decimal object.
As we do not have different encodings for the same number, the
received object already is in its canonical form.
"""
return self
def compare_signal(self, other, context=None):
"""Compares self to the other operand numerically.
It's pretty much like compare(), but all NaNs signal, with signaling
NaNs taking precedence over quiet NaNs.
"""
other = _convert_other(other, raiseit = True)
ans = self._compare_check_nans(other, context)
if ans:
return ans
return self.compare(other, context=context)
def compare_total(self, other, context=None):
"""Compares self to other using the abstract representations.
This is not like the standard compare, which use their numerical
value. Note that a total ordering is defined for all possible abstract
representations.
"""
other = _convert_other(other, raiseit=True)
# if one is negative and the other is positive, it's easy
if self._sign and not other._sign:
return _NegativeOne
if not self._sign and other._sign:
return _One
sign = self._sign
# let's handle both NaN types
self_nan = self._isnan()
other_nan = other._isnan()
if self_nan or other_nan:
if self_nan == other_nan:
# compare payloads as though they're integers
self_key = len(self._int), self._int
other_key = len(other._int), other._int
if self_key < other_key:
if sign:
return _One
else:
return _NegativeOne
if self_key > other_key:
if sign:
return _NegativeOne
else:
return _One
return _Zero
if sign:
if self_nan == 1:
return _NegativeOne
if other_nan == 1:
return _One
if self_nan == 2:
return _NegativeOne
if other_nan == 2:
return _One
else:
if self_nan == 1:
return _One
if other_nan == 1:
return _NegativeOne
if self_nan == 2:
return _One
if other_nan == 2:
return _NegativeOne
if self < other:
return _NegativeOne
if self > other:
return _One
if self._exp < other._exp:
if sign:
return _One
else:
return _NegativeOne
if self._exp > other._exp:
if sign:
return _NegativeOne
else:
return _One
return _Zero
def compare_total_mag(self, other, context=None):
"""Compares self to other using abstract repr., ignoring sign.
Like compare_total, but with operand's sign ignored and assumed to be 0.
"""
other = _convert_other(other, raiseit=True)
s = self.copy_abs()
o = other.copy_abs()
return s.compare_total(o)
def copy_abs(self):
"""Returns a copy with the sign set to 0. """
return _dec_from_triple(0, self._int, self._exp, self._is_special)
def copy_negate(self):
"""Returns a copy with the sign inverted."""
if self._sign:
return _dec_from_triple(0, self._int, self._exp, self._is_special)
else:
return _dec_from_triple(1, self._int, self._exp, self._is_special)
def copy_sign(self, other, context=None):
"""Returns self with the sign of other."""
other = _convert_other(other, raiseit=True)
return _dec_from_triple(other._sign, self._int,
self._exp, self._is_special)
def exp(self, context=None):
"""Returns e ** self."""
if context is None:
context = getcontext()
# exp(NaN) = NaN
ans = self._check_nans(context=context)
if ans:
return ans
# exp(-Infinity) = 0
if self._isinfinity() == -1:
return _Zero
# exp(0) = 1
if not self:
return _One
# exp(Infinity) = Infinity
if self._isinfinity() == 1:
return Decimal(self)
# the result is now guaranteed to be inexact (the true
# mathematical result is transcendental). There's no need to
# raise Rounded and Inexact here---they'll always be raised as
# a result of the call to _fix.
p = context.prec
adj = self.adjusted()
# we only need to do any computation for quite a small range
# of adjusted exponents---for example, -29 <= adj <= 10 for
# the default context. For smaller exponent the result is
# indistinguishable from 1 at the given precision, while for
# larger exponent the result either overflows or underflows.
if self._sign == 0 and adj > len(str((context.Emax+1)*3)):
# overflow
ans = _dec_from_triple(0, '1', context.Emax+1)
elif self._sign == 1 and adj > len(str((-context.Etiny()+1)*3)):
# underflow to 0
ans = _dec_from_triple(0, '1', context.Etiny()-1)
elif self._sign == 0 and adj < -p:
# p+1 digits; final round will raise correct flags
ans = _dec_from_triple(0, '1' + '0'*(p-1) + '1', -p)
elif self._sign == 1 and adj < -p-1:
# p+1 digits; final round will raise correct flags
ans = _dec_from_triple(0, '9'*(p+1), -p-1)
# general case
else:
op = _WorkRep(self)
c, e = op.int, op.exp
if op.sign == 1:
c = -c
# compute correctly rounded result: increase precision by
# 3 digits at a time until we get an unambiguously
# roundable result
extra = 3
while True:
coeff, exp = _dexp(c, e, p+extra)
if coeff % (5*10**(len(str(coeff))-p-1)):
break
extra += 3
ans = _dec_from_triple(0, str(coeff), exp)
# at this stage, ans should round correctly with *any*
# rounding mode, not just with ROUND_HALF_EVEN
context = context._shallow_copy()
rounding = context._set_rounding(ROUND_HALF_EVEN)
ans = ans._fix(context)
context.rounding = rounding
return ans
def is_canonical(self):
"""Return True if self is canonical; otherwise return False.
Currently, the encoding of a Decimal instance is always
canonical, so this method returns True for any Decimal.
"""
return True
def is_finite(self):
"""Return True if self is finite; otherwise return False.
A Decimal instance is considered finite if it is neither
infinite nor a NaN.
"""
return not self._is_special
def is_infinite(self):
"""Return True if self is infinite; otherwise return False."""
return self._exp == 'F'
def is_nan(self):
"""Return True if self is a qNaN or sNaN; otherwise return False."""
return self._exp in ('n', 'N')
def is_normal(self, context=None):
"""Return True if self is a normal number; otherwise return False."""
if self._is_special or not self:
return False
if context is None:
context = getcontext()
return context.Emin <= self.adjusted()
def is_qnan(self):
"""Return True if self is a quiet NaN; otherwise return False."""
return self._exp == 'n'
def is_signed(self):
"""Return True if self is negative; otherwise return False."""
return self._sign == 1
def is_snan(self):
"""Return True if self is a signaling NaN; otherwise return False."""
return self._exp == 'N'
def is_subnormal(self, context=None):
"""Return True if self is subnormal; otherwise return False."""
if self._is_special or not self:
return False
if context is None:
context = getcontext()
return self.adjusted() < context.Emin
def is_zero(self):
"""Return True if self is a zero; otherwise return False."""
return not self._is_special and self._int == '0'
def _ln_exp_bound(self):
"""Compute a lower bound for the adjusted exponent of self.ln().
In other words, compute r such that self.ln() >= 10**r. Assumes
that self is finite and positive and that self != 1.
"""
# for 0.1 <= x <= 10 we use the inequalities 1-1/x <= ln(x) <= x-1
adj = self._exp + len(self._int) - 1
if adj >= 1:
# argument >= 10; we use 23/10 = 2.3 as a lower bound for ln(10)
return len(str(adj*23//10)) - 1
if adj <= -2:
# argument <= 0.1
return len(str((-1-adj)*23//10)) - 1
op = _WorkRep(self)
c, e = op.int, op.exp
if adj == 0:
# 1 < self < 10
num = str(c-10**-e)
den = str(c)
return len(num) - len(den) - (num < den)
# adj == -1, 0.1 <= self < 1
return e + len(str(10**-e - c)) - 1
def ln(self, context=None):
"""Returns the natural (base e) logarithm of self."""
if context is None:
context = getcontext()
# ln(NaN) = NaN
ans = self._check_nans(context=context)
if ans:
return ans
# ln(0.0) == -Infinity
if not self:
return _NegativeInfinity
# ln(Infinity) = Infinity
if self._isinfinity() == 1:
return _Infinity
# ln(1.0) == 0.0
if self == _One:
return _Zero
# ln(negative) raises InvalidOperation
if self._sign == 1:
return context._raise_error(InvalidOperation,
'ln of a negative value')
# result is irrational, so necessarily inexact
op = _WorkRep(self)
c, e = op.int, op.exp
p = context.prec
# correctly rounded result: repeatedly increase precision by 3
# until we get an unambiguously roundable result
places = p - self._ln_exp_bound() + 2 # at least p+3 places
while True:
coeff = _dlog(c, e, places)
# assert len(str(abs(coeff)))-p >= 1
if coeff % (5*10**(len(str(abs(coeff)))-p-1)):
break
places += 3
ans = _dec_from_triple(int(coeff<0), str(abs(coeff)), -places)
context = context._shallow_copy()
rounding = context._set_rounding(ROUND_HALF_EVEN)
ans = ans._fix(context)
context.rounding = rounding
return ans
def _log10_exp_bound(self):
"""Compute a lower bound for the adjusted exponent of self.log10().
In other words, find r such that self.log10() >= 10**r.
Assumes that self is finite and positive and that self != 1.
"""
# For x >= 10 or x < 0.1 we only need a bound on the integer
# part of log10(self), and this comes directly from the
# exponent of x. For 0.1 <= x <= 10 we use the inequalities
# 1-1/x <= log(x) <= x-1. If x > 1 we have |log10(x)| >
# (1-1/x)/2.31 > 0. If x < 1 then |log10(x)| > (1-x)/2.31 > 0
adj = self._exp + len(self._int) - 1
if adj >= 1:
# self >= 10
return len(str(adj))-1
if adj <= -2:
# self < 0.1
return len(str(-1-adj))-1
op = _WorkRep(self)
c, e = op.int, op.exp
if adj == 0:
# 1 < self < 10
num = str(c-10**-e)
den = str(231*c)
return len(num) - len(den) - (num < den) + 2
# adj == -1, 0.1 <= self < 1
num = str(10**-e-c)
return len(num) + e - (num < "231") - 1
def log10(self, context=None):
"""Returns the base 10 logarithm of self."""
if context is None:
context = getcontext()
# log10(NaN) = NaN
ans = self._check_nans(context=context)
if ans:
return ans
# log10(0.0) == -Infinity
if not self:
return _NegativeInfinity
# log10(Infinity) = Infinity
if self._isinfinity() == 1:
return _Infinity
# log10(negative or -Infinity) raises InvalidOperation
if self._sign == 1:
return context._raise_error(InvalidOperation,
'log10 of a negative value')
# log10(10**n) = n
if self._int[0] == '1' and self._int[1:] == '0'*(len(self._int) - 1):
# answer may need rounding
ans = Decimal(self._exp + len(self._int) - 1)
else:
# result is irrational, so necessarily inexact
op = _WorkRep(self)
c, e = op.int, op.exp
p = context.prec
# correctly rounded result: repeatedly increase precision
# until result is unambiguously roundable
places = p-self._log10_exp_bound()+2
while True:
coeff = _dlog10(c, e, places)
# assert len(str(abs(coeff)))-p >= 1
if coeff % (5*10**(len(str(abs(coeff)))-p-1)):
break
places += 3
ans = _dec_from_triple(int(coeff<0), str(abs(coeff)), -places)
context = context._shallow_copy()
rounding = context._set_rounding(ROUND_HALF_EVEN)
ans = ans._fix(context)
context.rounding = rounding
return ans
def logb(self, context=None):
""" Returns the exponent of the magnitude of self's MSD.
The result is the integer which is the exponent of the magnitude
of the most significant digit of self (as though it were truncated
to a single digit while maintaining the value of that digit and
without limiting the resulting exponent).
"""
# logb(NaN) = NaN
ans = self._check_nans(context=context)
if ans:
return ans
if context is None:
context = getcontext()
# logb(+/-Inf) = +Inf
if self._isinfinity():
return _Infinity
# logb(0) = -Inf, DivisionByZero
if not self:
return context._raise_error(DivisionByZero, 'logb(0)', 1)
# otherwise, simply return the adjusted exponent of self, as a
# Decimal. Note that no attempt is made to fit the result
# into the current context.
ans = Decimal(self.adjusted())
return ans._fix(context)
def _islogical(self):
"""Return True if self is a logical operand.
For being logical, it must be a finite number with a sign of 0,
an exponent of 0, and a coefficient whose digits must all be
either 0 or 1.
"""
if self._sign != 0 or self._exp != 0:
return False
for dig in self._int:
if dig not in '01':
return False
return True
def _fill_logical(self, context, opa, opb):
dif = context.prec - len(opa)
if dif > 0:
opa = '0'*dif + opa
elif dif < 0:
opa = opa[-context.prec:]
dif = context.prec - len(opb)
if dif > 0:
opb = '0'*dif + opb
elif dif < 0:
opb = opb[-context.prec:]
return opa, opb
def logical_and(self, other, context=None):
"""Applies an 'and' operation between self and other's digits."""
if context is None:
context = getcontext()
other = _convert_other(other, raiseit=True)
if not self._islogical() or not other._islogical():
return context._raise_error(InvalidOperation)
# fill to context.prec
(opa, opb) = self._fill_logical(context, self._int, other._int)
# make the operation, and clean starting zeroes
result = "".join([str(int(a)&int(b)) for a,b in zip(opa,opb)])
return _dec_from_triple(0, result.lstrip('0') or '0', 0)
def logical_invert(self, context=None):
"""Invert all its digits."""
if context is None:
context = getcontext()
return self.logical_xor(_dec_from_triple(0,'1'*context.prec,0),
context)
def logical_or(self, other, context=None):
"""Applies an 'or' operation between self and other's digits."""
if context is None:
context = getcontext()
other = _convert_other(other, raiseit=True)
if not self._islogical() or not other._islogical():
return context._raise_error(InvalidOperation)
# fill to context.prec
(opa, opb) = self._fill_logical(context, self._int, other._int)
# make the operation, and clean starting zeroes
result = "".join([str(int(a)|int(b)) for a,b in zip(opa,opb)])
return _dec_from_triple(0, result.lstrip('0') or '0', 0)
def logical_xor(self, other, context=None):
"""Applies an 'xor' operation between self and other's digits."""
if context is None:
context = getcontext()
other = _convert_other(other, raiseit=True)
if not self._islogical() or not other._islogical():
return context._raise_error(InvalidOperation)
# fill to context.prec
(opa, opb) = self._fill_logical(context, self._int, other._int)
# make the operation, and clean starting zeroes
result = "".join([str(int(a)^int(b)) for a,b in zip(opa,opb)])
return _dec_from_triple(0, result.lstrip('0') or '0', 0)
def max_mag(self, other, context=None):
"""Compares the values numerically with their sign ignored."""
other = _convert_other(other, raiseit=True)
if context is None:
context = getcontext()
if self._is_special or other._is_special:
# If one operand is a quiet NaN and the other is number, then the
# number is always returned
sn = self._isnan()
on = other._isnan()
if sn or on:
if on == 1 and sn == 0:
return self._fix(context)
if sn == 1 and on == 0:
return other._fix(context)
return self._check_nans(other, context)
c = self.copy_abs()._cmp(other.copy_abs())
if c == 0:
c = self.compare_total(other)
if c == -1:
ans = other
else:
ans = self
return ans._fix(context)
def min_mag(self, other, context=None):
"""Compares the values numerically with their sign ignored."""
other = _convert_other(other, raiseit=True)
if context is None:
context = getcontext()
if self._is_special or other._is_special:
# If one operand is a quiet NaN and the other is number, then the
# number is always returned
sn = self._isnan()
on = other._isnan()
if sn or on:
if on == 1 and sn == 0:
return self._fix(context)
if sn == 1 and on == 0:
return other._fix(context)
return self._check_nans(other, context)
c = self.copy_abs()._cmp(other.copy_abs())
if c == 0:
c = self.compare_total(other)
if c == -1:
ans = self
else:
ans = other
return ans._fix(context)
def next_minus(self, context=None):
"""Returns the largest representable number smaller than itself."""
if context is None:
context = getcontext()
ans = self._check_nans(context=context)
if ans:
return ans
if self._isinfinity() == -1:
return _NegativeInfinity
if self._isinfinity() == 1:
return _dec_from_triple(0, '9'*context.prec, context.Etop())
context = context.copy()
context._set_rounding(ROUND_FLOOR)
context._ignore_all_flags()
new_self = self._fix(context)
if new_self != self:
return new_self
return self.__sub__(_dec_from_triple(0, '1', context.Etiny()-1),
context)
def next_plus(self, context=None):
"""Returns the smallest representable number larger than itself."""
if context is None:
context = getcontext()
ans = self._check_nans(context=context)
if ans:
return ans
if self._isinfinity() == 1:
return _Infinity
if self._isinfinity() == -1:
return _dec_from_triple(1, '9'*context.prec, context.Etop())
context = context.copy()
context._set_rounding(ROUND_CEILING)
context._ignore_all_flags()
new_self = self._fix(context)
if new_self != self:
return new_self
return self.__add__(_dec_from_triple(0, '1', context.Etiny()-1),
context)
def next_toward(self, other, context=None):
"""Returns the number closest to self, in the direction towards other.
The result is the closest representable number to self
(excluding self) that is in the direction towards other,
unless both have the same value. If the two operands are
numerically equal, then the result is a copy of self with the
sign set to be the same as the sign of other.
"""
other = _convert_other(other, raiseit=True)
if context is None:
context = getcontext()
ans = self._check_nans(other, context)
if ans:
return ans
comparison = self._cmp(other)
if comparison == 0:
return self.copy_sign(other)
if comparison == -1:
ans = self.next_plus(context)
else: # comparison == 1
ans = self.next_minus(context)
# decide which flags to raise using value of ans
if ans._isinfinity():
context._raise_error(Overflow,
'Infinite result from next_toward',
ans._sign)
context._raise_error(Inexact)
context._raise_error(Rounded)
elif ans.adjusted() < context.Emin:
context._raise_error(Underflow)
context._raise_error(Subnormal)
context._raise_error(Inexact)
context._raise_error(Rounded)
# if precision == 1 then we don't raise Clamped for a
# result 0E-Etiny.
if not ans:
context._raise_error(Clamped)
return ans
def number_class(self, context=None):
"""Returns an indication of the class of self.
The class is one of the following strings:
sNaN
NaN
-Infinity
-Normal
-Subnormal
-Zero
+Zero
+Subnormal
+Normal
+Infinity
"""
if self.is_snan():
return "sNaN"
if self.is_qnan():
return "NaN"
inf = self._isinfinity()
if inf == 1:
return "+Infinity"
if inf == -1:
return "-Infinity"
if self.is_zero():
if self._sign:
return "-Zero"
else:
return "+Zero"
if context is None:
context = getcontext()
if self.is_subnormal(context=context):
if self._sign:
return "-Subnormal"
else:
return "+Subnormal"
# just a normal, regular, boring number, :)
if self._sign:
return "-Normal"
else:
return "+Normal"
def radix(self):
"""Just returns 10, as this is Decimal, :)"""
return Decimal(10)
def rotate(self, other, context=None):
"""Returns a rotated copy of self, value-of-other times."""
if context is None:
context = getcontext()
other = _convert_other(other, raiseit=True)
ans = self._check_nans(other, context)
if ans:
return ans
if other._exp != 0:
return context._raise_error(InvalidOperation)
if not (-context.prec <= int(other) <= context.prec):
return context._raise_error(InvalidOperation)
if self._isinfinity():
return Decimal(self)
# get values, pad if necessary
torot = int(other)
rotdig = self._int
topad = context.prec - len(rotdig)
if topad > 0:
rotdig = '0'*topad + rotdig
elif topad < 0:
rotdig = rotdig[-topad:]
# let's rotate!
rotated = rotdig[torot:] + rotdig[:torot]
return _dec_from_triple(self._sign,
rotated.lstrip('0') or '0', self._exp)
def scaleb(self, other, context=None):
"""Returns self operand after adding the second value to its exp."""
if context is None:
context = getcontext()
other = _convert_other(other, raiseit=True)
ans = self._check_nans(other, context)
if ans:
return ans
if other._exp != 0:
return context._raise_error(InvalidOperation)
liminf = -2 * (context.Emax + context.prec)
limsup = 2 * (context.Emax + context.prec)
if not (liminf <= int(other) <= limsup):
return context._raise_error(InvalidOperation)
if self._isinfinity():
return Decimal(self)
d = _dec_from_triple(self._sign, self._int, self._exp + int(other))
d = d._fix(context)
return d
def shift(self, other, context=None):
"""Returns a shifted copy of self, value-of-other times."""
if context is None:
context = getcontext()
other = _convert_other(other, raiseit=True)
ans = self._check_nans(other, context)
if ans:
return ans
if other._exp != 0:
return context._raise_error(InvalidOperation)
if not (-context.prec <= int(other) <= context.prec):
return context._raise_error(InvalidOperation)
if self._isinfinity():
return Decimal(self)
# get values, pad if necessary
torot = int(other)
rotdig = self._int
topad = context.prec - len(rotdig)
if topad > 0:
rotdig = '0'*topad + rotdig
elif topad < 0:
rotdig = rotdig[-topad:]
# let's shift!
if torot < 0:
shifted = rotdig[:torot]
else:
shifted = rotdig + '0'*torot
shifted = shifted[-context.prec:]
return _dec_from_triple(self._sign,
shifted.lstrip('0') or '0', self._exp)
# Support for pickling, copy, and deepcopy
def __reduce__(self):
return (self.__class__, (str(self),))
def __copy__(self):
if type(self) is Decimal:
return self # I'm immutable; therefore I am my own clone
return self.__class__(str(self))
def __deepcopy__(self, memo):
if type(self) is Decimal:
return self # My components are also immutable
return self.__class__(str(self))
# PEP 3101 support. the _localeconv keyword argument should be
# considered private: it's provided for ease of testing only.
def __format__(self, specifier, context=None, _localeconv=None):
"""Format a Decimal instance according to the given specifier.
The specifier should be a standard format specifier, with the
form described in PEP 3101. Formatting types 'e', 'E', 'f',
'F', 'g', 'G', 'n' and '%' are supported. If the formatting
type is omitted it defaults to 'g' or 'G', depending on the
value of context.capitals.
"""
# Note: PEP 3101 says that if the type is not present then
# there should be at least one digit after the decimal point.
# We take the liberty of ignoring this requirement for
# Decimal---it's presumably there to make sure that
# format(float, '') behaves similarly to str(float).
if context is None:
context = getcontext()
spec = _parse_format_specifier(specifier, _localeconv=_localeconv)
# special values don't care about the type or precision
if self._is_special:
sign = _format_sign(self._sign, spec)
body = str(self.copy_abs())
if spec['type'] == '%':
body += '%'
return _format_align(sign, body, spec)
# a type of None defaults to 'g' or 'G', depending on context
if spec['type'] is None:
spec['type'] = ['g', 'G'][context.capitals]
# if type is '%', adjust exponent of self accordingly
if spec['type'] == '%':
self = _dec_from_triple(self._sign, self._int, self._exp+2)
# round if necessary, taking rounding mode from the context
rounding = context.rounding
precision = spec['precision']
if precision is not None:
if spec['type'] in 'eE':
self = self._round(precision+1, rounding)
elif spec['type'] in 'fF%':
self = self._rescale(-precision, rounding)
elif spec['type'] in 'gG' and len(self._int) > precision:
self = self._round(precision, rounding)
# special case: zeros with a positive exponent can't be
# represented in fixed point; rescale them to 0e0.
if not self and self._exp > 0 and spec['type'] in 'fF%':
self = self._rescale(0, rounding)
# figure out placement of the decimal point
leftdigits = self._exp + len(self._int)
if spec['type'] in 'eE':
if not self and precision is not None:
dotplace = 1 - precision
else:
dotplace = 1
elif spec['type'] in 'fF%':
dotplace = leftdigits
elif spec['type'] in 'gG':
if self._exp <= 0 and leftdigits > -6:
dotplace = leftdigits
else:
dotplace = 1
# find digits before and after decimal point, and get exponent
if dotplace < 0:
intpart = '0'
fracpart = '0'*(-dotplace) + self._int
elif dotplace > len(self._int):
intpart = self._int + '0'*(dotplace-len(self._int))
fracpart = ''
else:
intpart = self._int[:dotplace] or '0'
fracpart = self._int[dotplace:]
exp = leftdigits-dotplace
# done with the decimal-specific stuff; hand over the rest
# of the formatting to the _format_number function
return _format_number(self._sign, intpart, fracpart, exp, spec)
def _dec_from_triple(sign, coefficient, exponent, special=False):
"""Create a decimal instance directly, without any validation,
normalization (e.g. removal of leading zeros) or argument
conversion.
This function is for *internal use only*.
"""
self = object.__new__(Decimal)
self._sign = sign
self._int = coefficient
self._exp = exponent
self._is_special = special
return self
# Register Decimal as a kind of Number (an abstract base class).
# However, do not register it as Real (because Decimals are not
# interoperable with floats).
_numbers.Number.register(Decimal)
##### Context class #######################################################
class _ContextManager(object):
"""Context manager class to support localcontext().
Sets a copy of the supplied context in __enter__() and restores
the previous decimal context in __exit__()
"""
def __init__(self, new_context):
self.new_context = new_context.copy()
def __enter__(self):
self.saved_context = getcontext()
setcontext(self.new_context)
return self.new_context
def __exit__(self, t, v, tb):
setcontext(self.saved_context)
class Context(object):
"""Contains the context for a Decimal instance.
Contains:
prec - precision (for use in rounding, division, square roots..)
rounding - rounding type (how you round)
traps - If traps[exception] = 1, then the exception is
raised when it is caused. Otherwise, a value is
substituted in.
flags - When an exception is caused, flags[exception] is set.
(Whether or not the trap_enabler is set)
Should be reset by user of Decimal instance.
Emin - Minimum exponent
Emax - Maximum exponent
capitals - If 1, 1*10^1 is printed as 1E+1.
If 0, printed as 1e1
clamp - If 1, change exponents if too high (Default 0)
"""
def __init__(self, prec=None, rounding=None, Emin=None, Emax=None,
capitals=None, clamp=None, flags=None, traps=None,
_ignored_flags=None):
# Set defaults; for everything except flags and _ignored_flags,
# inherit from DefaultContext.
try:
dc = DefaultContext
except NameError:
pass
self.prec = prec if prec is not None else dc.prec
self.rounding = rounding if rounding is not None else dc.rounding
self.Emin = Emin if Emin is not None else dc.Emin
self.Emax = Emax if Emax is not None else dc.Emax
self.capitals = capitals if capitals is not None else dc.capitals
self.clamp = clamp if clamp is not None else dc.clamp
if _ignored_flags is None:
self._ignored_flags = []
else:
self._ignored_flags = _ignored_flags
if traps is None:
self.traps = dc.traps.copy()
elif not isinstance(traps, dict):
self.traps = dict((s, int(s in traps)) for s in _signals + traps)
else:
self.traps = traps
if flags is None:
self.flags = dict.fromkeys(_signals, 0)
elif not isinstance(flags, dict):
self.flags = dict((s, int(s in flags)) for s in _signals + flags)
else:
self.flags = flags
def _set_integer_check(self, name, value, vmin, vmax):
if not isinstance(value, int):
raise TypeError("%s must be an integer" % name)
if vmin == '-inf':
if value > vmax:
raise ValueError("%s must be in [%s, %d]. got: %s" % (name, vmin, vmax, value))
elif vmax == 'inf':
if value < vmin:
raise ValueError("%s must be in [%d, %s]. got: %s" % (name, vmin, vmax, value))
else:
if value < vmin or value > vmax:
raise ValueError("%s must be in [%d, %d]. got %s" % (name, vmin, vmax, value))
return object.__setattr__(self, name, value)
def _set_signal_dict(self, name, d):
if not isinstance(d, dict):
raise TypeError("%s must be a signal dict" % d)
for key in d:
if not key in _signals:
raise KeyError("%s is not a valid signal dict" % d)
for key in _signals:
if not key in d:
raise KeyError("%s is not a valid signal dict" % d)
return object.__setattr__(self, name, d)
def __setattr__(self, name, value):
if name == 'prec':
return self._set_integer_check(name, value, 1, 'inf')
elif name == 'Emin':
return self._set_integer_check(name, value, '-inf', 0)
elif name == 'Emax':
return self._set_integer_check(name, value, 0, 'inf')
elif name == 'capitals':
return self._set_integer_check(name, value, 0, 1)
elif name == 'clamp':
return self._set_integer_check(name, value, 0, 1)
elif name == 'rounding':
if not value in _rounding_modes:
# raise TypeError even for strings to have consistency
# among various implementations.
raise TypeError("%s: invalid rounding mode" % value)
return object.__setattr__(self, name, value)
elif name == 'flags' or name == 'traps':
return self._set_signal_dict(name, value)
elif name == '_ignored_flags':
return object.__setattr__(self, name, value)
else:
raise AttributeError(
"'decimal.Context' object has no attribute '%s'" % name)
def __delattr__(self, name):
raise AttributeError("%s cannot be deleted" % name)
# Support for pickling, copy, and deepcopy
def __reduce__(self):
flags = [sig for sig, v in self.flags.items() if v]
traps = [sig for sig, v in self.traps.items() if v]
return (self.__class__,
(self.prec, self.rounding, self.Emin, self.Emax,
self.capitals, self.clamp, flags, traps))
def __repr__(self):
"""Show the current context."""
s = []
s.append('Context(prec=%(prec)d, rounding=%(rounding)s, '
'Emin=%(Emin)d, Emax=%(Emax)d, capitals=%(capitals)d, '
'clamp=%(clamp)d'
% vars(self))
names = [f.__name__ for f, v in self.flags.items() if v]
s.append('flags=[' + ', '.join(names) + ']')
names = [t.__name__ for t, v in self.traps.items() if v]
s.append('traps=[' + ', '.join(names) + ']')
return ', '.join(s) + ')'
def clear_flags(self):
"""Reset all flags to zero"""
for flag in self.flags:
self.flags[flag] = 0
def clear_traps(self):
"""Reset all traps to zero"""
for flag in self.traps:
self.traps[flag] = 0
def _shallow_copy(self):
"""Returns a shallow copy from self."""
nc = Context(self.prec, self.rounding, self.Emin, self.Emax,
self.capitals, self.clamp, self.flags, self.traps,
self._ignored_flags)
return nc
def copy(self):
"""Returns a deep copy from self."""
nc = Context(self.prec, self.rounding, self.Emin, self.Emax,
self.capitals, self.clamp,
self.flags.copy(), self.traps.copy(),
self._ignored_flags)
return nc
__copy__ = copy
def _raise_error(self, condition, explanation = None, *args):
"""Handles an error
If the flag is in _ignored_flags, returns the default response.
Otherwise, it sets the flag, then, if the corresponding
trap_enabler is set, it reraises the exception. Otherwise, it returns
the default value after setting the flag.
"""
error = _condition_map.get(condition, condition)
if error in self._ignored_flags:
# Don't touch the flag
return error().handle(self, *args)
self.flags[error] = 1
if not self.traps[error]:
# The errors define how to handle themselves.
return condition().handle(self, *args)
# Errors should only be risked on copies of the context
# self._ignored_flags = []
raise error(explanation)
def _ignore_all_flags(self):
"""Ignore all flags, if they are raised"""
return self._ignore_flags(*_signals)
def _ignore_flags(self, *flags):
"""Ignore the flags, if they are raised"""
# Do not mutate-- This way, copies of a context leave the original
# alone.
self._ignored_flags = (self._ignored_flags + list(flags))
return list(flags)
def _regard_flags(self, *flags):
"""Stop ignoring the flags, if they are raised"""
if flags and isinstance(flags[0], (tuple,list)):
flags = flags[0]
for flag in flags:
self._ignored_flags.remove(flag)
# We inherit object.__hash__, so we must deny this explicitly
__hash__ = None
def Etiny(self):
"""Returns Etiny (= Emin - prec + 1)"""
return int(self.Emin - self.prec + 1)
def Etop(self):
"""Returns maximum exponent (= Emax - prec + 1)"""
return int(self.Emax - self.prec + 1)
def _set_rounding(self, type):
"""Sets the rounding type.
Sets the rounding type, and returns the current (previous)
rounding type. Often used like:
context = context.copy()
# so you don't change the calling context
# if an error occurs in the middle.
rounding = context._set_rounding(ROUND_UP)
val = self.__sub__(other, context=context)
context._set_rounding(rounding)
This will make it round up for that operation.
"""
rounding = self.rounding
self.rounding = type
return rounding
def create_decimal(self, num='0'):
"""Creates a new Decimal instance but using self as context.
This method implements the to-number operation of the
IBM Decimal specification."""
if isinstance(num, str) and (num != num.strip() or '_' in num):
return self._raise_error(ConversionSyntax,
"trailing or leading whitespace and "
"underscores are not permitted.")
d = Decimal(num, context=self)
if d._isnan() and len(d._int) > self.prec - self.clamp:
return self._raise_error(ConversionSyntax,
"diagnostic info too long in NaN")
return d._fix(self)
def create_decimal_from_float(self, f):
"""Creates a new Decimal instance from a float but rounding using self
as the context.
>>> context = Context(prec=5, rounding=ROUND_DOWN)
>>> context.create_decimal_from_float(3.1415926535897932)
Decimal('3.1415')
>>> context = Context(prec=5, traps=[Inexact])
>>> context.create_decimal_from_float(3.1415926535897932)
Traceback (most recent call last):
...
decimal.Inexact
"""
d = Decimal.from_float(f) # An exact conversion
return d._fix(self) # Apply the context rounding
# Methods
def abs(self, a):
"""Returns the absolute value of the operand.
If the operand is negative, the result is the same as using the minus
operation on the operand. Otherwise, the result is the same as using
the plus operation on the operand.
>>> ExtendedContext.abs(Decimal('2.1'))
Decimal('2.1')
>>> ExtendedContext.abs(Decimal('-100'))
Decimal('100')
>>> ExtendedContext.abs(Decimal('101.5'))
Decimal('101.5')
>>> ExtendedContext.abs(Decimal('-101.5'))
Decimal('101.5')
>>> ExtendedContext.abs(-1)
Decimal('1')
"""
a = _convert_other(a, raiseit=True)
return a.__abs__(context=self)
def add(self, a, b):
"""Return the sum of the two operands.
>>> ExtendedContext.add(Decimal('12'), Decimal('7.00'))
Decimal('19.00')
>>> ExtendedContext.add(Decimal('1E+2'), Decimal('1.01E+4'))
Decimal('1.02E+4')
>>> ExtendedContext.add(1, Decimal(2))
Decimal('3')
>>> ExtendedContext.add(Decimal(8), 5)
Decimal('13')
>>> ExtendedContext.add(5, 5)
Decimal('10')
"""
a = _convert_other(a, raiseit=True)
r = a.__add__(b, context=self)
if r is NotImplemented:
raise TypeError("Unable to convert %s to Decimal" % b)
else:
return r
def _apply(self, a):
return str(a._fix(self))
def canonical(self, a):
"""Returns the same Decimal object.
As we do not have different encodings for the same number, the
received object already is in its canonical form.
>>> ExtendedContext.canonical(Decimal('2.50'))
Decimal('2.50')
"""
if not isinstance(a, Decimal):
raise TypeError("canonical requires a Decimal as an argument.")
return a.canonical()
def compare(self, a, b):
"""Compares values numerically.
If the signs of the operands differ, a value representing each operand
('-1' if the operand is less than zero, '0' if the operand is zero or
negative zero, or '1' if the operand is greater than zero) is used in
place of that operand for the comparison instead of the actual
operand.
The comparison is then effected by subtracting the second operand from
the first and then returning a value according to the result of the
subtraction: '-1' if the result is less than zero, '0' if the result is
zero or negative zero, or '1' if the result is greater than zero.
>>> ExtendedContext.compare(Decimal('2.1'), Decimal('3'))
Decimal('-1')
>>> ExtendedContext.compare(Decimal('2.1'), Decimal('2.1'))
Decimal('0')
>>> ExtendedContext.compare(Decimal('2.1'), Decimal('2.10'))
Decimal('0')
>>> ExtendedContext.compare(Decimal('3'), Decimal('2.1'))
Decimal('1')
>>> ExtendedContext.compare(Decimal('2.1'), Decimal('-3'))
Decimal('1')
>>> ExtendedContext.compare(Decimal('-3'), Decimal('2.1'))
Decimal('-1')
>>> ExtendedContext.compare(1, 2)
Decimal('-1')
>>> ExtendedContext.compare(Decimal(1), 2)
Decimal('-1')
>>> ExtendedContext.compare(1, Decimal(2))
Decimal('-1')
"""
a = _convert_other(a, raiseit=True)
return a.compare(b, context=self)
def compare_signal(self, a, b):
"""Compares the values of the two operands numerically.
It's pretty much like compare(), but all NaNs signal, with signaling
NaNs taking precedence over quiet NaNs.
>>> c = ExtendedContext
>>> c.compare_signal(Decimal('2.1'), Decimal('3'))
Decimal('-1')
>>> c.compare_signal(Decimal('2.1'), Decimal('2.1'))
Decimal('0')
>>> c.flags[InvalidOperation] = 0
>>> print(c.flags[InvalidOperation])
0
>>> c.compare_signal(Decimal('NaN'), Decimal('2.1'))
Decimal('NaN')
>>> print(c.flags[InvalidOperation])
1
>>> c.flags[InvalidOperation] = 0
>>> print(c.flags[InvalidOperation])
0
>>> c.compare_signal(Decimal('sNaN'), Decimal('2.1'))
Decimal('NaN')
>>> print(c.flags[InvalidOperation])
1
>>> c.compare_signal(-1, 2)
Decimal('-1')
>>> c.compare_signal(Decimal(-1), 2)
Decimal('-1')
>>> c.compare_signal(-1, Decimal(2))
Decimal('-1')
"""
a = _convert_other(a, raiseit=True)
return a.compare_signal(b, context=self)
def compare_total(self, a, b):
"""Compares two operands using their abstract representation.
This is not like the standard compare, which use their numerical
value. Note that a total ordering is defined for all possible abstract
representations.
>>> ExtendedContext.compare_total(Decimal('12.73'), Decimal('127.9'))
Decimal('-1')
>>> ExtendedContext.compare_total(Decimal('-127'), Decimal('12'))
Decimal('-1')
>>> ExtendedContext.compare_total(Decimal('12.30'), Decimal('12.3'))
Decimal('-1')
>>> ExtendedContext.compare_total(Decimal('12.30'), Decimal('12.30'))
Decimal('0')
>>> ExtendedContext.compare_total(Decimal('12.3'), Decimal('12.300'))
Decimal('1')
>>> ExtendedContext.compare_total(Decimal('12.3'), Decimal('NaN'))
Decimal('-1')
>>> ExtendedContext.compare_total(1, 2)
Decimal('-1')
>>> ExtendedContext.compare_total(Decimal(1), 2)
Decimal('-1')
>>> ExtendedContext.compare_total(1, Decimal(2))
Decimal('-1')
"""
a = _convert_other(a, raiseit=True)
return a.compare_total(b)
def compare_total_mag(self, a, b):
"""Compares two operands using their abstract representation ignoring sign.
Like compare_total, but with operand's sign ignored and assumed to be 0.
"""
a = _convert_other(a, raiseit=True)
return a.compare_total_mag(b)
def copy_abs(self, a):
"""Returns a copy of the operand with the sign set to 0.
>>> ExtendedContext.copy_abs(Decimal('2.1'))
Decimal('2.1')
>>> ExtendedContext.copy_abs(Decimal('-100'))
Decimal('100')
>>> ExtendedContext.copy_abs(-1)
Decimal('1')
"""
a = _convert_other(a, raiseit=True)
return a.copy_abs()
def copy_decimal(self, a):
"""Returns a copy of the decimal object.
>>> ExtendedContext.copy_decimal(Decimal('2.1'))
Decimal('2.1')
>>> ExtendedContext.copy_decimal(Decimal('-1.00'))
Decimal('-1.00')
>>> ExtendedContext.copy_decimal(1)
Decimal('1')
"""
a = _convert_other(a, raiseit=True)
return Decimal(a)
def copy_negate(self, a):
"""Returns a copy of the operand with the sign inverted.
>>> ExtendedContext.copy_negate(Decimal('101.5'))
Decimal('-101.5')
>>> ExtendedContext.copy_negate(Decimal('-101.5'))
Decimal('101.5')
>>> ExtendedContext.copy_negate(1)
Decimal('-1')
"""
a = _convert_other(a, raiseit=True)
return a.copy_negate()
def copy_sign(self, a, b):
"""Copies the second operand's sign to the first one.
In detail, it returns a copy of the first operand with the sign
equal to the sign of the second operand.
>>> ExtendedContext.copy_sign(Decimal( '1.50'), Decimal('7.33'))
Decimal('1.50')
>>> ExtendedContext.copy_sign(Decimal('-1.50'), Decimal('7.33'))
Decimal('1.50')
>>> ExtendedContext.copy_sign(Decimal( '1.50'), Decimal('-7.33'))
Decimal('-1.50')
>>> ExtendedContext.copy_sign(Decimal('-1.50'), Decimal('-7.33'))
Decimal('-1.50')
>>> ExtendedContext.copy_sign(1, -2)
Decimal('-1')
>>> ExtendedContext.copy_sign(Decimal(1), -2)
Decimal('-1')
>>> ExtendedContext.copy_sign(1, Decimal(-2))
Decimal('-1')
"""
a = _convert_other(a, raiseit=True)
return a.copy_sign(b)
def divide(self, a, b):
"""Decimal division in a specified context.
>>> ExtendedContext.divide(Decimal('1'), Decimal('3'))
Decimal('0.333333333')
>>> ExtendedContext.divide(Decimal('2'), Decimal('3'))
Decimal('0.666666667')
>>> ExtendedContext.divide(Decimal('5'), Decimal('2'))
Decimal('2.5')
>>> ExtendedContext.divide(Decimal('1'), Decimal('10'))
Decimal('0.1')
>>> ExtendedContext.divide(Decimal('12'), Decimal('12'))
Decimal('1')
>>> ExtendedContext.divide(Decimal('8.00'), Decimal('2'))
Decimal('4.00')
>>> ExtendedContext.divide(Decimal('2.400'), Decimal('2.0'))
Decimal('1.20')
>>> ExtendedContext.divide(Decimal('1000'), Decimal('100'))
Decimal('10')
>>> ExtendedContext.divide(Decimal('1000'), Decimal('1'))
Decimal('1000')
>>> ExtendedContext.divide(Decimal('2.40E+6'), Decimal('2'))
Decimal('1.20E+6')
>>> ExtendedContext.divide(5, 5)
Decimal('1')
>>> ExtendedContext.divide(Decimal(5), 5)
Decimal('1')
>>> ExtendedContext.divide(5, Decimal(5))
Decimal('1')
"""
a = _convert_other(a, raiseit=True)
r = a.__truediv__(b, context=self)
if r is NotImplemented:
raise TypeError("Unable to convert %s to Decimal" % b)
else:
return r
def divide_int(self, a, b):
"""Divides two numbers and returns the integer part of the result.
>>> ExtendedContext.divide_int(Decimal('2'), Decimal('3'))
Decimal('0')
>>> ExtendedContext.divide_int(Decimal('10'), Decimal('3'))
Decimal('3')
>>> ExtendedContext.divide_int(Decimal('1'), Decimal('0.3'))
Decimal('3')
>>> ExtendedContext.divide_int(10, 3)
Decimal('3')
>>> ExtendedContext.divide_int(Decimal(10), 3)
Decimal('3')
>>> ExtendedContext.divide_int(10, Decimal(3))
Decimal('3')
"""
a = _convert_other(a, raiseit=True)
r = a.__floordiv__(b, context=self)
if r is NotImplemented:
raise TypeError("Unable to convert %s to Decimal" % b)
else:
return r
def divmod(self, a, b):
"""Return (a // b, a % b).
>>> ExtendedContext.divmod(Decimal(8), Decimal(3))
(Decimal('2'), Decimal('2'))
>>> ExtendedContext.divmod(Decimal(8), Decimal(4))
(Decimal('2'), Decimal('0'))
>>> ExtendedContext.divmod(8, 4)
(Decimal('2'), Decimal('0'))
>>> ExtendedContext.divmod(Decimal(8), 4)
(Decimal('2'), Decimal('0'))
>>> ExtendedContext.divmod(8, Decimal(4))
(Decimal('2'), Decimal('0'))
"""
a = _convert_other(a, raiseit=True)
r = a.__divmod__(b, context=self)
if r is NotImplemented:
raise TypeError("Unable to convert %s to Decimal" % b)
else:
return r
def exp(self, a):
"""Returns e ** a.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> c.exp(Decimal('-Infinity'))
Decimal('0')
>>> c.exp(Decimal('-1'))
Decimal('0.367879441')
>>> c.exp(Decimal('0'))
Decimal('1')
>>> c.exp(Decimal('1'))
Decimal('2.71828183')
>>> c.exp(Decimal('0.693147181'))
Decimal('2.00000000')
>>> c.exp(Decimal('+Infinity'))
Decimal('Infinity')
>>> c.exp(10)
Decimal('22026.4658')
"""
a =_convert_other(a, raiseit=True)
return a.exp(context=self)
def fma(self, a, b, c):
"""Returns a multiplied by b, plus c.
The first two operands are multiplied together, using multiply,
the third operand is then added to the result of that
multiplication, using add, all with only one final rounding.
>>> ExtendedContext.fma(Decimal('3'), Decimal('5'), Decimal('7'))
Decimal('22')
>>> ExtendedContext.fma(Decimal('3'), Decimal('-5'), Decimal('7'))
Decimal('-8')
>>> ExtendedContext.fma(Decimal('888565290'), Decimal('1557.96930'), Decimal('-86087.7578'))
Decimal('1.38435736E+12')
>>> ExtendedContext.fma(1, 3, 4)
Decimal('7')
>>> ExtendedContext.fma(1, Decimal(3), 4)
Decimal('7')
>>> ExtendedContext.fma(1, 3, Decimal(4))
Decimal('7')
"""
a = _convert_other(a, raiseit=True)
return a.fma(b, c, context=self)
def is_canonical(self, a):
"""Return True if the operand is canonical; otherwise return False.
Currently, the encoding of a Decimal instance is always
canonical, so this method returns True for any Decimal.
>>> ExtendedContext.is_canonical(Decimal('2.50'))
True
"""
if not isinstance(a, Decimal):
raise TypeError("is_canonical requires a Decimal as an argument.")
return a.is_canonical()
def is_finite(self, a):
"""Return True if the operand is finite; otherwise return False.
A Decimal instance is considered finite if it is neither
infinite nor a NaN.
>>> ExtendedContext.is_finite(Decimal('2.50'))
True
>>> ExtendedContext.is_finite(Decimal('-0.3'))
True
>>> ExtendedContext.is_finite(Decimal('0'))
True
>>> ExtendedContext.is_finite(Decimal('Inf'))
False
>>> ExtendedContext.is_finite(Decimal('NaN'))
False
>>> ExtendedContext.is_finite(1)
True
"""
a = _convert_other(a, raiseit=True)
return a.is_finite()
def is_infinite(self, a):
"""Return True if the operand is infinite; otherwise return False.
>>> ExtendedContext.is_infinite(Decimal('2.50'))
False
>>> ExtendedContext.is_infinite(Decimal('-Inf'))
True
>>> ExtendedContext.is_infinite(Decimal('NaN'))
False
>>> ExtendedContext.is_infinite(1)
False
"""
a = _convert_other(a, raiseit=True)
return a.is_infinite()
def is_nan(self, a):
"""Return True if the operand is a qNaN or sNaN;
otherwise return False.
>>> ExtendedContext.is_nan(Decimal('2.50'))
False
>>> ExtendedContext.is_nan(Decimal('NaN'))
True
>>> ExtendedContext.is_nan(Decimal('-sNaN'))
True
>>> ExtendedContext.is_nan(1)
False
"""
a = _convert_other(a, raiseit=True)
return a.is_nan()
def is_normal(self, a):
"""Return True if the operand is a normal number;
otherwise return False.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> c.is_normal(Decimal('2.50'))
True
>>> c.is_normal(Decimal('0.1E-999'))
False
>>> c.is_normal(Decimal('0.00'))
False
>>> c.is_normal(Decimal('-Inf'))
False
>>> c.is_normal(Decimal('NaN'))
False
>>> c.is_normal(1)
True
"""
a = _convert_other(a, raiseit=True)
return a.is_normal(context=self)
def is_qnan(self, a):
"""Return True if the operand is a quiet NaN; otherwise return False.
>>> ExtendedContext.is_qnan(Decimal('2.50'))
False
>>> ExtendedContext.is_qnan(Decimal('NaN'))
True
>>> ExtendedContext.is_qnan(Decimal('sNaN'))
False
>>> ExtendedContext.is_qnan(1)
False
"""
a = _convert_other(a, raiseit=True)
return a.is_qnan()
def is_signed(self, a):
"""Return True if the operand is negative; otherwise return False.
>>> ExtendedContext.is_signed(Decimal('2.50'))
False
>>> ExtendedContext.is_signed(Decimal('-12'))
True
>>> ExtendedContext.is_signed(Decimal('-0'))
True
>>> ExtendedContext.is_signed(8)
False
>>> ExtendedContext.is_signed(-8)
True
"""
a = _convert_other(a, raiseit=True)
return a.is_signed()
def is_snan(self, a):
"""Return True if the operand is a signaling NaN;
otherwise return False.
>>> ExtendedContext.is_snan(Decimal('2.50'))
False
>>> ExtendedContext.is_snan(Decimal('NaN'))
False
>>> ExtendedContext.is_snan(Decimal('sNaN'))
True
>>> ExtendedContext.is_snan(1)
False
"""
a = _convert_other(a, raiseit=True)
return a.is_snan()
def is_subnormal(self, a):
"""Return True if the operand is subnormal; otherwise return False.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> c.is_subnormal(Decimal('2.50'))
False
>>> c.is_subnormal(Decimal('0.1E-999'))
True
>>> c.is_subnormal(Decimal('0.00'))
False
>>> c.is_subnormal(Decimal('-Inf'))
False
>>> c.is_subnormal(Decimal('NaN'))
False
>>> c.is_subnormal(1)
False
"""
a = _convert_other(a, raiseit=True)
return a.is_subnormal(context=self)
def is_zero(self, a):
"""Return True if the operand is a zero; otherwise return False.
>>> ExtendedContext.is_zero(Decimal('0'))
True
>>> ExtendedContext.is_zero(Decimal('2.50'))
False
>>> ExtendedContext.is_zero(Decimal('-0E+2'))
True
>>> ExtendedContext.is_zero(1)
False
>>> ExtendedContext.is_zero(0)
True
"""
a = _convert_other(a, raiseit=True)
return a.is_zero()
def ln(self, a):
"""Returns the natural (base e) logarithm of the operand.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> c.ln(Decimal('0'))
Decimal('-Infinity')
>>> c.ln(Decimal('1.000'))
Decimal('0')
>>> c.ln(Decimal('2.71828183'))
Decimal('1.00000000')
>>> c.ln(Decimal('10'))
Decimal('2.30258509')
>>> c.ln(Decimal('+Infinity'))
Decimal('Infinity')
>>> c.ln(1)
Decimal('0')
"""
a = _convert_other(a, raiseit=True)
return a.ln(context=self)
def log10(self, a):
"""Returns the base 10 logarithm of the operand.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> c.log10(Decimal('0'))
Decimal('-Infinity')
>>> c.log10(Decimal('0.001'))
Decimal('-3')
>>> c.log10(Decimal('1.000'))
Decimal('0')
>>> c.log10(Decimal('2'))
Decimal('0.301029996')
>>> c.log10(Decimal('10'))
Decimal('1')
>>> c.log10(Decimal('70'))
Decimal('1.84509804')
>>> c.log10(Decimal('+Infinity'))
Decimal('Infinity')
>>> c.log10(0)
Decimal('-Infinity')
>>> c.log10(1)
Decimal('0')
"""
a = _convert_other(a, raiseit=True)
return a.log10(context=self)
def logb(self, a):
""" Returns the exponent of the magnitude of the operand's MSD.
The result is the integer which is the exponent of the magnitude
of the most significant digit of the operand (as though the
operand were truncated to a single digit while maintaining the
value of that digit and without limiting the resulting exponent).
>>> ExtendedContext.logb(Decimal('250'))
Decimal('2')
>>> ExtendedContext.logb(Decimal('2.50'))
Decimal('0')
>>> ExtendedContext.logb(Decimal('0.03'))
Decimal('-2')
>>> ExtendedContext.logb(Decimal('0'))
Decimal('-Infinity')
>>> ExtendedContext.logb(1)
Decimal('0')
>>> ExtendedContext.logb(10)
Decimal('1')
>>> ExtendedContext.logb(100)
Decimal('2')
"""
a = _convert_other(a, raiseit=True)
return a.logb(context=self)
def logical_and(self, a, b):
"""Applies the logical operation 'and' between each operand's digits.
The operands must be both logical numbers.
>>> ExtendedContext.logical_and(Decimal('0'), Decimal('0'))
Decimal('0')
>>> ExtendedContext.logical_and(Decimal('0'), Decimal('1'))
Decimal('0')
>>> ExtendedContext.logical_and(Decimal('1'), Decimal('0'))
Decimal('0')
>>> ExtendedContext.logical_and(Decimal('1'), Decimal('1'))
Decimal('1')
>>> ExtendedContext.logical_and(Decimal('1100'), Decimal('1010'))
Decimal('1000')
>>> ExtendedContext.logical_and(Decimal('1111'), Decimal('10'))
Decimal('10')
>>> ExtendedContext.logical_and(110, 1101)
Decimal('100')
>>> ExtendedContext.logical_and(Decimal(110), 1101)
Decimal('100')
>>> ExtendedContext.logical_and(110, Decimal(1101))
Decimal('100')
"""
a = _convert_other(a, raiseit=True)
return a.logical_and(b, context=self)
def logical_invert(self, a):
"""Invert all the digits in the operand.
The operand must be a logical number.
>>> ExtendedContext.logical_invert(Decimal('0'))
Decimal('111111111')
>>> ExtendedContext.logical_invert(Decimal('1'))
Decimal('111111110')
>>> ExtendedContext.logical_invert(Decimal('111111111'))
Decimal('0')
>>> ExtendedContext.logical_invert(Decimal('101010101'))
Decimal('10101010')
>>> ExtendedContext.logical_invert(1101)
Decimal('111110010')
"""
a = _convert_other(a, raiseit=True)
return a.logical_invert(context=self)
def logical_or(self, a, b):
"""Applies the logical operation 'or' between each operand's digits.
The operands must be both logical numbers.
>>> ExtendedContext.logical_or(Decimal('0'), Decimal('0'))
Decimal('0')
>>> ExtendedContext.logical_or(Decimal('0'), Decimal('1'))
Decimal('1')
>>> ExtendedContext.logical_or(Decimal('1'), Decimal('0'))
Decimal('1')
>>> ExtendedContext.logical_or(Decimal('1'), Decimal('1'))
Decimal('1')
>>> ExtendedContext.logical_or(Decimal('1100'), Decimal('1010'))
Decimal('1110')
>>> ExtendedContext.logical_or(Decimal('1110'), Decimal('10'))
Decimal('1110')
>>> ExtendedContext.logical_or(110, 1101)
Decimal('1111')
>>> ExtendedContext.logical_or(Decimal(110), 1101)
Decimal('1111')
>>> ExtendedContext.logical_or(110, Decimal(1101))
Decimal('1111')
"""
a = _convert_other(a, raiseit=True)
return a.logical_or(b, context=self)
def logical_xor(self, a, b):
"""Applies the logical operation 'xor' between each operand's digits.
The operands must be both logical numbers.
>>> ExtendedContext.logical_xor(Decimal('0'), Decimal('0'))
Decimal('0')
>>> ExtendedContext.logical_xor(Decimal('0'), Decimal('1'))
Decimal('1')
>>> ExtendedContext.logical_xor(Decimal('1'), Decimal('0'))
Decimal('1')
>>> ExtendedContext.logical_xor(Decimal('1'), Decimal('1'))
Decimal('0')
>>> ExtendedContext.logical_xor(Decimal('1100'), Decimal('1010'))
Decimal('110')
>>> ExtendedContext.logical_xor(Decimal('1111'), Decimal('10'))
Decimal('1101')
>>> ExtendedContext.logical_xor(110, 1101)
Decimal('1011')
>>> ExtendedContext.logical_xor(Decimal(110), 1101)
Decimal('1011')
>>> ExtendedContext.logical_xor(110, Decimal(1101))
Decimal('1011')
"""
a = _convert_other(a, raiseit=True)
return a.logical_xor(b, context=self)
def max(self, a, b):
"""max compares two values numerically and returns the maximum.
If either operand is a NaN then the general rules apply.
Otherwise, the operands are compared as though by the compare
operation. If they are numerically equal then the left-hand operand
is chosen as the result. Otherwise the maximum (closer to positive
infinity) of the two operands is chosen as the result.
>>> ExtendedContext.max(Decimal('3'), Decimal('2'))
Decimal('3')
>>> ExtendedContext.max(Decimal('-10'), Decimal('3'))
Decimal('3')
>>> ExtendedContext.max(Decimal('1.0'), Decimal('1'))
Decimal('1')
>>> ExtendedContext.max(Decimal('7'), Decimal('NaN'))
Decimal('7')
>>> ExtendedContext.max(1, 2)
Decimal('2')
>>> ExtendedContext.max(Decimal(1), 2)
Decimal('2')
>>> ExtendedContext.max(1, Decimal(2))
Decimal('2')
"""
a = _convert_other(a, raiseit=True)
return a.max(b, context=self)
def max_mag(self, a, b):
"""Compares the values numerically with their sign ignored.
>>> ExtendedContext.max_mag(Decimal('7'), Decimal('NaN'))
Decimal('7')
>>> ExtendedContext.max_mag(Decimal('7'), Decimal('-10'))
Decimal('-10')
>>> ExtendedContext.max_mag(1, -2)
Decimal('-2')
>>> ExtendedContext.max_mag(Decimal(1), -2)
Decimal('-2')
>>> ExtendedContext.max_mag(1, Decimal(-2))
Decimal('-2')
"""
a = _convert_other(a, raiseit=True)
return a.max_mag(b, context=self)
def min(self, a, b):
"""min compares two values numerically and returns the minimum.
If either operand is a NaN then the general rules apply.
Otherwise, the operands are compared as though by the compare
operation. If they are numerically equal then the left-hand operand
is chosen as the result. Otherwise the minimum (closer to negative
infinity) of the two operands is chosen as the result.
>>> ExtendedContext.min(Decimal('3'), Decimal('2'))
Decimal('2')
>>> ExtendedContext.min(Decimal('-10'), Decimal('3'))
Decimal('-10')
>>> ExtendedContext.min(Decimal('1.0'), Decimal('1'))
Decimal('1.0')
>>> ExtendedContext.min(Decimal('7'), Decimal('NaN'))
Decimal('7')
>>> ExtendedContext.min(1, 2)
Decimal('1')
>>> ExtendedContext.min(Decimal(1), 2)
Decimal('1')
>>> ExtendedContext.min(1, Decimal(29))
Decimal('1')
"""
a = _convert_other(a, raiseit=True)
return a.min(b, context=self)
def min_mag(self, a, b):
"""Compares the values numerically with their sign ignored.
>>> ExtendedContext.min_mag(Decimal('3'), Decimal('-2'))
Decimal('-2')
>>> ExtendedContext.min_mag(Decimal('-3'), Decimal('NaN'))
Decimal('-3')
>>> ExtendedContext.min_mag(1, -2)
Decimal('1')
>>> ExtendedContext.min_mag(Decimal(1), -2)
Decimal('1')
>>> ExtendedContext.min_mag(1, Decimal(-2))
Decimal('1')
"""
a = _convert_other(a, raiseit=True)
return a.min_mag(b, context=self)
def minus(self, a):
"""Minus corresponds to unary prefix minus in Python.
The operation is evaluated using the same rules as subtract; the
operation minus(a) is calculated as subtract('0', a) where the '0'
has the same exponent as the operand.
>>> ExtendedContext.minus(Decimal('1.3'))
Decimal('-1.3')
>>> ExtendedContext.minus(Decimal('-1.3'))
Decimal('1.3')
>>> ExtendedContext.minus(1)
Decimal('-1')
"""
a = _convert_other(a, raiseit=True)
return a.__neg__(context=self)
def multiply(self, a, b):
"""multiply multiplies two operands.
If either operand is a special value then the general rules apply.
Otherwise, the operands are multiplied together
('long multiplication'), resulting in a number which may be as long as
the sum of the lengths of the two operands.
>>> ExtendedContext.multiply(Decimal('1.20'), Decimal('3'))
Decimal('3.60')
>>> ExtendedContext.multiply(Decimal('7'), Decimal('3'))
Decimal('21')
>>> ExtendedContext.multiply(Decimal('0.9'), Decimal('0.8'))
Decimal('0.72')
>>> ExtendedContext.multiply(Decimal('0.9'), Decimal('-0'))
Decimal('-0.0')
>>> ExtendedContext.multiply(Decimal('654321'), Decimal('654321'))
Decimal('4.28135971E+11')
>>> ExtendedContext.multiply(7, 7)
Decimal('49')
>>> ExtendedContext.multiply(Decimal(7), 7)
Decimal('49')
>>> ExtendedContext.multiply(7, Decimal(7))
Decimal('49')
"""
a = _convert_other(a, raiseit=True)
r = a.__mul__(b, context=self)
if r is NotImplemented:
raise TypeError("Unable to convert %s to Decimal" % b)
else:
return r
def next_minus(self, a):
"""Returns the largest representable number smaller than a.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> ExtendedContext.next_minus(Decimal('1'))
Decimal('0.999999999')
>>> c.next_minus(Decimal('1E-1007'))
Decimal('0E-1007')
>>> ExtendedContext.next_minus(Decimal('-1.00000003'))
Decimal('-1.00000004')
>>> c.next_minus(Decimal('Infinity'))
Decimal('9.99999999E+999')
>>> c.next_minus(1)
Decimal('0.999999999')
"""
a = _convert_other(a, raiseit=True)
return a.next_minus(context=self)
def next_plus(self, a):
"""Returns the smallest representable number larger than a.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> ExtendedContext.next_plus(Decimal('1'))
Decimal('1.00000001')
>>> c.next_plus(Decimal('-1E-1007'))
Decimal('-0E-1007')
>>> ExtendedContext.next_plus(Decimal('-1.00000003'))
Decimal('-1.00000002')
>>> c.next_plus(Decimal('-Infinity'))
Decimal('-9.99999999E+999')
>>> c.next_plus(1)
Decimal('1.00000001')
"""
a = _convert_other(a, raiseit=True)
return a.next_plus(context=self)
def next_toward(self, a, b):
"""Returns the number closest to a, in direction towards b.
The result is the closest representable number from the first
operand (but not the first operand) that is in the direction
towards the second operand, unless the operands have the same
value.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> c.next_toward(Decimal('1'), Decimal('2'))
Decimal('1.00000001')
>>> c.next_toward(Decimal('-1E-1007'), Decimal('1'))
Decimal('-0E-1007')
>>> c.next_toward(Decimal('-1.00000003'), Decimal('0'))
Decimal('-1.00000002')
>>> c.next_toward(Decimal('1'), Decimal('0'))
Decimal('0.999999999')
>>> c.next_toward(Decimal('1E-1007'), Decimal('-100'))
Decimal('0E-1007')
>>> c.next_toward(Decimal('-1.00000003'), Decimal('-10'))
Decimal('-1.00000004')
>>> c.next_toward(Decimal('0.00'), Decimal('-0.0000'))
Decimal('-0.00')
>>> c.next_toward(0, 1)
Decimal('1E-1007')
>>> c.next_toward(Decimal(0), 1)
Decimal('1E-1007')
>>> c.next_toward(0, Decimal(1))
Decimal('1E-1007')
"""
a = _convert_other(a, raiseit=True)
return a.next_toward(b, context=self)
def normalize(self, a):
"""normalize reduces an operand to its simplest form.
Essentially a plus operation with all trailing zeros removed from the
result.
>>> ExtendedContext.normalize(Decimal('2.1'))
Decimal('2.1')
>>> ExtendedContext.normalize(Decimal('-2.0'))
Decimal('-2')
>>> ExtendedContext.normalize(Decimal('1.200'))
Decimal('1.2')
>>> ExtendedContext.normalize(Decimal('-120'))
Decimal('-1.2E+2')
>>> ExtendedContext.normalize(Decimal('120.00'))
Decimal('1.2E+2')
>>> ExtendedContext.normalize(Decimal('0.00'))
Decimal('0')
>>> ExtendedContext.normalize(6)
Decimal('6')
"""
a = _convert_other(a, raiseit=True)
return a.normalize(context=self)
def number_class(self, a):
"""Returns an indication of the class of the operand.
The class is one of the following strings:
-sNaN
-NaN
-Infinity
-Normal
-Subnormal
-Zero
+Zero
+Subnormal
+Normal
+Infinity
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> c.number_class(Decimal('Infinity'))
'+Infinity'
>>> c.number_class(Decimal('1E-10'))
'+Normal'
>>> c.number_class(Decimal('2.50'))
'+Normal'
>>> c.number_class(Decimal('0.1E-999'))
'+Subnormal'
>>> c.number_class(Decimal('0'))
'+Zero'
>>> c.number_class(Decimal('-0'))
'-Zero'
>>> c.number_class(Decimal('-0.1E-999'))
'-Subnormal'
>>> c.number_class(Decimal('-1E-10'))
'-Normal'
>>> c.number_class(Decimal('-2.50'))
'-Normal'
>>> c.number_class(Decimal('-Infinity'))
'-Infinity'
>>> c.number_class(Decimal('NaN'))
'NaN'
>>> c.number_class(Decimal('-NaN'))
'NaN'
>>> c.number_class(Decimal('sNaN'))
'sNaN'
>>> c.number_class(123)
'+Normal'
"""
a = _convert_other(a, raiseit=True)
return a.number_class(context=self)
def plus(self, a):
"""Plus corresponds to unary prefix plus in Python.
The operation is evaluated using the same rules as add; the
operation plus(a) is calculated as add('0', a) where the '0'
has the same exponent as the operand.
>>> ExtendedContext.plus(Decimal('1.3'))
Decimal('1.3')
>>> ExtendedContext.plus(Decimal('-1.3'))
Decimal('-1.3')
>>> ExtendedContext.plus(-1)
Decimal('-1')
"""
a = _convert_other(a, raiseit=True)
return a.__pos__(context=self)
def power(self, a, b, modulo=None):
"""Raises a to the power of b, to modulo if given.
With two arguments, compute a**b. If a is negative then b
must be integral. The result will be inexact unless b is
integral and the result is finite and can be expressed exactly
in 'precision' digits.
With three arguments, compute (a**b) % modulo. For the
three argument form, the following restrictions on the
arguments hold:
- all three arguments must be integral
- b must be nonnegative
- at least one of a or b must be nonzero
- modulo must be nonzero and have at most 'precision' digits
The result of pow(a, b, modulo) is identical to the result
that would be obtained by computing (a**b) % modulo with
unbounded precision, but is computed more efficiently. It is
always exact.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> c.power(Decimal('2'), Decimal('3'))
Decimal('8')
>>> c.power(Decimal('-2'), Decimal('3'))
Decimal('-8')
>>> c.power(Decimal('2'), Decimal('-3'))
Decimal('0.125')
>>> c.power(Decimal('1.7'), Decimal('8'))
Decimal('69.7575744')
>>> c.power(Decimal('10'), Decimal('0.301029996'))
Decimal('2.00000000')
>>> c.power(Decimal('Infinity'), Decimal('-1'))
Decimal('0')
>>> c.power(Decimal('Infinity'), Decimal('0'))
Decimal('1')
>>> c.power(Decimal('Infinity'), Decimal('1'))
Decimal('Infinity')
>>> c.power(Decimal('-Infinity'), Decimal('-1'))
Decimal('-0')
>>> c.power(Decimal('-Infinity'), Decimal('0'))
Decimal('1')
>>> c.power(Decimal('-Infinity'), Decimal('1'))
Decimal('-Infinity')
>>> c.power(Decimal('-Infinity'), Decimal('2'))
Decimal('Infinity')
>>> c.power(Decimal('0'), Decimal('0'))
Decimal('NaN')
>>> c.power(Decimal('3'), Decimal('7'), Decimal('16'))
Decimal('11')
>>> c.power(Decimal('-3'), Decimal('7'), Decimal('16'))
Decimal('-11')
>>> c.power(Decimal('-3'), Decimal('8'), Decimal('16'))
Decimal('1')
>>> c.power(Decimal('3'), Decimal('7'), Decimal('-16'))
Decimal('11')
>>> c.power(Decimal('23E12345'), Decimal('67E189'), Decimal('123456789'))
Decimal('11729830')
>>> c.power(Decimal('-0'), Decimal('17'), Decimal('1729'))
Decimal('-0')
>>> c.power(Decimal('-23'), Decimal('0'), Decimal('65537'))
Decimal('1')
>>> ExtendedContext.power(7, 7)
Decimal('823543')
>>> ExtendedContext.power(Decimal(7), 7)
Decimal('823543')
>>> ExtendedContext.power(7, Decimal(7), 2)
Decimal('1')
"""
a = _convert_other(a, raiseit=True)
r = a.__pow__(b, modulo, context=self)
if r is NotImplemented:
raise TypeError("Unable to convert %s to Decimal" % b)
else:
return r
def quantize(self, a, b):
"""Returns a value equal to 'a' (rounded), having the exponent of 'b'.
The coefficient of the result is derived from that of the left-hand
operand. It may be rounded using the current rounding setting (if the
exponent is being increased), multiplied by a positive power of ten (if
the exponent is being decreased), or is unchanged (if the exponent is
already equal to that of the right-hand operand).
Unlike other operations, if the length of the coefficient after the
quantize operation would be greater than precision then an Invalid
operation condition is raised. This guarantees that, unless there is
an error condition, the exponent of the result of a quantize is always
equal to that of the right-hand operand.
Also unlike other operations, quantize will never raise Underflow, even
if the result is subnormal and inexact.
>>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.001'))
Decimal('2.170')
>>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.01'))
Decimal('2.17')
>>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.1'))
Decimal('2.2')
>>> ExtendedContext.quantize(Decimal('2.17'), Decimal('1e+0'))
Decimal('2')
>>> ExtendedContext.quantize(Decimal('2.17'), Decimal('1e+1'))
Decimal('0E+1')
>>> ExtendedContext.quantize(Decimal('-Inf'), Decimal('Infinity'))
Decimal('-Infinity')
>>> ExtendedContext.quantize(Decimal('2'), Decimal('Infinity'))
Decimal('NaN')
>>> ExtendedContext.quantize(Decimal('-0.1'), Decimal('1'))
Decimal('-0')
>>> ExtendedContext.quantize(Decimal('-0'), Decimal('1e+5'))
Decimal('-0E+5')
>>> ExtendedContext.quantize(Decimal('+35236450.6'), Decimal('1e-2'))
Decimal('NaN')
>>> ExtendedContext.quantize(Decimal('-35236450.6'), Decimal('1e-2'))
Decimal('NaN')
>>> ExtendedContext.quantize(Decimal('217'), Decimal('1e-1'))
Decimal('217.0')
>>> ExtendedContext.quantize(Decimal('217'), Decimal('1e-0'))
Decimal('217')
>>> ExtendedContext.quantize(Decimal('217'), Decimal('1e+1'))
Decimal('2.2E+2')
>>> ExtendedContext.quantize(Decimal('217'), Decimal('1e+2'))
Decimal('2E+2')
>>> ExtendedContext.quantize(1, 2)
Decimal('1')
>>> ExtendedContext.quantize(Decimal(1), 2)
Decimal('1')
>>> ExtendedContext.quantize(1, Decimal(2))
Decimal('1')
"""
a = _convert_other(a, raiseit=True)
return a.quantize(b, context=self)
def radix(self):
"""Just returns 10, as this is Decimal, :)
>>> ExtendedContext.radix()
Decimal('10')
"""
return Decimal(10)
def remainder(self, a, b):
"""Returns the remainder from integer division.
The result is the residue of the dividend after the operation of
calculating integer division as described for divide-integer, rounded
to precision digits if necessary. The sign of the result, if
non-zero, is the same as that of the original dividend.
This operation will fail under the same conditions as integer division
(that is, if integer division on the same two operands would fail, the
remainder cannot be calculated).
>>> ExtendedContext.remainder(Decimal('2.1'), Decimal('3'))
Decimal('2.1')
>>> ExtendedContext.remainder(Decimal('10'), Decimal('3'))
Decimal('1')
>>> ExtendedContext.remainder(Decimal('-10'), Decimal('3'))
Decimal('-1')
>>> ExtendedContext.remainder(Decimal('10.2'), Decimal('1'))
Decimal('0.2')
>>> ExtendedContext.remainder(Decimal('10'), Decimal('0.3'))
Decimal('0.1')
>>> ExtendedContext.remainder(Decimal('3.6'), Decimal('1.3'))
Decimal('1.0')
>>> ExtendedContext.remainder(22, 6)
Decimal('4')
>>> ExtendedContext.remainder(Decimal(22), 6)
Decimal('4')
>>> ExtendedContext.remainder(22, Decimal(6))
Decimal('4')
"""
a = _convert_other(a, raiseit=True)
r = a.__mod__(b, context=self)
if r is NotImplemented:
raise TypeError("Unable to convert %s to Decimal" % b)
else:
return r
def remainder_near(self, a, b):
"""Returns to be "a - b * n", where n is the integer nearest the exact
value of "x / b" (if two integers are equally near then the even one
is chosen). If the result is equal to 0 then its sign will be the
sign of a.
This operation will fail under the same conditions as integer division
(that is, if integer division on the same two operands would fail, the
remainder cannot be calculated).
>>> ExtendedContext.remainder_near(Decimal('2.1'), Decimal('3'))
Decimal('-0.9')
>>> ExtendedContext.remainder_near(Decimal('10'), Decimal('6'))
Decimal('-2')
>>> ExtendedContext.remainder_near(Decimal('10'), Decimal('3'))
Decimal('1')
>>> ExtendedContext.remainder_near(Decimal('-10'), Decimal('3'))
Decimal('-1')
>>> ExtendedContext.remainder_near(Decimal('10.2'), Decimal('1'))
Decimal('0.2')
>>> ExtendedContext.remainder_near(Decimal('10'), Decimal('0.3'))
Decimal('0.1')
>>> ExtendedContext.remainder_near(Decimal('3.6'), Decimal('1.3'))
Decimal('-0.3')
>>> ExtendedContext.remainder_near(3, 11)
Decimal('3')
>>> ExtendedContext.remainder_near(Decimal(3), 11)
Decimal('3')
>>> ExtendedContext.remainder_near(3, Decimal(11))
Decimal('3')
"""
a = _convert_other(a, raiseit=True)
return a.remainder_near(b, context=self)
def rotate(self, a, b):
"""Returns a rotated copy of a, b times.
The coefficient of the result is a rotated copy of the digits in
the coefficient of the first operand. The number of places of
rotation is taken from the absolute value of the second operand,
with the rotation being to the left if the second operand is
positive or to the right otherwise.
>>> ExtendedContext.rotate(Decimal('34'), Decimal('8'))
Decimal('400000003')
>>> ExtendedContext.rotate(Decimal('12'), Decimal('9'))
Decimal('12')
>>> ExtendedContext.rotate(Decimal('123456789'), Decimal('-2'))
Decimal('891234567')
>>> ExtendedContext.rotate(Decimal('123456789'), Decimal('0'))
Decimal('123456789')
>>> ExtendedContext.rotate(Decimal('123456789'), Decimal('+2'))
Decimal('345678912')
>>> ExtendedContext.rotate(1333333, 1)
Decimal('13333330')
>>> ExtendedContext.rotate(Decimal(1333333), 1)
Decimal('13333330')
>>> ExtendedContext.rotate(1333333, Decimal(1))
Decimal('13333330')
"""
a = _convert_other(a, raiseit=True)
return a.rotate(b, context=self)
def same_quantum(self, a, b):
"""Returns True if the two operands have the same exponent.
The result is never affected by either the sign or the coefficient of
either operand.
>>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('0.001'))
False
>>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('0.01'))
True
>>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('1'))
False
>>> ExtendedContext.same_quantum(Decimal('Inf'), Decimal('-Inf'))
True
>>> ExtendedContext.same_quantum(10000, -1)
True
>>> ExtendedContext.same_quantum(Decimal(10000), -1)
True
>>> ExtendedContext.same_quantum(10000, Decimal(-1))
True
"""
a = _convert_other(a, raiseit=True)
return a.same_quantum(b)
def scaleb (self, a, b):
"""Returns the first operand after adding the second value its exp.
>>> ExtendedContext.scaleb(Decimal('7.50'), Decimal('-2'))
Decimal('0.0750')
>>> ExtendedContext.scaleb(Decimal('7.50'), Decimal('0'))
Decimal('7.50')
>>> ExtendedContext.scaleb(Decimal('7.50'), Decimal('3'))
Decimal('7.50E+3')
>>> ExtendedContext.scaleb(1, 4)
Decimal('1E+4')
>>> ExtendedContext.scaleb(Decimal(1), 4)
Decimal('1E+4')
>>> ExtendedContext.scaleb(1, Decimal(4))
Decimal('1E+4')
"""
a = _convert_other(a, raiseit=True)
return a.scaleb(b, context=self)
def shift(self, a, b):
"""Returns a shifted copy of a, b times.
The coefficient of the result is a shifted copy of the digits
in the coefficient of the first operand. The number of places
to shift is taken from the absolute value of the second operand,
with the shift being to the left if the second operand is
positive or to the right otherwise. Digits shifted into the
coefficient are zeros.
>>> ExtendedContext.shift(Decimal('34'), Decimal('8'))
Decimal('400000000')
>>> ExtendedContext.shift(Decimal('12'), Decimal('9'))
Decimal('0')
>>> ExtendedContext.shift(Decimal('123456789'), Decimal('-2'))
Decimal('1234567')
>>> ExtendedContext.shift(Decimal('123456789'), Decimal('0'))
Decimal('123456789')
>>> ExtendedContext.shift(Decimal('123456789'), Decimal('+2'))
Decimal('345678900')
>>> ExtendedContext.shift(88888888, 2)
Decimal('888888800')
>>> ExtendedContext.shift(Decimal(88888888), 2)
Decimal('888888800')
>>> ExtendedContext.shift(88888888, Decimal(2))
Decimal('888888800')
"""
a = _convert_other(a, raiseit=True)
return a.shift(b, context=self)
def sqrt(self, a):
"""Square root of a non-negative number to context precision.
If the result must be inexact, it is rounded using the round-half-even
algorithm.
>>> ExtendedContext.sqrt(Decimal('0'))
Decimal('0')
>>> ExtendedContext.sqrt(Decimal('-0'))
Decimal('-0')
>>> ExtendedContext.sqrt(Decimal('0.39'))
Decimal('0.624499800')
>>> ExtendedContext.sqrt(Decimal('100'))
Decimal('10')
>>> ExtendedContext.sqrt(Decimal('1'))
Decimal('1')
>>> ExtendedContext.sqrt(Decimal('1.0'))
Decimal('1.0')
>>> ExtendedContext.sqrt(Decimal('1.00'))
Decimal('1.0')
>>> ExtendedContext.sqrt(Decimal('7'))
Decimal('2.64575131')
>>> ExtendedContext.sqrt(Decimal('10'))
Decimal('3.16227766')
>>> ExtendedContext.sqrt(2)
Decimal('1.41421356')
>>> ExtendedContext.prec
9
"""
a = _convert_other(a, raiseit=True)
return a.sqrt(context=self)
def subtract(self, a, b):
"""Return the difference between the two operands.
>>> ExtendedContext.subtract(Decimal('1.3'), Decimal('1.07'))
Decimal('0.23')
>>> ExtendedContext.subtract(Decimal('1.3'), Decimal('1.30'))
Decimal('0.00')
>>> ExtendedContext.subtract(Decimal('1.3'), Decimal('2.07'))
Decimal('-0.77')
>>> ExtendedContext.subtract(8, 5)
Decimal('3')
>>> ExtendedContext.subtract(Decimal(8), 5)
Decimal('3')
>>> ExtendedContext.subtract(8, Decimal(5))
Decimal('3')
"""
a = _convert_other(a, raiseit=True)
r = a.__sub__(b, context=self)
if r is NotImplemented:
raise TypeError("Unable to convert %s to Decimal" % b)
else:
return r
def to_eng_string(self, a):
"""Convert to a string, using engineering notation if an exponent is needed.
Engineering notation has an exponent which is a multiple of 3. This
can leave up to 3 digits to the left of the decimal place and may
require the addition of either one or two trailing zeros.
The operation is not affected by the context.
>>> ExtendedContext.to_eng_string(Decimal('123E+1'))
'1.23E+3'
>>> ExtendedContext.to_eng_string(Decimal('123E+3'))
'123E+3'
>>> ExtendedContext.to_eng_string(Decimal('123E-10'))
'12.3E-9'
>>> ExtendedContext.to_eng_string(Decimal('-123E-12'))
'-123E-12'
>>> ExtendedContext.to_eng_string(Decimal('7E-7'))
'700E-9'
>>> ExtendedContext.to_eng_string(Decimal('7E+1'))
'70'
>>> ExtendedContext.to_eng_string(Decimal('0E+1'))
'0.00E+3'
"""
a = _convert_other(a, raiseit=True)
return a.to_eng_string(context=self)
def to_sci_string(self, a):
"""Converts a number to a string, using scientific notation.
The operation is not affected by the context.
"""
a = _convert_other(a, raiseit=True)
return a.__str__(context=self)
def to_integral_exact(self, a):
"""Rounds to an integer.
When the operand has a negative exponent, the result is the same
as using the quantize() operation using the given operand as the
left-hand-operand, 1E+0 as the right-hand-operand, and the precision
of the operand as the precision setting; Inexact and Rounded flags
are allowed in this operation. The rounding mode is taken from the
context.
>>> ExtendedContext.to_integral_exact(Decimal('2.1'))
Decimal('2')
>>> ExtendedContext.to_integral_exact(Decimal('100'))
Decimal('100')
>>> ExtendedContext.to_integral_exact(Decimal('100.0'))
Decimal('100')
>>> ExtendedContext.to_integral_exact(Decimal('101.5'))
Decimal('102')
>>> ExtendedContext.to_integral_exact(Decimal('-101.5'))
Decimal('-102')
>>> ExtendedContext.to_integral_exact(Decimal('10E+5'))
Decimal('1.0E+6')
>>> ExtendedContext.to_integral_exact(Decimal('7.89E+77'))
Decimal('7.89E+77')
>>> ExtendedContext.to_integral_exact(Decimal('-Inf'))
Decimal('-Infinity')
"""
a = _convert_other(a, raiseit=True)
return a.to_integral_exact(context=self)
def to_integral_value(self, a):
"""Rounds to an integer.
When the operand has a negative exponent, the result is the same
as using the quantize() operation using the given operand as the
left-hand-operand, 1E+0 as the right-hand-operand, and the precision
of the operand as the precision setting, except that no flags will
be set. The rounding mode is taken from the context.
>>> ExtendedContext.to_integral_value(Decimal('2.1'))
Decimal('2')
>>> ExtendedContext.to_integral_value(Decimal('100'))
Decimal('100')
>>> ExtendedContext.to_integral_value(Decimal('100.0'))
Decimal('100')
>>> ExtendedContext.to_integral_value(Decimal('101.5'))
Decimal('102')
>>> ExtendedContext.to_integral_value(Decimal('-101.5'))
Decimal('-102')
>>> ExtendedContext.to_integral_value(Decimal('10E+5'))
Decimal('1.0E+6')
>>> ExtendedContext.to_integral_value(Decimal('7.89E+77'))
Decimal('7.89E+77')
>>> ExtendedContext.to_integral_value(Decimal('-Inf'))
Decimal('-Infinity')
"""
a = _convert_other(a, raiseit=True)
return a.to_integral_value(context=self)
# the method name changed, but we provide also the old one, for compatibility
to_integral = to_integral_value
class _WorkRep(object):
__slots__ = ('sign','int','exp')
# sign: 0 or 1
# int: int
# exp: None, int, or string
def __init__(self, value=None):
if value is None:
self.sign = None
self.int = 0
self.exp = None
elif isinstance(value, Decimal):
self.sign = value._sign
self.int = int(value._int)
self.exp = value._exp
else:
# assert isinstance(value, tuple)
self.sign = value[0]
self.int = value[1]
self.exp = value[2]
def __repr__(self):
return "(%r, %r, %r)" % (self.sign, self.int, self.exp)
__str__ = __repr__
def _normalize(op1, op2, prec = 0):
"""Normalizes op1, op2 to have the same exp and length of coefficient.
Done during addition.
"""
if op1.exp < op2.exp:
tmp = op2
other = op1
else:
tmp = op1
other = op2
# Let exp = min(tmp.exp - 1, tmp.adjusted() - precision - 1).
# Then adding 10**exp to tmp has the same effect (after rounding)
# as adding any positive quantity smaller than 10**exp; similarly
# for subtraction. So if other is smaller than 10**exp we replace
# it with 10**exp. This avoids tmp.exp - other.exp getting too large.
tmp_len = len(str(tmp.int))
other_len = len(str(other.int))
exp = tmp.exp + min(-1, tmp_len - prec - 2)
if other_len + other.exp - 1 < exp:
other.int = 1
other.exp = exp
tmp.int *= 10 ** (tmp.exp - other.exp)
tmp.exp = other.exp
return op1, op2
##### Integer arithmetic functions used by ln, log10, exp and __pow__ #####
_nbits = int.bit_length
def _decimal_lshift_exact(n, e):
""" Given integers n and e, return n * 10**e if it's an integer, else None.
The computation is designed to avoid computing large powers of 10
unnecessarily.
>>> _decimal_lshift_exact(3, 4)
30000
>>> _decimal_lshift_exact(300, -999999999) # returns None
"""
if n == 0:
return 0
elif e >= 0:
return n * 10**e
else:
# val_n = largest power of 10 dividing n.
str_n = str(abs(n))
val_n = len(str_n) - len(str_n.rstrip('0'))
return None if val_n < -e else n // 10**-e
def _sqrt_nearest(n, a):
"""Closest integer to the square root of the positive integer n. a is
an initial approximation to the square root. Any positive integer
will do for a, but the closer a is to the square root of n the
faster convergence will be.
"""
if n <= 0 or a <= 0:
raise ValueError("Both arguments to _sqrt_nearest should be positive.")
b=0
while a != b:
b, a = a, a--n//a>>1
return a
def _rshift_nearest(x, shift):
"""Given an integer x and a nonnegative integer shift, return closest
integer to x / 2**shift; use round-to-even in case of a tie.
"""
b, q = 1 << shift, x >> shift
return q + (2*(x & (b-1)) + (q&1) > b)
def _div_nearest(a, b):
"""Closest integer to a/b, a and b positive integers; rounds to even
in the case of a tie.
"""
q, r = divmod(a, b)
return q + (2*r + (q&1) > b)
def _ilog(x, M, L = 8):
"""Integer approximation to M*log(x/M), with absolute error boundable
in terms only of x/M.
Given positive integers x and M, return an integer approximation to
M * log(x/M). For L = 8 and 0.1 <= x/M <= 10 the difference
between the approximation and the exact result is at most 22. For
L = 8 and 1.0 <= x/M <= 10.0 the difference is at most 15. In
both cases these are upper bounds on the error; it will usually be
much smaller."""
# The basic algorithm is the following: let log1p be the function
# log1p(x) = log(1+x). Then log(x/M) = log1p((x-M)/M). We use
# the reduction
#
# log1p(y) = 2*log1p(y/(1+sqrt(1+y)))
#
# repeatedly until the argument to log1p is small (< 2**-L in
# absolute value). For small y we can use the Taylor series
# expansion
#
# log1p(y) ~ y - y**2/2 + y**3/3 - ... - (-y)**T/T
#
# truncating at T such that y**T is small enough. The whole
# computation is carried out in a form of fixed-point arithmetic,
# with a real number z being represented by an integer
# approximation to z*M. To avoid loss of precision, the y below
# is actually an integer approximation to 2**R*y*M, where R is the
# number of reductions performed so far.
y = x-M
# argument reduction; R = number of reductions performed
R = 0
while (R <= L and abs(y) << L-R >= M or
R > L and abs(y) >> R-L >= M):
y = _div_nearest((M*y) << 1,
M + _sqrt_nearest(M*(M+_rshift_nearest(y, R)), M))
R += 1
# Taylor series with T terms
T = -int(-10*len(str(M))//(3*L))
yshift = _rshift_nearest(y, R)
w = _div_nearest(M, T)
for k in range(T-1, 0, -1):
w = _div_nearest(M, k) - _div_nearest(yshift*w, M)
return _div_nearest(w*y, M)
def _dlog10(c, e, p):
"""Given integers c, e and p with c > 0, p >= 0, compute an integer
approximation to 10**p * log10(c*10**e), with an absolute error of
at most 1. Assumes that c*10**e is not exactly 1."""
# increase precision by 2; compensate for this by dividing
# final result by 100
p += 2
# write c*10**e as d*10**f with either:
# f >= 0 and 1 <= d <= 10, or
# f <= 0 and 0.1 <= d <= 1.
# Thus for c*10**e close to 1, f = 0
l = len(str(c))
f = e+l - (e+l >= 1)
if p > 0:
M = 10**p
k = e+p-f
if k >= 0:
c *= 10**k
else:
c = _div_nearest(c, 10**-k)
log_d = _ilog(c, M) # error < 5 + 22 = 27
log_10 = _log10_digits(p) # error < 1
log_d = _div_nearest(log_d*M, log_10)
log_tenpower = f*M # exact
else:
log_d = 0 # error < 2.31
log_tenpower = _div_nearest(f, 10**-p) # error < 0.5
return _div_nearest(log_tenpower+log_d, 100)
def _dlog(c, e, p):
"""Given integers c, e and p with c > 0, compute an integer
approximation to 10**p * log(c*10**e), with an absolute error of
at most 1. Assumes that c*10**e is not exactly 1."""
# Increase precision by 2. The precision increase is compensated
# for at the end with a division by 100.
p += 2
# rewrite c*10**e as d*10**f with either f >= 0 and 1 <= d <= 10,
# or f <= 0 and 0.1 <= d <= 1. Then we can compute 10**p * log(c*10**e)
# as 10**p * log(d) + 10**p*f * log(10).
l = len(str(c))
f = e+l - (e+l >= 1)
# compute approximation to 10**p*log(d), with error < 27
if p > 0:
k = e+p-f
if k >= 0:
c *= 10**k
else:
c = _div_nearest(c, 10**-k) # error of <= 0.5 in c
# _ilog magnifies existing error in c by a factor of at most 10
log_d = _ilog(c, 10**p) # error < 5 + 22 = 27
else:
# p <= 0: just approximate the whole thing by 0; error < 2.31
log_d = 0
# compute approximation to f*10**p*log(10), with error < 11.
if f:
extra = len(str(abs(f)))-1
if p + extra >= 0:
# error in f * _log10_digits(p+extra) < |f| * 1 = |f|
# after division, error < |f|/10**extra + 0.5 < 10 + 0.5 < 11
f_log_ten = _div_nearest(f*_log10_digits(p+extra), 10**extra)
else:
f_log_ten = 0
else:
f_log_ten = 0
# error in sum < 11+27 = 38; error after division < 0.38 + 0.5 < 1
return _div_nearest(f_log_ten + log_d, 100)
class _Log10Memoize(object):
"""Class to compute, store, and allow retrieval of, digits of the
constant log(10) = 2.302585.... This constant is needed by
Decimal.ln, Decimal.log10, Decimal.exp and Decimal.__pow__."""
def __init__(self):
self.digits = "23025850929940456840179914546843642076011014886"
def getdigits(self, p):
"""Given an integer p >= 0, return floor(10**p)*log(10).
For example, self.getdigits(3) returns 2302.
"""
# digits are stored as a string, for quick conversion to
# integer in the case that we've already computed enough
# digits; the stored digits should always be correct
# (truncated, not rounded to nearest).
if p < 0:
raise ValueError("p should be nonnegative")
if p >= len(self.digits):
# compute p+3, p+6, p+9, ... digits; continue until at
# least one of the extra digits is nonzero
extra = 3
while True:
# compute p+extra digits, correct to within 1ulp
M = 10**(p+extra+2)
digits = str(_div_nearest(_ilog(10*M, M), 100))
if digits[-extra:] != '0'*extra:
break
extra += 3
# keep all reliable digits so far; remove trailing zeros
# and next nonzero digit
self.digits = digits.rstrip('0')[:-1]
return int(self.digits[:p+1])
_log10_digits = _Log10Memoize().getdigits
def _iexp(x, M, L=8):
"""Given integers x and M, M > 0, such that x/M is small in absolute
value, compute an integer approximation to M*exp(x/M). For 0 <=
x/M <= 2.4, the absolute error in the result is bounded by 60 (and
is usually much smaller)."""
# Algorithm: to compute exp(z) for a real number z, first divide z
# by a suitable power R of 2 so that |z/2**R| < 2**-L. Then
# compute expm1(z/2**R) = exp(z/2**R) - 1 using the usual Taylor
# series
#
# expm1(x) = x + x**2/2! + x**3/3! + ...
#
# Now use the identity
#
# expm1(2x) = expm1(x)*(expm1(x)+2)
#
# R times to compute the sequence expm1(z/2**R),
# expm1(z/2**(R-1)), ... , exp(z/2), exp(z).
# Find R such that x/2**R/M <= 2**-L
R = _nbits((x<<L)//M)
# Taylor series. (2**L)**T > M
T = -int(-10*len(str(M))//(3*L))
y = _div_nearest(x, T)
Mshift = M<<R
for i in range(T-1, 0, -1):
y = _div_nearest(x*(Mshift + y), Mshift * i)
# Expansion
for k in range(R-1, -1, -1):
Mshift = M<<(k+2)
y = _div_nearest(y*(y+Mshift), Mshift)
return M+y
def _dexp(c, e, p):
"""Compute an approximation to exp(c*10**e), with p decimal places of
precision.
Returns integers d, f such that:
10**(p-1) <= d <= 10**p, and
(d-1)*10**f < exp(c*10**e) < (d+1)*10**f
In other words, d*10**f is an approximation to exp(c*10**e) with p
digits of precision, and with an error in d of at most 1. This is
almost, but not quite, the same as the error being < 1ulp: when d
= 10**(p-1) the error could be up to 10 ulp."""
# we'll call iexp with M = 10**(p+2), giving p+3 digits of precision
p += 2
# compute log(10) with extra precision = adjusted exponent of c*10**e
extra = max(0, e + len(str(c)) - 1)
q = p + extra
# compute quotient c*10**e/(log(10)) = c*10**(e+q)/(log(10)*10**q),
# rounding down
shift = e+q
if shift >= 0:
cshift = c*10**shift
else:
cshift = c//10**-shift
quot, rem = divmod(cshift, _log10_digits(q))
# reduce remainder back to original precision
rem = _div_nearest(rem, 10**extra)
# error in result of _iexp < 120; error after division < 0.62
return _div_nearest(_iexp(rem, 10**p), 1000), quot - p + 3
def _dpower(xc, xe, yc, ye, p):
"""Given integers xc, xe, yc and ye representing Decimals x = xc*10**xe and
y = yc*10**ye, compute x**y. Returns a pair of integers (c, e) such that:
10**(p-1) <= c <= 10**p, and
(c-1)*10**e < x**y < (c+1)*10**e
in other words, c*10**e is an approximation to x**y with p digits
of precision, and with an error in c of at most 1. (This is
almost, but not quite, the same as the error being < 1ulp: when c
== 10**(p-1) we can only guarantee error < 10ulp.)
We assume that: x is positive and not equal to 1, and y is nonzero.
"""
# Find b such that 10**(b-1) <= |y| <= 10**b
b = len(str(abs(yc))) + ye
# log(x) = lxc*10**(-p-b-1), to p+b+1 places after the decimal point
lxc = _dlog(xc, xe, p+b+1)
# compute product y*log(x) = yc*lxc*10**(-p-b-1+ye) = pc*10**(-p-1)
shift = ye-b
if shift >= 0:
pc = lxc*yc*10**shift
else:
pc = _div_nearest(lxc*yc, 10**-shift)
if pc == 0:
# we prefer a result that isn't exactly 1; this makes it
# easier to compute a correctly rounded result in __pow__
if ((len(str(xc)) + xe >= 1) == (yc > 0)): # if x**y > 1:
coeff, exp = 10**(p-1)+1, 1-p
else:
coeff, exp = 10**p-1, -p
else:
coeff, exp = _dexp(pc, -(p+1), p+1)
coeff = _div_nearest(coeff, 10)
exp += 1
return coeff, exp
def _log10_lb(c, correction = {
'1': 100, '2': 70, '3': 53, '4': 40, '5': 31,
'6': 23, '7': 16, '8': 10, '9': 5}):
"""Compute a lower bound for 100*log10(c) for a positive integer c."""
if c <= 0:
raise ValueError("The argument to _log10_lb should be nonnegative.")
str_c = str(c)
return 100*len(str_c) - correction[str_c[0]]
##### Helper Functions ####################################################
def _convert_other(other, raiseit=False, allow_float=False):
"""Convert other to Decimal.
Verifies that it's ok to use in an implicit construction.
If allow_float is true, allow conversion from float; this
is used in the comparison methods (__eq__ and friends).
"""
if isinstance(other, Decimal):
return other
if isinstance(other, int):
return Decimal(other)
if allow_float and isinstance(other, float):
return Decimal.from_float(other)
if raiseit:
raise TypeError("Unable to convert %s to Decimal" % other)
return NotImplemented
def _convert_for_comparison(self, other, equality_op=False):
"""Given a Decimal instance self and a Python object other, return
a pair (s, o) of Decimal instances such that "s op o" is
equivalent to "self op other" for any of the 6 comparison
operators "op".
"""
if isinstance(other, Decimal):
return self, other
# Comparison with a Rational instance (also includes integers):
# self op n/d <=> self*d op n (for n and d integers, d positive).
# A NaN or infinity can be left unchanged without affecting the
# comparison result.
if isinstance(other, _numbers.Rational):
if not self._is_special:
self = _dec_from_triple(self._sign,
str(int(self._int) * other.denominator),
self._exp)
return self, Decimal(other.numerator)
# Comparisons with float and complex types. == and != comparisons
# with complex numbers should succeed, returning either True or False
# as appropriate. Other comparisons return NotImplemented.
if equality_op and isinstance(other, _numbers.Complex) and other.imag == 0:
other = other.real
if isinstance(other, float):
context = getcontext()
if equality_op:
context.flags[FloatOperation] = 1
else:
context._raise_error(FloatOperation,
"strict semantics for mixing floats and Decimals are enabled")
return self, Decimal.from_float(other)
return NotImplemented, NotImplemented
##### Setup Specific Contexts ############################################
# The default context prototype used by Context()
# Is mutable, so that new contexts can have different default values
DefaultContext = Context(
prec=28, rounding=ROUND_HALF_EVEN,
traps=[DivisionByZero, Overflow, InvalidOperation],
flags=[],
Emax=999999,
Emin=-999999,
capitals=1,
clamp=0
)
# Pre-made alternate contexts offered by the specification
# Don't change these; the user should be able to select these
# contexts and be able to reproduce results from other implementations
# of the spec.
BasicContext = Context(
prec=9, rounding=ROUND_HALF_UP,
traps=[DivisionByZero, Overflow, InvalidOperation, Clamped, Underflow],
flags=[],
)
ExtendedContext = Context(
prec=9, rounding=ROUND_HALF_EVEN,
traps=[],
flags=[],
)
##### crud for parsing strings #############################################
#
# Regular expression used for parsing numeric strings. Additional
# comments:
#
# 1. Uncomment the two '\s*' lines to allow leading and/or trailing
# whitespace. But note that the specification disallows whitespace in
# a numeric string.
#
# 2. For finite numbers (not infinities and NaNs) the body of the
# number between the optional sign and the optional exponent must have
# at least one decimal digit, possibly after the decimal point. The
# lookahead expression '(?=\d|\.\d)' checks this.
import re
_parser = re.compile(r""" # A numeric string consists of:
# \s*
(?P<sign>[-+])? # an optional sign, followed by either...
(
(?=\d|\.\d) # ...a number (with at least one digit)
(?P<int>\d*) # having a (possibly empty) integer part
(\.(?P<frac>\d*))? # followed by an optional fractional part
(E(?P<exp>[-+]?\d+))? # followed by an optional exponent, or...
|
Inf(inity)? # ...an infinity, or...
|
(?P<signal>s)? # ...an (optionally signaling)
NaN # NaN
(?P<diag>\d*) # with (possibly empty) diagnostic info.
)
# \s*
\Z
""", re.VERBOSE | re.IGNORECASE).match
_all_zeros = re.compile('0*$').match
_exact_half = re.compile('50*$').match
##### PEP3101 support functions ##############################################
# The functions in this section have little to do with the Decimal
# class, and could potentially be reused or adapted for other pure
# Python numeric classes that want to implement __format__
#
# A format specifier for Decimal looks like:
#
# [[fill]align][sign][#][0][minimumwidth][,][.precision][type]
_parse_format_specifier_regex = re.compile(r"""\A
(?:
(?P<fill>.)?
(?P<align>[<>=^])
)?
(?P<sign>[-+ ])?
(?P<alt>\#)?
(?P<zeropad>0)?
(?P<minimumwidth>(?!0)\d+)?
(?P<thousands_sep>,)?
(?:\.(?P<precision>0|(?!0)\d+))?
(?P<type>[eEfFgGn%])?
\Z
""", re.VERBOSE|re.DOTALL)
del re
# The locale module is only needed for the 'n' format specifier. The
# rest of the PEP 3101 code functions quite happily without it, so we
# don't care too much if locale isn't present.
try:
import locale as _locale
except ImportError:
pass
def _parse_format_specifier(format_spec, _localeconv=None):
"""Parse and validate a format specifier.
Turns a standard numeric format specifier into a dict, with the
following entries:
fill: fill character to pad field to minimum width
align: alignment type, either '<', '>', '=' or '^'
sign: either '+', '-' or ' '
minimumwidth: nonnegative integer giving minimum width
zeropad: boolean, indicating whether to pad with zeros
thousands_sep: string to use as thousands separator, or ''
grouping: grouping for thousands separators, in format
used by localeconv
decimal_point: string to use for decimal point
precision: nonnegative integer giving precision, or None
type: one of the characters 'eEfFgG%', or None
"""
m = _parse_format_specifier_regex.match(format_spec)
if m is None:
raise ValueError("Invalid format specifier: " + format_spec)
# get the dictionary
format_dict = m.groupdict()
# zeropad; defaults for fill and alignment. If zero padding
# is requested, the fill and align fields should be absent.
fill = format_dict['fill']
align = format_dict['align']
format_dict['zeropad'] = (format_dict['zeropad'] is not None)
if format_dict['zeropad']:
if fill is not None:
raise ValueError("Fill character conflicts with '0'"
" in format specifier: " + format_spec)
if align is not None:
raise ValueError("Alignment conflicts with '0' in "
"format specifier: " + format_spec)
format_dict['fill'] = fill or ' '
# PEP 3101 originally specified that the default alignment should
# be left; it was later agreed that right-aligned makes more sense
# for numeric types. See http://bugs.python.org/issue6857.
format_dict['align'] = align or '>'
# default sign handling: '-' for negative, '' for positive
if format_dict['sign'] is None:
format_dict['sign'] = '-'
# minimumwidth defaults to 0; precision remains None if not given
format_dict['minimumwidth'] = int(format_dict['minimumwidth'] or '0')
if format_dict['precision'] is not None:
format_dict['precision'] = int(format_dict['precision'])
# if format type is 'g' or 'G' then a precision of 0 makes little
# sense; convert it to 1. Same if format type is unspecified.
if format_dict['precision'] == 0:
if format_dict['type'] is None or format_dict['type'] in 'gGn':
format_dict['precision'] = 1
# determine thousands separator, grouping, and decimal separator, and
# add appropriate entries to format_dict
if format_dict['type'] == 'n':
# apart from separators, 'n' behaves just like 'g'
format_dict['type'] = 'g'
if _localeconv is None:
_localeconv = _locale.localeconv()
if format_dict['thousands_sep'] is not None:
raise ValueError("Explicit thousands separator conflicts with "
"'n' type in format specifier: " + format_spec)
format_dict['thousands_sep'] = _localeconv['thousands_sep']
format_dict['grouping'] = _localeconv['grouping']
format_dict['decimal_point'] = _localeconv['decimal_point']
else:
if format_dict['thousands_sep'] is None:
format_dict['thousands_sep'] = ''
format_dict['grouping'] = [3, 0]
format_dict['decimal_point'] = '.'
return format_dict
def _format_align(sign, body, spec):
"""Given an unpadded, non-aligned numeric string 'body' and sign
string 'sign', add padding and alignment conforming to the given
format specifier dictionary 'spec' (as produced by
parse_format_specifier).
"""
# how much extra space do we have to play with?
minimumwidth = spec['minimumwidth']
fill = spec['fill']
padding = fill*(minimumwidth - len(sign) - len(body))
align = spec['align']
if align == '<':
result = sign + body + padding
elif align == '>':
result = padding + sign + body
elif align == '=':
result = sign + padding + body
elif align == '^':
half = len(padding)//2
result = padding[:half] + sign + body + padding[half:]
else:
raise ValueError('Unrecognised alignment field')
return result
def _group_lengths(grouping):
"""Convert a localeconv-style grouping into a (possibly infinite)
iterable of integers representing group lengths.
"""
# The result from localeconv()['grouping'], and the input to this
# function, should be a list of integers in one of the
# following three forms:
#
# (1) an empty list, or
# (2) nonempty list of positive integers + [0]
# (3) list of positive integers + [locale.CHAR_MAX], or
from itertools import chain, repeat
if not grouping:
return []
elif grouping[-1] == 0 and len(grouping) >= 2:
return chain(grouping[:-1], repeat(grouping[-2]))
elif grouping[-1] == _locale.CHAR_MAX:
return grouping[:-1]
else:
raise ValueError('unrecognised format for grouping')
def _insert_thousands_sep(digits, spec, min_width=1):
"""Insert thousands separators into a digit string.
spec is a dictionary whose keys should include 'thousands_sep' and
'grouping'; typically it's the result of parsing the format
specifier using _parse_format_specifier.
The min_width keyword argument gives the minimum length of the
result, which will be padded on the left with zeros if necessary.
If necessary, the zero padding adds an extra '0' on the left to
avoid a leading thousands separator. For example, inserting
commas every three digits in '123456', with min_width=8, gives
'0,123,456', even though that has length 9.
"""
sep = spec['thousands_sep']
grouping = spec['grouping']
groups = []
for l in _group_lengths(grouping):
if l <= 0:
raise ValueError("group length should be positive")
# max(..., 1) forces at least 1 digit to the left of a separator
l = min(max(len(digits), min_width, 1), l)
groups.append('0'*(l - len(digits)) + digits[-l:])
digits = digits[:-l]
min_width -= l
if not digits and min_width <= 0:
break
min_width -= len(sep)
else:
l = max(len(digits), min_width, 1)
groups.append('0'*(l - len(digits)) + digits[-l:])
return sep.join(reversed(groups))
def _format_sign(is_negative, spec):
"""Determine sign character."""
if is_negative:
return '-'
elif spec['sign'] in ' +':
return spec['sign']
else:
return ''
def _format_number(is_negative, intpart, fracpart, exp, spec):
"""Format a number, given the following data:
is_negative: true if the number is negative, else false
intpart: string of digits that must appear before the decimal point
fracpart: string of digits that must come after the point
exp: exponent, as an integer
spec: dictionary resulting from parsing the format specifier
This function uses the information in spec to:
insert separators (decimal separator and thousands separators)
format the sign
format the exponent
add trailing '%' for the '%' type
zero-pad if necessary
fill and align if necessary
"""
sign = _format_sign(is_negative, spec)
if fracpart or spec['alt']:
fracpart = spec['decimal_point'] + fracpart
if exp != 0 or spec['type'] in 'eE':
echar = {'E': 'E', 'e': 'e', 'G': 'E', 'g': 'e'}[spec['type']]
fracpart += "{0}{1:+}".format(echar, exp)
if spec['type'] == '%':
fracpart += '%'
if spec['zeropad']:
min_width = spec['minimumwidth'] - len(fracpart) - len(sign)
else:
min_width = 0
intpart = _insert_thousands_sep(intpart, spec, min_width)
return _format_align(sign, intpart+fracpart, spec)
##### Useful Constants (internal use only) ################################
# Reusable defaults
_Infinity = Decimal('Inf')
_NegativeInfinity = Decimal('-Inf')
_NaN = Decimal('NaN')
_Zero = Decimal(0)
_One = Decimal(1)
_NegativeOne = Decimal(-1)
# _SignedInfinity[sign] is infinity w/ that sign
_SignedInfinity = (_Infinity, _NegativeInfinity)
# Constants related to the hash implementation; hash(x) is based
# on the reduction of x modulo _PyHASH_MODULUS
_PyHASH_MODULUS = sys.hash_info.modulus
# hash values to use for positive and negative infinities, and nans
_PyHASH_INF = sys.hash_info.inf
_PyHASH_NAN = sys.hash_info.nan
# _PyHASH_10INV is the inverse of 10 modulo the prime _PyHASH_MODULUS
_PyHASH_10INV = pow(10, _PyHASH_MODULUS - 2, _PyHASH_MODULUS)
del sys
| {
"content_hash": "4658e5819c246cc2d3ac4813a4f32686",
"timestamp": "",
"source": "github",
"line_count": 6434,
"max_line_length": 100,
"avg_line_length": 35.68542119987566,
"alnum_prop": 0.5518858885017421,
"repo_name": "MalloyPower/parsing-python",
"id": "6318a49ce706f0a4c1a809d57cac03875b031262",
"size": "230222",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-3.6.0/Lib/_pydecimal.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
} |
value = 1
def get_value():
return value
| {
"content_hash": "e0b1f06fe6d97cb1213e119f9c873538",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 16,
"avg_line_length": 11.25,
"alnum_prop": 0.6222222222222222,
"repo_name": "Hasimir/pyjs",
"id": "7358ac1faa5848415ff7662a887a828dfcc74ee2",
"size": "237",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "examples/libtest/builtin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4640"
},
{
"name": "Groff",
"bytes": "6633"
},
{
"name": "HTML",
"bytes": "10106"
},
{
"name": "JavaScript",
"bytes": "63385"
},
{
"name": "Makefile",
"bytes": "453"
},
{
"name": "Python",
"bytes": "5515375"
},
{
"name": "Shell",
"bytes": "4264"
}
],
"symlink_target": ""
} |
'''
Created on 2016. 11. 19
Updated on 2016. 01. 09
'''
from __future__ import print_function
import os
import shutil
import codecs
import cgi
import re
from dateutil import parser as dateparser
from commons import VersionUtil
from pytz import timezone
from utils import Progress
from bs4 import BeautifulSoup
class BugFilter:
'''
Extract bug reports to satisfy our criterions. bugitems = []
bugItem = {
'description':'',
'id':'',
'summary':'',
'resolution':'',
'opendate':'',
'fixdate':'',
'version':'',
'fixVersion':'',
'type':'',
'links':[
{'type':'', 'description':'', 'id':number}, ...
]
}
'''
__name__ = u'BugFilter'
ProjectName = u''
SourceBugPath = u''
gitlogs = None
gitversions = None
def __init__(self, _projectName, _srcbugPath):
self.__name__ = _projectName
self.ProjectName = _projectName
self.SourceBugPath = _srcbugPath
pass
@staticmethod
def unhash_folder(_src, _dest):
'''
hashed folder ==> unshed folder
example) path/aa/00/filename ==> path/filename
:param _src:
:param _dest:
:return:
'''
if os.path.exists(_dest) is False:
os.makedirs(_dest)
progress = Progress(u'Bug reports is merging', 20, 1000, False)
progress.start()
for root, dirs, files in os.walk(_src):
for f in files:
shutil.copy(os.path.join(root, f), os.path.join(_dest, f))
progress.check()
progress.done()
# def show_versions(self, _bugitems):
# for bug in _bugitems:
# #if bug['id'] not in ['DATAREST-216', 'DATAREST-199']: continue
# print(bug['id'] + u':' + bug['version'])
#
# print(u'\n\n\n\n\n\n')
def run(self, _gitlogs, _gitversions):
self.gitlogs = _gitlogs
self.gitversions = _gitversions
bugitems = self.loads()
bugitems = self.link_fixedFiles(bugitems)
bugitems, dupgroups = self.make_dupgroups(bugitems)
bugitems = self.filter(bugitems)
bugitems.sort(self.cmp) #fixed time order ASC
self.make_minimumVersion(bugitems)
return bugitems, dupgroups
def loads(self):
'''
loads a raw file of bug report
:return:
'''
fileConnt = self.getFileCounts(self.SourceBugPath)
bugitems = []
# show progress
progress = Progress(u'[%s] Loading bug reports'%self.__name__, 2, 10, True)
progress.set_upperbound(fileConnt)
progress.start()
for root, dirs, files in os.walk(self.SourceBugPath):
for f in files:
if f[:f.find(u'-')].strip().lower() != self.ProjectName.lower(): continue
#shutil.copy(os.path.join(root, f), os.path.join(_dest, f))
bugitem = self.get_bugitem(os.path.join(root, f))
if bugitem is not None:
bugitems.append(bugitem)
progress.check()
progress.done()
return bugitems
def getFileCounts(self, _src):
'''
get the count of files
:param _src:
:return:
'''
count = 0
files = os.listdir(_src)
for aname in files:
path = os.path.join(_src, aname)
#stat_info = os.lstat(path)
#if stat.S_ISDIR(stat_info.st_mode):
if os.path.isdir(path):
count += self.getFileCounts(path)
else:
count += 1
return count
def get_bugitem(self, _filepath):
#read xml data
fobj = codecs.open(_filepath, 'r', 'utf-8')
xmltext = fobj.read()
fobj.close()
try:
#extract information
doc = BeautifulSoup(xmltext, 'html.parser')
keys = ['description', 'key', 'summary', 'resolution', 'created', 'resolved', 'version', 'fixVersion', 'type']
keymaps = ['description', 'id', 'summary', 'resolution', 'opendate', 'fixdate', 'version', 'fixVersion', 'type']
bug = {}
for idx in range(0, len(keymaps)):
bug[keymaps[idx]] = u''
for idx in range(0, len(keys)):
findkey = 'item > ' + keys[idx].lower()
items = doc.select(findkey )
if len(items)==0: continue
for item in items:
bug[keymaps[idx]] += (u', ' if len(bug[keymaps[idx]]) > 0 else u'') + item.get_text()
bug['fixedFiles'] = []
#duplicate bug report
bug['links'] = self.get_links(doc)
# Convert some formats (date and text...)
#re.sub = remove compound character except english caracter and numbers and some special characters
bug['summary'] = cgi.escape(re.sub(r'[^\x00-\x80]+', '', bug['summary'])) #re.sub(r'[^\w\s&\^\|/()\[\]\{\}<>+\-=*/`~!@#$%^,.:;\\\'"?]', '', bug['summary']))
bug['description'] = BeautifulSoup(bug['description'], "html.parser").get_text()
bug['description'] = cgi.escape(re.sub(r'[^\x00-\x80]+', '', bug['description']))
bug['description'] = cgi.escape(re.sub(chr(27), '', bug['description']))
t = dateparser.parse(bug['opendate'])
bug['opendate'] = t.astimezone(timezone('UTC'))
#bug['opendate'] = dobj.strftime(u'%Y-%m-%d %H:%M:%S')
if bug['fixdate'] != u'':
t = dateparser.parse(bug['fixdate'])
bug['fixdate'] = t.astimezone(timezone('UTC'))
else:
bug['fixdate'] = None
#bug['fixdate'] = dobj.strftime(u'%Y-%m-%d %H:%M:%S')
except Exception as e:
print(e)
return None
return bug
def get_links(self, _doc):
'''
extract links in bug report file.
:param _doc:
:return:
'''
links = []
issuetypes = _doc.select('item > issuelinks > issuelinktype')
for issuetype in issuetypes:
name = issuetype.select('name')
if len(name)<=0: continue
typename = name[0].get_text()
subtypes = issuetype.select('outwardlinks')
for subtype in subtypes:
keyvalues = subtype.select('issuekey')
for keyvalue in keyvalues:
key_id = keyvalue.get_text()
key_id = key_id
links.append({'type':typename, 'description':subtype['description'], 'id':key_id})
subtypes = issuetype.select('inwardlinks')
for subtype in subtypes:
keyvalues = subtype.select('issuekey')
for keyvalue in keyvalues:
key_id = keyvalue.get_text()
key_id = key_id[key_id.rfind(u'-')+1:]
links.append({'type':typename, 'description':subtype['description'], 'id':key_id})
return links
def link_fixedFiles(self, _bugitems):
'''
Mapping answer files with Bug reports and git Log
The all related commit's files will be fixed files
:param _bugitems:
:return:
'''
for bug in _bugitems:
files = []
if bug['id'] not in self.gitlogs:
bug['fixedFiles'] = files
continue
logs = self.gitlogs[bug['id']] # get logs corresponding bug ID
for log in logs:
for filename in log['fixedFiles']:
changeType = log['fixedFiles'][filename]
clsName = self.get_classname(filename)
# check duplicate file
existIDX = -1
for idx in range(len(files)):
if files[idx]['name'] == clsName:
existIDX = idx
break
if existIDX == -1:
files.append({'type':changeType, 'name':clsName})
else:
# override the value if old is M and new is D.
if changeType == 'D':
files[existIDX]['type'] = changeType
bug['fixedFiles'] = files
return _bugitems
def get_classname(self, _filename):
'''
get class name from filepath
:param _filename:
:return:
'''
classname = _filename.replace(u'/', u'.')
classname = classname.replace(u'\\', u'.')
idx = classname.find(u'.org.')
if idx > 0:
classname = classname[idx+1:]
return classname
def make_dupgroups(self, _bugitems):
'''
identify duplicate bug reports,
we return groups of dup-set like below:
dupgroups = [{'src':ID, 'dest':ID, 'fixedboth:True /False}, ...]
if a bug report have fixedFiles, the report will be the master report.
if the two duplicate reports have fixedFiles both, the low id report will be a master report and
the fixedboth field will be set True.
:param _bugitems:
:return:
'''
dupgroups = []
visited = set([])
for x in range(len(_bugitems)):
src = _bugitems[x]
worklist = []
# find duplicate from all links, and add dup-groups
for link in src['links']:
# filter unrelated items
if link['type'].lower() != 'duplicate': continue
project = link['id'][:link['id'].find('-')].strip()
if project != self.ProjectName:continue
if src['id'] in visited and link['id'] in visited: continue
# find dest data
for y in range(len(_bugitems)):
if x == y:continue
if _bugitems[y]['id'] != link['id']: continue
dest = _bugitems[y]
break
# add worklist
if len(src['fixedFiles'])>0 and len(dest['fixedFiles'])>0:
if (src['id'][src['id'].find('-')+1:] <= dest['id'][dest['id'].find('-')+1:]):
worklist.append((src, dest, True))
else:
worklist.append((dest, src, True))
elif len(src['fixedFiles'])>0 and len(dest['fixedFiles'])==0:
worklist.append((src, dest, False))
elif len(src['fixedFiles'])==0 and len(dest['fixedFiles'])>0:
worklist.append((dest, src, False))
# append dupgroups and auxiliary works
for src, dest, both in worklist:
visited.add(src['id'])
visited.add(dest['id'])
self.complement_reports(src, dest, both)
dupgroups.append({'src':src['id'], 'dest':dest['id'], 'fixedboth':both})
return _bugitems, dupgroups
def complement_reports(self, _src, _dest, _both):
'''
complement information from the duplicate bug report
:param _bugitems:
:param _dupgroups:
:param _gitversions:
:return:
'''
# sync fixedfile
if _both is False:
if len(_src['fixedFiles']) == 0:
_src['fixedFiles'] = _dest['fixedFiles']
else:
_dest['fixedFiles'] = _src['fixedFiles']
# sync version
if _dest['version'] != u''and _src['version'] == u'':
_src['version'] = _dest['version']
elif _src['version'] != u'' and _dest['version'] == u'':
_dest['version'] = _src['version']
elif _src['version'] == u'' and _dest['version'] == u'':
#if both report has no version, get version information from git repository
v1 = self.get_gitversion(_src['id'])
v2 = self.get_gitversion(_dest['id'])
if v1!=u'' and v2 != u'':
if v1==u'': _src['version'] = v2
_src['version'] = v1 if VersionUtil.cmpVersion(v1, v2) <0 else v2
_dest['version'] = _src['version']
# sync fixdate
if _dest['fixdate'] != u''and _src['fixdate'] == u'':
_src['fixdate'] = _dest['fixdate']
if _src['fixdate'] != u'' and _dest['fixdate'] == u'':
_src['fixdate'] = _src['fixdate']
pass
def get_gitversion(self, _id):
'''
get bug version information from git repository
:param _id:
:return:
'''
if _id not in self.gitlogs: return u''
min_version = u''
commits = self.gitlogs[_id]
for commit in commits:
if commit['hash'] not in self.gitversions: continue
version = self.gitversions[commit['hash']]
if version is None: continue
if min_version == u'': min_version = version
if VersionUtil.cmpVersion(version, min_version) < 0:
min_version = version
return min_version
def filter(self, _bugitems):
'''
Remove bug reports that is not satisfied the criteria from the _bugitems
:param _bugitems: list of bug reports
:return:
'''
noFileCount = 0
noDateCount = 0
noVersionCount = 0
onlyVersionCount = 0
removedCount = 0
newlist = []
for bug in _bugitems:
flagVersion=True
flagFiles=True
flagDate=True
if bug['version'].strip() ==u'':
noVersionCount += 1
flagVersion = False
if len(bug['fixedFiles']) == 0:
noFileCount += 1
flagFiles=False
if bug['fixdate'] is None:
noDateCount += 1
flagDate=False
if flagDate is False or flagFiles is False or flagVersion is False:
removedCount += 1
if flagVersion is False and flagDate is True and flagFiles is True:
onlyVersionCount += 1
continue
# we already filtered this types of bug
# if not (
# (bug['type'].lower() == 'bug' and bug['resolution'].lower() == 'fixed')
# or bug['resolution'].lower() =='duplicate'
# ): continue
newlist.append(bug)
print(u'[%s] Filter : %d fixedFiles, %d version, %d fixdate. :: %d/%d only no versions'% ( self.__name__,
noFileCount,
noVersionCount,
noDateCount,
onlyVersionCount,
removedCount))
print(u'[%s] Filter : %d remained list.'% (self.__name__, len(newlist)))
return newlist
def make_minimumVersion(self, _bugs):
for bug in _bugs:
min_version = u'10000.0' # assign big version
for version in bug['version'].split(u', '):
if VersionUtil.cmpVersion(version, min_version) < 0:
min_version = version
bug['version'] = min_version
pass
def cmp(self, x, y):
if x['fixdate'] < y['fixdate'] :
return -1
elif x['fixdate'] > y['fixdate']:
return 1
return 0
| {
"content_hash": "023ce755159de1db68cd586910624ec9",
"timestamp": "",
"source": "github",
"line_count": 428,
"max_line_length": 160,
"avg_line_length": 28.983644859813083,
"alnum_prop": 0.6237001209189843,
"repo_name": "irblsensitivity/irblsensitivity",
"id": "43c0b1f7b6baf9c447ae9724db90fda0592f60b3",
"size": "12428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/repository/BugFilter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1768144"
},
{
"name": "Python",
"bytes": "374811"
},
{
"name": "Shell",
"bytes": "2451"
}
],
"symlink_target": ""
} |
import tempfile
from django.core.files.uploadedfile import InMemoryUploadedFile # noqa
from django import http
from django.utils import http as utils_http
from mox import IsA # noqa
from openstack_horizon import api
from openstack_horizon.dashboards.project.containers import forms
from openstack_horizon.dashboards.project.containers import tables
from openstack_horizon.dashboards.project.containers import views
from openstack_horizon.test import helpers as test
from horizon_lib import exceptions
from horizon_lib.utils.urlresolvers import reverse # noqa
CONTAINER_NAME_1 = u"container one%\u6346"
CONTAINER_NAME_2 = u"container_two\u6346"
CONTAINER_NAME_1_QUOTED = utils_http.urlquote(CONTAINER_NAME_1)
CONTAINER_NAME_2_QUOTED = utils_http.urlquote(CONTAINER_NAME_2)
INVALID_CONTAINER_NAME_1 = utils_http.urlquote(CONTAINER_NAME_1_QUOTED)
INVALID_CONTAINER_NAME_2 = utils_http.urlquote(CONTAINER_NAME_2_QUOTED)
CONTAINER_INDEX_URL = reverse('horizon:project:containers:index')
INVALID_PATHS = []
def invalid_paths():
if not INVALID_PATHS:
for x in (CONTAINER_NAME_1_QUOTED, CONTAINER_NAME_2_QUOTED):
y = reverse('horizon:project:containers:index',
args=(tables.wrap_delimiter(x), ))
INVALID_PATHS.append(y)
for x in (CONTAINER_NAME_1, CONTAINER_NAME_2):
INVALID_PATHS.append(CONTAINER_INDEX_URL + x)
return INVALID_PATHS
class SwiftTests(test.TestCase):
def _test_invalid_paths(self, response):
for x in invalid_paths():
self.assertNotContains(response, x)
@test.create_stubs({api.swift: ('swift_get_containers',)})
def test_index_no_container_selected(self):
containers = self.containers.list()
api.swift.swift_get_containers(IsA(http.HttpRequest), marker=None) \
.AndReturn((containers, False))
self.mox.ReplayAll()
res = self.client.get(CONTAINER_INDEX_URL)
self.assertTemplateUsed(res, 'project/containers/index.html')
self.assertIn('table', res.context)
resp_containers = res.context['table'].data
self.assertEqual(len(resp_containers), len(containers))
@test.create_stubs({api.swift: ('swift_delete_container', )})
def test_delete_container(self):
for container in self.containers.list():
self.mox.ResetAll() # mandatory in a for loop
api.swift.swift_delete_container(IsA(http.HttpRequest),
container.name)
self.mox.ReplayAll()
action_string = u"containers__delete__%s" % container.name
form_data = {"action": action_string}
req = self.factory.post(CONTAINER_INDEX_URL, form_data)
table = tables.ContainersTable(req, self.containers.list())
handled = table.maybe_handle()
self.assertEqual(handled['location'], CONTAINER_INDEX_URL)
@test.create_stubs({api.swift: ('swift_get_objects', )})
def test_delete_container_nonempty(self):
container = self.containers.first()
objects = self.objects.list()
api.swift.swift_get_objects(IsA(http.HttpRequest),
container.name).AndReturn([objects, False])
self.mox.ReplayAll()
action_string = u"containers__delete__%s" % container.name
form_data = {"action": action_string}
req = self.factory.post(CONTAINER_INDEX_URL, form_data)
req.META['HTTP_REFERER'] = '%s/%s' % (CONTAINER_INDEX_URL,
container.name)
table = tables.ContainersTable(req, self.containers.list())
# I'd prefer to call a self.assertRedirectnoFollow,
# but constructing the response object is a different paradigm
# from constructing the table and calling the maybe_handle method.
# I'd appreciate any suggestions on how this should properly be done.
self.assertRaises(exceptions.Http302, table.maybe_handle)
self.assertEqual(unicode(list(req._messages)[0].message),
u"The container cannot be deleted "
u"since it's not empty.")
def test_create_container_get(self):
res = self.client.get(reverse('horizon:project:containers:create'))
self.assertTemplateUsed(res, 'project/containers/create.html')
@test.create_stubs({api.swift: ('swift_create_container',)})
def test_create_container_post(self):
for container in self.containers.list():
self.mox.ResetAll() # mandatory in a for loop
api.swift.swift_create_container(IsA(http.HttpRequest),
container.name,
metadata=({'is_public': False}))
self.mox.ReplayAll()
formData = {'name': container.name,
'access': "private",
'method': forms.CreateContainer.__name__}
res = self.client.post(
reverse('horizon:project:containers:create'), formData)
args = (tables.wrap_delimiter(container.name),)
url = reverse('horizon:project:containers:index', args=args)
self.assertRedirectsNoFollow(res, url)
@test.create_stubs({api.swift: ('swift_update_container', )})
def test_update_container_to_public(self):
container = self.containers.get(name=u"container one%\u6346")
api.swift.swift_update_container(IsA(http.HttpRequest),
container.name,
metadata=({'is_public': True}))
self.mox.ReplayAll()
action_string = u"containers__make_public__%s" % container.name
form_data = {"action": action_string}
req = self.factory.post(CONTAINER_INDEX_URL, form_data)
table = tables.ContainersTable(req, self.containers.list())
handled = table.maybe_handle()
self.assertEqual(handled['location'], CONTAINER_INDEX_URL)
@test.create_stubs({api.swift: ('swift_update_container', )})
def test_update_container_to_private(self):
container = self.containers.get(name=u"container_two\u6346")
api.swift.swift_update_container(IsA(http.HttpRequest),
container.name,
metadata=({'is_public': False}))
self.mox.ReplayAll()
action_string = u"containers__make_private__%s" % container.name
form_data = {"action": action_string}
req = self.factory.post(CONTAINER_INDEX_URL, form_data)
table = tables.ContainersTable(req, self.containers.list())
handled = table.maybe_handle()
self.assertEqual(handled['location'], CONTAINER_INDEX_URL)
@test.create_stubs({api.swift: ('swift_get_containers',
'swift_get_objects')})
def test_index_container_selected(self):
containers = (self.containers.list(), False)
ret = (self.objects.list(), False)
api.swift.swift_get_containers(IsA(http.HttpRequest),
marker=None).AndReturn(containers)
api.swift.swift_get_objects(IsA(http.HttpRequest),
self.containers.first().name,
marker=None,
prefix=None).AndReturn(ret)
self.mox.ReplayAll()
container_name = self.containers.first().name
res = self.client.get(
reverse('horizon:project:containers:index',
args=[tables.wrap_delimiter(container_name)]))
self.assertTemplateUsed(res, 'project/containers/index.html')
# UTF8 encoding here to ensure there aren't problems with Nose output.
expected = [obj.name.encode('utf8') for obj in self.objects.list()]
self.assertQuerysetEqual(res.context['objects_table'].data,
expected,
lambda obj: obj.name.encode('utf8'))
# Check if the two forms' URL are properly 'urlquote()d'.
form_action = ' action="%s%s/" ' % (CONTAINER_INDEX_URL,
CONTAINER_NAME_1_QUOTED)
self.assertContains(res, form_action, count=2)
self._test_invalid_paths(res)
@test.create_stubs({api.swift: ('swift_upload_object',)})
def test_upload(self):
container = self.containers.first()
obj = self.objects.first()
OBJECT_DATA = 'objectData'
temp_file = tempfile.TemporaryFile()
temp_file.write(OBJECT_DATA)
temp_file.flush()
temp_file.seek(0)
api.swift.swift_upload_object(IsA(http.HttpRequest),
container.name,
obj.name,
IsA(InMemoryUploadedFile)).AndReturn(obj)
self.mox.ReplayAll()
upload_url = reverse('horizon:project:containers:object_upload',
args=[container.name])
res = self.client.get(upload_url)
self.assertTemplateUsed(res, 'project/containers/upload.html')
self.assertContains(res, 'enctype="multipart/form-data"')
self._test_invalid_paths(res)
formData = {'method': forms.UploadObject.__name__,
'container_name': container.name,
'name': obj.name,
'object_file': temp_file}
res = self.client.post(upload_url, formData)
args = (tables.wrap_delimiter(container.name),)
index_url = reverse('horizon:project:containers:index', args=args)
self.assertRedirectsNoFollow(res, index_url)
@test.create_stubs({api.swift: ('swift_upload_object',)})
def test_upload_without_file(self):
container = self.containers.first()
obj = self.objects.first()
api.swift.swift_upload_object(IsA(http.HttpRequest),
container.name,
obj.name,
None).AndReturn(obj)
self.mox.ReplayAll()
upload_url = reverse('horizon:project:containers:object_upload',
args=[container.name])
res = self.client.get(upload_url)
self.assertTemplateUsed(res, 'project/containers/upload.html')
res = self.client.get(upload_url)
self.assertContains(res, 'enctype="multipart/form-data"')
self.assertNotContains(res, INVALID_CONTAINER_NAME_1)
self.assertNotContains(res, INVALID_CONTAINER_NAME_2)
formData = {'method': forms.UploadObject.__name__,
'container_name': container.name,
'name': obj.name,
'object_file': None}
res = self.client.post(upload_url, formData)
args = (tables.wrap_delimiter(container.name),)
index_url = reverse('horizon:project:containers:index', args=args)
self.assertRedirectsNoFollow(res, index_url)
@test.create_stubs({api.swift: ('swift_create_pseudo_folder',)})
def test_create_pseudo_folder(self):
container = self.containers.first()
obj = self.objects.first()
api.swift.swift_create_pseudo_folder(IsA(http.HttpRequest),
container.name,
obj.name + "/").AndReturn(obj)
self.mox.ReplayAll()
create_pseudo_folder_url = reverse('horizon:project:containers:'
'create_pseudo_folder',
args=[container.name])
res = self.client.get(create_pseudo_folder_url)
self.assertTemplateUsed(res,
'project/containers/create_pseudo_folder.html')
self._test_invalid_paths(res)
formData = {'method': forms.CreatePseudoFolder.__name__,
'container_name': container.name,
'name': obj.name}
res = self.client.post(create_pseudo_folder_url, formData)
index_url = reverse('horizon:project:containers:index',
args=[tables.wrap_delimiter(container.name)])
self.assertRedirectsNoFollow(res, index_url)
@test.create_stubs({api.swift: ('swift_delete_object',)})
def test_delete(self):
container = self.containers.first()
obj = self.objects.first()
args = (tables.wrap_delimiter(container.name),)
index_url = reverse('horizon:project:containers:index', args=args)
api.swift.swift_delete_object(IsA(http.HttpRequest),
container.name,
obj.name)
self.mox.ReplayAll()
action_string = "objects__delete_object__%s" % obj.name
form_data = {"action": action_string}
req = self.factory.post(index_url, form_data)
kwargs = {"container_name": container.name}
table = tables.ObjectsTable(req, self.objects.list(), **kwargs)
handled = table.maybe_handle()
self.assertEqual(handled['location'], index_url)
@test.create_stubs({api.swift: ('swift_delete_object',)})
def test_delete_pseudo_folder(self):
container = self.containers.first()
folder = self.folder.first()
args = (tables.wrap_delimiter(container.name),)
index_url = reverse('horizon:project:containers:index', args=args)
api.swift.swift_delete_object(IsA(http.HttpRequest),
container.name,
folder.name + '/')
self.mox.ReplayAll()
action_string = "objects__delete_object__%s/%s" % (container.name,
folder.name)
form_data = {"action": action_string}
req = self.factory.post(index_url, form_data)
kwargs = {"container_name": container.name}
table = tables.ObjectsTable(req, self.folder.list(), **kwargs)
handled = table.maybe_handle()
self.assertEqual(handled['location'], index_url)
@test.create_stubs({api.swift: ('swift_get_object',)})
def test_download(self):
for container in self.containers.list():
for obj in self.objects.list():
self.mox.ResetAll() # mandatory in a for loop
api.swift.swift_get_object(IsA(http.HttpRequest),
container.name,
obj.name).AndReturn(obj)
self.mox.ReplayAll()
download_url = reverse(
'horizon:project:containers:object_download',
args=[container.name, obj.name])
res = self.client.get(download_url)
self.assertEqual(res.content, obj.data)
self.assertTrue(res.has_header('Content-Disposition'))
self.assertNotContains(res, INVALID_CONTAINER_NAME_1)
self.assertNotContains(res, INVALID_CONTAINER_NAME_2)
# Check that the returned Content-Disposition filename is well
# surrounded by double quotes and with commas removed
expected_name = '"%s"' % obj.name.replace(
',', '').encode('utf-8')
self.assertEqual(
res.get('Content-Disposition'),
'attachment; filename=%s' % expected_name
)
@test.create_stubs({api.swift: ('swift_get_containers',)})
def test_copy_index(self):
ret = (self.containers.list(), False)
api.swift.swift_get_containers(IsA(http.HttpRequest)).AndReturn(ret)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:project:containers:object_copy',
args=[self.containers.first().name,
self.objects.first().name]))
self.assertTemplateUsed(res, 'project/containers/copy.html')
self.assertNotContains(res, INVALID_CONTAINER_NAME_1)
self.assertNotContains(res, INVALID_CONTAINER_NAME_2)
@test.create_stubs({api.swift: ('swift_get_containers',
'swift_copy_object')})
def test_copy(self):
container_1 = self.containers.get(name=CONTAINER_NAME_1)
container_2 = self.containers.get(name=CONTAINER_NAME_2)
obj = self.objects.first()
ret = (self.containers.list(), False)
api.swift.swift_get_containers(IsA(http.HttpRequest)).AndReturn(ret)
api.swift.swift_copy_object(IsA(http.HttpRequest),
container_1.name,
obj.name,
container_2.name,
obj.name)
self.mox.ReplayAll()
formData = {'method': forms.CopyObject.__name__,
'new_container_name': container_2.name,
'new_object_name': obj.name,
'orig_container_name': container_1.name,
'orig_object_name': obj.name}
copy_url = reverse('horizon:project:containers:object_copy',
args=[container_1.name, obj.name])
res = self.client.post(copy_url, formData)
args = (tables.wrap_delimiter(container_2.name),)
index_url = reverse('horizon:project:containers:index', args=args)
self.assertRedirectsNoFollow(res, index_url)
@test.create_stubs({api.swift: ('swift_get_containers',
'swift_copy_object')})
def test_copy_get(self):
original_name = u"test.txt"
copy_name = u"test.copy.txt"
container = self.containers.first()
obj = self.objects.get(name=original_name)
ret = (self.containers.list(), False)
api.swift.swift_get_containers(IsA(http.HttpRequest)).AndReturn(ret)
self.mox.ReplayAll()
copy_url = reverse('horizon:project:containers:object_copy',
args=[container.name, obj.name])
res = self.client.get(copy_url)
# The copy's name must appear in initial data
pattern = ('<input id="id_new_object_name" value="%s" '
'name="new_object_name" type="text" '
'class="form-control" maxlength="255" />' % copy_name)
self.assertContains(res, pattern, html=True)
def test_get_copy_name(self):
self.assertEqual(views.CopyView.get_copy_name('test.txt'),
'test.copy.txt')
self.assertEqual(views.CopyView.get_copy_name('test'),
'test.copy')
@test.create_stubs({api.swift: ('swift_upload_object',)})
def test_update_with_file(self):
container = self.containers.first()
obj = self.objects.first()
OBJECT_DATA = 'objectData'
temp_file = tempfile.TemporaryFile()
temp_file.write(OBJECT_DATA)
temp_file.flush()
temp_file.seek(0)
api.swift.swift_upload_object(IsA(http.HttpRequest),
container.name,
obj.name,
IsA(InMemoryUploadedFile)).AndReturn(obj)
self.mox.ReplayAll()
update_url = reverse('horizon:project:containers:object_update',
args=[container.name, obj.name])
res = self.client.get(update_url)
self.assertTemplateUsed(res, 'project/containers/update.html')
self.assertContains(res, 'enctype="multipart/form-data"')
self._test_invalid_paths(res)
formData = {'method': forms.UpdateObject.__name__,
'container_name': container.name,
'name': obj.name,
'object_file': temp_file}
res = self.client.post(update_url, formData)
args = (tables.wrap_delimiter(container.name),)
index_url = reverse('horizon:project:containers:index', args=args)
self.assertRedirectsNoFollow(res, index_url)
@test.create_stubs({api.swift: ('swift_upload_object',)})
def test_update_without_file(self):
container = self.containers.first()
obj = self.objects.first()
self.mox.ReplayAll()
update_url = reverse('horizon:project:containers:object_update',
args=[container.name, obj.name])
res = self.client.get(update_url)
self.assertTemplateUsed(res, 'project/containers/update.html')
self.assertContains(res, 'enctype="multipart/form-data"')
self._test_invalid_paths(res)
formData = {'method': forms.UpdateObject.__name__,
'container_name': container.name,
'name': obj.name}
res = self.client.post(update_url, formData)
args = (tables.wrap_delimiter(container.name),)
index_url = reverse('horizon:project:containers:index', args=args)
self.assertRedirectsNoFollow(res, index_url)
@test.create_stubs({api.swift: ('swift_get_container', )})
def test_view_container(self):
for container in self.containers.list():
self.mox.ResetAll() # mandatory in a for loop
api.swift.swift_get_container(IsA(http.HttpRequest),
container.name,
with_data=False) \
.AndReturn(container)
self.mox.ReplayAll()
view_url = reverse('horizon:project:containers:container_detail',
args=[container.name])
res = self.client.get(view_url)
self.assertTemplateUsed(res,
'project/containers/container_detail.html')
self.assertContains(res, container.name, 1, 200)
self.assertNotContains(res, INVALID_CONTAINER_NAME_1)
self.assertNotContains(res, INVALID_CONTAINER_NAME_2)
@test.create_stubs({api.swift: ('swift_get_object', )})
def test_view_object(self):
for container in self.containers.list():
for obj in self.objects.list():
self.mox.ResetAll() # mandatory in a for loop
api.swift.swift_get_object(IsA(http.HttpRequest),
container.name,
obj.name,
with_data=False) \
.AndReturn(obj)
self.mox.ReplayAll()
view_url = reverse('horizon:project:containers:object_detail',
args=[container.name, obj.name])
res = self.client.get(view_url)
self.assertTemplateUsed(
res, 'project/containers/object_detail.html')
self.assertContains(res, obj.name, 1, 200)
self._test_invalid_paths(res)
def test_wrap_delimiter(self):
expected = {
'containerA': 'containerA/',
'containerB%': 'containerB%/', # no urlquote() should occur
'containerC/': 'containerC/', # already wrapped name
'containerD/objectA': 'containerD/objectA/'
}
for name, expected_name in expected.items():
self.assertEqual(tables.wrap_delimiter(name), expected_name)
| {
"content_hash": "b2bb04e4767e07503a11350ab6beddda",
"timestamp": "",
"source": "github",
"line_count": 518,
"max_line_length": 79,
"avg_line_length": 45.47490347490348,
"alnum_prop": 0.5734844625573102,
"repo_name": "mrunge/openstack_horizon",
"id": "c9dacbc7d65ce86524a9b0bfa5ea1f90d471291a",
"size": "24320",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack_horizon/dashboards/project/containers/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "63809"
},
{
"name": "JavaScript",
"bytes": "40"
},
{
"name": "Python",
"bytes": "3460539"
},
{
"name": "Shell",
"bytes": "16000"
}
],
"symlink_target": ""
} |
from django.conf.urls import url
from . import views
urlpatterns = [
url(
regex=r'^register/$',
view=views.RegisterView.as_view(),
name='register'
),
url(
regex=r'^login/$',
view=views.LoginView.as_view(),
name='login'
),
url(
regex=r'^logout/$',
view=views.LogoutView.as_view(),
name='logout'
),
url(
regex=r'^members/$',
view=views.MembersView.as_view(),
name='members'
),
]
| {
"content_hash": "5968b4ae158dcfb51ebb20d5aea30708",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 42,
"avg_line_length": 18.178571428571427,
"alnum_prop": 0.5088408644400786,
"repo_name": "Nikola-K/django-template",
"id": "00f91847ec0dab1a2bf5a1b1c6c17d890e894b5f",
"size": "509",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "users/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "864"
},
{
"name": "HTML",
"bytes": "8576"
},
{
"name": "Python",
"bytes": "23317"
}
],
"symlink_target": ""
} |
import os
import json
import random
import subprocess
# import flask web microframework
from flask import Flask
from flask import request
from flask import abort
# import from the 21 Developer Library
from two1.lib.wallet import Wallet
from two1.lib.bitserv.flask import Payment
app = Flask(__name__)
wallet = Wallet()
payment = Payment(app, wallet)
def get_fortune_text():
proc = subprocess.Popen(["/usr/games/fortune"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
try:
outs, errs = proc.communicate(timeout=10)
except TimeoutExpired:
proc.kill()
outs, errs = proc.communicate()
if errs:
return None
return outs
# endpoint to get a value from the server
@app.route('/fortune')
@payment.required(10)
def get_fortune():
fortune = get_fortune_text()
if fortune is None:
abort(500)
return fortune
@app.route('/')
def get_info():
info_obj = {
"name": "fortune",
"version": 100,
"pricing": {
"/fortune" : {
"minimum" : 10
},
}
}
body = json.dumps(info_obj, indent=2)
return (body, 200, {
'Content-length': len(body),
'Content-type': 'application/json',
})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=12012)
| {
"content_hash": "1d3467c402982b6db7fb04684e023114",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 99,
"avg_line_length": 20.967741935483872,
"alnum_prop": 0.6184615384615385,
"repo_name": "jgarzik/playground21",
"id": "39d67dac59de39903d2c5ebcdc5d9a4bd80e3f8c",
"size": "1300",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fortune/fortune-server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "87820"
},
{
"name": "Shell",
"bytes": "414"
}
],
"symlink_target": ""
} |
from logging import getLogger
from rester.http import HttpClient
from rester.struct import DictWrapper
from testfixtures import log_capture
import collections
import re
import traceback
Failure = collections.namedtuple("Failure", "errors output")
class TestCaseExec(object):
logger = getLogger(__name__)
def __init__(self, case, options):
self.case = case
self.options = options
self.passed = []
self.failed = []
self.skipped = []
def __call__(self):
# What was this?
#skip_all_subsequent_tests = False
for step in self.case.steps:
self.logger.debug('Test Step Name : %s', step.name)
if step.get('skip', False):
self.logger.info('\n=======> Skipping test case : ' + step.name)
self.skipped.append(step)
continue
@log_capture()
def _run(l):
failures = self._execute_test_step(step)
return failures, l
f, logs = _run()
if f:
self.failed.append((step, Failure(f.errors, "".join(self._format_logs(logs)))))
else:
self.passed.append(step)
return self._result()
def _result(self):
d = dict(name=self.case.filename,
passed=sorted([step.name for step in self.passed]),
failed=None,
skipped=sorted([step.name for step in self.skipped]))
d['failed'] = f = []
for step, failure in self.failed:
f.append(dict(name=step.name, errors=failure.errors, logs=failure.output))
return d
def _format_logs(self, lc):
for r in lc.actual():
yield "%s: %s - %s\n" % (r[1], r[0], r[2])
def _build_param_dict(self, test_step):
params = {}
if hasattr(test_step, 'params') and test_step.params is not None:
for key, value in test_step.params.items().items():
params[key] = self.case.variables.expand(value)
return params
def _execute_test_step(self, test_step):
http_client = HttpClient(**self.case.request_opts)
failures = Failure([], None)
try:
method = getattr(test_step, 'method', 'get')
is_raw = getattr(test_step, 'raw', False)
self.logger.info('\n=======> Executing TestStep : %s, method : %s', test_step.name, method)
# process and set up headers
headers = {}
if hasattr(test_step, 'headers') and test_step.headers is not None:
self.logger.debug('Found Headers')
for key, value in test_step.headers.items().items():
headers[key] = self.case.variables.expand(value)
# process and set up params
params = self._build_param_dict(test_step)
url = self.case.variables.expand(test_step.apiUrl)
self.logger.debug('Evaluated URL : %s', url)
response_wrapper = http_client.request(url, method, headers, params, is_raw)
# expected_status = getattr(getattr(test_step, 'asserts'), 'status', 200)
# if response_wrapper.status != expected_status:
# failures.errors.append("status(%s) != expected status(%s)" % (response_wrapper.status, expected_status))
if hasattr(test_step, "asserts"):
asserts = test_step.asserts
if hasattr(asserts, "headers"):
self._assert_element_list('Header', failures, test_step, response_wrapper.headers, test_step.asserts.headers.items().items())
if hasattr(asserts, "payload"):
self.logger.debug('Evaluating Response Payload')
self._assert_element_list('Payload', failures, test_step, response_wrapper.body, test_step.asserts.payload.items().items())
else:
self.logger.warn('\n=======> No "asserts" element found in TestStep %s', test_step.name)
except Exception as inst:
failures.errors.append(traceback.format_exc())
self.logger.error('ERROR !!! TestStep %s Failed to execute !!! %s \
\n !!! Will ignore all assignment statements as part of TestStep', test_step.name, inst)
self.logger.exception('Exception')
if failures.errors:
return failures
# execute all the assignment statements
if hasattr(test_step, 'postAsserts') and test_step.postAsserts is not None:
for key, value in test_step.postAsserts.items().items():
self._process_post_asserts(response_wrapper.body, key, value)
return None
def _assert_element_list(self, section, failures, test_step, response, assert_list):
self.logger.debug("Inside assert_element_list: %s", response)
test_step.assertResults = []
for key, value in assert_list:
self.logger.debug('key : %s, value : %s', key, value)
json_eval_expr = getattr(response, key, '')
if json_eval_expr is None:
assert_message = 'assert statement :%s not found in target response', key
self.logger.error('%s', assert_message)
failures.errors.append(assert_message)
continue
self.logger.debug('---> json_eval_expr : %s and type : %s', json_eval_expr, type(json_eval_expr))
# check for basic JSON types
json_types = {'Integer':'int', 'String':'str', 'Array':'list', 'Float':'float', 'Boolean':'bool', 'Object':'dict'}
if value in json_types:
self.logger.info('Found json type : %s ', value)
if type(json_eval_expr) == DictWrapper:
value = 'Object'
json_eval_expr = {}
if type(json_eval_expr) == unicode:
json_eval_expr = ''
value = eval(json_types[value])
json_eval_expr = type(json_eval_expr)
# Check for logical operators
logic_ops = {'-gt':'>', '-ge':'>=', '-lt':'<', '-le':'<=', '-ne':'!=', '-eq':'==', 'exec': 'exec'}
lg_op_expr = check_for_logical_op(value)
if lg_op_expr:
self.logger.debug("---> Found lg_op_expr : " + lg_op_expr)
if lg_op_expr in logic_ops:
final_lg_op = logic_ops[lg_op_expr]
value = value[len(lg_op_expr):]
self.logger.debug(" -----> Rest of the expression : " + value)
else:
# - If no operators found then assume '=='
final_lg_op = logic_ops['-eq']
self.logger.debug("---> Final final_lg_op : " + final_lg_op)
# do variable expansion...
value = self.case.variables.expand(value)
self.logger.debug(' ---> final evaluated expression : %s and type %s ', value, type(value))
if isinstance(json_eval_expr, basestring):
value = str(value)
# construct the logical assert expression
if final_lg_op != 'exec':
assert_expr = 'json_eval_expr {0} value'.format(final_lg_op)
assert_literal_expr = "{}:{{{}}} {} {}".format(key, json_eval_expr, final_lg_op, value)
self.logger.debug(' ---> Assert_expr : ' + assert_expr)
assert_result = eval(assert_expr)
else:
assert_expr = 'exec_result = {0}'.format(value)
assert_literal_expr = '"f({0}) <- {1}"'.format(json_eval_expr, value)
exec(assert_expr)
assert_result = _evaluate(value, json_eval_expr)
self.logger.debug('assert evaluation result : %s', assert_result)
if not assert_result:
assert_message = '{} Assert Statement : {} ---> Fail!'.format(section, assert_literal_expr)
self.logger.error('%s', assert_message)
failures.errors.append(assert_message)
else:
assert_message = '{} Assert Statement : {} ----> Pass!'.format(section, assert_literal_expr)
self.logger.info('%s', assert_message)
def _process_post_asserts(self, response, key, value):
self.logger.debug("evaled value: {}".format(getattr(response, value, '')))
self.case.variables.add_variable(key, getattr(response, value, ''))
def _evaluate(clause, value):
assert_expr = 'result = {0}'.format(clause)
#self.logger.debug(' ---> Assert_exec : ' + assert_expr)
exec(assert_expr)
return result #@UndefinedVariable
def check_for_logical_op(expression):
if expression and isinstance(expression, basestring):
#self.logger.debug("_check_for_logical_op : expression %s", expression)
oprtr_regex = re.compile("-lt|-le|-gt|-ge|-eq|-ne|exec")
match = re.match(oprtr_regex, expression)
if match:
return match.group()
| {
"content_hash": "4705abad4608826f681d831d11ff6474",
"timestamp": "",
"source": "github",
"line_count": 214,
"max_line_length": 145,
"avg_line_length": 42.23364485981308,
"alnum_prop": 0.5525558751936269,
"repo_name": "baiyunping333/Rester",
"id": "5f3c7d77e51d67f49ab08d15d537a69354345ef6",
"size": "9038",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "rester/exc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20565"
}
],
"symlink_target": ""
} |
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_full_argument_spec,
)
ANSIBLE_METADATA = {'status': 'preview',
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ovirt_vms_facts
short_description: Retrieve facts about one or more oVirt virtual machines
author: "Ondra Machacek (@machacekondra)"
version_added: "2.3"
description:
- "Retrieve facts about one or more oVirt virtual machines."
notes:
- "This module creates a new top-level C(ovirt_vms) fact, which
contains a list of virtual machines."
options:
pattern:
description:
- "Search term which is accepted by oVirt search backend."
- "For example to search VM X from cluster Y use following pattern:
name=X and cluster=Y"
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about all VMs which names start with C(centos) and
# belong to cluster C(west):
- ovirt_vms_facts:
pattern: name=centos* and cluster=west
- debug:
var: ovirt_vms
'''
RETURN = '''
ovirt_vms:
description: "List of dictionaries describing the VMs. VM attribues are mapped to dictionary keys,
all VMs attributes can be found at following url: https://ovirt.example.com/ovirt-engine/api/model#types/vm."
returned: On success.
type: list
'''
def main():
argument_spec = ovirt_full_argument_spec(
pattern=dict(default='', required=False),
)
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
connection = create_connection(module.params.pop('auth'))
vms_service = connection.system_service().vms_service()
vms = vms_service.list(search=module.params['pattern'])
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_vms=[
get_dict_of_struct(c) for c in vms
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=False)
if __name__ == '__main__':
main()
| {
"content_hash": "b852fac18603f6632b376506d9817b97",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 127,
"avg_line_length": 28.83132530120482,
"alnum_prop": 0.6435436690346845,
"repo_name": "nwiizo/workspace_2017",
"id": "2a11ad75280bd8e89fef8135b0b075c0d6f447e7",
"size": "3129",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "ansible-modules-extras/cloud/ovirt/ovirt_vms_facts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "173"
},
{
"name": "C++",
"bytes": "7105"
},
{
"name": "CSS",
"bytes": "50021"
},
{
"name": "Go",
"bytes": "112005"
},
{
"name": "HTML",
"bytes": "66435"
},
{
"name": "JavaScript",
"bytes": "73266"
},
{
"name": "Makefile",
"bytes": "1227"
},
{
"name": "PHP",
"bytes": "3916"
},
{
"name": "PowerShell",
"bytes": "277598"
},
{
"name": "Python",
"bytes": "11925958"
},
{
"name": "Ruby",
"bytes": "3779"
},
{
"name": "Rust",
"bytes": "1484076"
},
{
"name": "Shell",
"bytes": "86558"
}
],
"symlink_target": ""
} |
__all__ = ['NDN', 'Closure', 'ContentObject', 'Interest', 'Key', 'Name']
import sys as _sys
try:
from pyndn.NDN import *
from pyndn.Closure import *
from pyndn.ContentObject import *
from pyndn.Interest import *
from pyndn.Key import *
from pyndn.Name import *
from pyndn import NameCrypto
except ImportError:
del _sys.modules[__name__]
raise
#def name_compatibility():
# global _name_immutable
#
# _name_immutable = 1
| {
"content_hash": "1213d9c8205dadca7aa2ceab06f5b7cf",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 72,
"avg_line_length": 20.571428571428573,
"alnum_prop": 0.7013888888888888,
"repo_name": "cawka/packaging-PyNDN",
"id": "aad14d619a04b1f1b19edfe07381a35e30ae4f4c",
"size": "647",
"binary": false,
"copies": "2",
"ref": "refs/heads/ubuntu/precise",
"path": "python/pyndn/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "185314"
},
{
"name": "C++",
"bytes": "8097"
},
{
"name": "Python",
"bytes": "70124"
},
{
"name": "Shell",
"bytes": "54"
}
],
"symlink_target": ""
} |
from io import BytesIO
from urllib.parse import urlparse,parse_qs
import requests
from PIL import Image
from openpyxl import load_workbook
from bs4 import BeautifulSoup
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from openpyxl.drawing.image import Image as Image2
def Init(username, departId):
dict_userid={
'staffId':username,
'departId':departId,
'subSysCode':'BSS',
'eparchyCode':'0010'}
return dict_userid
def GetRandomImage(session):
################################
#获取网站验证码并返回random_code
################################
PAYLOAD_IMAGE = {
'mode':'validate',
'width':'60',
'height':'20'}
r = session.get('https://123.125.98.209/image',
params = PAYLOAD_IMAGE,
verify=False)
i = Image.open(BytesIO(r.content))
i.show()
random_code = input('test:')
return random_code
def LoginSystem(username, password, random_code, session):
################################
#登陆网站,上传用户名密码,获取BSS_ESS COOKIES
################################
DATA_LOGINSYS = {
'service':'direct/1/LoginProxy/$Form',
'sp':'S0',
'Form0':'ACTION_MODE,STAFF_ID,LOGIN_PASSWORD,NEED_SMS_VERIFY,SUBSYS_CODE,LOGIN_TYPE,authDomainType,soap,menuId,error,authType,authSys,LOGIN_PROVINCE_CODE,VERIFY_CODE,WHITE_LIST_LOGIN,IPASS_SERVICE_URL,IPASS_CHECK_MESSAGE,IPASS_LOGIN_PROVINCE,SIGNATURE_CODE,SIGNATURE_DATA,IPASS_LOGIN,IPASS_ACTIVATE,NEED_INSTALL_CERT,IPASS_INSTALL_RESULT,IPASS_INSTALL_MESSAGE,IPASS_LOGINOUT_DOMAIN,btnProxyLogin',
'STAFF_ID':username,
'LOGIN_PASSWORD':password,
'NEED_SMS_VERIFY':'',
'SUBSYS_CODE':'',
'LOGIN_TYPE':'redirectLogin',
'authDomainType':'',
'soap':'',
'menuId':'',
'error':'',
'authType':'',
'authSys':'',
'LOGIN_PROVINCE_CODE':'0011',
'VERIFY_CODE':random_code,
'WHITE_LIST_LOGIN':'',
'IPASS_SERVICE_URL':'http://132.35.102.170:7001/n6IpassAutherService/services/IpassAutherService',
'IPASS_CHECK_MESSAGE':'',
'IPASS_LOGIN_PROVINCE':'0091',
'SIGNATURE_CODE':'',
'SIGNATURE_DATA':'',
'IPASS_LOGIN':'',
'IPASS_ACTIVATE':'',
'NEED_INSTALL_CERT':'',
'IPASS_INSTALL_RESULT':'',
'IPASS_INSTALL_MESSAGE':'',
'IPASS_LOGINOUT_DOMAIN':'http://ess.10010.com/essframe',
'btnProxyLogin':'提交查询内容'.encode('GBK')
}
session.post(
'https://123.125.98.209/essframe',
headers = {'Referer': 'https://123.125.98.209/essframe?service=page/LoginProxy&login_type=redirectLogin'},
data = DATA_LOGINSYS,
verify=False)
def FindService(username, dict_userid, session):
################################
#获取各个服务登陆细节
################################
urls = {}
page = session.get(
'https://123.125.98.209/essframe?service=page/Nav&STAFF_ID='+username,
headers = {'Accept': 'application/x-ms-application, image/jpeg, application/xaml+xml, image/gif, image/pjpeg, application/x-ms-xbap, */*'},
cookies = {'LOGIN_SUBSYS_CODEBSS':'CRM'},
verify=False)
soup = BeautifulSoup(page.text,'html.parser')
for tag in soup.findAll(menuid=True):
try:
url = tag['onclick']
url = url.split(';')[1].split('\'')[1]
url_params = parse_qs(urlparse(url).query,True)
for key, value in url_params.items():
url_params[key] = value[0]
url_params.update(dict_userid)
urls[tag.string]=url_params
except:
urls[tag.string]={}
return urls
def LoginEssSystem(username, departId, password, session):
dict_userid=Init(username,departId)
random_code = GetRandomImage(session)
LoginSystem(username,password,random_code,session)
urls = FindService(username,dict_userid,session)
return urls
def LoginService(urls, ServiceName, part, session):
###############################
# LoginService(urls, '局方停机', 'personalserv', session)
###############################
session.get(
'https://123.125.98.209/'+ part,
params=urls[ServiceName],
headers={'Referer':'https://123.125.98.209/essframe?service=page/'},
verify=False)
if __name__ == '__main__':
USERNAME = 'wangping80'
DEPARTID = '11a0271'
PASSWORD = 'aUiJOHzTKG1V/avl/jU3gMdE+Ns='
session = requests.Session()
urls = LoginEssSystem(USERNAME,DEPARTID,PASSWORD,session)
LoginService(urls, '局方停机','personalserv', session)
| {
"content_hash": "66bcb38a9603982f6376cae998266589",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 402,
"avg_line_length": 34.59230769230769,
"alnum_prop": 0.6041805648209918,
"repo_name": "user5111/ESS-SDK-for-Human",
"id": "cc563c3a0606926b22c715d0ed2f13a796349741",
"size": "4699",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "login.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28983"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from datetime import datetime, timedelta
from uuid import UUID
from flask import after_this_request, g, session
from oauthlib.oauth2 import FatalClientError, InvalidClientIdError
from indico.core.db import db
from indico.modules.oauth import logger, oauth
from indico.modules.oauth.models.applications import OAuthApplication
from indico.modules.oauth.models.tokens import OAuthGrant, OAuthToken
from indico.util.date_time import now_utc
class DisabledClientIdError(FatalClientError):
error = 'application_disabled_by_admin'
@oauth.clientgetter
def load_client(client_id):
try:
UUID(hex=client_id)
except ValueError:
raise InvalidClientIdError
app = OAuthApplication.find_first(client_id=client_id)
if not app.is_enabled:
raise DisabledClientIdError
return app
@oauth.grantgetter
def load_grant(client_id, code): # pragma: no cover
return OAuthGrant.get(client_id, code)
@oauth.grantsetter
def save_grant(client_id, code, request, *args, **kwargs):
expires = datetime.utcnow() + timedelta(seconds=120)
grant = OAuthGrant(client_id=client_id, code=code['code'], redirect_uri=request.redirect_uri,
user=session.user, scopes=request.scopes, expires=expires)
grant.save()
return grant
@oauth.tokengetter
def load_token(access_token, refresh_token=None):
if not access_token:
return None
# ugly hack so we can know in other places that we received a token
# e.g. to show an error if there was an invalid token specified but
# not if there was no token at all
g.received_oauth_token = True
try:
UUID(hex=access_token)
except ValueError:
# malformed oauth token
return None
token = OAuthToken.find(access_token=access_token).options(db.joinedload(OAuthToken.application)).first()
if not token or not token.application.is_enabled:
return None
token_id = token.id # avoid DetachedInstanceError in the callback
@after_this_request
def _update_last_use(response):
with db.tmp_session() as sess:
# do not modify `token` directly, it's attached to a different session!
sess.query(OAuthToken).filter_by(id=token_id).update({OAuthToken.last_used_dt: now_utc()})
sess.commit()
return response
return token
@oauth.tokensetter
def save_token(token_data, request, *args, **kwargs):
# For the implicit flow
# Check issue: https://github.com/lepture/flask-oauthlib/issues/209
if request.grant_type == 'authorization_code':
user = request.user
elif request.grant_type is None: # implicit flow
user = session.user
else:
raise ValueError('Invalid grant_type')
requested_scopes = set(token_data['scope'].split())
token = OAuthToken.find_first(OAuthApplication.client_id == request.client.client_id,
OAuthToken.user == user,
_join=OAuthApplication)
if token is None:
application = OAuthApplication.find_one(client_id=request.client.client_id)
token = OAuthToken(application=application, user=user)
db.session.add(token)
token.access_token = token_data['access_token']
token.scopes = requested_scopes
elif requested_scopes - token.scopes:
logger.info('Added scopes to %s: %s', token, requested_scopes - token.scopes)
# use the new access_token when extending scopes
token.access_token = token_data['access_token']
token.scopes |= requested_scopes
else:
token_data['access_token'] = token.access_token
token_data.pop('refresh_token', None) # we don't support refresh tokens so far
token_data.pop('expires_in', None) # our tokens currently do not expire
return token
| {
"content_hash": "614471293b2cac7431eab8c41def3578",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 109,
"avg_line_length": 36.76190476190476,
"alnum_prop": 0.6873056994818653,
"repo_name": "mic4ael/indico",
"id": "3597fd4a1074a18b1457659a47f6f34e8139f672",
"size": "4074",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "indico/modules/oauth/provider.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "553825"
},
{
"name": "HTML",
"bytes": "1375160"
},
{
"name": "JavaScript",
"bytes": "1852830"
},
{
"name": "Mako",
"bytes": "1340"
},
{
"name": "Python",
"bytes": "4612709"
},
{
"name": "Shell",
"bytes": "2665"
},
{
"name": "TeX",
"bytes": "23292"
},
{
"name": "XSLT",
"bytes": "1504"
}
],
"symlink_target": ""
} |
"""
==================================================
Generalized Relevance Learning Vector Quantization
==================================================
This example shows how GRLVQ projects and classifies.
The plot shows the target class of each data point
(big circle) and which class was predicted (smaller circle). It also
shows the prototypes (black diamond) and their labels (small point inside the diamond).
The projected data is shown in the right plot.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn_lvq import GrlvqModel
from sklearn_lvq.utils import plot2d
print(__doc__)
nb_ppc = 100
toy_label = np.append(np.zeros(nb_ppc), np.ones(nb_ppc), axis=0)
print('GRLVQ:')
toy_data = np.append(
np.random.multivariate_normal([0, 0], np.array([[0.3, 0], [0, 4]]),
size=nb_ppc),
np.random.multivariate_normal([4, 4], np.array([[0.3, 0], [0, 4]]),
size=nb_ppc), axis=0)
grlvq = GrlvqModel()
grlvq.fit(toy_data, toy_label)
plot2d(grlvq, toy_data, toy_label, 1, 'grlvq')
print('classification accuracy:', grlvq.score(toy_data, toy_label))
plt.show()
| {
"content_hash": "89a09cb9b0892dc1ff6be5b46de6f28e",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 87,
"avg_line_length": 33.05714285714286,
"alnum_prop": 0.6153846153846154,
"repo_name": "MrNuggelz/sklearn-glvq",
"id": "1da2c47d2364dff8352e78d858b75b12fce9750c",
"size": "1157",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable",
"path": "examples/plot_grlvq.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "Python",
"bytes": "130642"
},
{
"name": "Shell",
"bytes": "3152"
}
],
"symlink_target": ""
} |
from __future__ import annotations
import os
import procrunner
import pytest
cpp_tests = [
# Paths are under /build/
"tests/algorithms/spatial_indexing/tst_collision_detection",
"tests/algorithms/spot_prediction/tst_reeke_model",
]
@pytest.mark.parametrize(
"executable", cpp_tests, ids=[p.replace("/", "-") for p in cpp_tests]
)
def test_cpp_program(executable):
if "LIBTBX_BUILD" not in os.environ:
pytest.skip("LIBTBX_ENV is unset; don't know how to find test executable")
full_path = os.path.join(
os.environ["LIBTBX_BUILD"], "dials", *(executable.split("/"))
)
print(full_path)
result = procrunner.run([full_path])
assert not result.returncode and not result.stderr
| {
"content_hash": "26797e184cb7340e740e90e3bf13e1c3",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 82,
"avg_line_length": 27.14814814814815,
"alnum_prop": 0.679399727148704,
"repo_name": "dials/dials",
"id": "e4c33b0ba2b2eecf9cef3a064334b2b47f894b5d",
"size": "733",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/test_cpp_components.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "379"
},
{
"name": "C++",
"bytes": "1758129"
},
{
"name": "CMake",
"bytes": "34388"
},
{
"name": "Dockerfile",
"bytes": "329"
},
{
"name": "Gherkin",
"bytes": "400"
},
{
"name": "HTML",
"bytes": "25033"
},
{
"name": "Makefile",
"bytes": "76"
},
{
"name": "Python",
"bytes": "6147100"
},
{
"name": "Shell",
"bytes": "6419"
}
],
"symlink_target": ""
} |
def __load():
import imp, os, sys
try:
dirname = os.path.dirname(__loader__.archive)
except NameError:
dirname = sys.prefix
path = os.path.join(dirname, 'PIL._imaging.pyd')
#print "py2exe extension module", __name__, "->", path
mod = imp.load_dynamic(__name__, path)
## mod.frozen = 1
__load()
del __load
| {
"content_hash": "01c23433516c142c74fa13257b2ce103",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 58,
"avg_line_length": 29,
"alnum_prop": 0.5804597701149425,
"repo_name": "nawarian/PHPBot",
"id": "b17de0a0f6b724e9c9e15766d9967329961ee47a",
"size": "349",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ext/pyautogui/bin/build/bdist.win-amd64/winexe/temp/PIL._imaging.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "26269"
},
{
"name": "Python",
"bytes": "3897"
},
{
"name": "Tcl",
"bytes": "1237789"
}
],
"symlink_target": ""
} |
from cached_property import cached_property
from decimal import Decimal, ROUND_HALF_UP
from frozendict import frozendict
def round_currency(dec):
"""
Correctly rounds the decimal value as per usual currency rules.
e.g.: Decimal('12.9699999999') becomes Decimal('12.97')
"""
return dec.quantize(Decimal('.01'), rounding=ROUND_HALF_UP)
class Locality(object):
"""
Represents a locality (place) returned by the API.
"""
def __init__(self, locality_dict):
self._d = frozendict(locality_dict)
def __str__(self):
return self.location
def __repr__(self):
return '<Locality \'{}\'>'.format(self.location)
@property
def as_dict(self):
"""
Return all the attributes as a dictionary. This may be useful for
caching purposes. The dictionary is mutable, unlike this object's
attributes.
"""
return dict(self._d)
@property
def category(self):
return self._d.get('category')
@property
def id(self):
return self._d.get('id')
@property
def latitude(self):
return self._d.get('latitude')
@property
def longitude(self):
return self._d.get('longitude')
@property
def location(self):
return self._d.get('location')
@property
def postcode(self):
return self._d.get('postcode')
@property
def state(self):
return self._d.get('state')
class Parcel(object):
"""
Represents a parcel with 3 dimmensions and a weight.
"""
height = 0
weight = 0
length = 0
width = 0
def __init__(self, **kwargs):
for k, v in kwargs.items():
if not hasattr(self, k):
raise ValueError('Parcel object does not have a {} attribute'.format(k))
setattr(self, k, v)
class PostageService(object):
"""
Represents a postage service as returned by the PAC.
"""
def __init__(self, service_dict):
self._d = frozendict(service_dict)
def __str__(self):
return self.name
def __repr__(self):
return '<PostageService \'{}\'>'.format(self.code)
@property
def code(self):
return self._d.get('code')
@property
def max_extra_cover(self):
return round_currency(Decimal(str(self._d.get('max_extra_cover'))))
@property
def name(self):
return self._d.get('name')
# Cache this so we're not constructing a new list of options all the time
@cached_property
def options(self):
return [ServiceOption(o) for o in self._d.get('options').get('option')]
@property
def price(self):
return round_currency(Decimal(self._d.get('price')))
class ServiceOption(object):
"""
Represents a postal service option. May contain suboptions which are
represented by ServiceSubOption instances.
"""
def __init__(self, option_dict):
self._d = frozendict(option_dict)
def __str__(self):
return self.name
def __repr__(self):
return '<ServiceOption \'{}\'>'.format(self.code)
@property
def code(self):
return self._d.get('code')
@property
def name(self):
return self._d.get('name')
# Cache this so we're not constructing a new list of options all the time
@cached_property
def suboptions(self):
options = self._d.get('suboptions', {}).get('option')
if options is None:
return []
if type(options) != list:
options = [options]
return [ServiceSubOption(o) for o in options]
class ServiceSubOption(ServiceOption):
"""
Represents any suboptions for a postal service. Typically these are extra
services such as extra cover or signature on delivery.
This class is identical to ServiceOption and only exists to differentiate
between the two.
"""
def __repr__(self):
return '<ServiceSubOption \'{}\'>'.format(self.code)
| {
"content_hash": "c2efc4826e8b53b418229aad15f6981d",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 88,
"avg_line_length": 25.506410256410255,
"alnum_prop": 0.6059311384770043,
"repo_name": "sjkingo/python-auspost-pac",
"id": "1011a52197403427a771032288c50c81d7ad1151",
"size": "3979",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "auspost_pac/models.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "18296"
},
{
"name": "Shell",
"bytes": "64"
}
],
"symlink_target": ""
} |
from __future__ import generators
def run(test):
for i in range(10):
test.check(i)
| {
"content_hash": "fdaed88d493a146a8ea42d436dc67d20",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 33,
"avg_line_length": 20.2,
"alnum_prop": 0.5841584158415841,
"repo_name": "QQuick/Transcrypt",
"id": "23f88106ce9280f695b4d897c72f87cc07c09b29",
"size": "101",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "transcrypt/development/automated_tests/__future__/generators.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5571"
},
{
"name": "C++",
"bytes": "603"
},
{
"name": "HTML",
"bytes": "28940"
},
{
"name": "JavaScript",
"bytes": "662845"
},
{
"name": "Makefile",
"bytes": "5733"
},
{
"name": "Python",
"bytes": "1347703"
},
{
"name": "Shell",
"bytes": "471"
}
],
"symlink_target": ""
} |
import flask
from web_site.web.siteroot.controller import mod as siteroot_module
def _build_app(config_to_use,
template_folder='web/templates',
static_folder='web/static'):
app = flask.Flask(__name__,
template_folder=template_folder,
static_folder=static_folder)
app.config.from_object(config_to_use)
app.register_blueprint(siteroot_module)
return app
def main():
config = 'config.Config'
app = _build_app(config)
# the config in the below line is NOT the config class above
app.run(host=app.config['DASHBOARD_HOST'],
port=app.config['DASHBOARD_PORT'],
debug=app.config['DEBUG'])
if __name__ == '__main__':
main()
| {
"content_hash": "12d65b95497e6ea8a40bfd41e9b7f602",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 67,
"avg_line_length": 31.12,
"alnum_prop": 0.589974293059126,
"repo_name": "MadDonkeySoftware/pyDuesPal",
"id": "a1b4d5e64f0b807c2f6c2b58dc3fd0abe73ae472",
"size": "778",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web_site/run.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "229"
},
{
"name": "Python",
"bytes": "1424"
}
],
"symlink_target": ""
} |
import pytest
from pybind11_tests import local_bindings as m
def test_load_external():
"""Load a `py::module_local` type that's only registered in an external module"""
import pybind11_cross_module_tests as cm
assert m.load_external1(cm.ExternalType1(11)) == 11
assert m.load_external2(cm.ExternalType2(22)) == 22
with pytest.raises(TypeError) as excinfo:
assert m.load_external2(cm.ExternalType1(21)) == 21
assert "incompatible function arguments" in str(excinfo.value)
with pytest.raises(TypeError) as excinfo:
assert m.load_external1(cm.ExternalType2(12)) == 12
assert "incompatible function arguments" in str(excinfo.value)
def test_local_bindings():
"""Tests that duplicate `py::module_local` class bindings work across modules"""
# Make sure we can load the second module with the conflicting (but local) definition:
import pybind11_cross_module_tests as cm
i1 = m.LocalType(5)
assert i1.get() == 4
assert i1.get3() == 8
i2 = cm.LocalType(10)
assert i2.get() == 11
assert i2.get2() == 12
assert not hasattr(i1, 'get2')
assert not hasattr(i2, 'get3')
# Loading within the local module
assert m.local_value(i1) == 5
assert cm.local_value(i2) == 10
# Cross-module loading works as well (on failure, the type loader looks for
# external module-local converters):
assert m.local_value(i2) == 10
assert cm.local_value(i1) == 5
def test_nonlocal_failure():
"""Tests that attempting to register a non-local type in multiple modules fails"""
import pybind11_cross_module_tests as cm
with pytest.raises(RuntimeError) as excinfo:
cm.register_nonlocal()
assert str(excinfo.value) == 'generic_type: type "NonLocalType" is already registered!'
def test_duplicate_local():
"""Tests expected failure when registering a class twice with py::local in the same module"""
with pytest.raises(RuntimeError) as excinfo:
m.register_local_external()
import pybind11_tests
assert str(excinfo.value) == (
'generic_type: type "LocalExternal" is already registered!'
if hasattr(pybind11_tests, 'class_') else 'test_class not enabled')
def test_stl_bind_local():
import pybind11_cross_module_tests as cm
v1, v2 = m.LocalVec(), cm.LocalVec()
v1.append(m.LocalType(1))
v1.append(m.LocalType(2))
v2.append(cm.LocalType(1))
v2.append(cm.LocalType(2))
# Cross module value loading:
v1.append(cm.LocalType(3))
v2.append(m.LocalType(3))
assert [i.get() for i in v1] == [0, 1, 2]
assert [i.get() for i in v2] == [2, 3, 4]
v3, v4 = m.NonLocalVec(), cm.NonLocalVec2()
v3.append(m.NonLocalType(1))
v3.append(m.NonLocalType(2))
v4.append(m.NonLocal2(3))
v4.append(m.NonLocal2(4))
assert [i.get() for i in v3] == [1, 2]
assert [i.get() for i in v4] == [13, 14]
d1, d2 = m.LocalMap(), cm.LocalMap()
d1["a"] = v1[0]
d1["b"] = v1[1]
d2["c"] = v2[0]
d2["d"] = v2[1]
assert {i: d1[i].get() for i in d1} == {'a': 0, 'b': 1}
assert {i: d2[i].get() for i in d2} == {'c': 2, 'd': 3}
def test_stl_bind_global():
import pybind11_cross_module_tests as cm
with pytest.raises(RuntimeError) as excinfo:
cm.register_nonlocal_map()
assert str(excinfo.value) == 'generic_type: type "NonLocalMap" is already registered!'
with pytest.raises(RuntimeError) as excinfo:
cm.register_nonlocal_vec()
assert str(excinfo.value) == 'generic_type: type "NonLocalVec" is already registered!'
with pytest.raises(RuntimeError) as excinfo:
cm.register_nonlocal_map2()
assert str(excinfo.value) == 'generic_type: type "NonLocalMap2" is already registered!'
def test_mixed_local_global():
"""Local types take precedence over globally registered types: a module with a `module_local`
type can be registered even if the type is already registered globally. With the module,
casting will go to the local type; outside the module casting goes to the global type."""
import pybind11_cross_module_tests as cm
m.register_mixed_global()
m.register_mixed_local()
a = []
a.append(m.MixedGlobalLocal(1))
a.append(m.MixedLocalGlobal(2))
a.append(m.get_mixed_gl(3))
a.append(m.get_mixed_lg(4))
assert [x.get() for x in a] == [101, 1002, 103, 1004]
cm.register_mixed_global_local()
cm.register_mixed_local_global()
a.append(m.MixedGlobalLocal(5))
a.append(m.MixedLocalGlobal(6))
a.append(cm.MixedGlobalLocal(7))
a.append(cm.MixedLocalGlobal(8))
a.append(m.get_mixed_gl(9))
a.append(m.get_mixed_lg(10))
a.append(cm.get_mixed_gl(11))
a.append(cm.get_mixed_lg(12))
assert [x.get() for x in a] == \
[101, 1002, 103, 1004, 105, 1006, 207, 2008, 109, 1010, 211, 2012]
def test_internal_locals_differ():
"""Makes sure the internal local type map differs across the two modules"""
import pybind11_cross_module_tests as cm
assert m.local_cpp_types_addr() != cm.local_cpp_types_addr()
def test_stl_caster_vs_stl_bind(msg):
"""One module uses a generic vector caster from `<pybind11/stl.h>` while the other
exports `std::vector<int>` via `py:bind_vector` and `py::module_local`"""
import pybind11_cross_module_tests as cm
v1 = cm.VectorInt([1, 2, 3])
assert m.load_vector_via_caster(v1) == 6
assert cm.load_vector_via_binding(v1) == 6
v2 = [1, 2, 3]
assert m.load_vector_via_caster(v2) == 6
with pytest.raises(TypeError) as excinfo:
cm.load_vector_via_binding(v2) == 6
assert msg(excinfo.value) == """
load_vector_via_binding(): incompatible function arguments. The following argument types are supported:
1. (arg0: pybind11_cross_module_tests.VectorInt) -> int
Invoked with: [1, 2, 3]
""" # noqa: E501 line too long
def test_cross_module_calls():
import pybind11_cross_module_tests as cm
v1 = m.LocalVec()
v1.append(m.LocalType(1))
v2 = cm.LocalVec()
v2.append(cm.LocalType(2))
# Returning the self pointer should get picked up as returning an existing
# instance (even when that instance is of a foreign, non-local type).
assert m.return_self(v1) is v1
assert cm.return_self(v2) is v2
assert m.return_self(v2) is v2
assert cm.return_self(v1) is v1
assert m.LocalVec is not cm.LocalVec
# Returning a copy, on the other hand, always goes to the local type,
# regardless of where the source type came from.
assert type(m.return_copy(v1)) is m.LocalVec
assert type(m.return_copy(v2)) is m.LocalVec
assert type(cm.return_copy(v1)) is cm.LocalVec
assert type(cm.return_copy(v2)) is cm.LocalVec
# Test the example given in the documentation (which also tests inheritance casting):
mycat = m.Cat("Fluffy")
mydog = cm.Dog("Rover")
assert mycat.get_name() == "Fluffy"
assert mydog.name() == "Rover"
assert m.Cat.__base__.__name__ == "Pet"
assert cm.Dog.__base__.__name__ == "Pet"
assert m.Cat.__base__ is not cm.Dog.__base__
assert m.pet_name(mycat) == "Fluffy"
assert m.pet_name(mydog) == "Rover"
assert cm.pet_name(mycat) == "Fluffy"
assert cm.pet_name(mydog) == "Rover"
assert m.MixGL is not cm.MixGL
a = m.MixGL(1)
b = cm.MixGL(2)
assert m.get_gl_value(a) == 11
assert m.get_gl_value(b) == 12
assert cm.get_gl_value(a) == 101
assert cm.get_gl_value(b) == 102
c, d = m.MixGL2(3), cm.MixGL2(4)
with pytest.raises(TypeError) as excinfo:
m.get_gl_value(c)
assert "incompatible function arguments" in str(excinfo)
with pytest.raises(TypeError) as excinfo:
m.get_gl_value(d)
assert "incompatible function arguments" in str(excinfo)
| {
"content_hash": "341e37297159af9d35affd823eea8f3a",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 107,
"avg_line_length": 34.42035398230089,
"alnum_prop": 0.6563825684535287,
"repo_name": "allison-group/indigo-bondorder",
"id": "b3dc3619c5d466d9f4364f1d147161db6018f230",
"size": "7779",
"binary": false,
"copies": "24",
"ref": "refs/heads/master",
"path": "external/pybind11/tests/test_local_bindings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "210781"
},
{
"name": "CMake",
"bytes": "9152"
},
{
"name": "Python",
"bytes": "136155"
}
],
"symlink_target": ""
} |
"""
Copyright 2017-present, Airbnb Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from collections import OrderedDict
import pymsteams
from pymsteams import TeamsWebhookException
from streamalert.alert_processor.helpers import compose_alert
from streamalert.alert_processor.outputs.output_base import (
OutputDispatcher,
OutputProperty,
StreamAlertOutput,
)
from streamalert.shared.logger import get_logger
LOGGER = get_logger(__name__)
@StreamAlertOutput
class TeamsOutput(OutputDispatcher):
"""TeamsOutput handles all alert dispatching for Microsoft Teams"""
__service__ = "teams"
@classmethod
def get_user_defined_properties(cls):
"""Get properties that must be assigned by the user when configuring a new Microsoft Teams
output. This should be sensitive or unique information for this use-case that needs
to come from the user.
Every output should return a dict that contains a 'descriptor' with a description of the
integration being configured.
Microsoft Teams also requires a user provided 'webhook' url that is composed of the Team's
api url and the unique integration key for this output. This value should be should be
masked during input and is a credential requirement.
Returns:
OrderedDict: Contains various OutputProperty items
"""
return OrderedDict(
[
(
"descriptor",
OutputProperty(
description="a short and unique descriptor for this service configuration "
"(ie: name of Team the webhook relates too)"
),
),
(
"url",
OutputProperty(
description="the full teams webhook url, including the secret",
mask_input=True,
cred_requirement=True,
),
),
]
)
@classmethod
def _format_message(cls, alert, publication, webhook_url):
"""Format the message to be sent to Teams
Args:
alert (Alert): The alert
publication (dict): Alert relevant to the triggered rule
webhook_url (str): The webhook_url to send the card too
Returns:
pymsteams.connectorcard: The message to be sent to Teams
The card will look like (by Default):
StreamAlert Rule Triggered: rule_name
Rule Description:
This will be the docstring from the rule, sent as the rule_description
Record:
key value
key value
...
"""
# Presentation defaults
default_title = "StreamAlert Rule Triggered: {}".format(alert.rule_name)
default_description = alert.rule_description
default_color = "E81123" # Red in Hexstring format
# Special field that Publishers can use to customize the message
title = publication.get("@teams.title", default_title)
description = publication.get("@teams.description", default_description)
card_color = publication.get("@teams.card_color", default_color)
with_record = publication.get("@teams.with_record", True)
# Instantiate the card with the url
teams_card = pymsteams.connectorcard(webhook_url)
# Set the cards title, text and color
teams_card.title(title)
teams_card.text(description)
teams_card.color(card_color)
# Add the Alert Section
teams_card.addSection(cls._generate_alert_section(alert))
if with_record:
# Add the record Section
teams_card.addSection(cls._generate_record_section(alert.record))
if "@teams.additional_card_sections" in publication:
teams_card = cls._add_additional_sections(
teams_card, publication["@teams.additional_card_sections"]
)
if "@teams.buttons" in publication:
teams_card = cls._add_buttons(
teams_card, publication["@teams.buttons"]
)
return teams_card
@classmethod
def _generate_record_section(cls, record):
"""Generate the record section
This adds the entire record to a section as key/value pairs
Args:
record (dict): The record that triggered the alert
Returns:
record_section (pymsteams.cardsection): record section for the outgoing card
"""
# Instantiate the card section
record_section = pymsteams.cardsection()
# Set the title
record_section.activityTitle("StreamAlert Alert Record")
# Add the record as key/value pairs
for key, value in record.items():
record_section.addFact(key, str(value))
return record_section
@classmethod
def _generate_alert_section(cls, alert):
"""Generate the alert section
Args:
alert (Alert): The alert
Returns:
alert_section (pymsteams.cardsection): alert section for the outgoing card
"""
# Instantiate the card
alert_section = pymsteams.cardsection()
# Set the title
alert_section.activityTitle("Alert Info")
# Add basic information to the alert section
alert_section.addFact("rule_name", alert.rule_name)
alert_section.addFact("alert_id", alert.alert_id)
return alert_section
@staticmethod
def _add_additional_sections(teams_card, additional_sections):
"""Add additional card sections to the teams card
Args:
teams_card (pymsteams.connectorcard): Teams connector card
additional_sections (list[pymsteams.cardsection]):
Additional sections to be added to the card. Each section should be of
type: pymsteams.cardsection and have their relevant fields filled out.
Please review the pymsteams documentation for additional information.
Returns:
teams_card (pymsteams.connectorcard): teams_card with additional sections added
"""
if not isinstance(additional_sections, list):
LOGGER.debug("additional_sections is not a list, converting")
additional_sections = [additional_sections]
for additional_section in additional_sections:
if not isinstance(additional_section, pymsteams.cardsection):
LOGGER.error(
"additional_section: %s is not an instance of %s",
additional_section,
pymsteams.cardsection,
)
continue
teams_card.addSection(additional_section)
return teams_card
@staticmethod
def _add_buttons(teams_card, buttons):
"""Add buttons to the teams card
Args:
teams_card (pymsteams.connectorcard): Teams connector card
buttons (list[(text, url)]):
Buttons to place on the card, should be a list of tuples containing
the text and the url
Returns:
teams_card (pymsteams.connectorcard): teams_card with buttons added
"""
for button_text, button_url in buttons:
teams_card.addLinkButton(button_text, button_url)
return teams_card
def _dispatch(self, alert, descriptor):
"""Sends the Teams Card to Teams
Publishing:
By default the teams output sends a teams card comprising some default intro text
and a section containing:
* title with rule name
* alert description
* alert record (as a section of key/value pairs)
To override this behavior use the following fields:
- @teams.title (str):
Replaces the title of the teams connector card.
- @teams.description (str):
Replaces the text of the team connector card
- @teams.card_color (str):
Replaces the default color of the connector card (red)
Note: colors are represented by hex string
- @teams.with_record (bool):
Set to False, to remove the alert record section. Useful if you want to have a
more targeted approach for the alert
- @teams.additional_card_sections (list[pymsteams.cardsection]):
Pass in additional sections you want to send on the message.
@see cls._add_additional_sections() for more info
- @teams.buttons (list[(text, url)])
Pass a list of tuples containing the button text and url
These will be placed at the bottom of a teams card
Args:
alert (Alert): Alert instance which triggered a rule
descriptor (str): Output descriptor
Returns:
bool: True if alert was sent successfully, False otherwise
"""
creds = self._load_creds(descriptor)
if not creds:
LOGGER.error("No credentials found for descriptor: %s", descriptor)
return False
# Create the publication
publication = compose_alert(alert, self, descriptor)
# Format the message
teams_card = self._format_message(alert, publication, creds["url"])
try:
teams_card.send()
except TeamsWebhookException as err:
LOGGER.error("Error Sending Alert to Teams: %s", err)
return False
return True
| {
"content_hash": "b5378791afdc54d92fdb91a294ccd03f",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 99,
"avg_line_length": 35.282758620689656,
"alnum_prop": 0.6112197028928851,
"repo_name": "airbnb/streamalert",
"id": "e97a615fe1848d9a7cceea6721ce9befcd9431fc",
"size": "10232",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "streamalert/alert_processor/outputs/teams.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HCL",
"bytes": "142275"
},
{
"name": "Python",
"bytes": "2209853"
},
{
"name": "Shell",
"bytes": "2975"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^$', 'ucenter.views.userInfo', name='ucenter_userInfo'),
url(r'^userInfo/$', 'ucenter.views.userInfo', name='ucenter_userInfo'),
url(r'^userEdit/$', 'ucenter.views.userEdit', name='ucenter_userEdit'),
url(r'^loginHistory/', 'ucenter.views.loginHistory', name='ucenter_loginHistory'),
url(r'^login/', 'ucenter.views.login', name='ucenter_login'),
url(r'^logout/', 'ucenter.views.logout', name='ucenter_logout'),
url(r'^register/', 'ucenter.views.register', name='ucenter_register'),
url(r'^(?P<username>[\.\w]+)/activating/(?P<key>\w+)/$', 'ucenter.views.activating', name='ucenter_activating'),
url(r'^passwordForget/$', 'ucenter.views.passwordForget', name='ucenter_passwordForget'),
url(r'^(?P<username>[\.\w]+)/passwordReset/(?P<key>\w+)/$', 'ucenter.views.passwordReset', name='ucenter_passwordReset'),
url(r'^changePassword/$', 'ucenter.views.changePassword', name='ucenter_changePassword'),
)
| {
"content_hash": "3c9004474f42c64a195d534f7bf2f84f",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 125,
"avg_line_length": 57.05555555555556,
"alnum_prop": 0.6728334956183057,
"repo_name": "kendazheng/wizcloud2",
"id": "d3ecfb0ff0818b578a1f268553835106264ec0fd",
"size": "1052",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "wizcloud/ucenter/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "368897"
},
{
"name": "HTML",
"bytes": "779419"
},
{
"name": "JavaScript",
"bytes": "3010901"
},
{
"name": "PHP",
"bytes": "2274"
},
{
"name": "Python",
"bytes": "591646"
},
{
"name": "Shell",
"bytes": "592"
}
],
"symlink_target": ""
} |
""" decorator """
import time
import traceback
import functools
from threading import Thread
def clock(ms=True):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
st, st_c = time.time(), time.clock()
ret = func(*args, **kwargs)
print 'run time: %.1fms' % ((time.time()-st)*1000) if ms else 'run time: %.4fs' % (time.time()-st)
print 'cpu time: %.1fms' % ((time.clock()-st_c)*1000) if ms else 'cpu time: %.4fs' % (time.clock()-st_c)
return ret
return wrapper
return decorator
def catch(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception:
print '>>>>>>>>' + wrapper.__name__ + '>>>>>>>>'
traceback.print_exc()
return wrapper
def trace_run(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
print '>>>>>>>> function *%s* is called :: %s >>>>>>>>' % \
(wrapper.__name__, time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
return func(*args, **kwargs)
return wrapper
def async(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
thr = Thread(target=f, args=args, kwargs=kwargs)
thr.start()
return wrapper
| {
"content_hash": "6ba77a2a46c95c555e0db304b6a34dc0",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 116,
"avg_line_length": 28.595744680851062,
"alnum_prop": 0.5446428571428571,
"repo_name": "baixuexue123/note",
"id": "3cc6a1fdea9c28dc6653724edfc685859e726940",
"size": "1369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/libs/pysaoke/decorators.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "9553"
},
{
"name": "Go",
"bytes": "123014"
},
{
"name": "HTML",
"bytes": "6704"
},
{
"name": "JavaScript",
"bytes": "1782"
},
{
"name": "Python",
"bytes": "487091"
},
{
"name": "Shell",
"bytes": "9074"
},
{
"name": "Vim script",
"bytes": "5287"
}
],
"symlink_target": ""
} |
"""
Command-line interface to the Stackops Automation API.
"""
from __future__ import print_function
import argparse
import glob
import imp
import itertools
import os
import pkgutil
import sys
import logging
import six
from automationclient import client
from automationclient import exceptions as exc
import automationclient.extension
from automationclient.openstack.common import strutils
from automationclient import utils
from automationclient.v1_1 import shell as shell_v1
DEFAULT_OS_AUTOMATION_API_VERSION = "1.1"
DEFAULT_AUTOMATION_ENDPOINT_TYPE = 'publicURL'
DEFAULT_AUTOMATION_SERVICE_TYPE = 'automation'
logger = logging.getLogger(__name__)
class AutomationClientArgumentParser(argparse.ArgumentParser):
def __init__(self, *args, **kwargs):
super(AutomationClientArgumentParser, self).__init__(*args, **kwargs)
def error(self, message):
"""error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
"""
self.print_usage(sys.stderr)
#FIXME(lzyeval): if changes occur in argparse.ArgParser._check_value
choose_from = ' (choose from'
progparts = self.prog.partition(' ')
self.exit(2, "error: %(errmsg)s\nTry '%(mainp)s help %(subp)s'"
" for more information.\n" %
{'errmsg': message.split(choose_from)[0],
'mainp': progparts[0],
'subp': progparts[2]})
class StackopsAutomationShell(object):
def get_base_parser(self):
parser = AutomationClientArgumentParser(
prog='automation',
description=__doc__.strip(),
epilog='See "automation help COMMAND" '
'for help on a specific command.',
add_help=False,
formatter_class=StackopsHelpFormatter,
)
# Global arguments
parser.add_argument('-h', '--help',
action='store_true',
help=argparse.SUPPRESS)
parser.add_argument('--version',
action='version',
version=automationclient.__version__)
parser.add_argument('--debug',
action='store_true',
default=utils.env('AUTOMATIONCLIENT_DEBUG',
default=False),
help="Print debugging output")
parser.add_argument('--os-username',
metavar='<auth-user-name>',
default=utils.env('OS_USERNAME',
'AUTOMATION_USERNAME'),
help='Defaults to env[OS_USERNAME].')
parser.add_argument('--os_username',
help=argparse.SUPPRESS)
parser.add_argument('--os-password',
metavar='<auth-password>',
default=utils.env('OS_PASSWORD',
'AUTOMATION_PASSWORD'),
help='Defaults to env[OS_PASSWORD].')
parser.add_argument('--os_password',
help=argparse.SUPPRESS)
parser.add_argument('--os-tenant-name',
metavar='<auth-tenant-name>',
default=utils.env('OS_TENANT_NAME',
'AUTOMATION_PROJECT_ID'),
help='Defaults to env[OS_TENANT_NAME].')
parser.add_argument('--os_tenant_name',
help=argparse.SUPPRESS)
parser.add_argument('--os-tenant-id',
metavar='<auth-tenant-id>',
default=utils.env('OS_TENANT_ID',
'AUTOMATION_TENANT_ID'),
help='Defaults to env[OS_TENANT_ID].')
parser.add_argument('--os_tenant_id',
help=argparse.SUPPRESS)
parser.add_argument('--os-auth-url',
metavar='<auth-url>',
default=utils.env('OS_AUTH_URL',
'AUTOMATION_URL'),
help='Defaults to env[OS_AUTH_URL].')
parser.add_argument('--os_auth_url',
help=argparse.SUPPRESS)
parser.add_argument('--os-region-name',
metavar='<region-name>',
default=utils.env('OS_REGION_NAME',
'AUTOMATION_REGION_NAME'),
help='Defaults to env[OS_REGION_NAME].')
parser.add_argument('--os_region_name',
help=argparse.SUPPRESS)
parser.add_argument('--service-type',
metavar='<service-type>',
help='Defaults to automation for most actions')
parser.add_argument('--service_type',
help=argparse.SUPPRESS)
parser.add_argument('--service-name',
metavar='<service-name>',
default=utils.env('AUTOMATION_SERVICE_NAME'),
help='Defaults to env[AUTOMATION_SERVICE_NAME]')
parser.add_argument('--service_name',
help=argparse.SUPPRESS)
parser.add_argument('--endpoint-type',
metavar='<endpoint-type>',
default=utils.env('AUTOMATION_ENDPOINT_TYPE',
default=
DEFAULT_AUTOMATION_ENDPOINT_TYPE
),
help='Defaults to env[AUTOMATION_ENDPOINT_TYPE] '
'or ' + DEFAULT_AUTOMATION_ENDPOINT_TYPE +
'.')
parser.add_argument('--endpoint_type',
help=argparse.SUPPRESS)
parser.add_argument('--os-automation-api-version',
metavar='<automation-api-ver>',
default=utils.env('OS_AUTOMATION_API_VERSION',
default=
DEFAULT_OS_AUTOMATION_API_VERSION
),
help='Accepts 1.1 or 2,defaults '
'to env[OS_AUTOMATION_API_VERSION].')
parser.add_argument('--os_automation_api_version',
help=argparse.SUPPRESS)
parser.add_argument('--os-cacert',
metavar='<ca-certificate>',
default=utils.env('OS_CACERT', default=None),
help='Specify a CA bundle file to use in '
'verifying a TLS (https) server certificate. '
'Defaults to env[OS_CACERT]')
parser.add_argument('--insecure',
default=utils.env('AUTOMATIONCLIENT_INSECURE',
default=False),
action='store_true',
help='Explicitly allow automationclient to perform'
' "insecure" TLS (https) requests. The '
'server\'s certificate will not be verified '
'against any certificate authorities. This '
'option should be used with caution.')
parser.add_argument('--retries',
metavar='<retries>',
type=int,
default=0,
help='Number of retries.')
# FIXME(dtroyer): The args below are here for diablo compatibility,
# remove them in folsum cycle
# alias for --os-username, left in for backwards compatibility
parser.add_argument('--username',
help=argparse.SUPPRESS)
# alias for --os-region_name, left in for backwards compatibility
parser.add_argument('--region_name',
help=argparse.SUPPRESS)
# alias for --os-password, left in for backwards compatibility
parser.add_argument('--apikey', '--password', dest='apikey',
default=utils.env('AUTOMATION_API_KEY'),
help=argparse.SUPPRESS)
# alias for --os-tenant-name, left in for backward compatibility
parser.add_argument('--projectid', '--tenant_name', dest='projectid',
default=utils.env('AUTOMATION_PROJECT_ID'),
help=argparse.SUPPRESS)
# alias for --os-auth-url, left in for backward compatibility
parser.add_argument('--url', '--auth_url', dest='url',
default=utils.env('AUTOMATION_URL'),
help=argparse.SUPPRESS)
return parser
def get_subcommand_parser(self, version):
parser = self.get_base_parser()
self.subcommands = {}
subparsers = parser.add_subparsers(metavar='<subcommand>')
try:
actions_module = {
'1.1': shell_v1,
}[version]
except KeyError:
actions_module = shell_v1
self._find_actions(subparsers, actions_module)
self._find_actions(subparsers, self)
for extension in self.extensions:
self._find_actions(subparsers, extension.module)
self._add_bash_completion_subparser(subparsers)
return parser
def _discover_extensions(self, version):
extensions = []
for name, module in itertools.chain(
self._discover_via_python_path(version),
self._discover_via_contrib_path(version)):
extension = automationclient.extension.Extension(name, module)
extensions.append(extension)
return extensions
def _discover_via_python_path(self, version):
for (module_loader, name, ispkg) in pkgutil.iter_modules():
if name.endswith('python_automationclient_ext'):
if not hasattr(module_loader, 'load_module'):
# Python 2.6 compat: actually get an ImpImporter obj
module_loader = module_loader.find_module(name)
module = module_loader.load_module(name)
yield name, module
def _discover_via_contrib_path(self, version):
module_path = os.path.dirname(os.path.abspath(__file__))
version_str = "v%s" % version.replace('.', '_')
ext_path = os.path.join(module_path, version_str, 'contrib')
ext_glob = os.path.join(ext_path, "*.py")
for ext_path in glob.iglob(ext_glob):
name = os.path.basename(ext_path)[:-3]
if name == "__init__":
continue
module = imp.load_source(name, ext_path)
yield name, module
def _add_bash_completion_subparser(self, subparsers):
subparser = subparsers.add_parser(
'bash_completion',
add_help=False,
formatter_class=StackopsHelpFormatter)
self.subcommands['bash_completion'] = subparser
subparser.set_defaults(func=self.do_bash_completion)
def _find_actions(self, subparsers, actions_module):
for attr in (a for a in dir(actions_module) if a.startswith('do_')):
# I prefer to be hypen-separated instead of underscores.
command = attr[3:].replace('_', '-')
callback = getattr(actions_module, attr)
desc = callback.__doc__ or ''
help = desc.strip().split('\n')[0]
arguments = getattr(callback, 'arguments', [])
subparser = subparsers.add_parser(
command,
help=help,
description=desc,
add_help=False,
formatter_class=StackopsHelpFormatter)
subparser.add_argument('-h', '--help',
action='help',
help=argparse.SUPPRESS,)
self.subcommands[command] = subparser
for (args, kwargs) in arguments:
subparser.add_argument(*args, **kwargs)
subparser.set_defaults(func=callback)
def setup_debugging(self, debug):
if not debug:
return
streamhandler = logging.StreamHandler()
streamformat = "%(levelname)s (%(module)s:%(lineno)d) %(message)s"
streamhandler.setFormatter(logging.Formatter(streamformat))
logger.setLevel(logging.DEBUG)
logger.addHandler(streamhandler)
def main(self, argv):
# Parse args once to find version and debug settings
parser = self.get_base_parser()
(options, args) = parser.parse_known_args(argv)
self.setup_debugging(options.debug)
# build available subcommands based on version
self.extensions = self._discover_extensions(
options.os_automation_api_version)
self._run_extension_hooks('__pre_parse_args__')
subcommand_parser = self.get_subcommand_parser(
options.os_automation_api_version)
self.parser = subcommand_parser
if options.help or not argv:
subcommand_parser.print_help()
return 0
args = subcommand_parser.parse_args(argv)
self._run_extension_hooks('__post_parse_args__', args)
# Short-circuit and deal with help right away.
if args.func == self.do_help:
self.do_help(args)
return 0
elif args.func == self.do_bash_completion:
self.do_bash_completion(args)
return 0
(os_username, os_password, os_tenant_name, os_auth_url,
os_region_name, os_tenant_id, endpoint_type, insecure,
service_type, service_name, username, apikey, projectid,
url, region_name, cacert) = \
(args.os_username, args.os_password,
args.os_tenant_name, args.os_auth_url,
args.os_region_name, args.os_tenant_id,
args.endpoint_type, args.insecure,
args.service_type, args.service_name,
args.username, args.apikey, args.projectid,
args.url, args.region_name, args.os_cacert)
if not endpoint_type:
endpoint_type = DEFAULT_AUTOMATION_ENDPOINT_TYPE
if not service_type:
service_type = DEFAULT_AUTOMATION_SERVICE_TYPE
service_type = utils.get_service_type(args.func) or service_type
#FIXME(usrleon): Here should be restrict for project id same as
# for os_username or os_password but for compatibility it is not.
if not utils.isunauthenticated(args.func):
if not os_username:
if not username:
raise exc.CommandError(
"You must provide a username "
"via either --os-username or env[OS_USERNAME]")
else:
os_username = username
if not os_password:
if not apikey:
raise exc.CommandError("You must provide a password "
"via either --os-password or via "
"env[OS_PASSWORD]")
else:
os_password = apikey
if not (os_tenant_name or os_tenant_id):
if not projectid:
raise exc.CommandError("You must provide a tenant_id "
"via either --os-tenant-id or "
"env[OS_TENANT_ID]")
else:
os_tenant_name = projectid
if not os_auth_url:
if not url:
raise exc.CommandError(
"You must provide an auth url "
"via either --os-auth-url or env[OS_AUTH_URL]")
else:
os_auth_url = url
if not os_region_name and region_name:
os_region_name = region_name
if not (os_tenant_name or os_tenant_id):
raise exc.CommandError(
"You must provide a tenant_id "
"via either --os-tenant-id or env[OS_TENANT_ID]")
if not os_auth_url:
raise exc.CommandError(
"You must provide an auth url "
"via either --os-auth-url or env[OS_AUTH_URL]")
self.cs = client.Client(options.os_automation_api_version, os_username,
os_password, os_tenant_name, os_auth_url,
insecure, region_name=os_region_name,
tenant_id=os_tenant_id,
endpoint_type=endpoint_type,
extensions=self.extensions,
service_type=service_type,
service_name=service_name,
retries=options.retries,
http_log_debug=args.debug,
cacert=cacert)
try:
if not utils.isunauthenticated(args.func):
self.cs.authenticate()
except exc.Unauthorized:
raise exc.CommandError("Invalid OpenStack Automation credentials.")
except exc.AuthorizationFailure:
raise exc.CommandError("Unable to authorize user")
endpoint_api_version = \
self.cs.get_automation_api_version_from_endpoint()
if endpoint_api_version != options.os_automation_api_version:
msg = (("Automation API version is set to %s "
"but you are accessing a %s endpoint. "
"Change its value via either --os-automation-api-version "
"or env[OS_AUTOMATION_API_VERSION]")
% (options.os_automation_api_version, endpoint_api_version))
raise exc.InvalidAPIVersion(msg)
args.func(self.cs, args)
def _run_extension_hooks(self, hook_type, *args, **kwargs):
"""Run hooks for all registered extensions."""
for extension in self.extensions:
extension.run_hooks(hook_type, *args, **kwargs)
def do_bash_completion(self, args):
"""Print arguments for bash_completion.
Prints all of the commands and options to stdout so that the
automation.bash_completion script doesn't have to hard code them.
"""
commands = set()
options = set()
for sc_str, sc in list(self.subcommands.items()):
commands.add(sc_str)
for option in list(sc._optionals._option_string_actions.keys()):
options.add(option)
commands.remove('bash-completion')
commands.remove('bash_completion')
print(' '.join(commands | options))
@utils.arg('command', metavar='<subcommand>', nargs='?',
help='Display help for <subcommand>')
def do_help(self, args):
"""
Display help about this program or one of its subcommands.
"""
if args.command:
if args.command in self.subcommands:
self.subcommands[args.command].print_help()
else:
raise exc.CommandError("'%s' is not a valid subcommand" %
args.command)
else:
self.parser.print_help()
# I'm picky about my shell help.
class StackopsHelpFormatter(argparse.HelpFormatter):
def start_section(self, heading):
# Title-case the headings
heading = '%s%s' % (heading[0].upper(), heading[1:])
super(StackopsHelpFormatter, self).start_section(heading)
def main():
try:
if sys.version_info >= (3, 0):
StackopsAutomationShell().main(sys.argv[1:])
else:
StackopsAutomationShell().main(map(strutils.safe_decode,
sys.argv[1:]))
except KeyboardInterrupt:
print("... terminating automation client", file=sys.stderr)
sys.exit(130)
except Exception as e:
logger.debug(e, exc_info=1)
message = e.message
if not isinstance(message, six.string_types):
message = str(message)
print("ERROR: %s" % strutils.safe_encode(message), file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()
| {
"content_hash": "4dc2e2bcb6cf1383075c328c567cd917",
"timestamp": "",
"source": "github",
"line_count": 515,
"max_line_length": 79,
"avg_line_length": 40.58058252427185,
"alnum_prop": 0.5187329537298435,
"repo_name": "StackOps/python-automationclient",
"id": "c77dba7ac4f39bc8e496f0b92e30e5717af11c18",
"size": "21579",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "automationclient/shell.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arc",
"bytes": "533"
},
{
"name": "CSS",
"bytes": "4176"
},
{
"name": "Python",
"bytes": "428565"
},
{
"name": "Shell",
"bytes": "12215"
}
],
"symlink_target": ""
} |
"""
Created on Thu Dec 11 13:55:17 2014
@author: sm1fg
This is the main module to construct a magnetohydrostatic solar atmosphere,
given a specified magnetic network of self-similar magnetic flux tubes and
save the output to gdf format.
To select an existing configuration change the import as model_pars, set Nxyz,
xyz_SI and any other special parameters, then execute mhs_atmopshere.
To add new configurations:
add the model options to set_options in parameters/options.py;
add options required in parameters/model_pars.py;
add alternative empirical data sets to hs_model/;
add alternativ table than interploate_atmosphere in hs_model/hs_atmosphere.py;
add option to get_flux_tubes in mhs_model/flux_tubes.py
If an alternative formulation of the flux tube is required add options to
construct_magnetic_field and construct_pairwise_field in
mhs_model/flux_tubes.py
Plotting options are included in plot/mhs_plot.py
"""
import os
import numpy as np
import pysac.mhs_atmosphere as atm
import astropy.units as u
from pysac.mhs_atmosphere.parameters.model_pars import spruit as model_pars
#==============================================================================
#check whether mpi is required and the number of procs = size
#==============================================================================
try:
from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
l_mpi = True
l_mpi = l_mpi and (size != 1)
except ImportError:
l_mpi = False
rank = 0
size = 1
#==============================================================================
#set up model parameters
#==============================================================================
local_procs=1
#optional coordinate - resolution
model_pars['Nxyz'] = [64,64,128] # 3D grid
model_pars['xyz'] = [-0.63*u.Mm,0.63*u.Mm,-0.63*u.Mm,0.63*u.Mm,0.0*u.Mm,12.7*u.Mm] #grid size
#standard set of logical switches
option_pars = atm.set_options(model_pars, l_mpi, l_gdf=True)
#standard conversion to dimensionless units and physical constants
scales, physical_constants = \
atm.get_parameters()
# select the option in the next line
option_pars['l_linear'] = True
# Alfven speed constant along the axis of the flux tube
if option_pars['l_const']:
option_pars['l_B0_quadz'] = True
model_pars['chrom_scale'] *= 5e1
model_pars['p0'] *= 1.5e1
physical_constants['gravity'] *= 1.
model_pars['radial_scale'] *= 1.
# Alfven speed proportional to sqrt(Z) along the axis of the flux tube
elif option_pars['l_sqrt']:
option_pars['l_B0_rootz'] = True
model_pars['chrom_scale'] *= 5.65e-3
model_pars['p0'] *= 1.
physical_constants['gravity'] *= 7.5e3
model_pars['radial_scale'] *= 0.7
# Alfven speed proportional to Z along the axis of the flux tube
elif option_pars['l_linear']:
option_pars['l_B0_rootz'] = True
model_pars['chrom_scale'] *= 0.062
model_pars['p0'] *= 3e2
physical_constants['gravity'] *= 8e3
model_pars['radial_scale'] *= 1.
# Alfven speed proportional to Z^2 along the axis of the flux tube
elif option_pars['l_square']:
option_pars['l_B0_rootz'] = True
model_pars['chrom_scale'] *= 1.65
model_pars['p0'] *= 2e4
physical_constants['gravity'] *= 5e4
model_pars['radial_scale'] *= 1.
# Alfven speed not defined along the axis of the flux tube
else:
option_pars['l_B0_rootz'] = True
model_pars['chrom_scale'] *= 1.
model_pars['p0'] *= 1.
#obtain code coordinates and model parameters in astropy units
coords = atm.get_coords(model_pars['Nxyz'], u.Quantity(model_pars['xyz']))
#==============================================================================
#calculate 1d hydrostatic balance from empirical density profile
#==============================================================================
pressure_Z, rho_Z, Rgas_Z = atm.get_spruit_hs(coords['Z'],
model_pars,
physical_constants,
option_pars
)
#==============================================================================
# load flux tube footpoint parameters
#==============================================================================
# axial location and value of Bz at each footpoint
xi, yi, Si = atm.get_flux_tubes(
model_pars,
coords,
option_pars
)
#==============================================================================
# split domain into processes if mpi
#==============================================================================
ax, ay, az = np.mgrid[coords['xmin']:coords['xmax']:1j*model_pars['Nxyz'][0],
coords['ymin']:coords['ymax']:1j*model_pars['Nxyz'][1],
coords['zmin']:coords['zmax']:1j*model_pars['Nxyz'][2]]
# split the grid between processes for mpi
if l_mpi:
x_chunks = np.array_split(ax, size, axis=0)
y_chunks = np.array_split(ay, size, axis=0)
z_chunks = np.array_split(az, size, axis=0)
x = comm.scatter(x_chunks, root=0)
y = comm.scatter(y_chunks, root=0)
z = comm.scatter(z_chunks, root=0)
else:
x, y, z = ax, ay, az
x = u.Quantity(x, unit=coords['xmin'].unit)
y = u.Quantity(y, unit=coords['ymin'].unit)
z = u.Quantity(z, unit=coords['zmin'].unit)
#==============================================================================
# initialize zero arrays in which to add magnetic field and mhs adjustments
#==============================================================================
Bx = u.Quantity(np.zeros(x.shape), unit=u.T) # magnetic x-component
By = u.Quantity(np.zeros(x.shape), unit=u.T) # magnetic y-component
Bz = u.Quantity(np.zeros(x.shape), unit=u.T) # magnetic z-component
pressure_m = u.Quantity(np.zeros(x.shape), unit=u.Pa) # magneto-hydrostatic adjustment to pressure
rho_m = u.Quantity(np.zeros(x.shape), unit=u.kg/u.m**3) # magneto-hydrostatic adjustment to density
# initialize zero arrays in which to add balancing forces and magnetic tension
Fx = u.Quantity(np.zeros(x.shape), unit=u.N/u.m**3) # balancing force x-component
Fy = u.Quantity(np.zeros(x.shape), unit=u.N/u.m**3) # balancing force y-component
# total tension force for comparison with residual balancing force
Btensx = u.Quantity(np.zeros(x.shape), unit=u.N/u.m**3)
Btensy = u.Quantity(np.zeros(x.shape), unit=u.N/u.m**3)
#==============================================================================
#calculate the magnetic field and pressure/density balancing expressions
#==============================================================================
for i in range(0,model_pars['nftubes']):
for j in range(i,model_pars['nftubes']):
if rank == 0:
print'calculating ij-pair:',i,j
if i == j:
pressure_mi, rho_mi, Bxi, Byi ,Bzi, B2x, B2y =\
atm.construct_magnetic_field(
x, y, z,
xi[i], yi[i], Si[i],
model_pars, option_pars,
physical_constants,
scales
)
Bx, By, Bz = Bxi+Bx, Byi+By ,Bzi+Bz
Btensx += B2x
Btensy += B2y
pressure_m += pressure_mi
rho_m += rho_mi
else:
pressure_mi, rho_mi, Fxi, Fyi, B2x, B2y =\
atm.construct_pairwise_field(
x, y, z,
xi[i], yi[i],
xi[j], yi[j], Si[i], Si[j],
model_pars,
option_pars,
physical_constants,
scales
)
pressure_m += pressure_mi
rho_m += rho_mi
Fx += Fxi
Fy += Fyi
Btensx += B2x
Btensy += B2y
# clear some memory
del pressure_mi, rho_mi, Bxi, Byi ,Bzi, B2x, B2y
#==============================================================================
# Construct 3D hs arrays and then add the mhs adjustments to obtain atmosphere
#==============================================================================
# select the 1D array spanning the local mpi process; the add/sub of dz to
# ensure all indices are used, but only once
indz = np.where(coords['Z'] >= z.min()-0.1*coords['dz']) and \
np.where(coords['Z'] <= z.max()+0.1*coords['dz'])
pressure_z, rho_z, Rgas_z = pressure_Z[indz], rho_Z[indz], Rgas_Z[indz]
# local proc 3D mhs arrays
pressure, rho = atm.mhs_3D_profile(z,
pressure_z,
rho_z,
pressure_m,
rho_m
)
magp = (Bx**2 + By**2 + Bz**2)/(2.*physical_constants['mu0'])
if rank ==0:
print'max B corona = ',magp[:,:,-1].max().decompose()
energy = atm.get_internal_energy(pressure,
magp,
physical_constants)
#============================================================================
# Save data for SAC and plotting
#============================================================================
# set up data directory and file names
# may be worthwhile locating on /data if files are large
datadir = os.path.expanduser('~/Documents/mhs_atmosphere/'+model_pars['model']+'/')
filename = datadir + model_pars['model'] + option_pars['suffix']
if not os.path.exists(datadir):
os.makedirs(datadir)
sourcefile = datadir + model_pars['model'] + '_sources' + option_pars['suffix']
aux3D = datadir + model_pars['model'] + '_3Daux' + option_pars['suffix']
aux1D = datadir + model_pars['model'] + '_1Daux' + option_pars['suffix']
# save the variables for the initialisation of a SAC simulation
atm.save_SACvariables(
filename,
rho,
Bx,
By,
Bz,
energy,
option_pars,
physical_constants,
coords,
model_pars['Nxyz']
)
# save the balancing forces as the background source terms for SAC simulation
atm.save_SACsources(
sourcefile,
Fx,
Fy,
option_pars,
physical_constants,
coords,
model_pars['Nxyz']
)
# save auxilliary variable and 1D profiles for plotting and analysis
Rgas = u.Quantity(np.zeros(x.shape), unit=Rgas_z.unit)
Rgas[:] = Rgas_z
temperature = pressure/rho/Rgas
if not option_pars['l_hdonly']:
inan = np.where(magp <=1e-7*pressure.min())
magpbeta = magp
magpbeta[inan] = 1e-7*pressure.min() # low pressure floor to avoid NaN
pbeta = pressure/magpbeta
else:
pbeta = magp+1.0 #dummy to avoid NaN
alfven = np.sqrt(2.*magp/rho)
if rank == 0:
print'Alfven speed Z.min to Z.max =',\
alfven[model_pars['Nxyz'][0]/2,model_pars['Nxyz'][1]/2, 0].decompose(),\
alfven[model_pars['Nxyz'][0]/2,model_pars['Nxyz'][1]/2,-1].decompose()
cspeed = np.sqrt(physical_constants['gamma']*pressure/rho)
atm.save_auxilliary3D(
aux3D,
pressure_m,
rho_m,
temperature,
pbeta,
alfven,
cspeed,
Btensx,
Btensy,
option_pars,
physical_constants,
coords,
model_pars['Nxyz']
)
atm.save_auxilliary1D(
aux1D,
pressure_Z,
rho_Z,
Rgas_Z,
option_pars,
physical_constants,
coords,
model_pars['Nxyz']
)
| {
"content_hash": "9ed03424f7b2cf7b55c046559587d7ce",
"timestamp": "",
"source": "github",
"line_count": 294,
"max_line_length": 104,
"avg_line_length": 41.61224489795919,
"alnum_prop": 0.4974660781428805,
"repo_name": "Cadair/pysac",
"id": "eec28e614262f3792d2cf32923e0678de8f41ae3",
"size": "12258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/mhs_atmosphere/spruit_atmosphere.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "249672"
}
],
"symlink_target": ""
} |
import node as n
import pytest
def test_init_Node():
'''Verify that new Nodes are instantiated correctly.'''
t = n.Node(1)
assert isinstance(t, n.Node)
assert (t.next, None)
assert (t.data, 2)
| {
"content_hash": "5f8798f5180bd4eebc96cb31b16f7a5e",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 59,
"avg_line_length": 19.636363636363637,
"alnum_prop": 0.6388888888888888,
"repo_name": "jacquestardie/data-structures",
"id": "a4c132ab78d365f2329412897120ccbe8c68ed0b",
"size": "216",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "node/test_node.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23297"
}
],
"symlink_target": ""
} |
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mycustomapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
| {
"content_hash": "19702546a2685e1baccffc00e2910764",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 75,
"avg_line_length": 19.76923076923077,
"alnum_prop": 0.688715953307393,
"repo_name": "regulusweb/django-oscar-api",
"id": "5de2d30ce1040d748d766ef4d4dc1c57450f2d8b",
"size": "279",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "demosite/manage.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "567"
},
{
"name": "Python",
"bytes": "193228"
}
],
"symlink_target": ""
} |
import m5
from m5.objects import *
def init_fs(system, membus, cpus=1):
system.pc = Pc()
# Constants similar to x86_traits.hh
IO_address_space_base = 0x8000000000000000
pci_config_address_space_base = 0xc000000000000000
interrupts_address_space_base = 0xa000000000000000
APIC_range_size = 1 << 12;
# North Bridge
system.iobus = IOXBar()
system.bridge = Bridge(delay='50ns')
system.bridge.master = system.iobus.slave
system.bridge.slave = membus.master
# Allow the bridge to pass through:
# 1) kernel configured PCI device memory map address: address range
# [0xC0000000, 0xFFFF0000). (The upper 64kB are reserved for m5ops.)
# 2) the bridge to pass through the IO APIC (two pages, already contained in 1),
# 3) everything in the IO address range up to the local APIC, and
# 4) then the entire PCI address space and beyond.
system.bridge.ranges = \
[
AddrRange(0xC0000000, 0xFFFF0000),
AddrRange(IO_address_space_base,
interrupts_address_space_base - 1),
AddrRange(pci_config_address_space_base,
Addr.max)
]
# Create a bridge from the IO bus to the memory bus to allow access to
# the local APIC (two pages)
system.apicbridge = Bridge(delay='50ns')
system.apicbridge.slave = system.iobus.master
system.apicbridge.master = membus.slave
system.apicbridge.ranges = [AddrRange(interrupts_address_space_base,
interrupts_address_space_base +
cpus * APIC_range_size
- 1)]
# connect the io bus
system.pc.attachIO(system.iobus)
# Add a tiny cache to the IO bus.
# This cache is required for the classic memory model to mantain coherence
system.iocache = Cache(assoc=8,
hit_latency = 50,
response_latency = 50,
mshrs = 20,
size = '1kB',
tgts_per_mshr = 12,
forward_snoops = False,
addr_ranges = system.mem_ranges)
system.iocache.cpu_side = system.iobus.master
system.iocache.mem_side = system.membus.slave
system.intrctrl = IntrControl()
###############################################
# Add in a Bios information structure.
system.smbios_table.structures = [X86SMBiosBiosInformation()]
# Set up the Intel MP table
base_entries = []
ext_entries = []
# This is the entries for the processors.
# You need to make multiple of these if you have multiple processors
# Note: Only one entry should have the flag bootstrap = True!
for i in range(cpus):
bp = X86IntelMPProcessor(
local_apic_id = i,
local_apic_version = 0x14,
enable = True,
bootstrap = (i ==0))
base_entries.append(bp)
io_apic = X86IntelMPIOAPIC(
id = cpus,
version = 0x11,
enable = True,
address = 0xfec00000)
system.pc.south_bridge.io_apic.apic_id = io_apic.id
base_entries.append(io_apic)
pci_bus = X86IntelMPBus(bus_id = 0, bus_type='PCI')
base_entries.append(pci_bus)
isa_bus = X86IntelMPBus(bus_id = 1, bus_type='ISA')
base_entries.append(isa_bus)
connect_busses = X86IntelMPBusHierarchy(bus_id=1,
subtractive_decode=True, parent_bus=0)
ext_entries.append(connect_busses)
pci_dev4_inta = X86IntelMPIOIntAssignment(
interrupt_type = 'INT',
polarity = 'ConformPolarity',
trigger = 'ConformTrigger',
source_bus_id = 0,
source_bus_irq = 0 + (4 << 2),
dest_io_apic_id = io_apic.id,
dest_io_apic_intin = 16)
base_entries.append(pci_dev4_inta)
def assignISAInt(irq, apicPin):
assign_8259_to_apic = X86IntelMPIOIntAssignment(
interrupt_type = 'ExtInt',
polarity = 'ConformPolarity',
trigger = 'ConformTrigger',
source_bus_id = 1,
source_bus_irq = irq,
dest_io_apic_id = io_apic.id,
dest_io_apic_intin = 0)
base_entries.append(assign_8259_to_apic)
assign_to_apic = X86IntelMPIOIntAssignment(
interrupt_type = 'INT',
polarity = 'ConformPolarity',
trigger = 'ConformTrigger',
source_bus_id = 1,
source_bus_irq = irq,
dest_io_apic_id = io_apic.id,
dest_io_apic_intin = apicPin)
base_entries.append(assign_to_apic)
assignISAInt(0, 2)
assignISAInt(1, 1)
for i in range(3, 15):
assignISAInt(i, i)
system.intel_mp_table.base_entries = base_entries
system.intel_mp_table.ext_entries = ext_entries
# This is setting up the physical memory layout
# Each entry represents a physical address range
# The last entry in this list is the main system memory
# Note: If you are configuring your system to use more than 3 GB then you
# will need to make significant changes to this section
entries = \
[
# Mark the first megabyte of memory as reserved
X86E820Entry(addr = 0, size = '639kB', range_type = 1),
X86E820Entry(addr = 0x9fc00, size = '385kB', range_type = 2),
# Mark the rest of physical memory as available
X86E820Entry(addr = 0x100000,
size = '%dB' % (system.mem_ranges[0].size() - 0x100000),
range_type = 1),
]
# Mark [mem_size, 3GB) as reserved if memory less than 3GB, which force
# IO devices to be mapped to [0xC0000000, 0xFFFF0000). Requests to this
# specific range can pass though bridge to iobus.
entries.append(X86E820Entry(addr = system.mem_ranges[0].size(),
size='%dB' % (0xC0000000 - system.mem_ranges[0].size()),
range_type=2))
# Reserve the last 16kB of the 32-bit address space for the m5op interface
entries.append(X86E820Entry(addr=0xFFFF0000, size='64kB', range_type=2))
system.e820_table.entries = entries
| {
"content_hash": "4c84e4ba2a372ae16555c6e730938ab1",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 85,
"avg_line_length": 40.80392156862745,
"alnum_prop": 0.5897805542207272,
"repo_name": "Nirvedh/CoarseCoherence",
"id": "53a6a949e4de9e58da3882030bf7f25f15f38073",
"size": "6243",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "configs/full_system/x86.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "239800"
},
{
"name": "C",
"bytes": "1027678"
},
{
"name": "C++",
"bytes": "14956416"
},
{
"name": "CMake",
"bytes": "2202"
},
{
"name": "Emacs Lisp",
"bytes": "1969"
},
{
"name": "HTML",
"bytes": "136898"
},
{
"name": "Hack",
"bytes": "2489"
},
{
"name": "Java",
"bytes": "3096"
},
{
"name": "M4",
"bytes": "49620"
},
{
"name": "Makefile",
"bytes": "39164"
},
{
"name": "Matlab",
"bytes": "1973"
},
{
"name": "Perl",
"bytes": "33602"
},
{
"name": "Protocol Buffer",
"bytes": "7033"
},
{
"name": "Python",
"bytes": "4051519"
},
{
"name": "Shell",
"bytes": "65994"
},
{
"name": "VimL",
"bytes": "4335"
},
{
"name": "Visual Basic",
"bytes": "2884"
}
],
"symlink_target": ""
} |
import dbutils
'''获取域名管理信息
返回值: True/False
'''
def get_service():
_columns = ('id', 'domain_name', 'username', 'password', 'function')
_sql = 'select * from service_manage'
_count, _rt_list = dbutils.execute_sql(_sql, fetch=True)
_rt = []
for _list in _rt_list:
_rt.append(dict(zip(_columns, _list)))
return _rt
'''删除数据
返回值: True/False
'''
def servicedel(uid):
sql = 'delete from service_manage where id=%s'
_count, _rt_list = dbutils.execute_sql(sql, uid)
return _count != 0
'''更新数据
返回值: True/False
'''
def update_service(_url, _username, _password, _func, _id):
sql = 'update service_manage set domain_name=%s, username=%s, password=%s, function=%s where id=%s'
args = (_url, _username, _password, _func, _id)
_count, _rt_list = dbutils.execute_sql(sql, args)
return _count != 0
'''添加数据
返回值: True/False
'''
def add_service(_url, _username, _password, _func):
sql = 'insert into service_manage(domain_name,username,password,function) values(%s,%s,%s,%s)'
args = (_url, _username, _password, _func)
_count, _rt_list = dbutils.execute_sql(sql, args)
return _count != 0 | {
"content_hash": "6065f9ab09baaec0dcc8a2e23a6035af",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 103,
"avg_line_length": 29.41025641025641,
"alnum_prop": 0.6259808195292066,
"repo_name": "51reboot/actual_09_homework",
"id": "da73d2706a7e379dbd73747f7009e43c6bd60e96",
"size": "1497",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "09/tanshuai/cmdb_v6/user/services2db.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4623850"
},
{
"name": "HTML",
"bytes": "90670692"
},
{
"name": "JavaScript",
"bytes": "31827839"
},
{
"name": "Nginx",
"bytes": "1073"
},
{
"name": "PHP",
"bytes": "349512"
},
{
"name": "Python",
"bytes": "1705997"
},
{
"name": "Shell",
"bytes": "10001"
},
{
"name": "Smarty",
"bytes": "342164"
}
],
"symlink_target": ""
} |
"""
Django settings for api project.
Generated by 'django-admin startproject' using Django 1.11.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ['secret_key']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False if (os.environ['debug'] == 'False') else True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'allauth',
'allauth.account',
'corsheaders',
]
SITE_ID = 1
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'api.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'api.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
if DEBUG:
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
CORS_ORIGIN_ALLOW_ALL = DEBUG
| {
"content_hash": "b8307ef850664f4a6a533f90c51d4252",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 91,
"avg_line_length": 25.094202898550726,
"alnum_prop": 0.6832226393300607,
"repo_name": "recadoc/recadoc-api",
"id": "61ae27c5518e0334341b2d27580ef2bb57ee3e2a",
"size": "3463",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5535"
}
],
"symlink_target": ""
} |
from django.shortcuts import render_to_response
from tumblog.models import *
def create(request, blogslug):
blog = Blog.objects.get(slug=blogslug)
return render_to_response('tumblog/admin_create.html', {'blog': blog})
| {
"content_hash": "b37c26e16c253ad401a9c9a0efa5b51f",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 74,
"avg_line_length": 37.833333333333336,
"alnum_prop": 0.748898678414097,
"repo_name": "lygaret/django-tumblog",
"id": "51ed47e693b8c96597dbccaa7cc3f0b1d51d1277",
"size": "227",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tumblog/views/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "51590"
}
],
"symlink_target": ""
} |
from pycryptics.grammar.cfg import generate_grammar
from pycryptics.grammar.memo_chart import MemoChart
from nltk import parse
def generate_clues(constraints):
g = generate_grammar(constraints.phrases)
parser = parse.EarleyChartParser(g, chart_class=MemoChart)
clues = parser.nbest_parse(constraints.phrases)
for c in clues:
c.set_constraints(constraints)
return clues
| {
"content_hash": "a96c7edf29625d3b78724173b96ce711",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 62,
"avg_line_length": 33.25,
"alnum_prop": 0.7644110275689223,
"repo_name": "rdeits/cryptics",
"id": "9d849b17164984aaf7efe34a19fc313a6dfb672c",
"size": "399",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycryptics/grammar/clue_parse.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1260"
},
{
"name": "Go",
"bytes": "18409"
},
{
"name": "HTML",
"bytes": "1544122"
},
{
"name": "JavaScript",
"bytes": "1711"
},
{
"name": "Jupyter Notebook",
"bytes": "23572"
},
{
"name": "PHP",
"bytes": "25856"
},
{
"name": "Python",
"bytes": "118345"
},
{
"name": "Ruby",
"bytes": "2648"
},
{
"name": "Shell",
"bytes": "152"
}
],
"symlink_target": ""
} |
import pandas as pd
from pandas_profiling import ProfileReport
def test_issue351():
data = pd.DataFrame(["Jan", 1]).set_index(0)
profile = ProfileReport(data, progress_bar=False)
assert profile.get_description()["variables"]["0"]["type"] == "Unsupported"
| {
"content_hash": "0cec55da8f40ac832c022332e7394e1c",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 79,
"avg_line_length": 30,
"alnum_prop": 0.7,
"repo_name": "JosPolfliet/pandas-profiling",
"id": "ce2936b6a2d5d1aca8fcde0b8cd7d16ad64fb875",
"size": "270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/issues/test_issue351.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "24527"
},
{
"name": "PowerShell",
"bytes": "3111"
},
{
"name": "Python",
"bytes": "61100"
}
],
"symlink_target": ""
} |
"""Support for Brunt Blind Engine covers."""
import logging
from brunt import BruntAPI
import voluptuous as vol
from homeassistant.components.cover import (
ATTR_POSITION,
PLATFORM_SCHEMA,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
CoverDevice,
)
from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
COVER_FEATURES = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
DEVICE_CLASS = "window"
ATTR_REQUEST_POSITION = "request_position"
NOTIFICATION_ID = "brunt_notification"
NOTIFICATION_TITLE = "Brunt Cover Setup"
ATTRIBUTION = "Based on an unofficial Brunt SDK."
CLOSED_POSITION = 0
OPEN_POSITION = 100
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the brunt platform."""
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
bapi = BruntAPI(username=username, password=password)
try:
things = bapi.getThings()["things"]
if not things:
_LOGGER.error("No things present in account.")
else:
add_entities(
[
BruntDevice(bapi, thing["NAME"], thing["thingUri"])
for thing in things
],
True,
)
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
hass.components.persistent_notification.create(
"Error: {}<br />"
"You will need to restart hass after fixing."
"".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
class BruntDevice(CoverDevice):
"""
Representation of a Brunt cover device.
Contains the common logic for all Brunt devices.
"""
def __init__(self, bapi, name, thing_uri):
"""Init the Brunt device."""
self._bapi = bapi
self._name = name
self._thing_uri = thing_uri
self._state = {}
self._available = None
@property
def name(self):
"""Return the name of the device as reported by tellcore."""
return self._name
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._available
@property
def current_cover_position(self):
"""
Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
pos = self._state.get("currentPosition")
return int(pos) if pos else None
@property
def request_cover_position(self):
"""
Return request position of cover.
The request position is the position of the last request
to Brunt, at times there is a diff of 1 to current
None is unknown, 0 is closed, 100 is fully open.
"""
pos = self._state.get("requestPosition")
return int(pos) if pos else None
@property
def move_state(self):
"""
Return current moving state of cover.
None is unknown, 0 when stopped, 1 when opening, 2 when closing
"""
mov = self._state.get("moveState")
return int(mov) if mov else None
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self.move_state == 1
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self.move_state == 2
@property
def device_state_attributes(self):
"""Return the detailed device state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_REQUEST_POSITION: self.request_cover_position,
}
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS
@property
def supported_features(self):
"""Flag supported features."""
return COVER_FEATURES
@property
def is_closed(self):
"""Return true if cover is closed, else False."""
return self.current_cover_position == CLOSED_POSITION
def update(self):
"""Poll the current state of the device."""
try:
self._state = self._bapi.getState(thingUri=self._thing_uri).get("thing")
self._available = True
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
self._available = False
def open_cover(self, **kwargs):
"""Set the cover to the open position."""
self._bapi.changeRequestPosition(OPEN_POSITION, thingUri=self._thing_uri)
def close_cover(self, **kwargs):
"""Set the cover to the closed position."""
self._bapi.changeRequestPosition(CLOSED_POSITION, thingUri=self._thing_uri)
def set_cover_position(self, **kwargs):
"""Set the cover to a specific position."""
self._bapi.changeRequestPosition(
kwargs[ATTR_POSITION], thingUri=self._thing_uri
)
| {
"content_hash": "030af91517392632a75de99784266606",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 84,
"avg_line_length": 29.40223463687151,
"alnum_prop": 0.6146684400532016,
"repo_name": "postlund/home-assistant",
"id": "373c33394413c8fbfcd6768a94ba05f9d23fb250",
"size": "5263",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/components/brunt/cover.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20215859"
},
{
"name": "Shell",
"bytes": "6663"
}
],
"symlink_target": ""
} |
__source__ = 'https://leetcode.com/problems/permutations/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/permutations.py
# Time: O(n!) //n factorial
# Space: O(n)
# Math/Brute Force Search
#
# Description: Leetcode # 46. Permutations
#
# Given a collection of numbers, return all possible permutations.
#
# For example,
# [1,2,3] have the following permutations:
# [1,2,3], [1,3,2], [2,1,3], [2,3,1], [3,1,2], and [3,2,1].
#
# Companies
# LinkedIn Microsoft
# Related Topics
# Backtracking
# Similar Questions
# Next Permutation Permutations II Permutation Sequence Combinations
#
# Note:
# 1)
# - Permutation dfs for loop always starts with 0 as [1,2,3] and [3,2,1] are diff
# - combination dfs for loop starts with next idx (i + 1) and needs to pass in start idx as parameter, [1,2,3] and [3,2,1] are the same
# 2) for permutation, ex [1,2,3]
# there will be a lot of duplicated calculations for dfs:
# - use visited = boolean[] to avoid result for [1,1,1]
# - if input has duplicated element, ex [1,1,2,3]
# use both visited and hashset(declared before enter forloop, no need to pass in as parameter) in dfs helper function
import unittest
class Solution:
# @param num, a list of integer
# @return a list of lists of integers
def permute(self, num):
result = []
used = [False] * len(num)
self.permuteRecu(result, used, [], num)
return result
def permuteRecu(self, result, used, cur, num):
if len(cur) == len(num):
result.append(cur + [])
#print cur + []
return
for i in xrange(len(num)):
if not used[i]:
used[i] = True
cur.append(num[i])
self.permuteRecu(result, used, cur, num)
cur.pop()
used[i] = False
class SolutionOther:
# @param num, a list of integer
# @return a list of lists of integers
def permute(self, num):
num.sort()
ans = [num[:]]
while self.next_permutation(num):
ans.append(num[:])
#print ans
return ans
def permuteUnique(self, num):
#need to sort num in duplicate condition
num.sort()
ans = [num[:]]
while self.next_permutation(num):
ans.append(num[:])
#print ans
return ans
def next_permutation(self, num):
for i in range(len(num)-2, -1, -1):
#print "this round start with i=",i, num[i], num[i+1]
if num[i] < num[i+1]:
break
else:
return False
for j in range(len(num)-1, i, -1):
#print "j=", j,"i= ",i
if num[j] > num[i]:
num[i], num[j] = num[j], num[i]
break
for j in range(0, (len(num) - i) //2):
#print "j=", j,"(len(num) - i) //2= ",(len(num) - i) //2
num[i+j+1], num[len(num)-j-1] = num[len(num)-j-1], num[i+j+1]
return True
def nextPermutation(self, num):
#num.sort()
ans = [num[:]]
while self.next_permutation(num):
ans = (num[:])
#print ans
return ans
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
#print test.permute([4,5,6])
#print test.permuteUnique([2,2,1,1])
#print test.nextPermutation([1,2])
#print test.nextPermutation([6,7,5,3,5,6,2,9,1,2,7,0,9])
#for i in range(3, -1, -1):
# print i
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought:
general ideas:
1)
- Permutation dfs for loop always starts with 0 as [1,2,3] and [3,2,1] are diff
- combination dfs for loop starts with next idx (i + 1) and needs to pass in start idx as parameter,
[1,2,3] and [3,2,1] are the same
2) for permutation, ex [1,2,3]
there will be a lot of duplicated calculations for dfs:
- use visited = boolean[] to avoid result for [1,1,1]
- if input has duplicated element, ex [1,1,2,3]
use both visited and hashset(declared before enter forloop, no need to pass in as parameter) in dfs helper function
# Thought: https://leetcode.com/problems/find-the-closest-palindrome/solution/
template1) use visited = boolean[] to avoid [1,1,1]
# 2ms 99.93%
class Solution {
//without boolean[] used, you'll see [1,1,1] showup as resue of the same element
//also permutation forloop index starts with 0
public List<List<Integer>> permute(int[] nums) {
List<List<Integer>> list = new ArrayList<>();
backtrack(list, new ArrayList<>(), nums, new boolean[nums.length]); //use boolean arr to track element
return list;
}
private void backtrack(List<List<Integer>> list, List<Integer> tempList, int [] nums, boolean[] used){
if (tempList.size() == nums.length) {
list.add(new ArrayList<>(tempList));
return;
}
for (int i = 0; i < nums.length; i++) {
if (!used[i]) {
tempList.add(nums[i]);
used[i] = true;
backtrack(list, tempList, nums, used);
tempList.remove(tempList.size() - 1);
used[i] = false;
}
}
}
}
# Template2) //note, does not work with duplicated elements
# 3ms 76.09%
class Solution {
public List<List<Integer>> permute(int[] nums) {
List<List<Integer>> result = new ArrayList<>();
if (nums == null || nums.length == 0) {
return result;
}
permute(nums, 0, result, new ArrayList<Integer>());
return result;
}
private void permute(int[] nums, int index, List<List<Integer>> result, List<Integer> curr) {
if (index == nums.length) {
result.add(new ArrayList<Integer>(curr));
return;
}
for (int i = 0; i <= curr.size(); i++) {
curr.add(i, nums[index]);
permute(nums, index + 1, result, curr);
curr.remove(i);
}
}
}
# 2ms 99.93%
class Solution {
public List<List<Integer>> permute(int[] nums) {
int len = nums.length;
if (len == 0) {
return new ArrayList<>();
}
return permute(nums, 0);
}
private List<List<Integer>> permute(int[] nums, int index) {
List<List<Integer>> result = new ArrayList<>();
if (index == nums.length) {
result.add(new ArrayList<>());
return result;
}
for (List<Integer> list : permute(nums, index + 1)) {
for (int i = 0; i <= list.size(); i++) {
List<Integer> newList = new ArrayList<>(list);
newList.add(i, nums[index]);
result.add(newList);
}
}
return result;
}
}
# 3ms 76.09%
class Solution {
public List<List<Integer>> permute(int[] nums) {
List<List<Integer>> res = new ArrayList<>();
if(nums == null || nums.length == 0) {
return res;
}
help(res, nums, 0);
return res;
}
private void help(List<List<Integer>> res, int[] nums, int index) {
if(index == nums.length) {
List<Integer> tmp = new ArrayList<>();
for(int num : nums) {
tmp.add(num);
}
res.add(tmp);
return;
}
for(int i = index; i < nums.length; i++) {
swap(nums, index, i);
help(res, nums, index + 1);
swap(nums, index, i);
}
}
private void swap(int[] nums, int x, int y) {
int tmp = nums[x];
nums[x] = nums[y];
nums[y] = tmp;
}
}
Iteration:
the basic idea is, to permute n numbers, we can add the nth number into the resulting
List<List<Integer>> from the n-1 numbers, in every possible position.
For example, if the input num[] is {1,2,3}: First, add 1 into the initial List<List<Integer>> (let's call it "answer").
Then, 2 can be added in front or after 1. So we have to copy the List<Integer> in answer
(it's just {1}), add 2 in position 0 of {1}, then copy the original {1} again, and add 2 in position 1.
Now we have an answer of {{2,1},{1,2}}. There are 2 lists in the current answer.
Then we have to add 3. first copy {2,1} and {1,2}, add 3 in position 0;
then copy {2,1} and {1,2}, and add 3 into position 1,
then do the same thing for position 3. Finally we have 2*3=6 lists in answer, which is what we want.
# 2ms 99.93%
class Solution {
public List<List<Integer>> permute(int[] nums) {
LinkedList<List<Integer>> res = new LinkedList<List<Integer>>();
if (nums == null || nums.length == 0) return res;
res.add(new ArrayList<>());
for (int n : nums) {
int size = res.size();
for (int i = size; i > 0; i--) {
List<Integer> tmp = res.pollFirst();
for (int j = 0; j <= tmp.size(); j++) {
List<Integer> cur = new ArrayList<Integer>(tmp);
cur.add(j, n);
res.add(cur);
}
}
}
return res;
}
}
'''
templates = '''
A general approach to backtracking questions in Java (Subsets, Permutations, Combination Sum, Palindrome Partioning)
This structure might apply to many other backtracking questions,
but here I am just going to demonstrate Subsets, Permutations, and Combination Sum.
Subsets : https://leetcode.com/problems/subsets/
public List<List<Integer>> subsets(int[] nums) {
List<List<Integer>> list = new ArrayList<>();
Arrays.sort(nums);
backtrack(list, new ArrayList<>(), nums, 0);
return list;
}
private void backtrack(List<List<Integer>> list , List<Integer> tempList, int [] nums, int start){
list.add(new ArrayList<>(tempList));
for(int i = start; i < nums.length; i++){
tempList.add(nums[i]);
backtrack(list, tempList, nums, i + 1);
tempList.remove(tempList.size() - 1);
}
}
Subsets II (contains duplicates) : https://leetcode.com/problems/subsets-ii/
public List<List<Integer>> subsetsWithDup(int[] nums) {
List<List<Integer>> list = new ArrayList<>();
Arrays.sort(nums);
backtrack(list, new ArrayList<>(), nums, 0);
return list;
}
private void backtrack(List<List<Integer>> list, List<Integer> tempList, int [] nums, int start){
list.add(new ArrayList<>(tempList));
for(int i = start; i < nums.length; i++){
if(i > start && nums[i] == nums[i-1]) continue; // skip duplicates
tempList.add(nums[i]);
backtrack(list, tempList, nums, i + 1);
tempList.remove(tempList.size() - 1);
}
}
Permutations : https://leetcode.com/problems/permutations/
public List<List<Integer>> permute(int[] nums) {
List<List<Integer>> list = new ArrayList<>();
// Arrays.sort(nums); // not necessary
backtrack(list, new ArrayList<>(), nums);
return list;
}
private void backtrack(List<List<Integer>> list, List<Integer> tempList, int [] nums){
if(tempList.size() == nums.length){
list.add(new ArrayList<>(tempList));
} else{
for(int i = 0; i < nums.length; i++){
if(tempList.contains(nums[i])) continue; // element already exists, skip
tempList.add(nums[i]);
backtrack(list, tempList, nums);
tempList.remove(tempList.size() - 1);
}
}
}
Permutations II (contains duplicates) : https://leetcode.com/problems/permutations-ii/
public List<List<Integer>> permuteUnique(int[] nums) {
List<List<Integer>> list = new ArrayList<>();
Arrays.sort(nums);
backtrack(list, new ArrayList<>(), nums, new boolean[nums.length]);
return list;
}
private void backtrack(List<List<Integer>> list, List<Integer> tempList, int [] nums, boolean [] used){
if(tempList.size() == nums.length){
list.add(new ArrayList<>(tempList));
} else{
for(int i = 0; i < nums.length; i++){
if(used[i] || i > 0 && nums[i] == nums[i-1] && !used[i - 1]) continue;
used[i] = true;
tempList.add(nums[i]);
backtrack(list, tempList, nums, used);
used[i] = false;
tempList.remove(tempList.size() - 1);
}
}
}
Combination Sum : https://leetcode.com/problems/combination-sum/
public List<List<Integer>> combinationSum(int[] nums, int target) {
List<List<Integer>> list = new ArrayList<>();
Arrays.sort(nums);
backtrack(list, new ArrayList<>(), nums, target, 0);
return list;
}
private void backtrack(List<List<Integer>> list, List<Integer> tempList, int [] nums, int remain, int start){
if(remain < 0) return;
else if(remain == 0) list.add(new ArrayList<>(tempList));
else{
for(int i = start; i < nums.length; i++){
tempList.add(nums[i]);
backtrack(list, tempList, nums, remain - nums[i], i); // not i + 1 because we can reuse same elements
tempList.remove(tempList.size() - 1);
}
}
}
Combination Sum II (can't reuse same element) : https://leetcode.com/problems/combination-sum-ii/
public List<List<Integer>> combinationSum2(int[] nums, int target) {
List<List<Integer>> list = new ArrayList<>();
Arrays.sort(nums);
backtrack(list, new ArrayList<>(), nums, target, 0);
return list;
}
private void backtrack(List<List<Integer>> list, List<Integer> tempList, int [] nums, int remain, int start){
if(remain < 0) return;
else if(remain == 0) list.add(new ArrayList<>(tempList));
else{
for(int i = start; i < nums.length; i++){
if(i > start && nums[i] == nums[i-1]) continue; // skip duplicates
tempList.add(nums[i]);
backtrack(list, tempList, nums, remain - nums[i], i + 1);
tempList.remove(tempList.size() - 1);
}
}
}
Palindrome Partitioning : https://leetcode.com/problems/palindrome-partitioning/
public List<List<String>> partition(String s) {
List<List<String>> list = new ArrayList<>();
backtrack(list, new ArrayList<>(), s, 0);
return list;
}
public void backtrack(List<List<String>> list, List<String> tempList, String s, int start){
if(start == s.length())
list.add(new ArrayList<>(tempList));
else{
for(int i = start; i < s.length(); i++){
if(isPalindrome(s, start, i)){
tempList.add(s.substring(start, i + 1));
backtrack(list, tempList, s, i + 1);
tempList.remove(tempList.size() - 1);
}
}
}
}
public boolean isPalindrome(String s, int low, int high){
while(low < high)
if(s.charAt(low++) != s.charAt(high--)) return false;
return true;
}
'''
| {
"content_hash": "131a6c4704d96f79380ae1a0d459d26a",
"timestamp": "",
"source": "github",
"line_count": 439,
"max_line_length": 135,
"avg_line_length": 33.428246013667426,
"alnum_prop": 0.5758773424190801,
"repo_name": "JulyKikuAkita/PythonPrac",
"id": "329227fa570872b60dd8046f5b4774ff79d1d1ac",
"size": "14675",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cs15211/Permutations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "191608"
},
{
"name": "HTML",
"bytes": "647778"
},
{
"name": "Python",
"bytes": "5429558"
}
],
"symlink_target": ""
} |
import os
#------------------------------------------------------------------------------
# xbmc related
#------------------------------------------------------------------------------
import xbmc, xbmcaddon
__settings__ = xbmcaddon.Addon(id='plugin.video.mrknow')
__icon__ = xbmcaddon.Addon(id='plugin.video.mrknow').getAddonInfo('icon')
translate = __settings__.getLocalizedString
enable_debug = True
language = xbmc.getLanguage
class Mode2:
UPDATE = 100
VIEW = 110
PLAY = 102
QUEUE = 103
DOWNLOAD = 104
EXECUTE = 105
ADDTOFAVOURITES = 106
REMOVEFROMFAVOURITES = 107
EDITITEM = 108
ADDITEM = 109
DOWNLOADCUSTOMMODULE = 110
REMOVEFROMCUSTOMMODULES = 111
INSTALLADDON = 112
CHROME = 113
class Mode3:
UPDATE = 200
VIEW = 210
PLAY = 202
QUEUE = 203
DOWNLOAD = 204
EXECUTE = 205
ADDTOFAVOURITES = 206
REMOVEFROMFAVOURITES = 207
EDITITEM = 208
ADDITEM = 209
DOWNLOADCUSTOMMODULE = 210
REMOVEFROMCUSTOMMODULES = 211
INSTALLADDON = 212
CHROME = 213
"""
def log(msg, level=xbmc.LOGDEBUG):
plugin = "Mrknow"
msg = msg.encode('utf-8')
xbmc.log("[%s] %s" % (plugin, msg.__str__()), level)
"""
def log(msg, level=xbmc.LOGNOTICE):
plugin = "plugin.video.mrknow"
msg = msg.encode('utf-8')
xbmc.log("[%s] %s" % (plugin, msg.__str__()), level)
print("[%s] %s" % (plugin, msg.__str__()))
def getSetting(name):
return __settings__.getSetting(name)
def setSetting(name, value):
__settings__.setSetting(id=name, value=value)
def showNotification(title, message, timeout=2000, icon=__icon__):
def clean(s):
return str(s.encode('utf-8', 'ignore'))
command = ''
if icon:
command = 'Notification(%s,%s,%s,%s)' % (clean(title), clean(message), timeout, icon)
else:
command = 'Notification(%s,%s,%s)' % (clean(title), clean(message), timeout)
xbmc.executebuiltin(command)
def runPlugin(url):
xbmc.executebuiltin('XBMC.RunPlugin(' + url +')')
#------------------------------------------------------------------------------
# dialogs
#------------------------------------------------------------------------------
from dialogs.dialogQuestion import DialogQuestion
from dialogs.dialogBrowser import DialogBrowser
from dialogs.dialogInfo import DialogInfo
from dialogs.dialogError import DialogError
from utils.xbmcUtils import getKeyboard
from utils import fileUtils as fu
def ask(question):
diaQuestion = DialogQuestion()
return diaQuestion.ask(question)
def showInfo(message):
diaInfo = DialogInfo()
diaInfo.show(message)
def showError(message):
diaError = DialogError()
diaError.show(message)
def browseFolders(head):
diaFolder = DialogBrowser()
return diaFolder.browseFolders(head)
def showOSK(defaultText='', title='', hidden=False):
return getKeyboard(defaultText, title, hidden)
#------------------------------------------------------------------------------
# web related
#------------------------------------------------------------------------------
from utils.regexUtils import parseTextToGroups
from utils.webUtils import CachedWebRequest
import cookielib
def getHTML(url, form_data='', referer='', xml=False, mobile=False, ignoreCache=False, demystify=False):
cookiePath = xbmc.translatePath(os.path.join(Paths.cacheDir, 'cookies.lwp'))
request = CachedWebRequest(cookiePath, Paths.cacheDir)
#log('#getHTML: "'+ url + '" from "' + referer + '"')
return request.getSource(url, form_data, referer, xml, mobile, ignoreCache, demystify)
def getCookies(cookieName, domain):
cookiePath = xbmc.translatePath(os.path.join(Paths.cacheDir, 'cookies.lwp'))
def load_cookies_from_lwp(filename):
lwp_cookiejar = cookielib.LWPCookieJar()
lwp_cookiejar.load(filename, ignore_discard=True)
return lwp_cookiejar
for cookie in load_cookies_from_lwp(cookiePath):
if domain in cookie.domain and cookieName in cookie.name:
return cookie.value
def parseWebsite(source, regex, referer='', variables=[]):
def parseWebsiteToGroups(url, regex, referer=''):
data = getHTML(url, None, referer)
return parseTextToGroups(data, regex)
groups = parseWebsiteToGroups(source, regex, referer)
if variables == []:
if groups:
return groups[0]
else:
return ''
else:
resultArr = {}
i = 0
for v in variables:
if groups:
resultArr[v] = groups[i]
else:
resultArr[v] = ''
i += 1
return resultArr
def clearCache():
cacheDir = Paths.cacheDir
if not os.path.exists(cacheDir):
os.mkdir(cacheDir, 0777)
print('Cache directory created' + str(cacheDir))
else:
fu.clearDirectory(cacheDir)
print('Cache directory purged')
#------------------------------------------------------------------------------
# classes with constants
#------------------------------------------------------------------------------
class Paths:
rootDir = xbmc.translatePath(__settings__.getAddonInfo('path')).decode('utf-8')
resDir = os.path.join(rootDir, 'resources')
imgDir = os.path.join(resDir, 'images')
modulesDir = os.path.join(resDir, 'modules')
catchersDir = os.path.join(resDir,'catchers')
dictsDir = os.path.join(resDir,'dictionaries')
pluginFanart = os.path.join(rootDir, 'fanart.jpg')
pluginFanart1 = os.path.join(rootDir, 'fanart1.jpg')
defaultVideoIcon = os.path.join(imgDir, 'video.png')
defaultCategoryIcon = os.path.join(imgDir, 'folder.png')
pluginDataDir = xbmc.translatePath(__settings__.getAddonInfo('profile')).decode('utf-8')
print("pluginDataDir",pluginDataDir)
cacheDir = os.path.join(pluginDataDir, 'cache')
favouritesFolder = os.path.join(pluginDataDir, 'favourites')
favouritesFile = os.path.join(favouritesFolder, 'favourites.cfg')
customModulesDir = os.path.join(pluginDataDir, 'custom')
customModulesFile = os.path.join(customModulesDir, 'custom.cfg')
catchersRepo = ''
modulesRepo = ''
customModulesRepo = ''
xbmcFavouritesFile = xbmc.translatePath( 'special://profile/favourites.xml' )
| {
"content_hash": "158a1becc5bdd0614e144c475ea904e0",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 104,
"avg_line_length": 31.04950495049505,
"alnum_prop": 0.5988520408163265,
"repo_name": "rysson/filmkodi",
"id": "37539299473b5a66e7ff09abad23f6a99c86aa3e",
"size": "6297",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plugin.video.mrknow/lib/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7510"
},
{
"name": "Python",
"bytes": "8058464"
},
{
"name": "Shell",
"bytes": "18531"
}
],
"symlink_target": ""
} |
import os
exp_dir = os.path.dirname(__file__)
mgra_path = os.environ.get('MGRA_PATH', None)
if mgra_path is None:
mgra_path = "indel_mgra"
configuration = {
"experiment_name": "Chimp 2000",
"experiment_info": """
6 genomes are taken as input: Chimpanzee, Mouse, Rat, Dog, Opossum.
Chimpanzee genome is fragment with repeats of length >= 2000 bp
All duplicated genes are filtered out.
(((chimpanzee,(mouse, rat)),(cat,dog)),opossum); is utilized for observed genome set.
""",
"gos-asm": {
"input": {
"block_orders_file_paths": [
os.path.join(exp_dir, "blocks.txt")
],
"phylogenetic_tree": "(((chimpanzee,(mouse, rat)),(cat,dog)),opossum);",
"target_organisms": ["chimpanzee"],
"repeats_bridges_file": os.path.join(exp_dir, "bridges.txt")
},
"output": {
"dir": os.path.join(exp_dir, "output")
}
},
"mgra": {
"executable_path": mgra_path,
},
"algorithm": {
"executable_containers": [
],
"pipeline": {
"entries_names": ["task_input", # reading data
"tmc_wrapper_CCA_balanced", # ###
"cyclic_wrapper_MGRA_CCA_balanced", # Assembly points detection
"tmc_wrapper_phylo", # ###
"task_output"] # outputting data
}
}
}
| {
"content_hash": "fa0ed53b7b387ac2f6632f7c14634a7d",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 97,
"avg_line_length": 36.395348837209305,
"alnum_prop": 0.4792332268370607,
"repo_name": "aganezov/gos-asm",
"id": "86fa42b072608441ff32b49c1a5ef7183dc71f98",
"size": "1589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gos_asm/examples/w_repeats/chimp/2000/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "89108"
},
{
"name": "Shell",
"bytes": "1577"
}
],
"symlink_target": ""
} |
import unittest
import time
if False:
mldb_wrapper = None
from mldb import mldb, MldbUnitTest, ResponseException
class ConstOptimTest(MldbUnitTest):
@classmethod
def setUpClass(self):
ds = mldb.create_dataset({ "id": "sample", "type": "sparse.mutable" })
for num in range(0,2000):
ds.record_row("a" + str(num),[["x", num, 0]])
ds.commit()
def test_fetcher_call(self):
mldb.put('/v1/functions/fetch', {
"type": 'fetcher',
"params": {}
})
startTime = time.perf_counter()
mldb.query('SELECT blob_length(fetch({\'file://mldb/testing/logo-new.jpg\' as url})[content]) as x')
deltaT = time.perf_counter() - startTime
mldb.log(deltaT)
startTime = time.perf_counter()
mldb.query('SELECT x, blob_length(fetch({\'file://mldb/testing/logo-new.jpg\' as url})[content]) as y FROM sample')
optimizedDeltaT = time.perf_counter() - startTime
mldb.log(optimizedDeltaT)
mldb.put('/v1/functions/fetch2', {
"type": 'fetcher',
"params": {},
"deterministic" : False,
})
startTime = time.perf_counter()
mldb.query('SELECT x, blob_length(fetch2({\'file://mldb/testing/logo-new.jpg\' as url})[content]) as y FROM sample')
nonOptimizedDeltaT = time.perf_counter() - startTime
mldb.log(deltaT)
self.assertTrue(nonOptimizedDeltaT > optimizedDeltaT)
mldb.run_tests()
| {
"content_hash": "8b2766f239565499eb7ec9e3410f1691",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 124,
"avg_line_length": 31.851063829787233,
"alnum_prop": 0.5938543754175016,
"repo_name": "mldbai/mldb",
"id": "388dfd55b475d7406597495eef9af9cf3991fdb3",
"size": "1642",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testing/MLDB-1935-const-optim.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "643"
},
{
"name": "C",
"bytes": "11754639"
},
{
"name": "C++",
"bytes": "14072572"
},
{
"name": "CMake",
"bytes": "2737"
},
{
"name": "CSS",
"bytes": "17037"
},
{
"name": "Dockerfile",
"bytes": "1591"
},
{
"name": "Fortran",
"bytes": "16349"
},
{
"name": "HTML",
"bytes": "311171"
},
{
"name": "JavaScript",
"bytes": "2209253"
},
{
"name": "Jupyter Notebook",
"bytes": "7661154"
},
{
"name": "Makefile",
"bytes": "290745"
},
{
"name": "Perl",
"bytes": "3890"
},
{
"name": "Python",
"bytes": "1422764"
},
{
"name": "Shell",
"bytes": "32489"
},
{
"name": "Smarty",
"bytes": "2938"
},
{
"name": "SourcePawn",
"bytes": "52752"
}
],
"symlink_target": ""
} |
'''
Created by auto_sdk on 2014-12-17 17:22:51
'''
from top.api.base import RestApi
class ItemsInventoryGetRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.banner = None
self.cid = None
self.end_modified = None
self.fields = None
self.has_discount = None
self.is_cspu = None
self.is_ex = None
self.is_taobao = None
self.order_by = None
self.page_no = None
self.page_size = None
self.q = None
self.seller_cids = None
self.start_modified = None
def getapiname(self):
return 'taobao.items.inventory.get'
| {
"content_hash": "a596031dc478da54df5b7085e40002e0",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 55,
"avg_line_length": 26.041666666666668,
"alnum_prop": 0.664,
"repo_name": "CooperLuan/devops.notes",
"id": "e484ea7ef381ea6558cce6f8631abaa7fe34caac",
"size": "625",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "taobao/top/api/rest/ItemsInventoryGetRequest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1505"
},
{
"name": "JavaScript",
"bytes": "29"
},
{
"name": "Python",
"bytes": "211546"
},
{
"name": "Shell",
"bytes": "150"
}
],
"symlink_target": ""
} |
# -*- coding: UTF-8 -*-
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import ugettext as _
try:
CONFERENCE = settings.CONFERENCE_CONFERENCE
except AttributeError:
raise ImproperlyConfigured('Current conference not set (CONFERENCE_CONFERENCE)')
TEMPLATE_FOR_AJAX_REQUEST = getattr(settings, 'CONFERENCE_TEMPLATE_FOR_AJAX_REQUEST', True)
GOOGLE_MAPS = getattr(settings, 'CONFERENCE_GOOGLE_MAPS', None)
MIMETYPE_NAME_CONVERSION_DICT = getattr(settings, 'CONFERENCE_MIMETYPE_NAME_CONVERSION_DICT', {
'application/zip': 'ZIP Archive',
'application/pdf': 'PDF Document',
'application/vnd.ms-powerpoint': 'PowerPoint',
'application/vnd.oasis.opendocument.presentation': 'ODP Document',
}
)
FORMS = {
'PaperSubmission': 'conference.forms.SubmissionForm',
'AdditionalPaperSubmission': 'conference.forms.TalkForm',
'Profile': 'conference.forms.ProfileForm',
'EventBooking': 'conference.forms.EventBookingForm',
}
FORMS.update(getattr(settings, 'CONFERENCE_FORMS', {}))
import os
# FIXME: This part is hardcoded, why ?
#MAX_TICKETS = os.environ.get("MAX_TICKETS")
# (artcz) Setting to 6 because that was/is the value in the javascript for the
# cart.
MAX_TICKETS = 6
# URL that will receive a user who tries to access the paper submission when the CFP is closed
# None if a 404 is returned
CFP_CLOSED = getattr(settings, 'CONFERENCE_CFP_CLOSED', None)
VOTING_CLOSED = getattr(settings, 'CONFERENCE_VOTING_CLOSED', None)
VOTING_OPENED = getattr(settings, 'CONFERENCE_VOTING_OPENED', lambda conf, user: conf.voting())
# Callable to check whether the user passed may vote
VOTING_ALLOWED = getattr(settings, 'CONFERENCE_VOTING_ALLOWED', lambda user: True)
VOTING_DISALLOWED = getattr(settings, 'CONFERENCE_VOTING_DISALLOWED', None)
# List of emails to send notification to
SEND_EMAIL_TO = getattr(settings, 'CONFERENCE_SEND_EMAIL_TO', None)
STUFF_DIR = getattr(settings, 'CONFERENCE_STUFF_DIR', settings.MEDIA_ROOT)
STUFF_URL = getattr(settings, 'CONFERENCE_STUFF_URL', settings.MEDIA_URL)
TALKS_RANKING_FILE = getattr(settings, 'CONFERENCE_TALKS_RANKING_FILE', None)
VIDEO_DOWNLOAD_FALLBACK = getattr(settings, 'CONFERENCE_VIDEO_DOWNLOAD_FALLBACK', True)
DEFAULT_VOTING_TALK_TYPES = (
('all', 'All'),
('s', 'Talks'),
('t', 'Trainings'),
('p', 'Poster'),
)
TALK_TYPES_TO_BE_VOTED = getattr(settings, 'CONFERENCE_VOTING_TALK_TYPES',
DEFAULT_VOTING_TALK_TYPES)
def _CONFERENCE_TICKETS(conf, ticket_type=None, fare_code=None):
from conference import models
tickets = models.Ticket.objects\
.filter(fare__conference=conf)
if ticket_type:
tickets = tickets.filter(fare__ticket_type=ticket_type)
if fare_code:
if fare_code.endswith('%'):
tickets = tickets.filter(fare__code__startswith=fare_code[:-1])
else:
tickets = tickets.filter(fare__code=fare_code)
return tickets
CONFERENCE_TICKETS = getattr(settings, 'CONFERENCE_TICKETS', _CONFERENCE_TICKETS)
# TICKET_BADGE_ENABLED enable or disable the ability to generate badge by admin
TICKET_BADGE_ENABLED = getattr(settings, 'CONFERENCE_TICKET_BADGE_ENABLED', False)
# The generation of the badges is a process in 3 steps:
# Step 1: The list, or the QuerySet, the tickets that we want, they are passed
# to the TICKET_BADGE_PREPARE_FUNCTION function; This must return a list
# where each eleemnt is a "group tickets". Tickets can be grouped in any way
# (usually for the conference, in the case that they are passed tickets belonging
# to more than one conference);
# It's a group of dict with 2 keys, 'plugin' and 'tickets'
#
# "tickets" is a list of tickets that will be encoded in JSON and passed as input
# to TICKET_BADGE_PROG; this list will be passed to the script specified via "plugin".
# "plugin is the absolute path to the python script with plugin configuration for use
# with the TICKED_BADGE_PROG program.
#
# Step 2: The TICKET_BADGE_PROG program will be invoked following the output of
# TICKET_BADGE_PREPARE_FUNCTION; the output of TICKET_BADGE_PROG is a tar file
# containing the TIFF pages with badges ready to go to press.
#
# Step 3: While running TICKET_BADGE_PROG runs the configuration plugin;
# this plugin must expose two functions: "tickets" and "Ticket".
# "Ticket" is invoked through the program input (generated by TICKET_BADGE_PREPARE_FUNCTION)
# and must return a dictionaary groups the tickets gone according to any criterion.
# The keys of dictionary are arbitrary, are only used in the generation of names
# of the tar file; on the other hand the values must be in turn of three keys dictionaries
# and containers, "image", "attendees", "max_width"
#
# "image" must be an instance of PIL.Image, "max_width" is the width maximum
# in pixels used to write text and "attendees" list of participants (one participant can be anything).
#
# "ticket" is called passing a copy of the image returned by "tickets" and the instance
# of a single participant, it must return the image of badge.
import os.path
import conference
TICKED_BADGE_PROG = getattr(settings, 'CONFERENCE_TICKED_BADGE_PROG',
os.path.join(os.path.dirname(conference.__file__), 'utils', 'ticket_badge.py'))
TICKET_BADGE_PROG_ARGS = getattr(settings, 'CONFERENCE_TICKET_BADGE_PROG_ARGS', ['-e', '1', '-n', '6'])
TICKET_BADGE_PROG_ARGS_ADMIN = getattr(settings, 'CONFERENCE_TICKET_BADGE_PROG_ARGS', ['-e', '0', '-p', 'A4', '-n', '2'])
TICKET_BADGE_PREPARE_FUNCTION = getattr(settings, 'CONFERENCE_TICKET_BADGE_PREPARE_FUNCTION', lambda tickets: [])
SCHEDULE_ATTENDEES = getattr(settings, 'CONFERENCE_SCHEDULE_ATTENDEES', lambda schedule, forecast=False: 0)
ADMIN_ATTENDEE_STATS = getattr(settings, 'CONFERENCE_ADMIN_ATTENDEE_STATS', ())
X_SENDFILE = getattr(settings, 'CONFERENCE_X_SENDFILE', None)
TALK_VIDEO_ACCESS = getattr(settings, 'CONFERENCE_TALK_VIDEO_ACCESS', lambda r, t: True)
TALK_SUBMISSION_LANGUAGES = getattr(
settings,
'CONFERENCE_TALK_SUBMISSION_LANGUAGES',
settings.LANGUAGES)
# Voting talk types (only the first letter of TALK_TYPE)
DEFAULT_VOTING_TALK_TYPES = (
('t', 'Talks'),
('r', 'Trainings'),
#('p', 'Poster'),
#('n', 'Panel'),
#('h', 'Help desk'),
)
# List of emails to send talk submission email notifications to
TALK_SUBMISSION_NOTIFICATION_EMAIL = getattr(
settings,
'CONFERENCE_TALK_SUBMISSION_NOTIFICATION_EMAIL',
None)
TALK_TYPES_TO_BE_VOTED = getattr(settings, 'CONFERENCE_VOTING_TALK_TYPES', DEFAULT_VOTING_TALK_TYPES)
# absolute path of a file used to log the email sent from the admin (tickets
# stats section); the log file is also used to show a list of "last recently
# sent email" in the admin page.
ADMIN_TICKETS_STATS_EMAIL_LOG = getattr(settings, 'CONFERENCE_ADMIN_TICKETS_STATS_EMAIL_LOG', None)
ADMIN_TICKETS_STATS_EMAIL_LOAD_LIBRARY = getattr(settings, 'CONFERENCE_ADMIN_TICKETS_STATS_EMAIL_LOAD_LIBRARY', ['conference'])
def _VIDEO_COVER_EVENTS(conference):
from conference import dataaccess
return [ x['id'] for x in dataaccess.events(conf=conference) ]
VIDEO_COVER_EVENTS = getattr(settings, 'CONFERENCE_VIDEO_COVER_EVENTS', _VIDEO_COVER_EVENTS)
def _VIDEO_COVER_IMAGE(conference, eid, type='front', thumb=False):
return None
VIDEO_COVER_IMAGE = getattr(settings, 'CONFERENCE_VIDEO_COVER_IMAGE', _VIDEO_COVER_IMAGE)
_OEMBED_PROVIDERS = (
('https://www.youtube.com/oembed',
('https://www.youtube.com/*', 'http://www.youtube.com/*')),
('http://vimeo.com/api/oembed.json',
('http://vimeo.com/*', 'https://vimeo.com/*',
'http://vimeo.com/groups/*/videos/*', 'https://vimeo.com/groups/*/videos/*')),
('http://lab.viddler.com/services/oembed/',
('http://*.viddler.com/*',))
)
OEMBED_PROVIDERS = getattr(settings, 'CONFERENCE_OEMBED_PROVIDERS', _OEMBED_PROVIDERS)
import oembed
OEMBED_CONSUMER = oembed.OEmbedConsumer()
for p, urls in OEMBED_PROVIDERS:
endpoint = oembed.OEmbedEndpoint(p, urls)
OEMBED_CONSUMER.addEndpoint(endpoint)
OEMBED_URL_FIX = (
(r'https?://vimeopro.com.*/(\d+)$', r'https://vimeo.com/\1'),
)
| {
"content_hash": "d18e9c3251fb3341dc95f52c35595e00",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 127,
"avg_line_length": 41.71573604060914,
"alnum_prop": 0.7190313944998783,
"repo_name": "artcz/epcon",
"id": "937641cfde5bf8ce2f4df5055b3db27f021552a1",
"size": "8218",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev/ep2018",
"path": "conference/settings.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "ASP",
"bytes": "1490"
},
{
"name": "CSS",
"bytes": "4775032"
},
{
"name": "HTML",
"bytes": "2124034"
},
{
"name": "JavaScript",
"bytes": "3337089"
},
{
"name": "Makefile",
"bytes": "3338"
},
{
"name": "PHP",
"bytes": "4506"
},
{
"name": "Python",
"bytes": "1066620"
},
{
"name": "Ruby",
"bytes": "1870"
},
{
"name": "Shell",
"bytes": "2522"
},
{
"name": "XSLT",
"bytes": "5122"
}
],
"symlink_target": ""
} |
from pythonforandroid.toolchain import CythonRecipe
class CymunkRecipe(CythonRecipe):
version = 'master'
url = 'https://github.com/tito/cymunk/archive/{version}.zip'
name = 'cymunk'
depends = [('python2', 'python3crystax')]
recipe = CymunkRecipe()
| {
"content_hash": "1a80a144f5dadce9d3c8890fa3313449",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 64,
"avg_line_length": 22.416666666666668,
"alnum_prop": 0.7026022304832714,
"repo_name": "wexi/python-for-android",
"id": "c9733e3e7ce2ce057ff52dfcd666eb2b0e24a135",
"size": "269",
"binary": false,
"copies": "4",
"ref": "refs/heads/local",
"path": "pythonforandroid/recipes/cymunk/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "70942"
},
{
"name": "C++",
"bytes": "491"
},
{
"name": "CMake",
"bytes": "250"
},
{
"name": "CSS",
"bytes": "3487"
},
{
"name": "HTML",
"bytes": "11631"
},
{
"name": "Java",
"bytes": "511420"
},
{
"name": "Makefile",
"bytes": "27280"
},
{
"name": "Python",
"bytes": "2941232"
},
{
"name": "Shell",
"bytes": "5340"
}
],
"symlink_target": ""
} |
import requests
import sys
s = requests.Session()
s.headers.update({'Client-ID': 'CLIENT_ID'})
channel_response = s.get('https://mixer.com/api/v1/channels/{}'.format(sys.argv[1]))
viewers = channel_response.json()['viewersTotal']
print("You have {} viewers...".format(viewers))
| {
"content_hash": "c86a55cf0cee143e424b36d31b901045",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 84,
"avg_line_length": 28.1,
"alnum_prop": 0.7046263345195729,
"repo_name": "WatchBeam/developers",
"id": "014483d55a4bb4f8e4824039a1b655ec148105db",
"size": "281",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/tutorials/code/python/rest/2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "52466"
},
{
"name": "HTML",
"bytes": "140632"
},
{
"name": "Java",
"bytes": "10442"
},
{
"name": "JavaScript",
"bytes": "37007"
},
{
"name": "Makefile",
"bytes": "291"
},
{
"name": "Protocol Buffer",
"bytes": "8166"
},
{
"name": "Python",
"bytes": "1905"
},
{
"name": "Shell",
"bytes": "1132"
}
],
"symlink_target": ""
} |
from multiconf import mc_config, ConfigItem, RepeatableConfigItem, ConfigBuilder, MC_REQUIRED
from multiconf.decorators import nested_repeatables, named_as, required
from multiconf.envs import EnvFactory
from .utils.tstclasses import ItemWithName
ef = EnvFactory()
pprd = ef.Env('pprd')
prod = ef.Env('prod')
def test_configbuilders_alternating_with_items_repeatable_multilevel_required():
class some_item(ConfigItem):
xx = 1
class another_item(ConfigItem):
xx = 2
@required('some_item')
@named_as('inners')
class InnerItem(RepeatableConfigItem):
def __init__(self, name, some_attribute=MC_REQUIRED):
super().__init__(mc_key=name)
self.name = name
self.some_attribute = some_attribute
class InnerBuilder(ConfigBuilder):
def __init__(self):
super().__init__()
self.some_attribute = MC_REQUIRED
def mc_build(self):
InnerItem('innermost', self.some_attribute)
@nested_repeatables('inners')
@required('another_item')
class MiddleItem(RepeatableConfigItem):
def __init__(self, mc_key):
super().__init__(mc_key=mc_key)
self.id = mc_key
self.another_attribute = MC_REQUIRED
class MiddleBuilder(ConfigBuilder):
def __init__(self, name):
super().__init__()
self.name = name
self.builder_attribute = MC_REQUIRED
def mc_build(self):
with MiddleItem(self.name) as mi:
mi.setattr('another_attribute', default=9)
another_item()
class OuterBuilder(ConfigBuilder):
def __init__(self):
super().__init__()
def mc_build(self):
with MiddleBuilder('base') as mb:
mb.builder_attribute = 1
with InnerBuilder() as ib:
ib.some_attribute = 1
some_item()
class another_item(ConfigItem):
xx = 2
@nested_repeatables('MiddleItems')
class OuterItem(ConfigItem):
pass
@mc_config(ef, load_now=True)
def config(_):
with ItemWithName('myp'):
with OuterItem():
OuterBuilder()
cr = config(prod).ItemWithName
cr.json(builders=True)
# TODO, verify values
| {
"content_hash": "d7d4ee667d089c9bedab25c03144abfa",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 93,
"avg_line_length": 28.378048780487806,
"alnum_prop": 0.5857327030511388,
"repo_name": "lhupfeldt/multiconf",
"id": "91967d8c1f7964754edceb792bf1426fba6bbc96",
"size": "2458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/mixed_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "31"
},
{
"name": "Python",
"bytes": "760680"
}
],
"symlink_target": ""
} |
from django.conf.urls import include, patterns, url
from . import views
# These views all start with user ID.
user_patterns = patterns('',
url(r'^summary$', views.user_summary, name='lookup.user_summary'),
url(r'^purchases$', views.user_purchases,
name='lookup.user_purchases'),
url(r'^activity$', views.user_activity, name='lookup.user_activity'),
url(r'^delete$', views.user_delete, name='lookup.user_delete'),
)
# These views all start with app/addon ID.
app_patterns = patterns('',
url(r'^summary$', views.app_summary, name='lookup.app_summary'),
)
# These views all start with transaction ID.
transaction_patterns = patterns('',
url(r'^refund$', views.transaction_refund,
name='lookup.transaction_refund'),
url(r'^summary$', views.transaction_summary,
name='lookup.transaction_summary'),
)
urlpatterns = patterns('',
url(r'^$', views.home, name='lookup.home'),
url(r'^bango-portal/(?P<package_id>[^/]+)/$',
views.bango_portal_from_package,
name='lookup.bango_portal_from_package'),
url(r'^user_search\.json$', views.user_search,
name='lookup.user_search'),
url(r'^transaction_search$', views.transaction_search,
name='lookup.transaction_search'),
url(r'^app_search\.json$', views.app_search,
name='lookup.app_search'),
(r'^app/(?P<addon_id>[^/]+)/', include(app_patterns)),
(r'^transaction/(?P<tx_uuid>[^/]+)/', include(transaction_patterns)),
(r'^user/(?P<user_id>[^/]+)/', include(user_patterns)),
)
| {
"content_hash": "fe3306ff4c409449de2f3c9c59ce56dd",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 73,
"avg_line_length": 34.22222222222222,
"alnum_prop": 0.6441558441558441,
"repo_name": "jinankjain/zamboni",
"id": "6da75cd4d447c178229c3aaef846e0f27e16e02b",
"size": "1540",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "mkt/lookup/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
Django settings for Gravitas project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'l0hmpby63v4ilp&b!92t6_c8+1gb(ga9=m%)tkctk_8-lm4ldh'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'apps.courses.apps.CoursesConfig',
'apps.quiz.apps.QuizConfig',
'apps.users.apps.UsersConfig',
'apps.documents.apps.DocumentsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Gravitas.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Gravitas.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_ROOT = ''
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
)
# media files, this will need to be changed in the full version
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
#login info
LOGIN_REDIRECT_URL = '/staticpages/homepage/'
| {
"content_hash": "8e5995c88fa690c25426e24207b28e88",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 91,
"avg_line_length": 26.92361111111111,
"alnum_prop": 0.6935775083827702,
"repo_name": "Sirikam/Gravitas",
"id": "8bd45cb5e7f90be59d4828b546b6088904149d52",
"size": "3877",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Gravitas/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12975"
},
{
"name": "HTML",
"bytes": "34714"
},
{
"name": "JavaScript",
"bytes": "1844"
},
{
"name": "Python",
"bytes": "110107"
}
],
"symlink_target": ""
} |
from mutagen.mp3 import MP3
from muzicast.collection.formats._id3 import ID3File
extensions = [".mp3", ".mp2"]
class MP3File(ID3File):
format = "MP3"
Kind = MP3
info = MP3File
| {
"content_hash": "fc789e967e40e198d1e9764ffb286c2f",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 52,
"avg_line_length": 18.7,
"alnum_prop": 0.6951871657754011,
"repo_name": "nikhilm/muzicast",
"id": "a82532d6ffcbed7ef6fbdce68ead9d9638654e49",
"size": "441",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "muzicast/collection/formats/mp3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "66800"
},
{
"name": "Python",
"bytes": "183169"
}
],
"symlink_target": ""
} |
"""Test label RPCs.
RPCs tested are:
- getaddressesbylabel
- listaddressgroupings
- setlabel
"""
from collections import defaultdict
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
from test_framework.wallet_util import test_address
class WalletLabelsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Check that there's no UTXO on the node
node = self.nodes[0]
assert_equal(len(node.listunspent()), 0)
# Note each time we call generate, all generated coins go into
# the same address, so we call twice to get two addresses w/50 each
self.generatetoaddress(node,
nblocks=1, address=node.getnewaddress(
label='coinbase'))
self.generatetoaddress(node,
nblocks=101,
address=node.getnewaddress(
label='coinbase'))
assert_equal(node.getbalance(), 100000000)
# there should be 2 address groups
# each with 1 address with a balance of 50 Bitcoins
address_groups = node.listaddressgroupings()
assert_equal(len(address_groups), 2)
# the addresses aren't linked now, but will be after we send to the
# common address
linked_addresses = set()
for address_group in address_groups:
assert_equal(len(address_group), 1)
assert_equal(len(address_group[0]), 3)
assert_equal(address_group[0][1], 50000000)
assert_equal(address_group[0][2], 'coinbase')
linked_addresses.add(address_group[0][0])
# send 50 from each address to a third address not in this wallet
common_address = "msf4WtN1YQKXvNtvdFYt9JBnUD2FB41kjr"
node.sendmany(
amounts={common_address: 100000000},
subtractfeefrom=[common_address],
minconf=1,
)
# there should be 1 address group, with the previously
# unlinked addresses now linked (they both have 0 balance)
address_groups = node.listaddressgroupings()
assert_equal(len(address_groups), 1)
assert_equal(len(address_groups[0]), 2)
assert_equal(set([a[0] for a in address_groups[0]]), linked_addresses)
assert_equal([a[1] for a in address_groups[0]], [0, 0])
self.generate(node, 1)
# we want to reset so that the "" label has what's expected.
# otherwise we're off by exactly the fee amount as that's mined
# and matures in the next 100 blocks
amount_to_send = 1000000
# Create labels and make sure subsequent label API calls
# recognize the label/address associations.
labels = [Label(name)
for name in ("a", "b", "c", "d", "e")]
for label in labels:
address = node.getnewaddress(label.name)
label.add_receive_address(address)
label.verify(node)
# Check all labels are returned by listlabels.
assert_equal(node.listlabels(), sorted(
['coinbase'] + [label.name for label in labels]))
# Send a transaction to each label.
for label in labels:
node.sendtoaddress(label.addresses[0], amount_to_send)
label.verify(node)
# Check the amounts received.
self.generate(node, 1)
for label in labels:
assert_equal(
node.getreceivedbyaddress(label.addresses[0]), amount_to_send)
assert_equal(node.getreceivedbylabel(label.name), amount_to_send)
for i, label in enumerate(labels):
to_label = labels[(i + 1) % len(labels)]
node.sendtoaddress(to_label.addresses[0], amount_to_send)
self.generate(node, 1)
for label in labels:
address = node.getnewaddress(label.name)
label.add_receive_address(address)
label.verify(node)
assert_equal(node.getreceivedbylabel(label.name), 2000000)
label.verify(node)
self.generate(node, 101)
# Check that setlabel can assign a label to a new unused address.
for label in labels:
address = node.getnewaddress()
node.setlabel(address, label.name)
label.add_address(address)
label.verify(node)
assert_raises_rpc_error(-11,
"No addresses with label",
node.getaddressesbylabel,
"")
# Check that addmultisigaddress can assign labels.
if not self.options.descriptors:
for label in labels:
addresses = []
for _ in range(10):
addresses.append(node.getnewaddress())
multisig_address = node.addmultisigaddress(
5, addresses, label.name)['address']
label.add_address(multisig_address)
label.purpose[multisig_address] = "send"
label.verify(node)
self.generate(node, 101)
# Check that setlabel can change the label of an address from a
# different label.
change_label(node, labels[0].addresses[0], labels[0], labels[1])
# Check that setlabel can set the label of an address already
# in the label. This is a no-op.
change_label(node, labels[2].addresses[0], labels[2], labels[2])
class Label:
def __init__(self, name):
# Label name
self.name = name
# Current receiving address associated with this label.
self.receive_address = None
# List of all addresses assigned with this label
self.addresses = []
# Map of address to address purpose
self.purpose = defaultdict(lambda: "receive")
def add_address(self, address):
assert_equal(address not in self.addresses, True)
self.addresses.append(address)
def add_receive_address(self, address):
self.add_address(address)
def verify(self, node):
if self.receive_address is not None:
assert self.receive_address in self.addresses
for address in self.addresses:
test_address(node, address, labels=[self.name])
assert self.name in node.listlabels()
assert_equal(
node.getaddressesbylabel(self.name),
{address: {"purpose": self.purpose[address]} for address in self.addresses})
def change_label(node, address, old_label, new_label):
assert_equal(address in old_label.addresses, True)
node.setlabel(address, new_label.name)
old_label.addresses.remove(address)
new_label.add_address(address)
old_label.verify(node)
new_label.verify(node)
if __name__ == '__main__':
WalletLabelsTest().main()
| {
"content_hash": "4e78b1ac137b0f6b2d73ad2c2ffe3b30",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 88,
"avg_line_length": 38.166666666666664,
"alnum_prop": 0.6000845189463304,
"repo_name": "Bitcoin-ABC/bitcoin-abc",
"id": "8bb0bd6db749637c0dad9d543ed2de9c57c26bc6",
"size": "7313",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/wallet_labels.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28178"
},
{
"name": "C",
"bytes": "1160721"
},
{
"name": "C++",
"bytes": "9817660"
},
{
"name": "CMake",
"bytes": "195193"
},
{
"name": "CSS",
"bytes": "4284"
},
{
"name": "Dockerfile",
"bytes": "3559"
},
{
"name": "HTML",
"bytes": "25754"
},
{
"name": "Java",
"bytes": "41238"
},
{
"name": "JavaScript",
"bytes": "2366459"
},
{
"name": "Kotlin",
"bytes": "3712"
},
{
"name": "M4",
"bytes": "31132"
},
{
"name": "Makefile",
"bytes": "100617"
},
{
"name": "Objective-C++",
"bytes": "5811"
},
{
"name": "PHP",
"bytes": "94504"
},
{
"name": "Perl",
"bytes": "4551"
},
{
"name": "PowerShell",
"bytes": "2277"
},
{
"name": "Python",
"bytes": "2706993"
},
{
"name": "QMake",
"bytes": "798"
},
{
"name": "Ruby",
"bytes": "21108"
},
{
"name": "Rust",
"bytes": "54953"
},
{
"name": "Sage",
"bytes": "39795"
},
{
"name": "Shell",
"bytes": "167526"
},
{
"name": "TypeScript",
"bytes": "66320"
}
],
"symlink_target": ""
} |
from neutron.openstack.common import importutils
from neutron.openstack.common import jsonutils
from neutron.openstack.common import log as logging
from neutron.plugins.mlnx.common import comm_utils
from neutron.plugins.mlnx.common import exceptions
zmq = importutils.try_import('eventlet.green.zmq')
LOG = logging.getLogger(__name__)
class EswitchUtils(object):
def __init__(self, daemon_endpoint, timeout):
if not zmq:
msg = _("Failed to import eventlet.green.zmq. "
"Won't connect to eSwitchD - exiting...")
LOG.error(msg)
raise SystemExit(1)
self.__conn = None
self.daemon = daemon_endpoint
self.timeout = timeout
@property
def _conn(self):
if self.__conn is None:
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.setsockopt(zmq.LINGER, 0)
socket.connect(self.daemon)
self.__conn = socket
self.poller = zmq.Poller()
self.poller.register(self._conn, zmq.POLLIN)
return self.__conn
@comm_utils.RetryDecorator(exceptions.RequestTimeout)
def send_msg(self, msg):
self._conn.send(msg)
socks = dict(self.poller.poll(self.timeout))
if socks.get(self._conn) == zmq.POLLIN:
recv_msg = self._conn.recv()
response = self.parse_response_msg(recv_msg)
return response
else:
self._conn.setsockopt(zmq.LINGER, 0)
self._conn.close()
self.poller.unregister(self._conn)
self.__conn = None
raise exceptions.RequestTimeout()
def parse_response_msg(self, recv_msg):
msg = jsonutils.loads(recv_msg)
if msg['status'] == 'OK':
if 'response' in msg:
return msg.get('response')
return
elif msg['status'] == 'FAIL':
msg_dict = dict(action=msg['action'], reason=msg['reason'])
error_msg = _("Action %(action)s failed: %(reason)s") % msg_dict
else:
error_msg = _("Unknown operation status %s") % msg['status']
LOG.error(error_msg)
raise exceptions.OperationFailed(err_msg=error_msg)
def get_attached_vnics(self):
LOG.debug(_("get_attached_vnics"))
msg = jsonutils.dumps({'action': 'get_vnics', 'fabric': '*'})
vnics = self.send_msg(msg)
return vnics
def set_port_vlan_id(self, physical_network,
segmentation_id, port_mac):
LOG.debug(_("Set Vlan %(segmentation_id)s on Port %(port_mac)s "
"on Fabric %(physical_network)s"),
{'port_mac': port_mac,
'segmentation_id': segmentation_id,
'physical_network': physical_network})
msg = jsonutils.dumps({'action': 'set_vlan',
'fabric': physical_network,
'port_mac': port_mac,
'vlan': segmentation_id})
self.send_msg(msg)
def define_fabric_mappings(self, interface_mapping):
for fabric, phy_interface in interface_mapping.iteritems():
LOG.debug(_("Define Fabric %(fabric)s on interface %(ifc)s"),
{'fabric': fabric,
'ifc': phy_interface})
msg = jsonutils.dumps({'action': 'define_fabric_mapping',
'fabric': fabric,
'interface': phy_interface})
self.send_msg(msg)
def port_up(self, fabric, port_mac):
LOG.debug(_("Port Up for %(port_mac)s on fabric %(fabric)s"),
{'port_mac': port_mac, 'fabric': fabric})
msg = jsonutils.dumps({'action': 'port_up',
'fabric': fabric,
'ref_by': 'mac_address',
'mac': 'port_mac'})
self.send_msg(msg)
def port_down(self, fabric, port_mac):
LOG.debug(_("Port Down for %(port_mac)s on fabric %(fabric)s"),
{'port_mac': port_mac, 'fabric': fabric})
msg = jsonutils.dumps({'action': 'port_down',
'fabric': fabric,
'ref_by': 'mac_address',
'mac': port_mac})
self.send_msg(msg)
def port_release(self, fabric, port_mac):
LOG.debug(_("Port Release for %(port_mac)s on fabric %(fabric)s"),
{'port_mac': port_mac, 'fabric': fabric})
msg = jsonutils.dumps({'action': 'port_release',
'fabric': fabric,
'ref_by': 'mac_address',
'mac': port_mac})
self.send_msg(msg)
def get_eswitch_ports(self, fabric):
# TODO(irena) - to implement for next phase
return {}
def get_eswitch_id(self, fabric):
# TODO(irena) - to implement for next phase
return ""
| {
"content_hash": "d1979fa8c08c8901a47f32ce50e8ba1b",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 76,
"avg_line_length": 39.74803149606299,
"alnum_prop": 0.5239698890649762,
"repo_name": "vbannai/neutron",
"id": "924be790f459b2eabda2ffea7a98ee78f5e0b7aa",
"size": "5686",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "neutron/plugins/mlnx/agent/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "21914"
},
{
"name": "JavaScript",
"bytes": "60527"
},
{
"name": "Python",
"bytes": "9344274"
},
{
"name": "Shell",
"bytes": "9202"
},
{
"name": "XSLT",
"bytes": "50907"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from __future__ import absolute_import
from django.utils.translation import ugettext as _
"""Base classes for different plugin objects.
* BasePlugin: Create a wiki_plugin.py with a class that inherits from BasePlugin.
* PluginSidebarFormMixin: Mix in this class in the form that should be rendered in the editor sidebar
* PluginSettingsFormMixin: ..and this one for a form in the settings tab.
Please have a look in wiki.models.pluginbase to see where to inherit your
plugin's models.
"""
from django import forms
class BasePlugin(object):
"""Plugins should inherit from this"""
# Must fill in!
slug = None
# Optional
settings_form = None # A form class to add to the settings tab
urlpatterns = {
# General urlpatterns that will reside in /wiki/plugins/plugin-slug/...
'root': [],
# urlpatterns that receive article_id or urlpath, i.e.
# /wiki/ArticleName/plugin/plugin-slug/...
'article': [],
}
article_tab = None # (_('Attachments'), "fa fa-file")
article_view = None # A view for article_id/plugin/slug/
# A list of notification handlers to be subscribed if the notification
# system is active
notifications = []
# Example
# [{'model': models.AttachmentRevision,
# 'message': lambda obj: _("A file was changed: %s") % obj.get_filename(),
# 'key': ARTICLE_EDIT,
# 'created': True,
# 'get_article': lambda obj: obj.attachment.article}
# ]
markdown_extensions = []
class RenderMedia:
js = []
css = {}
class PluginSidebarFormMixin(forms.ModelForm):
unsaved_article_title = forms.CharField(widget=forms.HiddenInput(),
required=True)
unsaved_article_content = forms.CharField(widget=forms.HiddenInput(),
required=False)
def get_usermessage(self):
pass
class PluginSettingsFormMixin(object):
settings_form_headline = _('Settings for plugin')
settings_order = 1
settings_write_access = False
def get_usermessage(self):
pass
| {
"content_hash": "4e6d3ee5fbe228ce96fc881bb63dd4a5",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 102,
"avg_line_length": 32.01449275362319,
"alnum_prop": 0.6301493888637393,
"repo_name": "skbly7/serc",
"id": "cfed95032100de93453b2b096d7abc36b6a52170",
"size": "2209",
"binary": false,
"copies": "16",
"ref": "refs/heads/master",
"path": "website/wiki/core/plugins/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "167250"
},
{
"name": "GCC Machine Description",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "127197"
},
{
"name": "JavaScript",
"bytes": "276437"
},
{
"name": "Python",
"bytes": "659443"
},
{
"name": "Shell",
"bytes": "194"
}
],
"symlink_target": ""
} |
r"""
path
====
Types for handling paths.
"""
from os import path
from ..modules.classes import CustomType
class PathBase(CustomType):
"""The base class for several path types.
Args:
func (callable): The function to process the value.
ret : The expected return value from func. The values that ail the expectation are invalid.
**Config (kwargs): Configuration for the custom type.
"""
def __init__(self, func, ret=True, **Config):
CustomType.__init__(self, **Config)
self.func = func
self.ret = ret
def __call__(self, val):
if self.func(val) == self.ret:
return val
raise ValueError()
exists = PathBase(path.exists, type_str='an existing path')
notExists = PathBase(path.exists, False, type_str='a non-existent path')
free = PathBase(lambda x: not path.exists(x) and path.exists(path.dirname(x)), True, type_str='a free path')
parentExists = PathBase(lambda x: path.exists(path.dirname(x)), True, type_str='a path with an existing parent')
isdir = PathBase(path.isdir, type_str='a dir')
isfile = PathBase(path.isfile, type_str='a file')
isabs = PathBase(path.isabs, type_str='an absolute path')
isrelative = PathBase(path.isabs, False, type_str='a relative path')
# #ToDo: Add isdescendant.
# #ToDo: Add isvalid. It seems to be quite complex. Refer: http://stackoverflow.com/questions/9532499/check-whether-a-path-is-valid-in-python-without-creating-a-file-at-the-paths-ta
def exists_in(root): # ToDo: Replace this function with a CustomType based class
return PathBase(lambda val: path.exists(path.join(root, val)), type_str='a path under %s' % root)
| {
"content_hash": "f0f3e76d157890d3d61820ddaa268ff2",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 181,
"avg_line_length": 37.44186046511628,
"alnum_prop": 0.7086956521739131,
"repo_name": "Laufire/ec",
"id": "8e66c37417d0e5688a687f240dc95d016a74afff",
"size": "1610",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ec/types/path.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "76634"
}
],
"symlink_target": ""
} |
"""The data range path specification implementation."""
from dfvfs.lib import definitions
from dfvfs.path import factory
from dfvfs.path import path_spec
class DataRangePathSpec(path_spec.PathSpec):
"""Class that implements the data range path specification.
Attributes:
range_offset (int): start offset of the data range.
range_size (int): size of the data range.
"""
TYPE_INDICATOR = definitions.TYPE_INDICATOR_DATA_RANGE
def __init__(self, parent=None, range_offset=None, range_size=None, **kwargs):
"""Initializes the path specification.
Note that the data range path specification must have a parent.
Args:
parent (Optional[PathSpec]): parent path specification.
range_offset (Optional[int]): start offset of the data range.
range_size (Optional[int]): size of the data range.
Raises:
ValueError: when range offset, range offset or parent are not set.
"""
if not range_offset or not range_size or not parent:
raise ValueError(u'Missing range offset, range size or parent value.')
super(DataRangePathSpec, self).__init__(parent=parent, **kwargs)
self.range_offset = range_offset
self.range_size = range_size
@property
def comparable(self):
"""str: comparable representation of the path specification."""
sub_comparable_string = (
u'range_offset: 0x{0:08x}, range_size: 0x{1:08x}').format(
self.range_offset, self.range_size)
return self._GetComparable(sub_comparable_string=sub_comparable_string)
factory.Factory.RegisterPathSpec(DataRangePathSpec)
| {
"content_hash": "715c1ab93a6ff252f1502ae718561478",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 80,
"avg_line_length": 33.744680851063826,
"alnum_prop": 0.7112232030264817,
"repo_name": "dc3-plaso/dfvfs",
"id": "5963c530761e61a4be57be9278eb0c7180509f95",
"size": "1610",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dfvfs/path/data_range_path_spec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "609"
},
{
"name": "Python",
"bytes": "1397977"
},
{
"name": "Shell",
"bytes": "1522"
}
],
"symlink_target": ""
} |
import sensor, image, time
sensor.reset()
sensor.set_pixformat(sensor.GRAYSCALE)
sensor.set_framesize(sensor.QVGA)
sensor.skip_frames(time = 2000)
clock = time.clock()
while(True):
clock.tick()
# Gamma, contrast, and brightness correction are applied to each color channel. The
# values are scaled to the range per color channel per image type...
img = sensor.snapshot().gamma_corr(gamma = 0.5, contrast = 1.0, brightness = 0.0)
print(clock.fps())
| {
"content_hash": "89b12e3fbe60758af967b4519314cb90",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 87,
"avg_line_length": 29.5,
"alnum_prop": 0.711864406779661,
"repo_name": "iabdalkader/openmv",
"id": "7c735fd1bf73d510afa9cb61b0a66d0d9befd673",
"size": "636",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/examples/Arduino/Portenta-H7/04-Image-Filters/gamma_correction.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "569030"
},
{
"name": "C",
"bytes": "100413378"
},
{
"name": "C++",
"bytes": "97780"
},
{
"name": "CMake",
"bytes": "10173"
},
{
"name": "Dockerfile",
"bytes": "874"
},
{
"name": "Makefile",
"bytes": "72669"
},
{
"name": "Python",
"bytes": "1197447"
},
{
"name": "Shell",
"bytes": "3220"
}
],
"symlink_target": ""
} |
import csv
import os
import shutil
import itertools
def delete_dirs(ls_dir):
for dirs in ls_dir:
print("removing " + dirs)
shutil.rmtree(dirs, ignore_errors=True)
def test_data_dirs(ls_dir):
for dir in ls_dir:
if not os.path.exists(dir):
os.makedirs(dir)
print "Wrote Directory: " + dir
def csv_to_ilist(file, field):
csvfile = open(file, "rU")
reader = csv.DictReader(csvfile)
for row in reader:
pmid = row[field]
yield pmid
def take(n, iterable):
"Return first n items of the iterable as a list"
return list(itertools.islice(iterable, n))
def csv_to_dict(file, keys):
csvfile = open(file, "rU")
data = list(csv.DictReader(csvfile))
new_data = [dict((k, d[k]) for k in keys) for d in data]
return(new_data)
def request_to_file(text, pmid, dir):
""" Save text to xml file using pmid
"""
file_xml = dir + pmid + ".xml"
f = open(file_xml, 'w')
f.write(text.encode('utf8'))
f.close()
return(file_xml)
def test_csv_exist(dir, file, header):
""" check to see if a file exists. If not, write header.
"""
filename = dir + file + ".csv"
print filename
if os.path.isfile(filename):
print "File exists, no need to write headers"
else:
print "File does not, exist, creating headers"
outcsv = open(filename, "w+")
writer = csv.writer(outcsv, quoting=csv.QUOTE_ALL)
print os.getcwd()
writer.writerow(header)
outcsv.close()
def append_csv(dir, file, data, header):
filename = dir + file + ".csv"
out = open(filename, "a")
# reader = csv.reader(out)
writer = csv.DictWriter(out, fieldnames=header, quoting=csv.QUOTE_ALL)
for row in data:
# print row
writer.writerow(row)
out.close()
| {
"content_hash": "76a92fc02205e63608177a3a25cef8f5",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 74,
"avg_line_length": 25.671232876712327,
"alnum_prop": 0.5939167556029883,
"repo_name": "McKenzieKohn/scopus_spider",
"id": "b261456adaefc1ffe3c987002dcde84551ad903a",
"size": "1874",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "build/lib/scopus_spider/data_tools.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "95065"
}
],
"symlink_target": ""
} |
'''
Nosvideo urlresolver plugin
Copyright (C) 2013 Vinnydude
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from lib import helpers
from urlresolver9 import common
from urlresolver9.resolver import UrlResolver, ResolverError
class NosvideoResolver(UrlResolver):
name = "nosvideo"
domains = ["nosvideo.com", "noslocker.com"]
pattern = '(?://|\.)(nosvideo.com|noslocker.com)/(?:\?v\=|embed/|.+?\u=)?([0-9a-zA-Z]+)'
def __init__(self):
self.net = common.Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
headers = {'User-Agent': common.FF_USER_AGENT, 'Referer': web_url}
html = self.net.http_GET(web_url, headers=headers).content
sources = []
streams = set()
count = 1
for match in re.finditer('<script.*?</script>', html):
for match in re.finditer("'(http[^']*v\.mp4)", match.group(0)):
stream_url = match.group(1)
if stream_url not in streams:
sources.append(('Source %s' % (count), stream_url))
streams.add(stream_url)
count += 1
return helpers.pick_source(sources) + helpers.append_headers(headers)
def get_url(self, host, media_id):
return 'http://nosvideo.com/embed/%s' % media_id
| {
"content_hash": "1f389b5a4a9b52c9e7e569683c1da513",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 92,
"avg_line_length": 37.509803921568626,
"alnum_prop": 0.6534239414532148,
"repo_name": "mrknow/filmkodi",
"id": "025426cdf7c5673896848cca8ab4d2e85d1de7f1",
"size": "1913",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "script.mrknow.urlresolver/lib/urlresolver9/plugins/nosvideo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7510"
},
{
"name": "Python",
"bytes": "8058464"
},
{
"name": "Shell",
"bytes": "18531"
}
],
"symlink_target": ""
} |
import time
class DateObject:
def getYear(self):
return time.localtime(time.time())[0]
def getMonth(self):
return time.localtime(time.time())[1]
def getDay(self):
return time.localtime(time.time())[2]
def getDetailedAge(self, dobMonth, dobYear, dobDay = None, curMonth = None, curYear = None, curDay = None):
if curMonth is None:
curMonth = self.getMonth()
if curYear is None:
curYear = self.getYear()
if curDay is None:
curDay = self.getDay()
curMonths = curYear * 12 + (curMonth - 1)
dobMonths = dobYear * 12 + (dobMonth - 1)
if dobMonth == curMonth:
if dobDay is not None:
if dobDay > curDay:
curMonths -= 1
ageMonths = curMonths - dobMonths
return (int(ageMonths / 12), ageMonths % 12)
def getAge(self, dobMonth, dobYear, dobDay = None, curMonth = None, curYear = None, curDay = None):
return self.getDetailedAge(dobMonth, dobYear, dobDay=dobDay, curMonth=curMonth, curYear=curYear, curDay=curDay)[0]
def getNumDaysInMonth(self, month = None, year = None):
def isLeapYear(year):
if year % 4 == 0:
if year % 100 == 0:
if year % 400 == 0:
return 1
else:
return 0
else:
return 1
else:
return 0
if month is None:
m = self.getMonth()
else:
m = month
if year is None:
y = self.getYear()
else:
y = year
if m == 2:
if isLeapYear(y):
return 29
else:
return 28
elif m in (1, 3, 5, 7, 8, 10, 12):
return 31
else:
return 30
return
| {
"content_hash": "aea414d0bf35f1701b1fea06f1f9e693",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 122,
"avg_line_length": 29.9375,
"alnum_prop": 0.4900835073068894,
"repo_name": "silly-wacky-3-town-toon/SOURCE-COD",
"id": "f354a1a24128d13870d1734d0ff0e75e89312849",
"size": "1916",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "toontown/login/DateObject.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "10249"
},
{
"name": "C",
"bytes": "1752256"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "5485400"
},
{
"name": "Emacs Lisp",
"bytes": "210083"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "JavaScript",
"bytes": "7003"
},
{
"name": "Makefile",
"bytes": "895"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "NSIS",
"bytes": "1009050"
},
{
"name": "Objective-C",
"bytes": "21821"
},
{
"name": "PLSQL",
"bytes": "10200"
},
{
"name": "Pascal",
"bytes": "4986"
},
{
"name": "Perl6",
"bytes": "30612"
},
{
"name": "Puppet",
"bytes": "259"
},
{
"name": "Python",
"bytes": "33566014"
},
{
"name": "Shell",
"bytes": "14642"
},
{
"name": "Tcl",
"bytes": "2084458"
}
],
"symlink_target": ""
} |
from msrest.serialization import Model
class WinRMConfiguration(Model):
"""Describes Windows Remote Management configuration of the VM.
:param listeners: The list of Windows Remote Management listeners
:type listeners:
list[~azure.mgmt.compute.v2015_06_15.models.WinRMListener]
"""
_attribute_map = {
'listeners': {'key': 'listeners', 'type': '[WinRMListener]'},
}
def __init__(self, listeners=None):
super(WinRMConfiguration, self).__init__()
self.listeners = listeners
| {
"content_hash": "f1371607971482d9bdf92ee61c584d89",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 69,
"avg_line_length": 29.61111111111111,
"alnum_prop": 0.6697936210131332,
"repo_name": "AutorestCI/azure-sdk-for-python",
"id": "efc18d734d09fb8af513d1815997048c96a3abef",
"size": "1007",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-compute/azure/mgmt/compute/v2015_06_15/models/win_rm_configuration.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "34619070"
}
],
"symlink_target": ""
} |
from django.views.generic import ListView, DetailView
from apps.tasks.models import Task
class TasksIndexPage(ListView):
model = Task
class TaskPage(DetailView):
model = Task
| {
"content_hash": "3e2de488684f9cf1b2e90f0942b646e5",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 53,
"avg_line_length": 17.09090909090909,
"alnum_prop": 0.7606382978723404,
"repo_name": "PyNSKOrg/pynsk.ru",
"id": "bac177ffa8c76aadc32f61d64a2670f345b11a7e",
"size": "203",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "apps/tasks/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "333150"
},
{
"name": "HTML",
"bytes": "161065"
},
{
"name": "JavaScript",
"bytes": "54186"
},
{
"name": "Nginx",
"bytes": "1215"
},
{
"name": "Python",
"bytes": "105894"
},
{
"name": "Shell",
"bytes": "772"
}
],
"symlink_target": ""
} |
'''
A simple test-case that ensures the forward and backward passes
of the fast, cythonized factorization machine match the forward
and backward passes of the slow, autodiff version.
'''
from __future__ import print_function
import numpy as np
import torch
import torch.nn.functional as F
from fmpytorch.second_order.second_order_naive import SecondOrderInteraction as SOISlow
from fmpytorch.second_order.second_order_fast import SecondOrderInteraction as SOIFast
from torch.autograd import Variable
INPUT_SIZE = 50
BATCH_SIZE = 32
N_FACTORS = 5
N_TESTS = 10
class ModelSlow(torch.nn.Module):
def __init__(self):
super(ModelSlow, self).__init__()
self.second_order = SOISlow(INPUT_SIZE, N_FACTORS)
def forward(self, x):
x = self.second_order(x)
return x
class ModelFast(torch.nn.Module):
def __init__(self):
super(ModelFast, self).__init__()
self.second_order = SOIFast(INPUT_SIZE, N_FACTORS)
def forward(self, x):
x = self.second_order(x)
return x
def _forward_backward_check(dtype):
np.random.seed(1)
torch.manual_seed(1)
slow = ModelSlow()
np.random.seed(1)
torch.manual_seed(1)
fast = ModelFast()
if dtype is np.float64:
slow.double()
fast.double()
for i in range(N_TESTS):
input = np.random.random((32, INPUT_SIZE)).astype(dtype)
x_slow = Variable(torch.from_numpy(input),
requires_grad=True)
x_fast = Variable(torch.from_numpy(input),
requires_grad=True)
y = Variable(torch.from_numpy(np.random.random((32, 1)).astype(dtype)))
out_slow = slow(x_slow)
out_fast = fast(x_fast)
assert np.allclose(out_slow.data.numpy(),
out_fast.data.numpy()), "Forward passes differed for {}".format(dtype)
loss_slow = F.mse_loss(out_slow, y)
loss_fast = F.mse_loss(out_fast, y)
loss_slow.backward()
loss_fast.backward()
for var_slow, var_fast in zip(slow.parameters(), fast.parameters()):
assert np.allclose(var_slow.grad.data.numpy(),
var_fast.grad.data.numpy()), "Backward passes differed for {}".format(dtype)
assert np.allclose(x_slow.grad.data.numpy(),
x_fast.grad.data.numpy()), "Backward passes differed for {}".format(dtype)
def test_forward_backward_float():
_forward_backward_check(np.float32)
def test_forward_backward_double():
_forward_backward_check(np.float64)
| {
"content_hash": "15ac7e575125bccda6bf1826e7abe777",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 107,
"avg_line_length": 30.8,
"alnum_prop": 0.6191749427043545,
"repo_name": "jmhessel/fmpytorch",
"id": "3baee29351768937878d4c0b690f328ec7274128",
"size": "2618",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fmpytorch/tests/test_second_order.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17097"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from django.conf import settings
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'DepthSounding'
db.create_table('depth_range_depthsounding', (
('geometry', self.gf('django.contrib.gis.db.models.fields.PointField')(srid=settings.GEOMETRY_DB_SRID)),
('depth_ft', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('depth_range', ['DepthSounding'])
def backwards(self, orm):
# Deleting model 'DepthSounding'
db.delete_table('depth_range_depthsounding')
models = {
'depth_range.depthsounding': {
'Meta': {'object_name': 'DepthSounding'},
'depth_ft': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'geometry': ('django.contrib.gis.db.models.fields.PointField', [], {'srid': str(settings.GEOMETRY_DB_SRID)}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['depth_range']
| {
"content_hash": "55d379cf9d18da38599ad94988eb0471",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 121,
"avg_line_length": 37.57142857142857,
"alnum_prop": 0.6045627376425855,
"repo_name": "Alwnikrotikz/marinemap",
"id": "06c39b43722f546ce30ed26bb1255a489cee439f",
"size": "1333",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lingcod/depth_range/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "62866"
},
{
"name": "HTML",
"bytes": "229261"
},
{
"name": "JavaScript",
"bytes": "1435695"
},
{
"name": "PLpgSQL",
"bytes": "3371"
},
{
"name": "Python",
"bytes": "1152113"
},
{
"name": "Shell",
"bytes": "12077"
}
],
"symlink_target": ""
} |
import os
import sys
import vstruct.qt as vs_qt
import envi.expression as e_expr
import envi.qt.config as e_q_config
import vqt.cli as vq_cli
import vqt.main as vq_main
import vqt.colors as vq_colors
import vqt.qpython as vq_python
import vqt.application as vq_app
import vivisect.cli as viv_cli
import vivisect.base as viv_base
import vivisect.vdbext as viv_vdbext
import vivisect.qt.tips as viv_q_tips
import vivisect.qt.views as viv_q_views
import vivisect.qt.memory as viv_q_memory
import vivisect.qt.remote as viv_q_remote
import vivisect.qt.ustruct as viv_q_ustruct
import vivisect.extensions as viv_extensions
import vivisect.qt.funcgraph as viv_q_funcgraph
import vivisect.qt.funcviews as viv_q_funcviews
import vivisect.qt.symboliks as viv_q_symboliks
import vivisect.remote.share as viv_share
import vivisect.analysis.generic.symswitchcase as symswitch
from PyQt5 import QtCore
from PyQt5.QtWidgets import QInputDialog
from vqt.common import *
from vivisect.const import *
from vqt.main import getOpenFileName, getSaveFileName
from vqt.saveable import compat_isNone
dock_top = QtCore.Qt.TopDockWidgetArea
dock_right = QtCore.Qt.RightDockWidgetArea
class VQVivMainWindow(viv_base.VivEventDist, vq_app.VQMainCmdWindow):
# Child windows may emit this on "navigate" requests...
# vivNavSignal = QtCore.pyqtSignal(str, name='vivNavSignal')
vivMemColorSignal = QtCore.pyqtSignal(dict, name='vivMemColorSignal')
def __init__(self, vw):
self.vw = vw
vw._viv_gui = self
# DEV: hijack the workspace's vprint so that they get routed to the UI canvas
# and not out to the stdout
vw.vprint = self.vprint
viv_base.VivEventDist.__init__(self, vw=vw)
vq_app.VQMainCmdWindow.__init__(self, 'Vivisect', vw)
self.vqAddMenuField('&File.Open', self._menuFileOpen)
self.vqAddMenuField('&File.Save', self._menuFileSave)
self.vqAddMenuField('&File.Save As', self._menuFileSaveAs)
self.vqAddMenuField('&File.Save to Server', self._menuFileSaveServer)
self.vqAddMenuField('&File.Quit', self.close)
self.vqAddMenuField('&Edit.&Preferences', self._menuEditPrefs)
self.vqAddMenuField('&View.&Exports', self._menuViewExports)
self.vqAddMenuField('&View.&Functions', self._menuViewFunctions)
self.vqAddMenuField('&View.&Imports', self._menuViewImports)
self.vqAddMenuField('&View.&Names', self._menuViewNames)
self.vqAddMenuField('&View.&Memory', self._menuViewMemory)
self.vqAddMenuField('&View.&Function Graph', self._menuViewFuncGraph)
self.vqAddMenuField('&View.&Strings', self._menuViewStrings)
self.vqAddMenuField('&View.&Structures', self._menuViewStructs)
self.vqAddMenuField('&View.&Segments', self._menuViewSegments)
self.vqAddMenuField('&View.&Symboliks', self._menuViewSymboliks)
self.vqAddMenuField('&View.&Layouts.&Set Default', self._menuViewLayoutsSetDefault)
self.vqAddMenuField('&View.&Layouts.&Save', self._menuViewLayoutsSave)
self.vqAddMenuField('&View.&Layouts.&Load', self._menuViewLayoutsLoad)
self.vqAddMenuField('&Share.Share Workspace', self._menuShareWorkspace)
self.vqAddMenuField('&Share.Connect to Shared Workspace', self._menuShareConnect)
self.vqAddMenuField('&Share.Connect To Workspace Server', self._menuShareConnectServer)
self.vqAddMenuField('&Tools.&Python', self._menuToolsPython)
self.vqAddMenuField('&Tools.&Debug', self._menuToolsDebug)
self.vqAddMenuField('&Tools.&Analysis.&Reanalyze Switchcase', self._menuToolsReSwitchCase)
self.vqAddMenuField('&Tools.&Structures.Add Namespace', self._menuToolsStructNames)
self.vqAddMenuField('&Tools.&Structures.New', self._menuToolsUStructNew)
self.vqAddDynMenu('&Tools.&Structures.&Edit', self._menuToolsUStructEdit)
self.vqAddDynMenu('&Tools.&Va Sets', self._menuToolsVaSets)
self.vqAddMenuField('&Window.&Fullscreen', self._menuWindowFullscreen)
self.vqAddMenuField('&Window.&Maximized', self._menuWindowMaximize)
self.vqAddMenuField('&Window.&Normal', self._menuWindowNormal)
self.vw.vprint('Welcome to Vivisect (Qt Edition)!')
self.vw.vprint('Random Tip: %s' % viv_q_tips.getRandomTip())
if len(self.vqGetDockWidgets()) == 0:
self.vw.vprint('\n')
#self.vw.vprint(' ')
self.vw.vprint('Looks like you have an empty layout!')
self.vw.vprint('Use View->Layouts->Load and select vivisect/qt/default.lyt')
fname = os.path.basename(self.vw.getMeta('StorageName', 'Unknown'))
self.setWindowTitle('Vivisect: %s' % fname)
self.windowState = QtCore.Qt.WindowNoState
self.addHotKey('ctrl+o', 'file:open')
self.addHotKeyTarget('file:open', self._menuFileOpen)
self.addHotKey('ctrl+s', 'file:save')
self.addHotKeyTarget('file:save', self._menuFileSave)
self.addHotKey('ctrl+S', 'file:saveas')
self.addHotKeyTarget('file:saveas', self._menuFileSaveAs)
self.addHotKey('ctrl+meta+C', 'file:connecttoserver')
self.addHotKeyTarget('file:connecttoserver', self._menuShareConnectServer)
self.addHotKey('ctrl+w', 'file:quit')
self.addHotKeyTarget('file:quit', self.close)
def vprint(self, msg, addnl=True):
# ripped and modded from envi/cli.py
self.vw.canvas.write(msg)
if addnl:
self.vw.canvas.write('\n')
def getLocation(self, va):
loctup = self.vw.getLocation(va)
if loctup is None:
self.vw.vprint('Location not found!')
else:
name = loc_type_names.get(loctup[L_LTYPE], 'Unspecified')
self.vw.vprint('\nVA: %s' % hex(loctup[L_VA]))
self.vw.vprint(' Size: %d' % loctup[L_SIZE])
self.vw.vprint(' Type: %s' % name)
self.vw.vprint(' Info: %s' % str(loctup[L_TINFO]))
self.vw.vprint(' Repr: %s' % self.vw.reprLocation(loctup)[:64])
def setVaName(self, va, parent=None):
if parent is None:
parent = self
curname = self.vw.getName(va)
if curname is None:
curname = ''
name, ok = QInputDialog.getText(parent, 'Enter...', 'Name', text=curname)
if ok:
name = str(name)
if self.vw.vaByName(name):
raise Exception('Duplicate Name: %s' % name)
self.vw.makeName(va, name)
def setVaComment(self, va, parent=None):
if parent is None:
parent = self
curcomment = self.vw.getComment(va)
if curcomment is None:
curcomment = ''
comment, ok = QInputDialog.getText(parent, 'Enter...', 'Comment', text=curcomment)
if ok:
self.vw.setComment(va, str(comment))
def setVaMultilineComment(self, va, parent=None):
if parent is None:
parent = self
curcomment = self.vw.getComment(va)
if curcomment is None:
curcomment = ''
comment, ok = QInputDialog.getMultiLineText(parent, 'Enter...', 'Comment', text=curcomment)
if ok:
self.vw.setComment(va, str(comment))
def addVaXref(self, va, parent=None):
if parent is None:
parent = self
xtova, ok = QInputDialog.getText(parent, 'Enter...', 'Make Code Xref 0x%x -> ' % va)
if ok:
try:
val = self.vw.parseExpression(str(xtova))
if self.vw.isValidPointer(val):
self.vw.addXref(va, val, REF_CODE)
else:
self.vw.vprint("Invalid Expression: %s (%s)" % (xtova, val))
except Exception as e:
self.vw.vprint(repr(e))
def reanalyzeSwitchCase(self, va, parent=None):
if parent is None:
parent = self
timestr, ok = QInputDialog.getText(parent, 'Re-Analyze Switchcase', 'Enter Timeout (secs) for analysis (0x%x): ' % va, text="300")
if ok:
try:
timeout = self.vw.parseExpression(str(timestr))
symswitch.analyzeJmp(self.vw, va, timeout=timeout)
except Exception as e:
self.vw.vprint(repr(e))
def setFuncLocalName(self, fva, offset, atype, aname):
curname = ''
if self.vw.getFunctionLocal(fva, offset):
curtype, curname = self.vw.getFunctionLocal(fva, offset)
newname, ok = QInputDialog.getText(self, 'Enter...', 'Local Name', text=curname)
if ok:
self.vw.setFunctionLocal(fva, offset, LSYM_NAME, (atype, str(newname)))
def setFuncArgName(self, fva, idx, atype, aname):
curname = ''
if len(self.vw.getFunctionArgs(fva)) > idx:
curtype, curname = self.vw.getFunctionArgs(fva)[idx]
newname, ok = QInputDialog.getText(self, 'Enter...', 'Argument Name', text=curname)
if ok:
self.vw.setFunctionArg(fva, idx, atype, str(newname))
def showFuncCallGraph(self, fva):
callview = viv_q_funcviews.FuncCallsView(self.vw)
callview.functionSelected(fva)
callview.show()
self.vqDockWidget(callview, floating=True)
def makeStruct(self, va, parent=None):
if parent is None:
parent = self
sname = vs_qt.selectStructure(self.vw.vsbuilder, parent=parent)
if sname is not None:
self.vw.makeStructure(va, sname)
return sname
def addBookmark(self, va, parent=None):
if parent is None:
parent = self
bname, ok = QInputDialog.getText(parent, 'Enter...', 'Bookmark Name')
if ok:
self.vw.setVaSetRow('Bookmarks', (va, str(bname)))
def getMemoryWidgets(self):
return self.views.get('VQVivMemoryView', [])
def getMemWidgetsByName(self, name='viv', firstonly=True):
'''
Returns a list of Memory View Widgets with the given name.
If "firstonly" is True, only return the first one or None(not a list)
Returns a tuple of (Widget, DockWidget). The "Widget" is obtained from
the DockWidget, but they both have different powers.
'''
logger.debug("getWindowsByName(%r, firstonly=%r)", name, firstonly)
out = []
for vqDW in self.getMemoryWidgets():
w = vqDW.widget()
if w.getEnviNavName() == name:
if firstonly:
return w, vqDW
out.append((w,vqDW))
if firstonly: # if firstonly and we don't have one, return None
return None
return out
def getFuncGraphs(self):
return self.views.get('VQVivFuncgraphView', [])
def getFuncGraphsByName(self, name='FuncGraph0', firstonly=True):
'''
Returns a list of Dock Widgets which have a "getEnviNavName"
This includes MemoryViews and FuncGraphs
'''
logger.debug("getFuncGraphsByName()")
out = []
for vqDW in self.getFuncGraphs():
w = vqDW.widget()
if name != w.getEnviNavName():
continue
if firstonly:
return w, vqDW
out.append((w, vqDW))
if firstonly: # if firstonly and we don't have one, return None
return None
return out
def sendMemWidgetTo(self, va, wname='viv', firstonly=False):
'''
Tells the named Envi Nav Widget to navigate to the given VA
'''
logger.debug("sendMemWidgetsTo(0x%x, wname=%r)", va, wname)
for win in self.getMemWidgetsByName(wname, firstonly=False):
w, vqFW = win
logger.debug("sending %r to %r", w, hex(va))
w.enviNavGoto(hex(va))
if firstonly:
break
return True
def sendFuncGraphTo(self, va, wname='FuncGraph0', firstonly=False):
'''
Tells the named Envi Nav Widget to navigate to the given VA
'''
logger.debug("sendFuncGraphTo(0x%x, wname=%r)", va, wname)
for win in self.getFuncGraphsByName(wname, firstonly=False):
w, vqFW = win
logger.debug("sending %r to %r", w, hex(va))
w.enviNavGoto(hex(va))
if firstonly:
break
return True
def getCliBar(self):
'''
Returns the CLI Bar object
'''
for c in self.children():
if isinstance(c, vq_cli.VQCli):
return c
def getCliText(self):
'''
Get the text from the GUI's CLI Bar (at the bottom)
'''
cli = self.getCliBar()
return cli.input.text()
def setCliText(self, text):
'''
Set the text in the GUI's CLI Bar (at the bottom)
'''
logger.debug("setCliText(%r)" % text)
cli = self.getCliBar()
cli.input.setText(text)
def _menuEditPrefs(self):
configs = []
configs.append(('Vivisect', self.vw.config.viv))
configs.append(('Vdb', self.vw.config.vdb))
self._cfg_widget = e_q_config.EnviConfigTabs(configs)
self._cfg_widget.show()
def _menuToolsUStructNew(self):
u = viv_q_ustruct.UserStructEditor(self.vw)
w = self.vqDockWidget(u, floating=True)
w.resize(600, 600)
def _menuToolsUStructEdit(self, name=None):
if name is None:
return self.vw.getUserStructNames()
u = viv_q_ustruct.UserStructEditor(self.vw, name=name)
w = self.vqDockWidget(u, floating=True)
w.resize(600, 600)
def _menuToolsVaSets(self, name=None):
if name is None:
return self.vw.getVaSetNames()
view = viv_q_views.VQVivVaSetView(self.vw, self, name)
self.vqDockWidget(view)
def delFunction(self, fva, parent=None):
if parent is None:
parent = self
yn, ok = QInputDialog.getItem(self, 'Delete Function', 'Confirm:', ('No', 'Yes'), 0, False)
if ok and yn == 'Yes':
self.vw.delFunction(fva)
def vqInitDockWidgetClasses(self):
exprloc = e_expr.MemoryExpressionLocals(self.vw, symobj=self.vw)
exprloc['vw'] = self.vw
exprloc['vwqgui'] = self
exprloc['vprint'] = self.vw.vprint
self.vqAddDockWidgetClass(viv_q_views.VQVivExportsView, args=(self.vw, self))
self.vqAddDockWidgetClass(viv_q_views.VQVivFunctionsView, args=(self.vw, self))
self.vqAddDockWidgetClass(viv_q_views.VQVivNamesView, args=(self.vw, self))
self.vqAddDockWidgetClass(viv_q_views.VQVivImportsView, args=(self.vw, self))
self.vqAddDockWidgetClass(viv_q_views.VQVivSegmentsView, args=(self.vw, self))
self.vqAddDockWidgetClass(viv_q_views.VQVivStringsView, args=(self.vw, self))
self.vqAddDockWidgetClass(viv_q_views.VQVivStructsView, args=(self.vw, self))
self.vqAddDockWidgetClass(vq_python.VQPythonView, args=(exprloc, self))
self.vqAddDockWidgetClass(viv_q_memory.VQVivMemoryView, args=(self.vw, self))
self.vqAddDockWidgetClass(viv_q_funcgraph.VQVivFuncgraphView, args=(self.vw, self))
self.vqAddDockWidgetClass(viv_q_symboliks.VivSymbolikFuncPane, args=(self.vw, self))
@idlethread
def vqRestoreGuiSettings(self, settings, guid=None):
'''
Restores GUI settings (size/layout/views) based on:
* GUID
* Filename(s)
* Default Layout
If workspace is connected to a server, we wait for a GUID to be present before proceeding
'''
if self.vw.server and not guid:
# wait until the GUID has been loaded from the remote workspace before continuing
self.vw._load_guid.wait()
if not guid:
guid = self.vw.getVivGuid()
logger.debug("vqRestoreGuiSettings() -> guid=%r vw.server=%r", guid, self.vw.server)
logger.debug("attempting to load GUI settings based on GUID: %s", guid)
dwcls = settings.value('%s/DockClasses' % guid)
state = settings.value('%s/DockState' % guid)
geom = settings.value('%s/DockGeometry' % guid)
stub = '%s/' % guid
if compat_isNone(dwcls):
names = list(self.vw.filemeta.keys())
names.sort()
name = '+'.join(names)
logger.debug("attempting to load GUI settings based on Filename(s): %r", name)
dwcls = settings.value('%s/DockClasses' % name)
state = settings.value('%s/DockState' % name)
geom = settings.value('%s/DockGeometry' % name)
stub = '%s/' % name
if compat_isNone(dwcls):
logger.debug("loading default GUI settings")
dwcls = settings.value('DockClasses')
state = settings.value('DockState')
geom = settings.value('DockGeometry')
stub = ''
if not compat_isNone(dwcls):
for i, clsname in enumerate(dwcls):
name = 'VQDockWidget%d' % i
try:
tup = self.vqBuildDockWidget(str(clsname), floating=False)
if tup is not None:
d, obj = tup
d.setObjectName(name)
d.vqRestoreState(settings, name, stub)
d.show()
except Exception as e:
self.vw.vprint('Error Building: %s: %s' % (clsname, e))
# Once dock widgets are loaded, we can restoreState
if not compat_isNone(state):
self.restoreState(state)
if not compat_isNone(geom):
self.restoreGeometry(geom)
# Just get all the resize activities done...
vq_main.eatevents()
for w in self.vqGetDockWidgets():
w.show()
return True
def vqSaveGuiSettings(self, settings):
dock_classes = []
guid = self.vw.getVivGuid()
names = list(self.vw.filemeta.keys())
names.sort()
vivname = '+'.join(names)
# Enumerate the current dock windows and set
# their names by their list order...
for i, w in enumerate(self.vqGetDockWidgets()):
widget = w.widget()
dock_classes.append(widget.__class__.__name__)
name = 'VQDockWidget%d' % i
w.setObjectName(name)
w.vqSaveState(settings, '%s/%s' % (guid, name))
w.vqSaveState(settings, '%s/%s' % (vivname, name))
geom = self.saveGeometry()
state = self.saveState()
# first store for this specific workspace
settings.setValue('%s/DockClasses' % guid, dock_classes)
settings.setValue('%s/DockGeometry' % guid, geom)
settings.setValue('%s/DockState' % guid, state)
# next store for this filename
settings.setValue('%s/DockClasses' % vivname, dock_classes)
settings.setValue('%s/DockGeometry' % vivname, geom)
settings.setValue('%s/DockState' % vivname, state)
# don't store the default. that should be saved manually
def vqGetDockWidgetsByName(self, name='viv', firstonly=False):
'''
Get list of DockWidgets matching a given name (default is 'viv').
Returns a list of tuples (window, DockWidget)
If firstonly==True, returns the first tuple, not a list.
'''
out = []
for vqDW in self.vqGetDockWidgets():
w = vqDW.widget()
if hasattr(w, 'getEnviNavName') and w.getEnviNavName() == name:
if firstonly:
return w, vqDW
out.append((w,vqDW))
return out
def _menuToolsDebug(self):
viv_vdbext.runVdb(self)
def _menuFileOpen(self):
# TODO: Add something to change the workspace storage name,
# and also to list the currently loaded files
# Right now it'll successively create storage files
fname = getOpenFileName(self, 'Open...')
if fname is None or not len(fname):
return
self.vw.vprint('Opening %s' % fname)
self.setWindowTitle('Vivisect: %s' % fname)
self.vw.loadFromFile(str(fname))
self.vw.vprint('Analyzing %s' % fname)
self.vw.analyze()
self.vw.vprint('%s is ready!' % fname)
@vq_main.workthread
def _menuFileSave(self, fullsave=False, filename=None):
if self.vw.server and filename is None:
self.vw.vprint("Connected to remote workspace, not saving locally.")
self.vw.vprint("Use 'File->Save As' to create a local backup copy of the workspace.")
return
# duplicate filename, since saveWorkspace() with None as the filename
# forces a local save
fname = filename
if not fname:
fname = self.vw.getMeta("StorageName")
self.vw.vprint('Saving workspace... (%r)' % fname)
try:
self.vw.saveWorkspace(fullsave=fullsave, filename=filename)
except Exception as e:
self.vw.vprint(str(e))
else:
self.vw.vprint('complete!')
def _menuFileSaveAs(self):
fname = getSaveFileName(self, 'Save As...')
if fname is None or not len(fname):
return
self._menuFileSave(fullsave=True, filename=fname)
def _menuFileSaveServer(self):
viv_q_remote.saveToServer(self.vw, parent=self)
def _menuViewLayoutsLoad(self):
fname = getOpenFileName(self, 'Load Layout')
if fname is None:
return
settings = QtCore.QSettings(fname, QtCore.QSettings.IniFormat)
self.vqRestoreGuiSettings(settings)
def _menuViewLayoutsSave(self):
fname = getSaveFileName(self, 'Save Layout')
if fname is None or not len(fname):
return
settings = QtCore.QSettings(fname, QtCore.QSettings.IniFormat)
self.vqSaveGuiSettings(settings)
def _menuViewLayoutsSetDefault(self):
vq_app.VQMainCmdWindow.vqSaveGuiSettings(self, self._vq_settings)
def _menuToolsReSwitchCase(self):
timeoutSwitches = self.vw.getVaSetRows('SwitchCases_TimedOut')
dynd = DynamicDialog('Reanalze Switchcase')
dynd.addComboBox('select', title='Select Switchcase Branch', itemlist=\
['0x%x: (failed at %d secs)' % (va, tosec) for va, tosec in timeoutSwitches], \
dfltidx=0)
dynd.addIntHexField('timeout', title='Timeout (some analysis can be very long)', dflt=300)
results = dynd.prompt()
if not results:
return
timeout = results.get('timeout')
vastr, _ = results.get('select').split(':',1)
va = int(vastr, 16)
symswitch.analyzeJmp(self.vw, va, timeout=timeout)
def _menuToolsStructNames(self):
nsinfo = vs_qt.selectStructNamespace()
if nsinfo is not None:
nsname, modname = nsinfo
self.vw.vprint('Adding struct namespace: %s' % nsname)
self.vw.addStructureModule(nsname, modname)
def _menuShareWorkspace(self):
self.vw.vprint('Sharing workspace...')
daemon = viv_share.shareWorkspace(self.vw)
self.vw.vprint('Workspace Listening Port: %d' % daemon.port)
self.vw.vprint('Clients may now connect to your host on port %d' % daemon.port)
def _menuShareConnect(self):
viv_q_remote.openSharedWorkspace(self.vw, parent=self)
def _menuShareConnectServer(self):
viv_q_remote.openServerAndWorkspace(self.vw, parent=self)
def _menuToolsPython(self):
self.vqBuildDockWidget('VQPythonView', area=QtCore.Qt.RightDockWidgetArea)
def _menuViewStrings(self):
self.newStringsView()
def _menuViewStructs(self):
self.newStructsView()
def _menuViewSegments(self):
self.newSegmentsView()
def _menuViewImports(self):
self.newImportsView()
def _menuViewExports(self):
self.newExportsView()
def _menuViewFunctions(self):
self.newFunctionsView()
def _menuViewNames(self):
self.newNamesView()
def _menuViewMemory(self):
self.newMemoryView()
def _menuViewFuncGraph(self):
self.newFuncGraphView()
def _menuViewSymboliks(self):
self.newSymbolikFuncView()
@idlethread
def newPythonView(self, floating=False):
self.vqBuildDockWidget('VQPythonView', floating=floating, area=QtCore.Qt.RightDockWidgetArea)
@idlethread
def newStringsView(self, floating=False):
self.vqBuildDockWidget('VQVivStringsView', floating=floating, area=QtCore.Qt.RightDockWidgetArea)
@idlethread
def newStructsView(self, floating=False):
self.vqBuildDockWidget('VQVivStructsView', floating=floating, area=QtCore.Qt.RightDockWidgetArea)
@idlethread
def newSegmentsView(self, floating=False):
self.vqBuildDockWidget('VQVivSegmentsView', floating=floating, area=QtCore.Qt.RightDockWidgetArea)
@idlethread
def newImportsView(self, floating=False):
self.vqBuildDockWidget('VQVivImportsView', floating=floating, area=QtCore.Qt.RightDockWidgetArea)
@idlethread
def newExportsView(self, floating=False):
self.vqBuildDockWidget('VQVivExportsView', floating=floating, area=QtCore.Qt.RightDockWidgetArea)
@idlethread
def newFunctionsView(self, floating=False):
self.vqBuildDockWidget('VQVivFunctionsView', floating=floating, area=QtCore.Qt.RightDockWidgetArea)
@idlethread
def newNamesView(self, floating=False):
self.vqBuildDockWidget('VQVivNamesView', floating=floating, area=QtCore.Qt.RightDockWidgetArea)
@idlethread
def newMemoryView(self, name='viv', floating=False):
dock, widget = self.vqBuildDockWidget('VQVivMemoryView', floating=floating, area=QtCore.Qt.TopDockWidgetArea)
widget.setMemWindowName(name)
@idlethread
def newFuncGraphView(self, name=None, floating=False):
dock, widget = self.vqBuildDockWidget('VQVivFuncgraphView', floating=floating, area=QtCore.Qt.TopDockWidgetArea)
if name is not None:
widget.setMemWindowName(name)
@idlethread
def newSymbolikFuncView(self, floating=False):
self.vqBuildDockWidget('VivSymbolikFuncPane', floating=floating, area=QtCore.Qt.TopDockWidgetArea)
def _menuWindowFullscreen(self):
if not self.windowState & QtCore.Qt.WindowFullScreen:
self.windowState = QtCore.Qt.WindowFullScreen
self.showFullScreen()
else:
self._menuWindowNormal()
def _menuWindowMaximize(self):
if not self.windowState & QtCore.Qt.WindowMaximized:
self.windowState = QtCore.Qt.WindowMaximized
self.showMaximized()
def _menuWindowNormal(self):
if not self.windowState & QtCore.Qt.WindowNoState:
self.windowState = QtCore.Qt.WindowNoState
self.showNormal()
@vq_main.idlethread
def _ve_fireEvent(self, event, edata):
return viv_base.VivEventDist._ve_fireEvent(self, event, edata)
@vq_main.idlethread
def runqt(vw, closeme=None):
'''
Use this API to instantiate a QT main window and show it when
there is already a main thread running...
'''
mw = VQVivMainWindow(vw)
viv_extensions.loadExtensions(vw, mw)
mw.show()
if closeme:
closeme.close()
return mw
def main(vw):
vq_main.startup(css=vq_colors.qt_matrix)
mw = VQVivMainWindow(vw)
viv_extensions.loadExtensions(vw, mw)
mw.show()
vq_main.main()
if __name__ == '__main__':
vw = viv_cli.VivCli()
if len(sys.argv) == 2:
vw.loadWorkspace(sys.argv[1])
main(vw)
| {
"content_hash": "e15ed5a743e24348122b9ca7932ed6c5",
"timestamp": "",
"source": "github",
"line_count": 739,
"max_line_length": 138,
"avg_line_length": 37.48443843031123,
"alnum_prop": 0.6249954875275261,
"repo_name": "atlas0fd00m/vivisect",
"id": "b18a38e043b315cda55ffed7436d29b156348efa",
"size": "27701",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "vivisect/qt/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "167795"
},
{
"name": "CSS",
"bytes": "15980"
},
{
"name": "Makefile",
"bytes": "355"
},
{
"name": "Python",
"bytes": "17710506"
},
{
"name": "Shell",
"bytes": "476"
}
],
"symlink_target": ""
} |
import ctypes
import json
import locale
import multiprocessing
import os
import platform
import textwrap
import sys
from contextlib import redirect_stdout
from datetime import datetime
from io import StringIO
from subprocess import check_output, PIPE, CalledProcessError
import llvmlite.binding as llvmbind
from llvmlite import __version__ as llvmlite_version
from numba import cuda as cu, __version__ as version_number
from numba.cuda import cudadrv
from numba.cuda.cudadrv.driver import driver as cudriver
from numba.cuda.cudadrv.runtime import runtime as curuntime
from numba.core import config
_psutil_import = False
try:
import psutil
except ImportError:
pass
else:
_psutil_import = True
__all__ = ['get_sysinfo', 'display_sysinfo']
# Keys of a `sysinfo` dictionary
# Time info
_start, _start_utc, _runtime = 'Start', 'Start UTC', 'Runtime'
_numba_version = 'Numba Version'
# Hardware info
_machine = 'Machine'
_cpu_name, _cpu_count = 'CPU Name', 'CPU Count'
_cpus_allowed, _cpus_list = 'CPUs Allowed', 'List CPUs Allowed'
_cpu_features = 'CPU Features'
_cfs_quota, _cfs_period = 'CFS Quota', 'CFS Period',
_cfs_restrict = 'CFS Restriction'
_mem_total, _mem_available = 'Mem Total', 'Mem Available'
# OS info
_platform_name, _platform_release = 'Platform Name', 'Platform Release'
_os_name, _os_version = 'OS Name', 'OS Version'
_os_spec_version = 'OS Specific Version'
_libc_version = 'Libc Version'
# Python info
_python_comp = 'Python Compiler'
_python_impl = 'Python Implementation'
_python_version = 'Python Version'
_python_locale = 'Python Locale'
# LLVM info
_llvmlite_version = 'llvmlite Version'
_llvm_version = 'LLVM Version'
# CUDA info
_cu_dev_init = 'CUDA Device Init'
_cu_drv_ver = 'CUDA Driver Version'
_cu_rt_ver = 'CUDA Runtime Version'
_cu_nvidia_bindings = 'NVIDIA CUDA Bindings'
_cu_nvidia_bindings_used = 'NVIDIA CUDA Bindings In Use'
_cu_detect_out, _cu_lib_test = 'CUDA Detect Output', 'CUDA Lib Test'
# SVML info
_svml_state, _svml_loaded = 'SVML State', 'SVML Lib Loaded'
_llvm_svml_patched = 'LLVM SVML Patched'
_svml_operational = 'SVML Operational'
# Threading layer info
_tbb_thread, _tbb_error = 'TBB Threading', 'TBB Threading Error'
_openmp_thread, _openmp_error = 'OpenMP Threading', 'OpenMP Threading Error'
_openmp_vendor = 'OpenMP vendor'
_wkq_thread, _wkq_error = 'Workqueue Threading', 'Workqueue Threading Error'
# Numba info
_numba_env_vars = 'Numba Env Vars'
# Conda info
_conda_build_ver, _conda_env_ver = 'Conda Build', 'Conda Env'
_conda_platform, _conda_python_ver = 'Conda Platform', 'Conda Python Version'
_conda_root_writable = 'Conda Root Writable'
# Packages info
_inst_pkg = 'Installed Packages'
# Psutil info
_psutil = 'Psutil Available'
# Errors and warnings
_errors = 'Errors'
_warnings = 'Warnings'
# Error and warning log
_error_log = []
_warning_log = []
def get_os_spec_info(os_name):
# Linux man page for `/proc`:
# http://man7.org/linux/man-pages/man5/proc.5.html
# Windows documentation for `wmic OS`:
# https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/cim-operatingsystem
# MacOS man page for `sysctl`:
# https://www.unix.com/man-page/osx/3/sysctl/
# MacOS man page for `vm_stat`:
# https://www.unix.com/man-page/osx/1/vm_stat/
class CmdBufferOut(tuple):
buffer_output_flag = True
class CmdReadFile(tuple):
read_file_flag = True
shell_params = {
'Linux': {
'cmd': (
CmdReadFile(('/sys/fs/cgroup/cpuacct/cpu.cfs_quota_us',)),
CmdReadFile(('/sys/fs/cgroup/cpuacct/cpu.cfs_period_us',)),
),
'cmd_optional': (
CmdReadFile(('/proc/meminfo',)),
CmdReadFile(('/proc/self/status',)),
),
'kwds': {
# output string fragment -> result dict key
'MemTotal:': _mem_total,
'MemAvailable:': _mem_available,
'Cpus_allowed:': _cpus_allowed,
'Cpus_allowed_list:': _cpus_list,
'/sys/fs/cgroup/cpuacct/cpu.cfs_quota_us': _cfs_quota,
'/sys/fs/cgroup/cpuacct/cpu.cfs_period_us': _cfs_period,
},
},
'Windows': {
'cmd': (),
'cmd_optional': (
CmdBufferOut(('wmic', 'OS', 'get', 'TotalVirtualMemorySize')),
CmdBufferOut(('wmic', 'OS', 'get', 'FreeVirtualMemory')),
),
'kwds': {
# output string fragment -> result dict key
'TotalVirtualMemorySize': _mem_total,
'FreeVirtualMemory': _mem_available,
},
},
'Darwin': {
'cmd': (),
'cmd_optional': (
('sysctl', 'hw.memsize'),
('vm_stat'),
),
'kwds': {
# output string fragment -> result dict key
'hw.memsize:': _mem_total,
'free:': _mem_available,
},
'units': {
_mem_total: 1, # Size is given in bytes.
_mem_available: 4096, # Size is given in 4kB pages.
},
},
}
os_spec_info = {}
params = shell_params.get(os_name, {})
cmd_selected = params.get('cmd', ())
if _psutil_import:
vm = psutil.virtual_memory()
os_spec_info.update({
_mem_total: vm.total,
_mem_available: vm.available,
})
p = psutil.Process()
cpus_allowed = p.cpu_affinity() if hasattr(p, 'cpu_affinity') else []
if cpus_allowed:
os_spec_info[_cpus_allowed] = len(cpus_allowed)
os_spec_info[_cpus_list] = ' '.join(str(n) for n in cpus_allowed)
else:
_warning_log.append(
"Warning (psutil): psutil cannot be imported. "
"For more accuracy, consider installing it.")
# Fallback to internal heuristics
cmd_selected += params.get('cmd_optional', ())
# Assuming the shell cmd returns a unique (k, v) pair per line
# or a unique (k, v) pair spread over several lines:
# Gather output in a list of strings containing a keyword and some value.
output = []
for cmd in cmd_selected:
if hasattr(cmd, 'read_file_flag'):
# Open file within Python
if os.path.exists(cmd[0]):
try:
with open(cmd[0], 'r') as f:
out = f.readlines()
if out:
out[0] = ' '.join((cmd[0], out[0]))
output.extend(out)
except OSError as e:
_error_log.append(f'Error (file read): {e}')
continue
else:
_warning_log.append('Warning (no file): {}'.format(cmd[0]))
continue
else:
# Spawn a subprocess
try:
out = check_output(cmd, stderr=PIPE)
except (OSError, CalledProcessError) as e:
_error_log.append(f'Error (subprocess): {e}')
continue
if hasattr(cmd, 'buffer_output_flag'):
out = b' '.join(line for line in out.splitlines()) + b'\n'
output.extend(out.decode().splitlines())
# Extract (k, output) pairs by searching for keywords in output
kwds = params.get('kwds', {})
for line in output:
match = kwds.keys() & line.split()
if match and len(match) == 1:
k = kwds[match.pop()]
os_spec_info[k] = line
elif len(match) > 1:
print(f'Ambiguous output: {line}')
# Try to extract something meaningful from output string
def format():
# CFS restrictions
split = os_spec_info.get(_cfs_quota, '').split()
if split:
os_spec_info[_cfs_quota] = float(split[-1])
split = os_spec_info.get(_cfs_period, '').split()
if split:
os_spec_info[_cfs_period] = float(split[-1])
if os_spec_info.get(_cfs_quota, -1) != -1:
cfs_quota = os_spec_info.get(_cfs_quota, '')
cfs_period = os_spec_info.get(_cfs_period, '')
runtime_amount = cfs_quota / cfs_period
os_spec_info[_cfs_restrict] = runtime_amount
def format_optional():
# Memory
units = {_mem_total: 1024, _mem_available: 1024}
units.update(params.get('units', {}))
for k in (_mem_total, _mem_available):
digits = ''.join(d for d in os_spec_info.get(k, '') if d.isdigit())
os_spec_info[k] = int(digits or 0) * units[k]
# Accessible CPUs
split = os_spec_info.get(_cpus_allowed, '').split()
if split:
n = split[-1]
n = n.split(',')[-1]
os_spec_info[_cpus_allowed] = str(bin(int(n or 0, 16))).count('1')
split = os_spec_info.get(_cpus_list, '').split()
if split:
os_spec_info[_cpus_list] = split[-1]
try:
format()
if not _psutil_import:
format_optional()
except Exception as e:
_error_log.append(f'Error (format shell output): {e}')
# Call OS specific functions
os_specific_funcs = {
'Linux': {
_libc_version: lambda: ' '.join(platform.libc_ver())
},
'Windows': {
_os_spec_version: lambda: ' '.join(
s for s in platform.win32_ver()),
},
'Darwin': {
_os_spec_version: lambda: ''.join(
i or ' ' for s in tuple(platform.mac_ver()) for i in s),
},
}
key_func = os_specific_funcs.get(os_name, {})
os_spec_info.update({k: f() for k, f in key_func.items()})
return os_spec_info
def get_sysinfo():
# Gather the information that shouldn't raise exceptions
sys_info = {
_start: datetime.now(),
_start_utc: datetime.utcnow(),
_machine: platform.machine(),
_cpu_name: llvmbind.get_host_cpu_name(),
_cpu_count: multiprocessing.cpu_count(),
_platform_name: platform.platform(aliased=True),
_platform_release: platform.release(),
_os_name: platform.system(),
_os_version: platform.version(),
_python_comp: platform.python_compiler(),
_python_impl: platform.python_implementation(),
_python_version: platform.python_version(),
_numba_env_vars: {k: v for (k, v) in os.environ.items()
if k.startswith('NUMBA_')},
_numba_version: version_number,
_llvm_version: '.'.join(str(i) for i in llvmbind.llvm_version_info),
_llvmlite_version: llvmlite_version,
_psutil: _psutil_import,
}
# CPU features
try:
feature_map = llvmbind.get_host_cpu_features()
except RuntimeError as e:
_error_log.append(f'Error (CPU features): {e}')
else:
features = sorted([key for key, value in feature_map.items() if value])
sys_info[_cpu_features] = ' '.join(features)
# Python locale
# On MacOSX, getdefaultlocale can raise. Check again if Py > 3.7.5
try:
# If $LANG is unset, getdefaultlocale() can return (None, None), make
# sure we can encode this as strings by casting explicitly.
sys_info[_python_locale] = '.'.join([str(i) for i in
locale.getdefaultlocale()])
except Exception as e:
_error_log.append(f'Error (locale): {e}')
# CUDA information
try:
cu.list_devices()[0] # will a device initialise?
except Exception as e:
sys_info[_cu_dev_init] = False
msg_not_found = "CUDA driver library cannot be found"
msg_disabled_by_user = "CUDA is disabled"
msg_end = " or no CUDA enabled devices are present."
msg_generic_problem = "CUDA device intialisation problem."
msg = getattr(e, 'msg', None)
if msg is not None:
if msg_not_found in msg:
err_msg = msg_not_found + msg_end
elif msg_disabled_by_user in msg:
err_msg = msg_disabled_by_user + msg_end
else:
err_msg = msg_generic_problem + " Message:" + msg
else:
err_msg = msg_generic_problem + " " + str(e)
# Best effort error report
_warning_log.append("Warning (cuda): %s\nException class: %s" %
(err_msg, str(type(e))))
else:
try:
sys_info[_cu_dev_init] = True
output = StringIO()
with redirect_stdout(output):
cu.detect()
sys_info[_cu_detect_out] = output.getvalue()
output.close()
sys_info[_cu_drv_ver] = cudriver.get_version()
rtver = ctypes.c_int(0)
curuntime.cudaRuntimeGetVersion(ctypes.byref(rtver))
sys_info[_cu_rt_ver] = rtver.value
output = StringIO()
with redirect_stdout(output):
cudadrv.libs.test(sys.platform, print_paths=False)
sys_info[_cu_lib_test] = output.getvalue()
output.close()
try:
from cuda import cuda # noqa: F401
nvidia_bindings_available = True
except ImportError:
nvidia_bindings_available = False
sys_info[_cu_nvidia_bindings] = nvidia_bindings_available
nv_binding_used = bool(cudadrv.driver.USE_NV_BINDING)
sys_info[_cu_nvidia_bindings_used] = nv_binding_used
except Exception as e:
_warning_log.append(
"Warning (cuda): Probing CUDA failed "
"(device and driver present, runtime problem?)\n"
f"(cuda) {type(e)}: {e}")
# SVML information
# Replicate some SVML detection logic from numba.__init__ here.
# If SVML load fails in numba.__init__ the splitting of the logic
# here will help diagnosing the underlying issue.
svml_lib_loaded = True
try:
if sys.platform.startswith('linux'):
llvmbind.load_library_permanently("libsvml.so")
elif sys.platform.startswith('darwin'):
llvmbind.load_library_permanently("libsvml.dylib")
elif sys.platform.startswith('win'):
llvmbind.load_library_permanently("svml_dispmd")
else:
svml_lib_loaded = False
except Exception:
svml_lib_loaded = False
func = getattr(llvmbind.targets, "has_svml", None)
sys_info[_llvm_svml_patched] = func() if func else False
sys_info[_svml_state] = config.USING_SVML
sys_info[_svml_loaded] = svml_lib_loaded
sys_info[_svml_operational] = all((
sys_info[_svml_state],
sys_info[_svml_loaded],
sys_info[_llvm_svml_patched],
))
# Check which threading backends are available.
def parse_error(e, backend):
# parses a linux based error message, this is to provide feedback
# and hide user paths etc
try:
path, problem, symbol = [x.strip() for x in e.msg.split(':')]
extn_dso = os.path.split(path)[1]
if backend in extn_dso:
return "%s: %s" % (problem, symbol)
except Exception:
pass
return "Unknown import problem."
try:
# check import is ok, this means the DSO linkage is working
from numba.np.ufunc import tbbpool # NOQA
# check that the version is compatible, this is a check performed at
# runtime (well, compile time), it will also ImportError if there's
# a problem.
from numba.np.ufunc.parallel import _check_tbb_version_compatible
_check_tbb_version_compatible()
sys_info[_tbb_thread] = True
except ImportError as e:
# might be a missing symbol due to e.g. tbb libraries missing
sys_info[_tbb_thread] = False
sys_info[_tbb_error] = parse_error(e, 'tbbpool')
try:
from numba.np.ufunc import omppool
sys_info[_openmp_thread] = True
sys_info[_openmp_vendor] = omppool.openmp_vendor
except ImportError as e:
sys_info[_openmp_thread] = False
sys_info[_openmp_error] = parse_error(e, 'omppool')
try:
from numba.np.ufunc import workqueue # NOQA
sys_info[_wkq_thread] = True
except ImportError as e:
sys_info[_wkq_thread] = True
sys_info[_wkq_error] = parse_error(e, 'workqueue')
# Look for conda and installed packages information
cmd = ('conda', 'info', '--json')
try:
conda_out = check_output(cmd)
except Exception as e:
_warning_log.append(f'Warning: Conda not available.\n Error was {e}\n')
# Conda is not available, try pip list to list installed packages
cmd = (sys.executable, '-m', 'pip', 'list')
try:
reqs = check_output(cmd)
except Exception as e:
_error_log.append(f'Error (pip): {e}')
else:
sys_info[_inst_pkg] = reqs.decode().splitlines()
else:
jsond = json.loads(conda_out.decode())
keys = {
'conda_build_version': _conda_build_ver,
'conda_env_version': _conda_env_ver,
'platform': _conda_platform,
'python_version': _conda_python_ver,
'root_writable': _conda_root_writable,
}
for conda_k, sysinfo_k in keys.items():
sys_info[sysinfo_k] = jsond.get(conda_k, 'N/A')
# Get info about packages in current environment
cmd = ('conda', 'list')
try:
conda_out = check_output(cmd)
except CalledProcessError as e:
_error_log.append(f'Error (conda): {e}')
else:
data = conda_out.decode().splitlines()
sys_info[_inst_pkg] = [l for l in data if not l.startswith('#')]
sys_info.update(get_os_spec_info(sys_info[_os_name]))
sys_info[_errors] = _error_log
sys_info[_warnings] = _warning_log
sys_info[_runtime] = (datetime.now() - sys_info[_start]).total_seconds()
return sys_info
def display_sysinfo(info=None, sep_pos=45):
class DisplayMap(dict):
display_map_flag = True
class DisplaySeq(tuple):
display_seq_flag = True
class DisplaySeqMaps(tuple):
display_seqmaps_flag = True
if info is None:
info = get_sysinfo()
fmt = f'%-{sep_pos}s : %-s'
MB = 1024**2
template = (
("-" * 80,),
("__Time Stamp__",),
("Report started (local time)", info.get(_start, '?')),
("UTC start time", info.get(_start_utc, '?')),
("Running time (s)", info.get(_runtime, '?')),
("",),
("__Hardware Information__",),
("Machine", info.get(_machine, '?')),
("CPU Name", info.get(_cpu_name, '?')),
("CPU Count", info.get(_cpu_count, '?')),
("Number of accessible CPUs", info.get(_cpus_allowed, '?')),
("List of accessible CPUs cores", info.get(_cpus_list, '?')),
("CFS Restrictions (CPUs worth of runtime)",
info.get(_cfs_restrict, 'None')),
("",),
("CPU Features", '\n'.join(
' ' * (sep_pos + 3) + l if i else l
for i, l in enumerate(
textwrap.wrap(
info.get(_cpu_features, '?'),
width=79 - sep_pos
)
)
)),
("",),
("Memory Total (MB)", info.get(_mem_total, 0) // MB or '?'),
("Memory Available (MB)"
if info.get(_os_name, '') != 'Darwin' or info.get(_psutil, False)
else "Free Memory (MB)", info.get(_mem_available, 0) // MB or '?'),
("",),
("__OS Information__",),
("Platform Name", info.get(_platform_name, '?')),
("Platform Release", info.get(_platform_release, '?')),
("OS Name", info.get(_os_name, '?')),
("OS Version", info.get(_os_version, '?')),
("OS Specific Version", info.get(_os_spec_version, '?')),
("Libc Version", info.get(_libc_version, '?')),
("",),
("__Python Information__",),
DisplayMap({k: v for k, v in info.items() if k.startswith('Python')}),
("",),
("__Numba Toolchain Versions__",),
("Numba Version", info.get(_numba_version, '?')),
("llvmlite Version", info.get(_llvmlite_version, '?')),
("",),
("__LLVM Information__",),
("LLVM Version", info.get(_llvm_version, '?')),
("",),
("__CUDA Information__",),
("CUDA Device Initialized", info.get(_cu_dev_init, '?')),
("CUDA Driver Version", info.get(_cu_drv_ver, '?')),
("CUDA Runtime Version", info.get(_cu_rt_ver, '?')),
("CUDA NVIDIA Bindings Available", info.get(_cu_nvidia_bindings, '?')),
("CUDA NVIDIA Bindings In Use",
info.get(_cu_nvidia_bindings_used, '?')),
("CUDA Detect Output:",),
(info.get(_cu_detect_out, "None"),),
("CUDA Libraries Test Output:",),
(info.get(_cu_lib_test, "None"),),
("",),
("__SVML Information__",),
("SVML State, config.USING_SVML", info.get(_svml_state, '?')),
("SVML Library Loaded", info.get(_svml_loaded, '?')),
("llvmlite Using SVML Patched LLVM", info.get(_llvm_svml_patched, '?')),
("SVML Operational", info.get(_svml_operational, '?')),
("",),
("__Threading Layer Information__",),
("TBB Threading Layer Available", info.get(_tbb_thread, '?')),
("+-->TBB imported successfully." if info.get(_tbb_thread, '?')
else f"+--> Disabled due to {info.get(_tbb_error, '?')}",),
("OpenMP Threading Layer Available", info.get(_openmp_thread, '?')),
(f"+-->Vendor: {info.get(_openmp_vendor, '?')}"
if info.get(_openmp_thread, False)
else f"+--> Disabled due to {info.get(_openmp_error, '?')}",),
("Workqueue Threading Layer Available", info.get(_wkq_thread, '?')),
("+-->Workqueue imported successfully." if info.get(_wkq_thread, False)
else f"+--> Disabled due to {info.get(_wkq_error, '?')}",),
("",),
("__Numba Environment Variable Information__",),
(DisplayMap(info.get(_numba_env_vars, {})) or ('None found.',)),
("",),
("__Conda Information__",),
(DisplayMap({k: v for k, v in info.items()
if k.startswith('Conda')}) or ("Conda not available.",)),
("",),
("__Installed Packages__",),
DisplaySeq(info.get(_inst_pkg, ("Couldn't retrieve packages info.",))),
("",),
("__Error log__" if info.get(_errors, [])
else "No errors reported.",),
DisplaySeq(info.get(_errors, [])),
("",),
("__Warning log__" if info.get(_warnings, [])
else "No warnings reported.",),
DisplaySeq(info.get(_warnings, [])),
("-" * 80,),
("If requested, please copy and paste the information between\n"
"the dashed (----) lines, or from a given specific section as\n"
"appropriate.\n\n"
"=============================================================\n"
"IMPORTANT: Please ensure that you are happy with sharing the\n"
"contents of the information present, any information that you\n"
"wish to keep private you should remove before sharing.\n"
"=============================================================\n",),
)
for t in template:
if hasattr(t, 'display_seq_flag'):
print(*t, sep='\n')
elif hasattr(t, 'display_map_flag'):
print(*tuple(fmt % (k, v) for (k, v) in t.items()), sep='\n')
elif hasattr(t, 'display_seqmaps_flag'):
for d in t:
print(*tuple(fmt % ('\t' + k, v) for (k, v) in d.items()),
sep='\n', end='\n')
elif len(t) == 2:
print(fmt % t)
else:
print(*t)
if __name__ == '__main__':
display_sysinfo()
| {
"content_hash": "22eaf1c66e867d1ee229bba22c376a2d",
"timestamp": "",
"source": "github",
"line_count": 633,
"max_line_length": 85,
"avg_line_length": 37.96998420221169,
"alnum_prop": 0.5492822966507177,
"repo_name": "IntelLabs/numba",
"id": "abc74538de358d689e3f37c7a66c20d08f931433",
"size": "24035",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "numba/misc/numba_sysinfo.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6984"
},
{
"name": "C",
"bytes": "639446"
},
{
"name": "C++",
"bytes": "93702"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "GDB",
"bytes": "101"
},
{
"name": "HTML",
"bytes": "3464"
},
{
"name": "Python",
"bytes": "8764393"
},
{
"name": "Shell",
"bytes": "13542"
}
],
"symlink_target": ""
} |
"""
This script is responsible for generating .gclient-xwalk in the top-level
source directory from DEPS.xwalk.
User-configurable values such as |cache_dir| are fetched from .gclient instead.
"""
import logging
import optparse
import os
import pprint
CROSSWALK_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
GCLIENT_ROOT = os.path.dirname(os.path.dirname(CROSSWALK_ROOT))
def ParseGClientConfig():
"""
Parses the top-level .gclient file (NOT .gclient-xwalk) and returns the
values set there as a dictionary.
"""
with open(os.path.join(GCLIENT_ROOT, '.gclient')) as dot_gclient:
config = {}
exec(dot_gclient, config)
return config
def GenerateGClientXWalk(options):
with open(os.path.join(CROSSWALK_ROOT, 'DEPS.xwalk')) as deps_file:
deps_contents = deps_file.read()
if 'XWALK_OS_ANDROID' in os.environ:
deps_contents += 'target_os = [\'android\']\n'
gclient_config = ParseGClientConfig()
if options.cache_dir:
logging.warning('--cache_dir is deprecated and will be removed in '
'Crosswalk 8. You should set cache_dir in .gclient '
'instead.')
cache_dir = options.cache_dir
else:
cache_dir = gclient_config.get('cache_dir')
deps_contents += 'cache_dir = %s\n' % pprint.pformat(cache_dir)
with open(os.path.join(GCLIENT_ROOT, '.gclient-xwalk'), 'w') as gclient_file:
gclient_file.write(deps_contents)
def main():
option_parser = optparse.OptionParser()
# TODO(rakuco): Remove in Crosswalk 8.
option_parser.add_option('--cache-dir',
help='DEPRECATED Set "cache_dir" in .gclient-xwalk '
'to this directory, so that all git '
'repositories are cached there.')
options, _ = option_parser.parse_args()
GenerateGClientXWalk(options)
if __name__ == '__main__':
main()
| {
"content_hash": "6962c099f84d390afda9c7c6960b8fa9",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 79,
"avg_line_length": 30.56451612903226,
"alnum_prop": 0.6559366754617414,
"repo_name": "TheDirtyCalvinist/spacewalk",
"id": "340f56f8651c7b864746f1ba25a4a1deacb578c0",
"size": "2082",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tools/generate_gclient-xwalk.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "30701"
},
{
"name": "C++",
"bytes": "1841615"
},
{
"name": "CSS",
"bytes": "485"
},
{
"name": "Java",
"bytes": "986368"
},
{
"name": "JavaScript",
"bytes": "65123"
},
{
"name": "Objective-C",
"bytes": "688"
},
{
"name": "Objective-C++",
"bytes": "16628"
},
{
"name": "Python",
"bytes": "255925"
},
{
"name": "Shell",
"bytes": "5080"
}
],
"symlink_target": ""
} |
import glass
# Import app configs
import configs
app = glass.Application(
name="Hello",
client_id=configs.CLIENT_ID,
client_secret=configs.CLIENT_SECRET
)
@app.subscriptions.login
def login(user):
print "user : %s" % user.token
user.timeline.post(html="Hello <b>World</b>")
if __name__ == '__main__':
app.run(port=8080)
| {
"content_hash": "84cc72d139e8c30b1e3f2fc2c72580f9",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 49,
"avg_line_length": 18.526315789473685,
"alnum_prop": 0.6534090909090909,
"repo_name": "SamyPesse/glass.py",
"id": "6aa376cab65dad09a7fc66801ae522aade4f08d5",
"size": "375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/html.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "414"
},
{
"name": "Python",
"bytes": "35455"
}
],
"symlink_target": ""
} |
import plugins
import subprocess
class Plugin(plugins.BasePlugin):
__name__ = 'asterisk'
def run(self, config):
ip = config.get(__name__, 'sbcip')
p = subprocess.Popen("sudo asterisk -rx 'core show calls' | grep 'active' | cut -f1 -d ' '", stdout=subprocess.PIPE, shell=True)
p = p.communicate()[0].decode('utf-8').replace("\n", "")
incoming = subprocess.Popen("sudo asterisk -rx 'core show channels verbose' | cut -c1-15 | grep 'pstn_' | wc -l", stdout=subprocess.PIPE, shell=True)
incoming = incoming.communicate()[0].decode('utf-8').replace("\n", "")
devices = subprocess.Popen("sudo asterisk -rx 'sip show peers' | grep '%s' | wc -l" % (ip), stdout=subprocess.PIPE, shell=True)
devices = devices.communicate()[0].decode('utf-8').replace("\n", "")
res = { "calls": p, "incomingcalls": incoming, "devices": devices }
return res
if __name__ == '__main__':
Plugin().execute()
| {
"content_hash": "efb09ace863aa6a2abdc71a93256d955",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 157,
"avg_line_length": 43.90909090909091,
"alnum_prop": 0.6055900621118012,
"repo_name": "vfuse/nixstatsagent",
"id": "b6317f472ebab9ccafe5bec2a548f6f0bf0683e8",
"size": "1012",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nixstatsagent/plugins/asterisk.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "31298"
}
],
"symlink_target": ""
} |
import array
import corepy.arch.x86.isa as x86
import corepy.spre.spe as spe
import corepy.arch.x86.lib.util as util
__doc__ = """
"""
# ------------------------------------------------------------
# 'Type' Classes
# ------------------------------------------------------------
# Type classes implement the operator overloads for a type and hold
# other type-specific information, such as register types and valid
# literal types.
# They are separate from the type so they can be used as mix-ins in
# different contexts, e.g. Variables and Expressions subclasses can
# both share operator semantics by subclassing the same operator
# class.
# Operator classes also provide static interfaces to typed versions of
# the operations.
# Operator methods return an Expression of an appropriate type for the
# operation.
# To always return the same type:
# return SignedWord.expr_class(inst, *(self, other))
# To upcast to the type of the first operand:
# return self.expr_class(inst, *(self, other))
# To upcast to the type of the second operand:
# return other.expr_class(inst, *(self, other))
# Upcasting can be useful for two types of different specificity are
# used in expressions and the more specific type should be
# preserved type the expressions. For instance, the logical
# operators are the base classes of all integer-like types. A logical
# operation, e.g. (a & b), should preserve the most specific type of a
# and b.
def _most_specific(a, b, default = None):
"""
If a and b are from the same hierarcy, return the more specific of
[type(a), type(b)], or the default type if they are from different
hierarchies. If default is None, return type(a), or type(b) if a
does not have a type_cls
"""
if (hasattr(a, 'type_cls') and hasattr(a, 'type_cls')):
if issubclass(b.type_cls, a.type_cls):
return type(b)
elif issubclass(a.type_cls, b.type_cls):
return type(a)
elif default is None:
if hasattr(a, 'type_cls'):
return type(a)
elif hasattr(b, 'type_cls'):
return type(b)
return default
_int_literals = (spe.Immediate, int, long)
class PPCType(spe.Type):
def _get_active_code(self):
return x86.get_active_code()
def _set_active_code(self, code):
return x86.set_active_code(code)
active_code = property(_get_active_code, _set_active_code)
# ------------------------------------------------------------
# General Purpose Register Types
# ------------------------------------------------------------
class BitType(PPCType):
register_type_id = 'gp'
literal_types = (int,long)
def _upcast(self, other, inst):
return inst.ex(self, other, type_cls = _most_specific(self, other))
def __and__(self, other):
if isinstance(other, BitType):
return self._upcast(other, x86.andx)
elif isinstance(other, _int_literals):
return x86.andi.ex(self, other, type_cls = self.var_cls)
raise Exception('__and__ not implemented for %s and %s' % (type(self), type(other)))
and_ = staticmethod(__and__)
def __lshift__(self, other):
if isinstance(other, BitType):
return x86.slwx.ex(self, other, type_cls = self.var_cls)
raise Exception('__lshift__ not implemented for %s and %s' % (type(self), type(other)))
lshift = staticmethod(__lshift__)
def __rshift__(self, other):
if isinstance(other, BitType):
return x86.srwx.ex(self, other, type_cls = self.var_cls)
raise Exception('__rshift__ not implemented for %s and %s' % (type(self), type(other)))
rshift = staticmethod(__rshift__)
def __or__(self, other):
if isinstance(other, BitType):
return self._upcast(other, x86.orx)
elif isinstance(other, _int_literals):
return x86.ori.ex(self, other, type_cls = self.var_cls)
raise Exception('__or__ not implemented for %s and %s' % (type(self), type(other)))
or_ = staticmethod(__or__)
def __xor__(self, other):
if isinstance(other, BitType):
return self._upcast(other, x86.xorx)
elif isinstance(other, _int_literals):
return x86.xori.ex(self, other, type_cls = self.var_cls)
raise Exception('__xor__ not implemented for %s and %s' % (type(self), type(other)))
xor = staticmethod(__xor__)
def _set_literal_value(self, value):
# Put the lower 16 bits into r-temp
self.code.add(x86.addi(self.reg, 0, value))
# Addis r-temp with the upper 16 bits (shifted add immediate) and
# put the result in r-target
if (value & 0x7FFF) != value:
self.code.add(x86.addis(self.reg, self.reg, ((value + 32768) >> 16)))
return
def copy_register(self, other):
return self.code.add(x86.addi(self, other, 0))
# ------------------------------
# Integer Types
# ------------------------------
class UnsignedWordType(BitType):
def __add__(self, other):
if isinstance(other, UnsignedWordType):
return x86.addx.ex(self, other, type_cls = self.var_cls)
elif isinstance(other, (spe.Immediate, int)):
return self.expr_cls(x86.addi, *(self, other))
raise Exception('__add__ not implemented for %s and %s' % (type(self), type(other)))
add = staticmethod(__add__)
def __div__(self, other):
if isinstance(other, SignedWordType):
return self.expr_cls(x86.divwux, *(self, other))
raise NotImplemented
div = staticmethod(__div__)
def __mul__(self, other):
if isinstance(other, UnsignedWordType):
return self.expr_cls(x86.mullwx, *(self, other))
elif isinstance(other, (spe.Immediate, int)):
return self.expr_cls(x86.mulli, *(self, other))
raise Exception('__mul__ not implemented for %s and %s' % (type(self), type(other)))
div = staticmethod(__div__)
class SignedWordType(BitType):
def __add__(self, other):
if isinstance(other, SignedWordType):
return x86.addx.ex(self, other, type_cls = self.var_cls)
elif isinstance(other, (spe.Immediate, int)):
return self.expr_cls(x86.addi, *(self, other))
raise Exception('__add__ not implemented for %s and %s' % (type(self), type(other)))
add = staticmethod(__add__)
def __div__(self, other):
if isinstance(other, SignedWordType):
return self.expr_cls(x86.divwx, *(self, other))
raise Exception('__div__ not implemented for %s and %s' % (type(self), type(other)))
div = staticmethod(__div__)
def __mul__(self, other):
if isinstance(other, SignedWordType):
return self.expr_cls(x86.mullwx, *(self, other))
elif isinstance(other, (spe.Immediate, int)):
return self.expr_cls(x86.mulli, *(self, other))
raise Exception('__mul__ not implemented for %s and %s' % (type(self), type(other)))
div = staticmethod(__div__)
def __neg__(self):
return x86.negx(self, type_cls = self.var_cls)
def __sub__(self, other):
if isinstance(other, SignedWordType):
return self.expr_cls(x86.subfx, other, self) # swap a and b
raise Exception('__add__ not implemented for %s and %s' % (type(self), type(other)))
sub = staticmethod(__sub__)
# ------------------------------------------------------------
# Floating Point Register Types
# ------------------------------------------------------------
class SingleFloatType(PPCType):
register_type_id = 'fp'
literal_types = (float,)
def __abs__(self):
return x86.fabsx.ex(self, type_cls = self.var_cls)
abs = staticmethod(__abs__)
def __add__(self, other):
if isinstance(other, SingleFloatType):
return x86.faddsx.ex(self, other, type_cls = self.var_cls)
raise Exception('__add__ not implemented for %s and %s' % (type(self), type(other)))
add = staticmethod(__add__)
def __div__(self, other):
if isinstance(other, SingleFloatType):
return x86.fdivsx.ex(self, other, type_cls = self.var_cls)
raise Exception('__div__ not implemented for %s and %s' % (type(self), type(other)))
div = staticmethod(__div__)
def __mul__(self, other):
if isinstance(other, SingleFloatType):
return x86.fmulsx.ex(self, other, type_cls = self.var_cls)
raise Exception('__mul__ not implemented for %s and %s' % (type(self), type(other)))
mul = staticmethod(__mul__)
def __neg__(self):
return x86.fnegx.ex(self, type_cls = self.var_cls)
neg = staticmethod(__neg__)
def __sub__(self, other):
if isinstance(other, SingleFloatType):
return x86.fsubsx.ex(self, other, type_cls = self.var_cls)
raise Exception('__sub__ not implemented for %s and %s' % (type(self), type(other)))
sub = staticmethod(__sub__)
def _set_literal_value(self, value):
storage = array.array('f', (float(self.value),))
self.code.add_storage(storage)
r_storage = self.code.acquire_register()
addr = Bits(storage.buffer_info()[0], reg = r_storage)
self.code.add(x86.lfs(self.reg, addr.reg, 0))
self.code.release_register(r_storage)
return
def copy_register(self, other):
return self.code.add(x86.fmr(self, other))
class DoubleFloatType(PPCType):
register_type_id = 'fp'
literal_types = (float,)
def __abs__(self):
return x86.fabsx.ex(self, type_cls = self.var_cls)
abs = staticmethod(__abs__)
def __add__(self, other):
if isinstance(other, DoubleFloatType):
return x86.faddx.ex(self, other, type_cls = self.var_cls)
raise Exception('__add__ not implemented for %s and %s' % (type(self), type(other)))
add = staticmethod(__add__)
def __div__(self, other):
if isinstance(other, DoubleFloatType):
return x86.fdivx.ex(self, other, type_cls = self.var_cls)
raise Exception('__div__ not implemented for %s and %s' % (type(self), type(other)))
div = staticmethod(__div__)
def __mul__(self, other):
if isinstance(other, DoubleFloatType):
return x86.fmulx.ex(self, other, type_cls = self.var_cls)
raise Exception('__mul__ not implemented for %s and %s' % (type(self), type(other)))
mul = staticmethod(__mul__)
def __neg__(self):
return x86.fnegx.ex(self, type_cls = self.var_cls)
neg = staticmethod(__neg__)
def __sub__(self, other):
if isinstance(other, DoubleFloatType):
return x86.fsubx.ex(self, other, type_cls = self.var_cls)
raise Exception('__sub__ not implemented for %s and %s' % (type(self), type(other)))
sub = staticmethod(__sub__)
def _set_literal_value(self, value):
storage = array.array('d', (float(value),))
self.code.add_storage(storage)
self.load(storage.buffer_info()[0])
# r_storage = self.code.acquire_register()
# addr = Bits(storage.buffer_info()[0], reg = r_storage)
# self.code.add(x86.lfd(self.reg, addr.reg, 0))
# self.code.release_register(r_storage)
return
def copy_register(self, other):
return self.code.add(x86.fmrx(self, other))
def load(self, addr, offset = 0):
# If addr is a constant, create a variable and store the value
if not issubclass(type(addr), spe.Type):
r_storage = self.code.acquire_register()
addr = Bits(addr, reg = r_storage)
else:
r_storage = None
# If offset is a constant, use lfd, otherwise use lfdx
if issubclass(type(offset), spe.Type):
self.code.add(x86.lfdx(self, addr, offset))
else:
# TODO: Check size of offset to ensure it fits in the immediate field
self.code.add(x86.lfd(self, addr, offset))
if r_storage is not None:
self.code.release_register(r_storage)
return
def store(self, addr, offset = 0):
# If addr is a constant, create a variable and store the value
if not issubclass(type(addr), spe.Type):
r_storage = self.code.acquire_register()
addr = Bits(addr, reg = r_storage)
else:
r_storage = None
# If offset is a constant, use lfd, otherwise use lfdx
if issubclass(type(offset), spe.Type):
self.code.add(x86.stfdx(self, addr, offset))
else:
# TODO: Check size of offset to ensure it fits in the immediate field
self.code.add(x86.stfd(self, addr, offset))
if r_storage is not None:
self.code.release_register(r_storage)
return
# ------------------------------
# Floating Point Free Functions
# ------------------------------
class _float_function(object):
"""
Callable object that performs basic type checking and dispatch for
floating point operations.
"""
def __init__(self, name, single_func, double_func):
self.name = name
self.single_func = single_func
self.double_func = double_func
return
def __call__(self, *operands, **koperands):
a = operands[0]
for op in operands[1:]:
if op.var_cls != a.var_cls:
raise Exception('Types for all operands must be the same')
if isinstance(a, SingleFloatType):
return self.single_func.ex(*operands, **{'type_cls': SingleFloat})
elif isinstance(a, DoubleFloatType):
return self.double_func.ex(*operands, **{'type_cls': DoubleFloat})
raise Exception(self.name + ' is not implemeneted for ' + str(type(a)))
fmadd = _float_function('fmadd', x86.fmaddsx, x86.fmaddx)
fmsub = _float_function('fmsub', x86.fmsubsx, x86.fmsubx)
fnmadd = _float_function('fnmadd', x86.fnmaddsx, x86.fnmaddx)
fnmsub = _float_function('fnmsub', x86.fnmsubsx, x86.fnmsubx)
fsqrt = _float_function('fsqrt', x86.fsqrtsx, x86.fsqrtx)
# ------------------------------------------------------------
# User Types
# ------------------------------------------------------------
# Type classes are mixed-in with Variables and Expressions to form the
# final user types.
def make_user_type(name, type_cls, g = None):
"""
Create a Variable class and an Expression class for a type class.
This is equivalent to creating two classes and updating the type
class (except that the Expression class is not added to the global
namespace):
class [name](spe.Variable, type_cls):
type_cls = type_cls
class [name]Ex(spe.Expression, type_cls):
type_cls = type_cls
type_class.var_cls = [name]
type_class.expr_cls = [name]Ex
type_cls is added to help determine type precedence among Variables
and Expressions.
(note: there's probably a better way to model these hierarchies that
avoids the type_cls, var_cls, expr_cls references. But, this works
and keeping explicit references avoids tricky introspection
operations)
"""
# Create the sublasses of Varaible and Expression
var_cls = type(name, (spe.Variable, type_cls), {'type_cls': type_cls})
expr_cls = type(name + 'Ex', (spe.Expression, type_cls), {'type_cls': type_cls})
# Update the type class with references to the variable and
# expression classes
type_cls.var_cls = var_cls
type_cls.expr_cls = expr_cls
# Add the Variable class to the global namespace
if g is None: g = globals()
g[name] = var_cls
return
_user_types = ( # name, type class
('Bits', BitType),
('UnsignedWord', UnsignedWordType),
('SignedWord', SignedWordType),
('SingleFloat', SingleFloatType),
('DoubleFloat', DoubleFloatType)
)
for t in _user_types:
make_user_type(*(t + (globals(),)))
# ------------------------------------------------------------
# Unit Tests
# ------------------------------------------------------------
def SimpleTest():
"""
Just make sure things are working...
"""
from corepy.arch.x86.platform import Processor, InstructionStream
code = InstructionStream()
proc = Processor()
# Without active code
a = SignedWord(11, code)
b = SignedWord(31, reg = code.acquire_register())
c = SignedWord(reg = code.gp_return)
byte_mask = Bits(0xFF, code)
code.add(x86.addi(code.gp_return, 0, 31))
# c.v = a + SignedWord.cast(b & byte_mask) + 12
c.v = a + (byte_mask & b) + 12
if True:
r = proc.execute(code)
assert(r == (42 + 12))
# With active code
code.reset()
x86.set_active_code(code)
a = SignedWord(11)
b = SignedWord(31)
c = SignedWord(reg = code.gp_return)
byte_mask = Bits(0xFF)
c.v = a + (b & byte_mask)
x86.set_active_code(None)
r = proc.execute(code)
# code.print_code()
assert(r == 42)
return
def TestBits():
from corepy.arch.x86.platform import Processor, InstructionStream
code = InstructionStream()
proc = Processor()
x86.set_active_code(code)
b = Bits(0xB0)
e = Bits(0xE0000)
a = Bits(0xCA)
f = Bits(0x5)
x = Bits(0, reg = code.gp_return)
mask = Bits(0xF)
byte = Bits(8) # 8 bits
halfbyte = Bits(4)
f.v = (a & mask) ^ f
x.v = (b << byte) | (e >> byte) | ((a & mask) << halfbyte) | (f | mask)
r = proc.execute(code)
assert(r == 0xBEAF)
return
def TestFloatingPoint(float_type):
from corepy.arch.x86.platform import Processor, InstructionStream
code = InstructionStream()
proc = Processor()
x86.set_active_code(code)
x = float_type(1.0)
y = float_type(2.0)
z = float_type(3.0)
a = float_type()
b = float_type()
c = float_type()
d = float_type()
# Create some data
data = array.array('d', (1.0, 2.0, 3.0, 4.0))
addr = data.buffer_info()[0]
# Load from addr
a.load(addr)
# Load from addr with idx in register
offset = Bits(8)
b.load(data.buffer_info()[0], offset)
# Load from addr with constant idx
c.load(data.buffer_info()[0], 8*2)
# Load from addr with addr as a register
reg_addr = Bits(addr)
d.load(reg_addr)
r = float_type(reg = code.fp_return)
r.v = (x + y) / y
r.v = fmadd(a, y, z + z) + fnmadd(a, y, z + z) + fmsub(x, y, z) + fnmsub(x, y, z)
x.v = -x
r.v = r + x - x + a + b - c + d - d
# Store from addr
a.v = 11.0
a.store(addr)
# Store from addr with idx in register
offset = Bits(8)
b.v = 12.0
b.store(data.buffer_info()[0], offset)
# Store from addr with constant idx
c.v = 13.0
c.store(data.buffer_info()[0], 8*2)
# Store from addr with addr as a register
d.v = 14.0
reg_addr = UnsignedWord(addr)
reg_addr.v = reg_addr + 8 * 3
d.store(reg_addr)
r = proc.execute(code, mode='fp')
assert(r == 0.0)
assert(data[0] == 11.0)
assert(data[1] == 12.0)
assert(data[2] == 13.0)
assert(data[3] == 14.0)
return
if __name__=='__main__':
from corepy.arch.x86.lib.util import RunTest
RunTest(SimpleTest)
RunTest(TestFloatingPoint, SingleFloat)
RunTest(TestFloatingPoint, DoubleFloat)
RunTest(TestBits)
| {
"content_hash": "9bce1a3132f7975434559255221c48e3",
"timestamp": "",
"source": "github",
"line_count": 592,
"max_line_length": 98,
"avg_line_length": 30.846283783783782,
"alnum_prop": 0.620338426154099,
"repo_name": "matthiaskramm/corepy",
"id": "b26b4a74b177a2b80626fe31de605fed37731e9d",
"size": "20483",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corepy/arch/x86/types/x86_types.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "291898"
},
{
"name": "C++",
"bytes": "2256"
},
{
"name": "Python",
"bytes": "2114744"
},
{
"name": "Shell",
"bytes": "145"
}
],
"symlink_target": ""
} |
import pytest
from raiden.tests.utils.transfer import (
direct_transfer,
)
from raiden.transfer.state_change import (
ActionTransferDirect,
ReceiveTransferDirect,
)
from raiden.transfer.events import (
EventTransferReceivedSuccess,
EventTransferSentSuccess,
)
from raiden.tests.utils.log import (
get_all_state_changes,
get_all_state_events,
)
@pytest.mark.parametrize('blockchain_type', ['tester'])
@pytest.mark.parametrize('channels_per_node', [1])
@pytest.mark.parametrize('number_of_nodes', [2])
@pytest.mark.parametrize('privatekey_seed', ['test_initiator_log_directransfer_action:{}'])
def test_initiator_log_directransfer_action(
raiden_chain,
token_addresses,
deposit):
""" The action that start a direct transfer must be logged in the WAL. """
token_address = token_addresses[0]
amount = int(deposit / 2.)
identifier = 13
app0, app1 = raiden_chain # pylint: disable=unbalanced-tuple-unpacking
direct_transfer(
app0,
app1,
token_address,
amount,
identifier,
)
app0_state_changes = get_all_state_changes(app0.raiden.transaction_log)
direct_transfers = [
state_change
for _, state_change in app0_state_changes
if isinstance(state_change, ActionTransferDirect)
]
assert direct_transfers[0] == ActionTransferDirect(
identifier,
amount,
token_address,
app1.raiden.address,
)
@pytest.mark.parametrize('blockchain_type', ['tester'])
@pytest.mark.parametrize('channels_per_node', [1])
@pytest.mark.parametrize('number_of_nodes', [2])
@pytest.mark.parametrize('privatekey_seed', ['test_initiator_log_directransfer_success:{}'])
def test_initiator_log_directransfer_success(
raiden_chain,
token_addresses,
deposit):
token_address = token_addresses[0]
amount = int(deposit / 2.)
identifier = 7
app0, app1 = raiden_chain # pylint: disable=unbalanced-tuple-unpacking
direct_transfer(
app0,
app1,
token_address,
amount,
identifier,
)
app0_events = get_all_state_events(app0.raiden.transaction_log)
sucessful_transfers = [
event.event_object for event in app0_events
if isinstance(event.event_object, EventTransferSentSuccess)
]
assert sucessful_transfers[0] == EventTransferSentSuccess(
identifier,
amount,
app1.raiden.address,
)
@pytest.mark.parametrize('blockchain_type', ['tester'])
@pytest.mark.parametrize('channels_per_node', [1])
@pytest.mark.parametrize('number_of_nodes', [2])
@pytest.mark.parametrize('privatekey_seed', ['test_target_log_directransfer_message:{}'])
def test_target_log_directransfer_message(
raiden_chain,
token_addresses,
deposit):
token_address = token_addresses[0]
amount = int(deposit / 2.)
identifier = 21
app0, app1 = raiden_chain # pylint: disable=unbalanced-tuple-unpacking
direct_transfer(
app0,
app1,
token_address,
amount,
identifier,
)
app1_state_changes = get_all_state_changes(app1.raiden.transaction_log)
received_transfers = [
state_change
for _, state_change in app1_state_changes
if isinstance(state_change, ReceiveTransferDirect)
]
assert received_transfers[0] == ReceiveTransferDirect(
identifier,
amount,
token_address,
app0.raiden.address,
)
@pytest.mark.parametrize('blockchain_type', ['tester'])
@pytest.mark.parametrize('channels_per_node', [1])
@pytest.mark.parametrize('number_of_nodes', [2])
@pytest.mark.parametrize('privatekey_seed', ['test_target_log_directransfer_success:{}'])
def test_target_log_directransfer_successevent(
raiden_chain,
token_addresses,
deposit):
token_address = token_addresses[0]
amount = int(deposit / 2.)
identifier = 23
app0, app1 = raiden_chain # pylint: disable=unbalanced-tuple-unpacking
direct_transfer(
app0,
app1,
token_address,
amount,
identifier,
)
app1_state_events = get_all_state_events(app1.raiden.transaction_log)
sucessful_received_transfers = [
event.event_object for event in app1_state_events
if isinstance(event.event_object, EventTransferReceivedSuccess)
]
assert sucessful_received_transfers[0] == EventTransferReceivedSuccess(
identifier,
amount,
app0.raiden.address,
)
| {
"content_hash": "1de9396e38ea0d79730524aeb4e920cf",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 92,
"avg_line_length": 28.79746835443038,
"alnum_prop": 0.66,
"repo_name": "tomashaber/raiden",
"id": "667247fc8243875300b230a44256b8b52988b6a4",
"size": "4574",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "raiden/tests/unit/functional/test_directtransferlog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4536"
},
{
"name": "HTML",
"bytes": "21998"
},
{
"name": "JavaScript",
"bytes": "1996"
},
{
"name": "Makefile",
"bytes": "5184"
},
{
"name": "Python",
"bytes": "1222610"
},
{
"name": "Shell",
"bytes": "4570"
},
{
"name": "TypeScript",
"bytes": "75150"
}
],
"symlink_target": ""
} |
from sklearn2sql_heroku.tests.regression import generic as reg_gen
reg_gen.test_model("SVR_poly" , "freidman1" , "db2")
| {
"content_hash": "4f499368807995b7ce49e052d4411792",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 66,
"avg_line_length": 30.5,
"alnum_prop": 0.7459016393442623,
"repo_name": "antoinecarme/sklearn2sql_heroku",
"id": "97e3e48986897a2550317575c2d2112ebf209457",
"size": "122",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/regression/freidman1/ws_freidman1_SVR_poly_db2_code_gen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "507043"
},
{
"name": "Procfile",
"bytes": "37"
},
{
"name": "Python",
"bytes": "1021137"
},
{
"name": "R",
"bytes": "2521"
}
],
"symlink_target": ""
} |
""" Sahana Eden Automated Test - HRM001 Create Volunteer Certificate
@copyright: 2011-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from gluon import current
from tests.web2unittest import SeleniumUnitTest
class CreateVolunteerCertificate(SeleniumUnitTest):
def test_hrm001_create_volunteer_certificate(self):
"""
@case: HRM001
@description: Create Volunteer Certificate
@TestDoc: https://docs.google.com/spreadsheet/ccc?key=0AmB3hMcgB-3idG1XNGhhRG9QWF81dUlKLXpJaFlCMFE
@Test Wiki: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Testing
"""
print "\n"
self.login(account="admin", nexturl="vol/certificate/create")
self.create("hrm_certificate",
[( "name",
"Advance First Aid ATest"
),
( "organisation_id",
"Timor-Leste Red Cross Society",
),
( "expiry",
"12"
),
]
)
# Check if add button is present on the page. Click it if found.
add_btn = self.browser.find_elements_by_id("show-add-btn")
if len(add_btn) > 0:
add_btn[0].click()
if current.deployment_settings.get_hrm_use_skills():
self.create("hrm_certificate_skill",
[( "skill_id",
"Hazmat"),
( "competency_id",
"Level 2"),
]
)
| {
"content_hash": "0f577d119b30dd494f5ce42357848c28",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 110,
"avg_line_length": 40.838235294117645,
"alnum_prop": 0.594526467410875,
"repo_name": "waidyanatha/sambro-eden",
"id": "f4e8ac5cbec1cdf841190c10a312e3b2e0346271",
"size": "2777",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "modules/tests/volunteer/create_volunteer_certificate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1162174"
},
{
"name": "JavaScript",
"bytes": "15697414"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "25187444"
},
{
"name": "Shell",
"bytes": "697"
},
{
"name": "XSLT",
"bytes": "1966322"
}
],
"symlink_target": ""
} |
import yyafl.fields
from yyafl.util import flatatt, smart_unicode
from yyafl.exception import IncompatibleWidget
class Widget(object):
is_hidden = False
def __init__(self, attrs = None):
self.field = None
if attrs is not None:
self.attrs = attrs.copy()
else:
self.attrs = {}
def render(self, name, value, attrs = None):
"""
Returns this widget as an HTML entry.
"""
raise NotImplementedError
def set_field(self, field):
""" Capture or return the Field instance """
if field:
self.field = field
return self.field
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = dict(self.attrs, **kwargs)
if extra_attrs:
attrs.update(extra_attrs)
return attrs
class Input(Widget):
input_type = None # Subclasses must define this.
def render(self, name, value, attrs=None):
if value is None: value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '': final_attrs['value'] = smart_unicode(value)
return u'<input%s />' % flatatt(final_attrs)
class TextInput(Input):
input_type = "text"
pass
class TextArea(Widget):
def render(self, name, value, attrs = None):
if value is None: value = '' # default value
final_attrs = self.build_attrs(attrs, name=name)
return u'<textarea' + flatatt(final_attrs) + u'>' + smart_unicode(value) + u'</textarea>'
class Select(Widget):
def _get_value(self, value):
# Fetch the value from the list or dict
values = self.field.allowed_values
if isinstance(values, list):
return smart_unicode(value)
else:
return smart_unicode(values[value])
def render(self, name, value, attrs = None):
if value is None: value = '' # default value
final_attrs = self.build_attrs(attrs, name=name)
options = []
if not isinstance(self.field, yyafl.fields.ChoiceField):
raise IncompatibleWidget(u'Widget named %s is incompatible with select' % name)
for option in self.field.allowed_values:
option_attrs = {'value' : option }
if value == option:
option_attrs['selected'] = u'selected'
options.append(u'<option %s>' % flatatt(option_attrs) + self._get_value(option) + u'</option>')
return u'<select' + flatatt(final_attrs) + u'>' + u''.join(options) + u'</select>'
class HiddenInput(Input):
input_type = "hidden"
is_hidden = True
| {
"content_hash": "04ac7b16182078198f15ffe88b1c0fbe",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 108,
"avg_line_length": 30.476744186046513,
"alnum_prop": 0.5978634109118657,
"repo_name": "miracle2k/yyafl",
"id": "64f0246a33d79966feb1e86a5ccd94e92610d074",
"size": "3981",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "yyafl/widgets.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "39539"
}
],
"symlink_target": ""
} |
import os, base64
from PIL import Image
from six import BytesIO
from indicoio.utils.preprocessing import data_preprocess
from .indico_image_base import ImageTest, DIR
class ResizeTests(ImageTest):
"""
test image resizing
"""
def test_min_axis_resize(self):
test_image = os.path.normpath(os.path.join(DIR, "data/fear.png"))
resized_image = data_preprocess(test_image, size=360, min_axis=True)
image_string = BytesIO(base64.b64decode(resized_image))
image = Image.open(image_string)
self.assertEqual(image.size, (540, 360))
| {
"content_hash": "b0bc40ce6ac5d1c8a359f5987b396cad",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 76,
"avg_line_length": 29.15,
"alnum_prop": 0.6946826758147513,
"repo_name": "IndicoDataSolutions/IndicoIo-python",
"id": "ef5377cab3134d230c22eb023a255013cb7d1118",
"size": "583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/image/test_resize.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "736"
},
{
"name": "Python",
"bytes": "132663"
}
],
"symlink_target": ""
} |
import json
import webob
from nova import test
from nova.api.openstack import accounts
from nova.auth.manager import User
from nova.tests.api.openstack import fakes
def fake_init(self):
self.manager = fakes.FakeAuthManager()
def fake_admin_check(self, req):
return True
class AccountsTest(test.TestCase):
def setUp(self):
super(AccountsTest, self).setUp()
self.flags(verbose=True, allow_admin_api=True)
self.stubs.Set(accounts.Controller, '__init__',
fake_init)
self.stubs.Set(accounts.Controller, '_check_admin',
fake_admin_check)
fakes.FakeAuthManager.clear_fakes()
fakes.FakeAuthDatabase.data = {}
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_auth(self.stubs)
fakemgr = fakes.FakeAuthManager()
joeuser = User('id1', 'guy1', 'acc1', 'secret1', False)
superuser = User('id2', 'guy2', 'acc2', 'secret2', True)
fakemgr.add_user(joeuser)
fakemgr.add_user(superuser)
fakemgr.create_project('test1', joeuser)
fakemgr.create_project('test2', superuser)
def test_get_account(self):
req = webob.Request.blank('/v1.0/accounts/test1')
res = req.get_response(fakes.wsgi_app())
res_dict = json.loads(res.body)
self.assertEqual(res_dict['account']['id'], 'test1')
self.assertEqual(res_dict['account']['name'], 'test1')
self.assertEqual(res_dict['account']['manager'], 'id1')
self.assertEqual(res.status_int, 200)
def test_account_delete(self):
req = webob.Request.blank('/v1.0/accounts/test1')
req.method = 'DELETE'
res = req.get_response(fakes.wsgi_app())
self.assertTrue('test1' not in fakes.FakeAuthManager.projects)
self.assertEqual(res.status_int, 200)
def test_account_create(self):
body = dict(account=dict(description='test account',
manager='id1'))
req = webob.Request.blank('/v1.0/accounts/newacct')
req.headers["Content-Type"] = "application/json"
req.method = 'PUT'
req.body = json.dumps(body)
res = req.get_response(fakes.wsgi_app())
res_dict = json.loads(res.body)
self.assertEqual(res.status_int, 200)
self.assertEqual(res_dict['account']['id'], 'newacct')
self.assertEqual(res_dict['account']['name'], 'newacct')
self.assertEqual(res_dict['account']['description'], 'test account')
self.assertEqual(res_dict['account']['manager'], 'id1')
self.assertTrue('newacct' in
fakes.FakeAuthManager.projects)
self.assertEqual(len(fakes.FakeAuthManager.projects.values()), 3)
def test_account_update(self):
body = dict(account=dict(description='test account',
manager='id2'))
req = webob.Request.blank('/v1.0/accounts/test1')
req.headers["Content-Type"] = "application/json"
req.method = 'PUT'
req.body = json.dumps(body)
res = req.get_response(fakes.wsgi_app())
res_dict = json.loads(res.body)
self.assertEqual(res.status_int, 200)
self.assertEqual(res_dict['account']['id'], 'test1')
self.assertEqual(res_dict['account']['name'], 'test1')
self.assertEqual(res_dict['account']['description'], 'test account')
self.assertEqual(res_dict['account']['manager'], 'id2')
self.assertEqual(len(fakes.FakeAuthManager.projects.values()), 2)
| {
"content_hash": "a908c51d6ecc47c3bed358ab1a798e7a",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 76,
"avg_line_length": 38.255319148936174,
"alnum_prop": 0.6170745272525028,
"repo_name": "30loops/nova",
"id": "707a2599fef4c78f2873cb213f932d938c0b8b42",
"size": "4227",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/tests/api/openstack/test_accounts.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""BIG-IP® auth module
REST URI
``http://localhost/mgmt/tm/auth/``
GUI Path
``System --> Users``
REST Kind
``tm:auth:*``
"""
from f5.bigip.resource import OrganizingCollection
from f5.bigip.tm.auth.password_policy import Password_Policy
from f5.bigip.tm.auth.user import Users
class Auth(OrganizingCollection):
def __init__(self, tm):
super(Auth, self).__init__(tm)
self._meta_data['allowed_lazy_attributes'] = [
Password_Policy,
Users
]
| {
"content_hash": "b6fda063bcddbf95383b6a4722aa2955",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 60,
"avg_line_length": 20.36,
"alnum_prop": 0.6247544204322201,
"repo_name": "wojtek0806/f5-common-python",
"id": "9ce3ee526ca9704cc66d7fef66632cba914cf054",
"size": "1114",
"binary": false,
"copies": "1",
"ref": "refs/heads/0.1",
"path": "f5/bigip/tm/auth/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "554081"
}
],
"symlink_target": ""
} |
import getopt
import os
import re
import sys
def usage():
print >> sys.stderr, """
Usage:
python test_harness-script.py [ -c config_file ] [ -o scriptfile ] [ -t gtestfile ] [ -p path_to_gtestfile ]
where:
config_file: test_harness config file; default: test_harness.conf
scriptfile: script file to write; default: gtest.sh
gtestfile: name of gtest executable file; default: ajtest
path_to_gtestfile: optional path to directory containing gtestfile (when scriptfile runs)
"""
def main(argv=None):
# get commandline options
conffile='test_harness.conf'
scriptfile='gtest.sh'
gtestfileopt=''
testpath=''
if argv is None:
argv=[]
if len(argv) > 0:
try:
opts, junk = getopt.getopt(argv, 'c:o:p:t:')
if junk:
print >> sys.stderr, 'error, unrecognized arguments ' + str(junk)
usage()
return 2
for opt, val in opts:
if opt == '-c':
conffile = val
elif opt == '-o':
scriptfile = val
elif opt == '-p':
testpath = val
elif opt == '-t':
gtestfileopt = val
except getopt.GetoptError, err:
print >> sys.stderr, 'error, ' + str(err)
usage()
return 2
# initialize
dict = []
filter = ''
negfilter = ''
gtestfile = ''
part = ''
text = ''
re_comment = re.compile( r'\s*#.*$' )
re_equals = re.compile( r'\s*=\s*' )
re_lastcolon = re.compile( r':$' )
re_TestCases = re.compile( r'^\[\s*Test\s*Cases\s*\]', re.I )
re_Environment = re.compile( r'^\[\s*Environment\s*\]', re.I )
re_GTestFile = re.compile( r'^\[\s*GTest\s*File\s*\]', re.I )
# read config file one line at a time
try:
with open( conffile, 'r' ) as fileptr:
for line in fileptr:
# strip leading and trailing whitespace
line = line.strip()
line = line.strip('\n')
# strip trailing comment (and preceding whitespace), if any
line = re_comment.sub( '', line )
# search line for part header
if re_TestCases.search( line ):
# line ~= [ TestCases ]
part = 'TestCases'
continue
elif re_Environment.search( line ):
# line ~= [ Environment ]
part = 'Environment'
print '[Environment]'
continue
elif re_GTestFile.search( line ):
# line ~= [ GTestFile ]
part = 'GTestFile'
continue
else:
# line is none of the above
# split line around equals sign (and surrounding whitespace), if any
dict = re_equals.split( line, 1 )
if (len(dict) > 1):
# line ~= something = something
if part == 'TestCases':
# Can select individual tests as well as groups.
# That is, TestCase selection can look like Foo.Bar=YES, not just Foo=YES.
# You can also used negative selection, like *=YES followed by Foo.Bar=NO.
d0 = dict[0].split('.',1)
if (dict[1].upper() == 'YES' or dict[1].upper() == 'Y'):
if (len(d0) > 1):
filter = filter + dict[0] + ':'
else:
filter = filter + dict[0] + '.*' + ':'
elif (dict[1].upper() == 'NO' or dict[1].upper() == 'N'):
if (len(d0) > 1):
negfilter = negfilter + dict[0] + ':'
else:
negfilter = negfilter + dict[0] + '.*' + ':'
elif part == 'Environment':
print '\t%s="%s"' % ( dict[0], dict[1] )
text = text + '\n%s="%s"' % ( dict[0], dict[1] )
text = text + '\nexport %s\n' % ( dict[0] )
elif part == 'GTestFile':
# the file name might contain = character
gtestfile = line
elif part == 'GTestFile' and line != '':
gtestfile = line
else:
# line is unusable
continue
except IOError:
print >> sys.stderr, 'error opening config file "%s"' % conffile
return 2
# assemble the path to gtestfile to execute
command = gtestfile
if gtestfileopt != '':
command = gtestfileopt
if command == '':
command = 'ajtest'
if testpath != '':
text = text + '\ncd "%s" || exit 2\n' % testpath
command = os.path.join( testpath, command )
else:
command = os.path.join( '.', command )
print '[GTestFile]\n\t%s' % command
text = text + '\nls "%s" || exit 2\n' % command
# assemble the gtest filter, if any
if filter == '' and negfilter == '':
pass
elif filter != '' and negfilter == '':
filter = re_lastcolon.sub( '', filter )
elif filter == '' and negfilter != '':
filter = '*' + '-' + re_lastcolon.sub( '', negfilter )
else:
filter = re_lastcolon.sub( '', filter ) + '-' + re_lastcolon.sub( '', negfilter)
if filter != '':
print '[TestCases]\n\t%s' % filter
command = command + ' --gtest_filter="' + filter + '"'
# script ends by executing the gtestfile with filter argument, if any
text = text + '\n' + command + ' || exit 1'
# write the scriptfile
try:
with open( scriptfile, 'w' ) as fileptr:
print >> fileptr, text
except IOError:
print >> sys.stderr, 'error opening script file "%s"' % scriptfile
return 2
return 0
if __name__ == '__main__':
if len(sys.argv) > 1:
sys.exit(main(sys.argv[1:]))
else:
sys.exit(main())
| {
"content_hash": "88bdd38b1270a3e1a7a46fc269482437",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 112,
"avg_line_length": 33.53125,
"alnum_prop": 0.4507611059335197,
"repo_name": "Vovkasquid/compassApp",
"id": "c0462ec73180f9c6dde75b060423b3424f298cdc",
"size": "7243",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "alljoyn/alljoyn_core/unit_test/test_report/test_harness-android.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "37177"
},
{
"name": "Batchfile",
"bytes": "5209"
},
{
"name": "C",
"bytes": "2487701"
},
{
"name": "C#",
"bytes": "98407"
},
{
"name": "C++",
"bytes": "11424962"
},
{
"name": "CSS",
"bytes": "19287"
},
{
"name": "Groff",
"bytes": "3146"
},
{
"name": "HTML",
"bytes": "36175"
},
{
"name": "Java",
"bytes": "2602308"
},
{
"name": "JavaScript",
"bytes": "646500"
},
{
"name": "Makefile",
"bytes": "43413"
},
{
"name": "Objective-C",
"bytes": "1395199"
},
{
"name": "Objective-C++",
"bytes": "679757"
},
{
"name": "Python",
"bytes": "439743"
},
{
"name": "Shell",
"bytes": "47261"
},
{
"name": "TeX",
"bytes": "789"
},
{
"name": "Visual Basic",
"bytes": "1285"
},
{
"name": "XSLT",
"bytes": "103689"
}
],
"symlink_target": ""
} |
__author__ = "Osman Baskaya"
"""
Preprocess and create a file that contains all information to be
needed for constructing different test sets
"""
import sys
import os
from bs4 import BeautifulSoup
from collections import defaultdict as dd
from nltk.corpus.reader import BracketParseCorpusReader
from itertools import count
import gzip
from nlp_utils import find_files
if len(sys.argv) != 3:
msg = "Usage: {} annotation_path sense_inventory_path"
print >> sys.stderr, msg.format(sys.argv[0])
annotations_path = sys.argv[1]
inventory_path = sys.argv[2]
NONE_OF_ABOVE_SENSE = "none of the above"
# treebank has some peculiar mappings. fix dict above will convert them.
fix = {'-LCB-': '{', '-RCB': '}', "n't": 'not', 'ca': 'can', 'wo': 'will',
"-LRB-": "(", "-RRB-": ")", "-RSB-": "]", "-LSB-": "[" }
def get_inventory_info():
d = dd(dict)
files = find_files(inventory_path, "*.xml")
for num_processed, f in enumerate(files):
fn = os.path.basename(f).replace('.xml', '')
if num_processed % 1000 == 0:
print >> sys.stderr, "{} files processed".format(num_processed)
soup = BeautifulSoup(open(f), 'xml')
senses = soup.findAll('sense')
for sense in senses:
onto_key = str(sense['n'])
sense_name = str(sense['name'])
mapping = sense.findAll('mappings')[0]
wn = mapping.findAll('wn')[0]
version = wn['version']
wn_senses = wn.text.strip()
#FIXME: None of above sense should be mapped to 3.0 first!
if sense_name == NONE_OF_ABOVE_SENSE:
wn_senses = "no_lexicon_sense"
version = "3.0"
ita = soup.findAll('ita') # inter-annotator agreement
ita_score = "ITA_UNDEFINED"
if len(ita) != 0:
ita_score = ita[0]['ann_1_2_agreement']
d[fn][onto_key] = [wn_senses, version, ita_score]
print >> sys.stderr, "{} files processed".format(num_processed)
return d
def annotation_process():
d = get_inventory_info()
annotated_files = find_files(annotations_path, "*.sense")
pos_file = gzip.open('on.pos.gz', 'w')
inst_num_dict = dd(lambda: count(1))
for num_processed, fn in enumerate(annotated_files):
if num_processed % 1000 == 0:
print >> sys.stderr, "{} files processed".format(num_processed)
directory = os.path.dirname(fn)
basename = os.path.basename(fn)
reader = BracketParseCorpusReader(directory, basename.replace('.sense', '.parse'))
fileid = reader.fileids()[0]
sentences = dict()
parsed_sents = reader.parsed_sents(fileid)
for line in open(fn):
line = line.split()
tw = line[3]
onto_sense = line[-1]
sent_id, tok_id = int(line[1]), int(line[2])
stuple = sentences.setdefault(sent_id, None)
if stuple is None:
sentence = parsed_sents[sent_id]
clean_sent = []
clean_pos = []
for word, p in sentence.pos():
if p != '-NONE-':
if word in fix:
word = fix[word]
clean_sent.append(word)
clean_pos.append(p)
sentences[sent_id] = (clean_sent, clean_pos)
else:
clean_sent, clean_pos = stuple
lexicon_senses, version, ita = d[tw][onto_sense]
w = tw.replace('-', '.') # following the convention of SemEval
m = "{}\t{}.on.{}\t{}-{}-{}\t{}-{}\t{}\t{}\t{}\t{}\t{}"
print m.format(w, w, inst_num_dict[tw].next(), line[0], sent_id, tok_id,
w, onto_sense, lexicon_senses, version, ita, tok_id, " ".join(clean_sent))
pos_file.write("{}\n".format(clean_pos))
print >> sys.stderr, "{} files processed".format(num_processed)
annotation_process()
| {
"content_hash": "f5dd6aa55d571b31e4a0e8a301b1f7de",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 90,
"avg_line_length": 39.09803921568628,
"alnum_prop": 0.5509027081243731,
"repo_name": "osmanbaskaya/induction-disambiguation-dataset-generator",
"id": "7dd2caebdbadde0a74a9a78cec45f85011252b79",
"size": "4031",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ontonotes-preprocess.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2369"
},
{
"name": "Python",
"bytes": "12543"
}
],
"symlink_target": ""
} |
from sqlalchemy import exc
from sqlalchemy import FLOAT
from sqlalchemy import ForeignKey
from sqlalchemy import ForeignKeyConstraint
from sqlalchemy import Index
from sqlalchemy import inspect
from sqlalchemy import INTEGER
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Numeric
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import text
from sqlalchemy import Unicode
from sqlalchemy import UniqueConstraint
from sqlalchemy.dialects.oracle.base import BINARY_DOUBLE
from sqlalchemy.dialects.oracle.base import BINARY_FLOAT
from sqlalchemy.dialects.oracle.base import DOUBLE_PRECISION
from sqlalchemy.dialects.oracle.base import NUMBER
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = "oracle"
__backend__ = True
@classmethod
def setup_class(cls):
# currently assuming full DBA privs for the user.
# don't really know how else to go here unless
# we connect as the other user.
for stmt in (
"""
create table %(test_schema)s.parent(
id integer primary key,
data varchar2(50)
);
create table %(test_schema)s.child(
id integer primary key,
data varchar2(50),
parent_id integer references %(test_schema)s.parent(id)
);
create table local_table(
id integer primary key,
data varchar2(50)
);
create synonym %(test_schema)s.ptable for %(test_schema)s.parent;
create synonym %(test_schema)s.ctable for %(test_schema)s.child;
create synonym %(test_schema)s_pt for %(test_schema)s.parent;
create synonym %(test_schema)s.local_table for local_table;
-- can't make a ref from local schema to the
-- remote schema's table without this,
-- *and* cant give yourself a grant !
-- so we give it to public. ideas welcome.
grant references on %(test_schema)s.parent to public;
grant references on %(test_schema)s.child to public;
"""
% {"test_schema": testing.config.test_schema}
).split(";"):
if stmt.strip():
testing.db.execute(stmt)
@classmethod
def teardown_class(cls):
for stmt in (
"""
drop table %(test_schema)s.child;
drop table %(test_schema)s.parent;
drop table local_table;
drop synonym %(test_schema)s.ctable;
drop synonym %(test_schema)s.ptable;
drop synonym %(test_schema)s_pt;
drop synonym %(test_schema)s.local_table;
"""
% {"test_schema": testing.config.test_schema}
).split(";"):
if stmt.strip():
testing.db.execute(stmt)
@testing.provide_metadata
def test_create_same_names_explicit_schema(self):
schema = testing.db.dialect.default_schema_name
meta = self.metadata
parent = Table(
"parent",
meta,
Column("pid", Integer, primary_key=True),
schema=schema,
)
child = Table(
"child",
meta,
Column("cid", Integer, primary_key=True),
Column("pid", Integer, ForeignKey("%s.parent.pid" % schema)),
schema=schema,
)
meta.create_all()
parent.insert().execute({"pid": 1})
child.insert().execute({"cid": 1, "pid": 1})
eq_(child.select().execute().fetchall(), [(1, 1)])
def test_reflect_alt_table_owner_local_synonym(self):
meta = MetaData(testing.db)
parent = Table(
"%s_pt" % testing.config.test_schema,
meta,
autoload=True,
oracle_resolve_synonyms=True,
)
self.assert_compile(
parent.select(),
"SELECT %(test_schema)s_pt.id, "
"%(test_schema)s_pt.data FROM %(test_schema)s_pt"
% {"test_schema": testing.config.test_schema},
)
select([parent]).execute().fetchall()
def test_reflect_alt_synonym_owner_local_table(self):
meta = MetaData(testing.db)
parent = Table(
"local_table",
meta,
autoload=True,
oracle_resolve_synonyms=True,
schema=testing.config.test_schema,
)
self.assert_compile(
parent.select(),
"SELECT %(test_schema)s.local_table.id, "
"%(test_schema)s.local_table.data "
"FROM %(test_schema)s.local_table"
% {"test_schema": testing.config.test_schema},
)
select([parent]).execute().fetchall()
@testing.provide_metadata
def test_create_same_names_implicit_schema(self):
meta = self.metadata
parent = Table(
"parent", meta, Column("pid", Integer, primary_key=True)
)
child = Table(
"child",
meta,
Column("cid", Integer, primary_key=True),
Column("pid", Integer, ForeignKey("parent.pid")),
)
meta.create_all()
parent.insert().execute({"pid": 1})
child.insert().execute({"cid": 1, "pid": 1})
eq_(child.select().execute().fetchall(), [(1, 1)])
def test_reflect_alt_owner_explicit(self):
meta = MetaData(testing.db)
parent = Table(
"parent", meta, autoload=True, schema=testing.config.test_schema
)
child = Table(
"child", meta, autoload=True, schema=testing.config.test_schema
)
self.assert_compile(
parent.join(child),
"%(test_schema)s.parent JOIN %(test_schema)s.child ON "
"%(test_schema)s.parent.id = %(test_schema)s.child.parent_id"
% {"test_schema": testing.config.test_schema},
)
select([parent, child]).select_from(
parent.join(child)
).execute().fetchall()
def test_reflect_local_to_remote(self):
testing.db.execute(
"CREATE TABLE localtable (id INTEGER "
"PRIMARY KEY, parent_id INTEGER REFERENCES "
"%(test_schema)s.parent(id))"
% {"test_schema": testing.config.test_schema}
)
try:
meta = MetaData(testing.db)
lcl = Table("localtable", meta, autoload=True)
parent = meta.tables["%s.parent" % testing.config.test_schema]
self.assert_compile(
parent.join(lcl),
"%(test_schema)s.parent JOIN localtable ON "
"%(test_schema)s.parent.id = "
"localtable.parent_id"
% {"test_schema": testing.config.test_schema},
)
select([parent, lcl]).select_from(
parent.join(lcl)
).execute().fetchall()
finally:
testing.db.execute("DROP TABLE localtable")
def test_reflect_alt_owner_implicit(self):
meta = MetaData(testing.db)
parent = Table(
"parent", meta, autoload=True, schema=testing.config.test_schema
)
child = Table(
"child", meta, autoload=True, schema=testing.config.test_schema
)
self.assert_compile(
parent.join(child),
"%(test_schema)s.parent JOIN %(test_schema)s.child "
"ON %(test_schema)s.parent.id = "
"%(test_schema)s.child.parent_id"
% {"test_schema": testing.config.test_schema},
)
select([parent, child]).select_from(
parent.join(child)
).execute().fetchall()
def test_reflect_alt_owner_synonyms(self):
testing.db.execute(
"CREATE TABLE localtable (id INTEGER "
"PRIMARY KEY, parent_id INTEGER REFERENCES "
"%s.ptable(id))" % testing.config.test_schema
)
try:
meta = MetaData(testing.db)
lcl = Table(
"localtable", meta, autoload=True, oracle_resolve_synonyms=True
)
parent = meta.tables["%s.ptable" % testing.config.test_schema]
self.assert_compile(
parent.join(lcl),
"%(test_schema)s.ptable JOIN localtable ON "
"%(test_schema)s.ptable.id = "
"localtable.parent_id"
% {"test_schema": testing.config.test_schema},
)
select([parent, lcl]).select_from(
parent.join(lcl)
).execute().fetchall()
finally:
testing.db.execute("DROP TABLE localtable")
def test_reflect_remote_synonyms(self):
meta = MetaData(testing.db)
parent = Table(
"ptable",
meta,
autoload=True,
schema=testing.config.test_schema,
oracle_resolve_synonyms=True,
)
child = Table(
"ctable",
meta,
autoload=True,
schema=testing.config.test_schema,
oracle_resolve_synonyms=True,
)
self.assert_compile(
parent.join(child),
"%(test_schema)s.ptable JOIN "
"%(test_schema)s.ctable "
"ON %(test_schema)s.ptable.id = "
"%(test_schema)s.ctable.parent_id"
% {"test_schema": testing.config.test_schema},
)
select([parent, child]).select_from(
parent.join(child)
).execute().fetchall()
class ConstraintTest(fixtures.TablesTest):
__only_on__ = "oracle"
__backend__ = True
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table("foo", metadata, Column("id", Integer, primary_key=True))
def test_oracle_has_no_on_update_cascade(self):
bar = Table(
"bar",
self.metadata,
Column("id", Integer, primary_key=True),
Column(
"foo_id", Integer, ForeignKey("foo.id", onupdate="CASCADE")
),
)
assert_raises(exc.SAWarning, bar.create)
bat = Table(
"bat",
self.metadata,
Column("id", Integer, primary_key=True),
Column("foo_id", Integer),
ForeignKeyConstraint(["foo_id"], ["foo.id"], onupdate="CASCADE"),
)
assert_raises(exc.SAWarning, bat.create)
def test_reflect_check_include_all(self):
insp = inspect(testing.db)
eq_(insp.get_check_constraints("foo"), [])
eq_(
[
rec["sqltext"]
for rec in insp.get_check_constraints("foo", include_all=True)
],
['"ID" IS NOT NULL'],
)
class SystemTableTablenamesTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
def setup(self):
testing.db.execute("create table my_table (id integer)")
testing.db.execute(
"create global temporary table my_temp_table (id integer)"
)
testing.db.execute(
"create table foo_table (id integer) tablespace SYSTEM"
)
def teardown(self):
testing.db.execute("drop table my_temp_table")
testing.db.execute("drop table my_table")
testing.db.execute("drop table foo_table")
def test_table_names_no_system(self):
insp = inspect(testing.db)
eq_(insp.get_table_names(), ["my_table"])
def test_temp_table_names_no_system(self):
insp = inspect(testing.db)
eq_(insp.get_temp_table_names(), ["my_temp_table"])
def test_table_names_w_system(self):
engine = testing_engine(options={"exclude_tablespaces": ["FOO"]})
insp = inspect(engine)
eq_(
set(insp.get_table_names()).intersection(
["my_table", "foo_table"]
),
set(["my_table", "foo_table"]),
)
class DontReflectIOTTest(fixtures.TestBase):
"""test that index overflow tables aren't included in
table_names."""
__only_on__ = "oracle"
__backend__ = True
def setup(self):
testing.db.execute(
"""
CREATE TABLE admin_docindex(
token char(20),
doc_id NUMBER,
token_frequency NUMBER,
token_offsets VARCHAR2(2000),
CONSTRAINT pk_admin_docindex PRIMARY KEY (token, doc_id))
ORGANIZATION INDEX
TABLESPACE users
PCTTHRESHOLD 20
OVERFLOW TABLESPACE users
"""
)
def teardown(self):
testing.db.execute("drop table admin_docindex")
def test_reflect_all(self):
m = MetaData(testing.db)
m.reflect()
eq_(set(t.name for t in m.tables.values()), set(["admin_docindex"]))
class UnsupportedIndexReflectTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
@testing.emits_warning("No column names")
@testing.provide_metadata
def test_reflect_functional_index(self):
metadata = self.metadata
Table(
"test_index_reflect",
metadata,
Column("data", String(20), primary_key=True),
)
metadata.create_all()
testing.db.execute(
"CREATE INDEX DATA_IDX ON " "TEST_INDEX_REFLECT (UPPER(DATA))"
)
m2 = MetaData(testing.db)
Table("test_index_reflect", m2, autoload=True)
def all_tables_compression_missing():
try:
testing.db.execute("SELECT compression FROM all_tables")
if "Enterprise Edition" not in testing.db.scalar(
"select * from v$version"
):
return True
return False
except Exception:
return True
def all_tables_compress_for_missing():
try:
testing.db.execute("SELECT compress_for FROM all_tables")
if "Enterprise Edition" not in testing.db.scalar(
"select * from v$version"
):
return True
return False
except Exception:
return True
class TableReflectionTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
@testing.provide_metadata
@testing.fails_if(all_tables_compression_missing)
def test_reflect_basic_compression(self):
metadata = self.metadata
tbl = Table(
"test_compress",
metadata,
Column("data", Integer, primary_key=True),
oracle_compress=True,
)
metadata.create_all()
m2 = MetaData(testing.db)
tbl = Table("test_compress", m2, autoload=True)
# Don't hardcode the exact value, but it must be non-empty
assert tbl.dialect_options["oracle"]["compress"]
@testing.provide_metadata
@testing.fails_if(all_tables_compress_for_missing)
def test_reflect_oltp_compression(self):
metadata = self.metadata
tbl = Table(
"test_compress",
metadata,
Column("data", Integer, primary_key=True),
oracle_compress="OLTP",
)
metadata.create_all()
m2 = MetaData(testing.db)
tbl = Table("test_compress", m2, autoload=True)
assert tbl.dialect_options["oracle"]["compress"] == "OLTP"
class RoundTripIndexTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
@testing.provide_metadata
def test_basic(self):
metadata = self.metadata
s_table = Table(
"sometable",
metadata,
Column("id_a", Unicode(255), primary_key=True),
Column("id_b", Unicode(255), primary_key=True, unique=True),
Column("group", Unicode(255), primary_key=True),
Column("col", Unicode(255)),
UniqueConstraint("col", "group"),
)
# "group" is a keyword, so lower case
normalind = Index("tableind", s_table.c.id_b, s_table.c.group)
Index(
"compress1", s_table.c.id_a, s_table.c.id_b, oracle_compress=True
)
Index(
"compress2",
s_table.c.id_a,
s_table.c.id_b,
s_table.c.col,
oracle_compress=1,
)
metadata.create_all()
mirror = MetaData(testing.db)
mirror.reflect()
metadata.drop_all()
mirror.create_all()
inspect = MetaData(testing.db)
inspect.reflect()
def obj_definition(obj):
return (
obj.__class__,
tuple([c.name for c in obj.columns]),
getattr(obj, "unique", None),
)
# find what the primary k constraint name should be
primaryconsname = testing.db.scalar(
text(
"""SELECT constraint_name
FROM all_constraints
WHERE table_name = :table_name
AND owner = :owner
AND constraint_type = 'P' """
),
table_name=s_table.name.upper(),
owner=testing.db.dialect.default_schema_name.upper(),
)
reflectedtable = inspect.tables[s_table.name]
# make a dictionary of the reflected objects:
reflected = dict(
[
(obj_definition(i), i)
for i in reflectedtable.indexes | reflectedtable.constraints
]
)
# assert we got primary key constraint and its name, Error
# if not in dict
assert (
reflected[
(PrimaryKeyConstraint, ("id_a", "id_b", "group"), None)
].name.upper()
== primaryconsname.upper()
)
# Error if not in dict
eq_(reflected[(Index, ("id_b", "group"), False)].name, normalind.name)
assert (Index, ("id_b",), True) in reflected
assert (Index, ("col", "group"), True) in reflected
idx = reflected[(Index, ("id_a", "id_b"), False)]
assert idx.dialect_options["oracle"]["compress"] == 2
idx = reflected[(Index, ("id_a", "id_b", "col"), False)]
assert idx.dialect_options["oracle"]["compress"] == 1
eq_(len(reflectedtable.constraints), 1)
eq_(len(reflectedtable.indexes), 5)
class DBLinkReflectionTest(fixtures.TestBase):
__requires__ = ("oracle_test_dblink",)
__only_on__ = "oracle"
__backend__ = True
@classmethod
def setup_class(cls):
from sqlalchemy.testing import config
cls.dblink = config.file_config.get("sqla_testing", "oracle_db_link")
# note that the synonym here is still not totally functional
# when accessing via a different username as we do with the
# multiprocess test suite, so testing here is minimal
with testing.db.connect() as conn:
conn.execute(
"create table test_table "
"(id integer primary key, data varchar2(50))"
)
conn.execute(
"create synonym test_table_syn "
"for test_table@%s" % cls.dblink
)
@classmethod
def teardown_class(cls):
with testing.db.connect() as conn:
conn.execute("drop synonym test_table_syn")
conn.execute("drop table test_table")
def test_reflection(self):
"""test the resolution of the synonym/dblink. """
m = MetaData()
t = Table(
"test_table_syn",
m,
autoload=True,
autoload_with=testing.db,
oracle_resolve_synonyms=True,
)
eq_(list(t.c.keys()), ["id", "data"])
eq_(list(t.primary_key), [t.c.id])
class TypeReflectionTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
@testing.provide_metadata
def _run_test(self, specs, attributes):
columns = [Column("c%i" % (i + 1), t[0]) for i, t in enumerate(specs)]
m = self.metadata
Table("oracle_types", m, *columns)
m.create_all()
m2 = MetaData(testing.db)
table = Table("oracle_types", m2, autoload=True)
for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
expected_spec = spec[1]
reflected_type = reflected_col.type
is_(type(reflected_type), type(expected_spec))
for attr in attributes:
eq_(
getattr(reflected_type, attr),
getattr(expected_spec, attr),
"Column %s: Attribute %s value of %s does not "
"match %s for type %s"
% (
"c%i" % (i + 1),
attr,
getattr(reflected_type, attr),
getattr(expected_spec, attr),
spec[0],
),
)
def test_integer_types(self):
specs = [(Integer, INTEGER()), (Numeric, INTEGER())]
self._run_test(specs, [])
def test_number_types(self):
specs = [(Numeric(5, 2), NUMBER(5, 2)), (NUMBER, NUMBER())]
self._run_test(specs, ["precision", "scale"])
def test_float_types(self):
specs = [
(DOUBLE_PRECISION(), FLOAT()),
# when binary_precision is supported
# (DOUBLE_PRECISION(), oracle.FLOAT(binary_precision=126)),
(BINARY_DOUBLE(), BINARY_DOUBLE()),
(BINARY_FLOAT(), BINARY_FLOAT()),
(FLOAT(5), FLOAT()),
# when binary_precision is supported
# (FLOAT(5), oracle.FLOAT(binary_precision=5),),
(FLOAT(), FLOAT()),
# when binary_precision is supported
# (FLOAT(5), oracle.FLOAT(binary_precision=126),),
]
self._run_test(specs, ["precision"])
| {
"content_hash": "596ce099a58a78f706df9e94e1893b2c",
"timestamp": "",
"source": "github",
"line_count": 680,
"max_line_length": 79,
"avg_line_length": 32.188235294117646,
"alnum_prop": 0.5598044590643275,
"repo_name": "wujuguang/sqlalchemy",
"id": "6c36c0a6bb7f57c41d93c37936ac4f908fdb5ac9",
"size": "21906",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/dialect/oracle/test_reflection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "45930"
},
{
"name": "Python",
"bytes": "11287383"
}
],
"symlink_target": ""
} |
"""Support for Gogogate2 garage Doors."""
from __future__ import annotations
import logging
from ismartgate.common import (
AbstractDoor,
DoorStatus,
TransitionDoorStatus,
get_configured_doors,
)
from homeassistant.components.cover import (
DEVICE_CLASS_GARAGE,
DEVICE_CLASS_GATE,
SUPPORT_CLOSE,
SUPPORT_OPEN,
CoverEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .common import (
DeviceDataUpdateCoordinator,
GoGoGate2Entity,
cover_unique_id,
get_data_update_coordinator,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the config entry."""
data_update_coordinator = get_data_update_coordinator(hass, config_entry)
async_add_entities(
[
DeviceCover(config_entry, data_update_coordinator, door)
for door in get_configured_doors(data_update_coordinator.data)
]
)
class DeviceCover(GoGoGate2Entity, CoverEntity):
"""Cover entity for goggate2."""
def __init__(
self,
config_entry: ConfigEntry,
data_update_coordinator: DeviceDataUpdateCoordinator,
door: AbstractDoor,
) -> None:
"""Initialize the object."""
unique_id = cover_unique_id(config_entry, door)
super().__init__(config_entry, data_update_coordinator, door, unique_id)
self._api = data_update_coordinator.api
self._is_available = True
@property
def name(self):
"""Return the name of the door."""
return self._get_door().name
@property
def is_closed(self):
"""Return true if cover is closed, else False."""
door_status = self._get_door_status()
if door_status == DoorStatus.OPENED:
return False
if door_status == DoorStatus.CLOSED:
return True
return None
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
if self._get_door().gate:
return DEVICE_CLASS_GATE
return DEVICE_CLASS_GARAGE
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_OPEN | SUPPORT_CLOSE
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self._get_door_status() == TransitionDoorStatus.CLOSING
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self._get_door_status() == TransitionDoorStatus.OPENING
async def async_open_cover(self, **kwargs):
"""Open the door."""
await self._api.async_open_door(self._get_door().door_id)
await self.coordinator.async_refresh()
async def async_close_cover(self, **kwargs):
"""Close the door."""
await self._api.async_close_door(self._get_door().door_id)
await self.coordinator.async_refresh()
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {"door_id": self._get_door().door_id}
def _get_door_status(self) -> AbstractDoor:
return self._api.async_get_door_statuses_from_info(self.coordinator.data)[
self._door.door_id
]
| {
"content_hash": "7d991e3af92335988ee687a31547f9b7",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 82,
"avg_line_length": 28.59016393442623,
"alnum_prop": 0.6427752293577982,
"repo_name": "FreekingDean/home-assistant",
"id": "073c48e55b836d1fb72b518cb0078b17427169be",
"size": "3488",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/components/gogogate2/cover.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2335"
},
{
"name": "Python",
"bytes": "36746639"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gfysound.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| {
"content_hash": "83d8a8df552c31d292d8bc241b6f9641",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 78,
"avg_line_length": 26.11111111111111,
"alnum_prop": 0.7148936170212766,
"repo_name": "alex-jerez/gfysound",
"id": "6c866ef6b11889fe1996290186c6edc28a5c7a4b",
"size": "257",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "4378"
},
{
"name": "Awk",
"bytes": "38258"
},
{
"name": "C",
"bytes": "20607912"
},
{
"name": "C++",
"bytes": "237496"
},
{
"name": "CSS",
"bytes": "167300"
},
{
"name": "Emacs Lisp",
"bytes": "60444"
},
{
"name": "Erlang",
"bytes": "55192"
},
{
"name": "IDL",
"bytes": "9704"
},
{
"name": "JavaScript",
"bytes": "322972"
},
{
"name": "Makefile",
"bytes": "734622"
},
{
"name": "Mathematica",
"bytes": "69256"
},
{
"name": "Objective-C",
"bytes": "10346"
},
{
"name": "Perl",
"bytes": "110298"
},
{
"name": "Prolog",
"bytes": "330732"
},
{
"name": "Python",
"bytes": "94834"
},
{
"name": "Ruby",
"bytes": "57904"
},
{
"name": "Shell",
"bytes": "129485"
},
{
"name": "Smalltalk",
"bytes": "51354"
},
{
"name": "SystemVerilog",
"bytes": "55596"
},
{
"name": "VimL",
"bytes": "17312306"
}
],
"symlink_target": ""
} |
import os
from restlib.response import Response
from catalogService import errors
from catalogService.rest.models import userData
from catalogService import storage
from catalogService.rest.api.base import BaseController
from catalogService.rest.middleware.response import XmlStringResponse, XmlResponse
class UsersController(BaseController):
modelName = 'userId'
processSuburls = True
def _getUserDataStore(self, request):
path = os.sep.join([self.storageCfg.storagePath, 'userData',
self._sanitizeKey(request.auth[0])])
cfg = storage.StorageConfig(storagePath = path)
return storage.DiskStorage(cfg)
@classmethod
def _sanitizeKey(cls, key):
return '/'.join(x for x in key.split('/') if x not in ('.', '..'))
def index(self, request):
"enumerate the users"
raise NotImplementedError
def update(self, request, userId):
"update a key"
if userId != request.auth[0]:
raise errors.ParameterError("Mismatching users %s, %s" %
(userId, request.auth[0]))
dataLen = request.getContentLength()
data = request.read(dataLen)
keyId = request.unparsedPath
key = self._sanitizeKey(keyId)
store = self._getUserDataStore(request)
store.set(key, data)
data = '<?xml version="1.0" encoding="UTF-8"?><id>%s</id>' % (self.url(request, 'users', '%s/%s' % (userId, key)))
return XmlStringResponse(data)
def get(self, request, userId):
if userId != request.auth[0]:
raise errors.ParameterError("Mismatching users %s, %s" %
(userId, request.auth[0]))
keyPath = request.unparsedPath
key = self._sanitizeKey(keyPath)
prefix = self.url(request, 'users', '%s/' % (userId))
store = self._getUserDataStore(request)
xmlHeader = '<?xml version="1.0" encoding="UTF-8"?>'
key = key.rstrip('/')
if key != keyPath:
# A trailing / means retrieving the contents from a collection
if not store.isCollection(key):
data = xmlHeader + '<list></list>'
return XmlStringResponse(data)
#raise Exception("XXX 2", prefix, keyPath)
if store.isCollection(key):
node = userData.IdsNode()
snodes = store.enumerate(keyPrefix = key)
if key == keyPath:
# No trailing /
snodes = [ userData.IdNode().characters("%s%s" % (prefix, x))
for x in snodes ]
node.extend(snodes)
return XmlResponse(node)
# Grab contents and wrap them in some XML
data = [ store.get(x) for x in snodes ]
data = xmlHeader + '<list>%s</list>' % ''.join(data)
return XmlStringResponse(data)
else:
data = store.get(key)
if data is None:
raise NotImplementedError
return XmlStringResponse(data)
def destroy(self, request, userId):
if userId != request.auth[0]:
raise errors.ParameterError("Mismatching users %s, %s" %
(userId, request.auth[0]))
store = self._getUserDataStore(request)
key = request.unparsedPath
key = self._sanitizeKey(key)
store.delete(key)
url = self.url(request, 'users', '%s/%s' % (userId, key))
data = '<?xml version="1.0" encoding="UTF-8"?><id>%s</id>' % (url)
return XmlStringResponse(data)
def process(self, request, userId):
"create a new key entry in the store"
if userId != request.auth[0]:
raise errors.ParameterError("Mismatching users %s, %s" %
(userId, request.auth[0]))
key = request.unparsedPath
dataLen = request.getContentLength()
data = request.read(dataLen)
store = self._getUserDataStore(request)
# Sanitize key
key = key.rstrip('/')
keyPrefix = self._sanitizeKey(key)
newId = store.store(data, keyPrefix = keyPrefix)
url = self.url(request, 'users', '%s/%s' % (userId, newId) )
txt = '<?xml version="1.0" encoding="UTF-8"?><id>%s</id>' % (url)
return XmlStringResponse(txt)
| {
"content_hash": "003e039c8a68f7b900103f851ac35da6",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 122,
"avg_line_length": 36.93103448275862,
"alnum_prop": 0.5849673202614379,
"repo_name": "sassoftware/catalog-service",
"id": "fd08fd3c6f7f1f371d0754bc3762763c305afa15",
"size": "4889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "catalogService/rest/api/users.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "9398"
},
{
"name": "Makefile",
"bytes": "26985"
},
{
"name": "Python",
"bytes": "8543839"
},
{
"name": "Shell",
"bytes": "329"
}
],
"symlink_target": ""
} |
__all__ = ['nitro_service', 'options']
| {
"content_hash": "013325a8b9c28b526a3cfea0c63a91af",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 38,
"avg_line_length": 20,
"alnum_prop": 0.55,
"repo_name": "mahabs/nitro",
"id": "51a721cb6e5bf018f568b989b5fc2fab54f290ca",
"size": "40",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nssrc/com/citrix/netscaler/nitro/service/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "498"
},
{
"name": "Python",
"bytes": "10647176"
}
],
"symlink_target": ""
} |
import load_synth_extract
from microbes import studytreelist
if __name__ == "__main__":
from stephen_desktop_conf_TEMP import *
synthottolid="372049"
#studytreelist = ["713_1287"]
print "loading synthottolid:",synthottolid
print "loading studytreelist:",studytreelist
for i in studytreelist:
tstudy_list = [i]
generallogfileloc = "microbeslog_inf_mono/"+i+".log"
ttfntreefn = "microbeslog_inf_mono/"+i+".tre"
infmonofn = "microbeslog_inf_mono/"+i+".inf_mono"
load_synth_extract.run_load_single_ttfn_inf_mono(dott,dload,studyloc,tstudy_list,javapre,
treemloc,generallogfileloc,dsynth,synthottolid,treefn,ttfntreefn,infmonofn)
| {
"content_hash": "4f960524aa7fa756194c8718429009ee",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 123,
"avg_line_length": 34.18181818181818,
"alnum_prop": 0.6449468085106383,
"repo_name": "OpenTreeOfLife/gcmdr",
"id": "fdb347c0f59a52117409369c336dcc703f3dff75",
"size": "752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "load_studies_inf_mono.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "293388"
}
],
"symlink_target": ""
} |
import matplotlib
import socket
if socket.gethostname() != 'arch':
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import io
import tensorflow as tf
from matplotlib.patches import Circle
from matplotlib.patches import Patch
# Keep colors consistent
class_colors = [None, '#ff0000', '#00ff00', '#0000ff', '#ffff00', '#ff00ff', '#00ffff', '#000000', '#80ff80', '#b0bc32', '#d65111', '#615562', '#ef8bd4', '#83bc8c', '#726800', '#40d93e', '#54692c', '#6fd4f1', '#e2d978', '#ff8000', '#1dcceb', '#7a58f7', '#1aaa91', '#ba60b0', '#76191f']
class_labels = [None, 'LoRa 1 ', 'LoRa 2 ', 'LoRa 3 ', 'LoRa 4 ', 'LoRa 5 ', 'LoRa 6 ', 'LoRa 7 ', 'LoRa 8 ', 'LoRa 9 ', 'LoRa 10', 'LoRa 11', 'LoRa 12', 'LoRa 13', 'LoRa 14', 'LoRa 15', 'LoRa 16', 'Lora 17', 'LoRa 18', 'LoRa 19', 'LoRa 20', 'LoRa 21', 'LoRa 22', 'LoRa 23', 'LoRa 24']
def dbg_plot(y, title=''):
fig = plt.figure()
ax = plt.gca()
ax.set_title(title)
ax.plot(np.arange(len(y)), y)
ax.set_xlim([0, len(y)])
ax.set_xlabel("samples")
plt.show()
def dbg_plot_complex(y, title=''):
fig = plt.figure()
ax = plt.gca()
ax.set_title(title)
ax.plot(np.arange(len(y)), np.real(y), "b", np.arange(len(y)), np.imag(y), "g")
ax.set_xlim([0, len(y)])
ax.set_xlabel("samples")
plt.show()
# Convert matplotlib plot to tensorboard image
def _plt_to_tf(plot, tag):
# Write to PNG buffer
buf = io.BytesIO()
plot.savefig(buf, format='png')
plot.savefig("/tmp/tf_" + tag + ".pdf", format='pdf')
buf.seek(0)
# Add to TensorBoard summary
image = tf.image.decode_png(buf.getvalue(), channels=4)
image = tf.expand_dims(image, 0) # Add the batch dimension
return tf.summary.image(tag, image, 1)
# See https://stackoverflow.com/questions/38543850/tensorflow-how-to-display-custom-images-in-tensorboard-e-g-matplotlib-plots
def plot_values(values, instances_mapping, height=800, width=800, tag="", title="", label=None, backdrop=None):
# Configure figure
dpi = 96
fig = plt.figure(figsize=(width/dpi, height/dpi), dpi=dpi)
plt.title(title)
plot_color = 'gray'
if not label is None: # Show plot in color of the label according to class_colors
title += " (LoRa " + str(instances_mapping.map_to_lora_id(label)) + ")"
plt.title(title)
plot_color = class_colors[instances_mapping.map_to_lora_id(label)]
# Plot main values
if backdrop is None:
xvalues = range(0, len(values))
values_normed = (values - values.min(0)) / values.ptp(0)
plt.plot(xvalues, values_normed, plot_color, alpha=0.7)
# Plot weights backdrop?
else:
num_classes = backdrop.shape[1]
props = dict(alpha=0.5, edgecolors='none')
for i in range(0, num_classes):
class_backdrop = backdrop[0:,i]
class_backdrop_normed = (class_backdrop - class_backdrop.min(0)) / class_backdrop.ptp(0)
xvalues = range(0, len(class_backdrop_normed))
# Get correct color for backdrop
color = class_colors[instances_mapping.map_to_lora_id(i)]
plt.scatter(xvalues, class_backdrop_normed, c=color, **props)
# Organize plot tightly
plt.tight_layout()
# Fix axis ranges
ax = plt.gca()
ax.set_xlim([0, len(xvalues)])
return _plt_to_tf(plt, tag)
def plot_kernels(kernels, kernel_size, height, width, tag="", title=""):
dpi = 96
cols = 2 # TODO: Make user definable
rows = len(kernels)/cols # Should be round number
plt.title(title)
fig, axes = plt.subplots(rows, cols, sharex='col', sharey='row', figsize=(width/dpi, height/dpi), dpi=dpi)
kernel_idx = 0
for axis_rows in axes:
for axis_col in axis_rows:
kernel = kernels[kernel_idx]
# Line plot
#axis_col.plot(range(0, len(kernel)), kernel)
#axis_col.set_xlim([0, len(kernel)])
# Image
kernel_image = kernel.reshape((1, len(kernel)))
axis_col.imshow(kernel_image, extent=(0, width, 0, 64), interpolation='nearest', cmap=plt.get_cmap('Blues'))
kernel_idx += 1
plt.tight_layout()
return _plt_to_tf(plt, tag)
def plot_weights(weights, real_labels, predictions, expected_values, thresholds, instances_mapping, height=600, width=800, tag="", title="", xlabel="Class A", ylabel="Class B", metrics=None, equal_aspect=False, tf=True):
# Configure figure TODO duplicate code fix me
dpi = 96
fig = plt.figure(figsize=(width/dpi, height/dpi), dpi=dpi)
plt.title(title)
# Plot output weights
num_points = len(weights)
num_real_labels = len(real_labels)
num_predictions = len(predictions)
if num_points != num_real_labels != predictions:
print("[-] Number of points != number of real_labels. That's not good. plot_output_weights exiting.")
exit(1)
if weights.shape[1] != 2:
print("[-] Can't plot other-than 2D data, continuing without plot.")
return
# Draw weights
real_props = dict(alpha=0.50, edgecolors='none')
predicted_props = dict(alpha=1.00, facecolors='none')
adversary_props = dict(alpha=0.50, facecolors='r', marker="x")
for i in range(0, num_points):
point = weights[i]
real_lora_id = real_labels[i]
predicted_lora_id = predictions[i]
real_point_color = class_colors[real_lora_id]
plt.scatter(point[0], point[1], c=real_point_color, **real_props)
if predicted_lora_id == -1:
plt.scatter(point[0], point[1], **adversary_props)
else:
predicted_point_color = class_colors[predicted_lora_id]
plt.scatter(point[0], point[1], edgecolors=predicted_point_color, **predicted_props)
# Draw expected values
# TODO: temp disabled until I figure out what to do with this
"""
for i in range(0, len(expected_values)):
circle_x = expected_values[i][0]
circle_y = expected_values[i][1]
circle = Circle((circle_x, circle_y), thresholds[i], edgecolor=class_colors[instances_mapping.map_to_lora_id(i)], facecolor='none', linewidth=2, alpha=0.5)
plt.gca().add_patch(circle)
"""
# Draw legend
patches = []
tmp = []
for rlabel in sorted(real_labels):
lora_id = rlabel
real_color = class_colors[lora_id]
real_label = class_labels[lora_id]
if not (real_label in tmp):
patches.append(Patch(color=real_color, label=real_label))
tmp.append(real_label)
plt.legend(loc='upper right', ncol=4, fancybox=True, shadow=True, fontsize=8, handles=patches)
# Draw metrics on the pdf
if not metrics is None:
ax = plt.gca()
metrics_text = 'accuracy: %.2f%%\nprecision: %.2f%%\nrecall: %.2f%%' % (metrics['accuracy'], metrics['precision'], metrics['recall'])
ax.text(0.01, 0.80, metrics_text,
verticalalignment='bottom', horizontalalignment='left',
transform=ax.transAxes, fontsize=10)
# Set labels
#plt.tight_layout()
plt.xlabel(xlabel)
plt.ylabel(ylabel)
# Fix axis aspect ratio
if equal_aspect:
plt.axes().set_aspect('equal', 'datalim')
if tf:
return _plt_to_tf(plt, tag)
else:
plt.show()
| {
"content_hash": "619d08c97b5be9c94f2dfc48f5ac3738",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 285,
"avg_line_length": 37.53092783505155,
"alnum_prop": 0.6162615025408598,
"repo_name": "rpp0/lora-phy-fingerprinting",
"id": "ad6327421fc7a5cc721f6ef9fcd3a08727a208bd",
"size": "7328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "visualization.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "89607"
},
{
"name": "Shell",
"bytes": "6017"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.