repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
JaDogg/__py_playground | refs/heads/master | reference/parsley/ometa/tube.py | 3 | from ometa.interp import TrampolinedGrammarInterpreter, _feed_me
class TrampolinedParser:
"""
A parser that incrementally parses incoming data.
"""
def __init__(self, grammar, receiver, bindings):
"""
Initializes the parser.
@param grammar: The grammar used to parse the incoming data.
@param receiver: Responsible for logic operation on the parsed data.
Typically, the logic operation will be invoked inside the grammar,
e.g., rule = expr1 expr2 (-> receiver.doSomeStuff())
@param bindings: The namespace that can be accessed inside the grammar.
"""
self.grammar = grammar
self.bindings = dict(bindings)
self.bindings['receiver'] = self.receiver = receiver
self._setupInterp()
def _setupInterp(self):
"""
Resets the parser. The parser will begin parsing with the rule named
'initial'.
"""
self._interp = TrampolinedGrammarInterpreter(
grammar=self.grammar, rule=self.receiver.currentRule,
callback=None, globals=self.bindings)
def receive(self, data):
"""
Receive the incoming data and begin parsing. The parser will parse the
data incrementally according to the 'initial' rule in the grammar.
@param data: The raw data received.
"""
while data:
status = self._interp.receive(data)
if status is _feed_me:
return
data = ''.join(self._interp.input.data[self._interp.input.position:])
self._setupInterp()
|
marmarek/pykickstart | refs/heads/master | pykickstart/handlers/rhel7.py | 5 | #
# Chris Lumens <clumens@redhat.com>
#
# Copyright 2012, 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
__all__ = ["RHEL7Handler"]
from pykickstart import commands
from pykickstart.base import BaseHandler
from pykickstart.version import RHEL7
class RHEL7Handler(BaseHandler):
version = RHEL7
commandMap = {
"auth": commands.authconfig.FC3_Authconfig,
"authconfig": commands.authconfig.FC3_Authconfig,
"autopart": commands.autopart.F20_AutoPart,
"autostep": commands.autostep.FC3_AutoStep,
"bootloader": commands.bootloader.RHEL7_Bootloader,
"btrfs": commands.btrfs.RHEL7_BTRFS,
"cdrom": commands.cdrom.FC3_Cdrom,
"clearpart": commands.clearpart.F21_ClearPart,
"cmdline": commands.displaymode.FC3_DisplayMode,
"device": commands.device.F8_Device,
"deviceprobe": commands.deviceprobe.FC3_DeviceProbe,
"dmraid": commands.dmraid.FC6_DmRaid,
"driverdisk": commands.driverdisk.F14_DriverDisk,
"eula": commands.eula.F20_Eula,
"fcoe": commands.fcoe.RHEL7_Fcoe,
"firewall": commands.firewall.F20_Firewall,
"firstboot": commands.firstboot.FC3_Firstboot,
"graphical": commands.displaymode.FC3_DisplayMode,
"group": commands.group.F12_Group,
"halt": commands.reboot.F18_Reboot,
"harddrive": commands.harddrive.FC3_HardDrive,
"ignoredisk": commands.ignoredisk.F14_IgnoreDisk,
"install": commands.upgrade.F11_Upgrade,
"iscsi": commands.iscsi.F17_Iscsi,
"iscsiname": commands.iscsiname.FC6_IscsiName,
"keyboard": commands.keyboard.F18_Keyboard,
"lang": commands.lang.F19_Lang,
"liveimg": commands.liveimg.F19_Liveimg,
"logging": commands.logging.FC6_Logging,
"logvol": commands.logvol.RHEL7_LogVol,
"mediacheck": commands.mediacheck.FC4_MediaCheck,
"method": commands.method.F19_Method,
"multipath": commands.multipath.FC6_MultiPath,
"network": commands.network.RHEL7_Network,
"nfs": commands.nfs.FC6_NFS,
"ostreesetup": commands.ostreesetup.RHEL7_OSTreeSetup,
"part": commands.partition.RHEL7_Partition,
"partition": commands.partition.RHEL7_Partition,
"poweroff": commands.reboot.F18_Reboot,
"raid": commands.raid.RHEL7_Raid,
"realm": commands.realm.F19_Realm,
"reboot": commands.reboot.F18_Reboot,
"repo": commands.repo.RHEL7_Repo,
"reqpart": commands.reqpart.RHEL7_ReqPart,
"rescue": commands.rescue.F10_Rescue,
"rootpw": commands.rootpw.F18_RootPw,
"selinux": commands.selinux.FC3_SELinux,
"services": commands.services.FC6_Services,
"shutdown": commands.reboot.F18_Reboot,
"skipx": commands.skipx.FC3_SkipX,
"sshpw": commands.sshpw.F13_SshPw,
"text": commands.displaymode.FC3_DisplayMode,
"timezone": commands.timezone.F18_Timezone,
"unsupported_hardware": commands.unsupported_hardware.RHEL6_UnsupportedHardware,
"updates": commands.updates.F7_Updates,
"upgrade": commands.upgrade.F20_Upgrade,
"url": commands.url.F18_Url,
"user": commands.user.F19_User,
"vnc": commands.vnc.F9_Vnc,
"volgroup": commands.volgroup.RHEL7_VolGroup,
"xconfig": commands.xconfig.F14_XConfig,
"zerombr": commands.zerombr.F9_ZeroMbr,
"zfcp": commands.zfcp.F14_ZFCP,
}
dataMap = {
"BTRFSData": commands.btrfs.RHEL7_BTRFSData,
"DriverDiskData": commands.driverdisk.F14_DriverDiskData,
"DeviceData": commands.device.F8_DeviceData,
"DmRaidData": commands.dmraid.FC6_DmRaidData,
"FcoeData": commands.fcoe.F13_FcoeData,
"GroupData": commands.group.F12_GroupData,
"IscsiData": commands.iscsi.F17_IscsiData,
"LogVolData": commands.logvol.RHEL7_LogVolData,
"MultiPathData": commands.multipath.FC6_MultiPathData,
"NetworkData": commands.network.RHEL7_NetworkData,
"PartData": commands.partition.RHEL7_PartData,
"RaidData": commands.raid.RHEL7_RaidData,
"RepoData": commands.repo.RHEL7_RepoData,
"SshPwData": commands.sshpw.F13_SshPwData,
"UserData": commands.user.F19_UserData,
"VolGroupData": commands.volgroup.RHEL7_VolGroupData,
"ZFCPData": commands.zfcp.F14_ZFCPData,
}
|
40223141/0505 | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/token.py | 743 | """Token constants (from "token.h")."""
__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF']
# This file is automatically generated; please don't muck it up!
#
# To update the symbols in this file, 'cd' to the top directory of
# the python source tree after building the interpreter and run:
#
# ./python Lib/token.py
#--start constants--
ENDMARKER = 0
NAME = 1
NUMBER = 2
STRING = 3
NEWLINE = 4
INDENT = 5
DEDENT = 6
LPAR = 7
RPAR = 8
LSQB = 9
RSQB = 10
COLON = 11
COMMA = 12
SEMI = 13
PLUS = 14
MINUS = 15
STAR = 16
SLASH = 17
VBAR = 18
AMPER = 19
LESS = 20
GREATER = 21
EQUAL = 22
DOT = 23
PERCENT = 24
LBRACE = 25
RBRACE = 26
EQEQUAL = 27
NOTEQUAL = 28
LESSEQUAL = 29
GREATEREQUAL = 30
TILDE = 31
CIRCUMFLEX = 32
LEFTSHIFT = 33
RIGHTSHIFT = 34
DOUBLESTAR = 35
PLUSEQUAL = 36
MINEQUAL = 37
STAREQUAL = 38
SLASHEQUAL = 39
PERCENTEQUAL = 40
AMPEREQUAL = 41
VBAREQUAL = 42
CIRCUMFLEXEQUAL = 43
LEFTSHIFTEQUAL = 44
RIGHTSHIFTEQUAL = 45
DOUBLESTAREQUAL = 46
DOUBLESLASH = 47
DOUBLESLASHEQUAL = 48
AT = 49
RARROW = 50
ELLIPSIS = 51
OP = 52
ERRORTOKEN = 53
N_TOKENS = 54
NT_OFFSET = 256
#--end constants--
tok_name = {value: name
for name, value in globals().items()
if isinstance(value, int) and not name.startswith('_')}
__all__.extend(tok_name.values())
def ISTERMINAL(x):
return x < NT_OFFSET
def ISNONTERMINAL(x):
return x >= NT_OFFSET
def ISEOF(x):
return x == ENDMARKER
def _main():
import re
import sys
args = sys.argv[1:]
inFileName = args and args[0] or "Include/token.h"
outFileName = "Lib/token.py"
if len(args) > 1:
outFileName = args[1]
try:
fp = open(inFileName)
except IOError as err:
sys.stdout.write("I/O error: %s\n" % str(err))
sys.exit(1)
lines = fp.read().split("\n")
fp.close()
prog = re.compile(
"#define[ \t][ \t]*([A-Z0-9][A-Z0-9_]*)[ \t][ \t]*([0-9][0-9]*)",
re.IGNORECASE)
tokens = {}
for line in lines:
match = prog.match(line)
if match:
name, val = match.group(1, 2)
val = int(val)
tokens[val] = name # reverse so we can sort them...
keys = sorted(tokens.keys())
# load the output skeleton from the target:
try:
fp = open(outFileName)
except IOError as err:
sys.stderr.write("I/O error: %s\n" % str(err))
sys.exit(2)
format = fp.read().split("\n")
fp.close()
try:
start = format.index("#--start constants--") + 1
end = format.index("#--end constants--")
except ValueError:
sys.stderr.write("target does not contain format markers")
sys.exit(3)
lines = []
for val in keys:
lines.append("%s = %d" % (tokens[val], val))
format[start:end] = lines
try:
fp = open(outFileName, 'w')
except IOError as err:
sys.stderr.write("I/O error: %s\n" % str(err))
sys.exit(4)
fp.write("\n".join(format))
fp.close()
if __name__ == "__main__":
_main()
|
xiangke/pycopia | refs/heads/master | mibs/pycopia/mibs/FR_MFR_MIB_OID.py | 1 | # python
# This file is generated by a program (mib2py).
import FR_MFR_MIB
OIDMAP = {
'1.3.6.1.2.1.10.47': FR_MFR_MIB.mfrMib,
'1.3.6.1.2.1.10.47.1': FR_MFR_MIB.mfrMibScalarObjects,
'1.3.6.1.2.1.10.47.2': FR_MFR_MIB.mfrMibBundleObjects,
'1.3.6.1.2.1.10.47.3': FR_MFR_MIB.mfrMibBundleLinkObjects,
'1.3.6.1.2.1.10.47.4': FR_MFR_MIB.mfrMibTraps,
'1.3.6.1.2.1.10.47.4.0': FR_MFR_MIB.mfrMibTrapsPrefix,
'1.3.6.1.2.1.10.47.5': FR_MFR_MIB.mfrMibConformance,
'1.3.6.1.2.1.10.47.5.1': FR_MFR_MIB.mfrMibGroups,
'1.3.6.1.2.1.10.47.5.2': FR_MFR_MIB.mfrMibCompliances,
'1.3.6.1.2.1.10.47.1.1': FR_MFR_MIB.mfrBundleMaxNumBundles,
'1.3.6.1.2.1.10.47.1.2': FR_MFR_MIB.mfrBundleNextIndex,
'1.3.6.1.2.1.10.47.2.3.1.1': FR_MFR_MIB.mfrBundleIndex,
'1.3.6.1.2.1.10.47.2.3.1.2': FR_MFR_MIB.mfrBundleIfIndex,
'1.3.6.1.2.1.10.47.2.3.1.3': FR_MFR_MIB.mfrBundleRowStatus,
'1.3.6.1.2.1.10.47.2.3.1.4': FR_MFR_MIB.mfrBundleNearEndName,
'1.3.6.1.2.1.10.47.2.3.1.5': FR_MFR_MIB.mfrBundleFragmentation,
'1.3.6.1.2.1.10.47.2.3.1.6': FR_MFR_MIB.mfrBundleMaxFragSize,
'1.3.6.1.2.1.10.47.2.3.1.7': FR_MFR_MIB.mfrBundleTimerHello,
'1.3.6.1.2.1.10.47.2.3.1.8': FR_MFR_MIB.mfrBundleTimerAck,
'1.3.6.1.2.1.10.47.2.3.1.9': FR_MFR_MIB.mfrBundleCountMaxRetry,
'1.3.6.1.2.1.10.47.2.3.1.10': FR_MFR_MIB.mfrBundleActivationClass,
'1.3.6.1.2.1.10.47.2.3.1.11': FR_MFR_MIB.mfrBundleThreshold,
'1.3.6.1.2.1.10.47.2.3.1.12': FR_MFR_MIB.mfrBundleMaxDiffDelay,
'1.3.6.1.2.1.10.47.2.3.1.13': FR_MFR_MIB.mfrBundleSeqNumSize,
'1.3.6.1.2.1.10.47.2.3.1.14': FR_MFR_MIB.mfrBundleMaxBundleLinks,
'1.3.6.1.2.1.10.47.2.3.1.15': FR_MFR_MIB.mfrBundleLinksConfigured,
'1.3.6.1.2.1.10.47.2.3.1.16': FR_MFR_MIB.mfrBundleLinksActive,
'1.3.6.1.2.1.10.47.2.3.1.17': FR_MFR_MIB.mfrBundleBandwidth,
'1.3.6.1.2.1.10.47.2.3.1.18': FR_MFR_MIB.mfrBundleFarEndName,
'1.3.6.1.2.1.10.47.2.3.1.19': FR_MFR_MIB.mfrBundleResequencingErrors,
'1.3.6.1.2.1.10.47.2.4.1.2': FR_MFR_MIB.mfrBundleIfIndexMappingIndex,
'1.3.6.1.2.1.10.47.3.1.1.1': FR_MFR_MIB.mfrBundleLinkRowStatus,
'1.3.6.1.2.1.10.47.3.1.1.2': FR_MFR_MIB.mfrBundleLinkConfigBundleIndex,
'1.3.6.1.2.1.10.47.3.1.1.3': FR_MFR_MIB.mfrBundleLinkNearEndName,
'1.3.6.1.2.1.10.47.3.1.1.4': FR_MFR_MIB.mfrBundleLinkState,
'1.3.6.1.2.1.10.47.3.1.1.5': FR_MFR_MIB.mfrBundleLinkFarEndName,
'1.3.6.1.2.1.10.47.3.1.1.6': FR_MFR_MIB.mfrBundleLinkFarEndBundleName,
'1.3.6.1.2.1.10.47.3.1.1.7': FR_MFR_MIB.mfrBundleLinkDelay,
'1.3.6.1.2.1.10.47.3.1.1.8': FR_MFR_MIB.mfrBundleLinkFramesControlTx,
'1.3.6.1.2.1.10.47.3.1.1.9': FR_MFR_MIB.mfrBundleLinkFramesControlRx,
'1.3.6.1.2.1.10.47.3.1.1.10': FR_MFR_MIB.mfrBundleLinkFramesControlInvalid,
'1.3.6.1.2.1.10.47.3.1.1.11': FR_MFR_MIB.mfrBundleLinkTimerExpiredCount,
'1.3.6.1.2.1.10.47.3.1.1.12': FR_MFR_MIB.mfrBundleLinkLoopbackSuspected,
'1.3.6.1.2.1.10.47.3.1.1.13': FR_MFR_MIB.mfrBundleLinkUnexpectedSequence,
'1.3.6.1.2.1.10.47.3.1.1.14': FR_MFR_MIB.mfrBundleLinkMismatch,
'1.3.6.1.2.1.10.47.4.0.1': FR_MFR_MIB.mfrMibTrapBundleLinkMismatch,
'1.3.6.1.2.1.10.47.5.1.1': FR_MFR_MIB.mfrMibBundleGroup,
'1.3.6.1.2.1.10.47.5.1.2': FR_MFR_MIB.mfrMibBundleLinkGroup,
'1.3.6.1.2.1.10.47.5.1.3': FR_MFR_MIB.mfrMibTrapGroup,
}
|
paluby/Exercicios | refs/heads/master | backend/appengine/routes/login/passwordless.py | 67 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from gaecookie.decorator import no_csrf
from gaepermission import facade
from gaepermission.decorator import login_not_required, permissions
from permission_app.model import ADMIN
from tekton import router
from config.template_middleware import TemplateResponse
from tekton.gae.middleware.redirect import RedirectResponse
import settings
from routes import admin
@no_csrf
@login_not_required
def index():
return TemplateResponse()
@login_not_required
def send_email(email, ret_path='/'):
url = settings.APP_URL + router.to_path(check, ret_path=ret_path)
facade.send_passwordless_login_link(email, url).execute()
return RedirectResponse(index)
@no_csrf
@login_not_required
def check( _resp, ticket, ret_path='/'):
facade.login_passwordless(ticket, _resp).execute()
return RedirectResponse(ret_path)
@permissions(ADMIN)
@no_csrf
def form():
app = facade.get_passwordless_app_data().execute().result
dct = {'save_app_path': router.to_path(save), 'app': app}
return TemplateResponse(dct)
@permissions(ADMIN)
def save(app_id, token):
facade.save_or_update_passwordless_app_data(app_id, token).execute()
return RedirectResponse(admin) |
endlessm/chromium-browser | refs/heads/master | third_party/angle/third_party/VK-GL-CTS/src/scripts/testset.py | 6 | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------
# drawElements Quality Program utilities
# --------------------------------------
#
# Copyright 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#-------------------------------------------------------------------------
import sys
import random
import string
import subprocess
from optparse import OptionParser
def all (results, predicate):
for result in results:
if not predicate(result):
return False
return True
def any (results, predicate):
for result in results:
if predicate(result):
return True
return False
class FilterRule:
def __init__ (self, name, description, filters):
self.name = name
self.description = description
self.filters = filters
class TestCaseResult:
def __init__ (self, name, results):
self.name = name
self.results = results
class Group:
def __init__ (self, name):
self.name = name
self.cases = []
def readCaseList (filename):
f = open(filename, 'rb')
cases = []
for line in f:
if line[:6] == "TEST: ":
case = line[6:].strip()
if len(case) > 0:
cases.append(case)
return cases
def toResultList (caselist):
results = []
for case in caselist:
results.append(TestCaseResult(case, []))
return results
def addResultsToCaseList (caselist, results):
resultMap = {}
caseListRes = toResultList(caselist)
for res in caseListRes:
resultMap[res.name] = res
for result in results:
if result.name in resultMap:
resultMap[result.name].results += result.results
return caseListRes
def readTestResults (filename):
f = open(filename, 'rb')
csvData = f.read()
csvLines = csvData.splitlines()
results = []
f.close()
for line in csvLines[1:]:
args = line.split(',')
if len(args) == 1:
continue # Ignore
results.append(TestCaseResult(args[0], args[1:]))
if len(results) == 0:
raise Exception("Empty result list")
# Sanity check for results
numResultItems = len(results[0].results)
seenResults = set()
for result in results:
if result.name in seenResults:
raise Exception("Duplicate result row for test case '%s'" % result.name)
if len(result.results) != numResultItems:
raise Exception("Found %d results for test case '%s', expected %d" % (len(result.results), result.name, numResultItems))
seenResults.add(result.name)
return results
def readGroupList (filename):
f = open(filename, 'rb')
groups = []
for line in f:
group = line.strip()
if group != "":
groups.append(group)
return groups
def createGroups (results, groupNames):
groups = []
matched = set()
for groupName in groupNames:
group = Group(groupName)
groups.append(group)
prefix = groupName + "."
prefixLen = len(prefix)
for case in results:
if case.name[:prefixLen] == prefix:
if case in matched:
die("Case '%s' matched by multiple groups (when processing '%s')" % (case.name, group.name))
group.cases.append(case)
matched.add(case)
return groups
def createLeafGroups (results):
groups = []
groupMap = {}
for case in results:
parts = case.name.split('.')
groupName = string.join(parts[:-1], ".")
if not groupName in groupMap:
group = Group(groupName)
groups.append(group)
groupMap[groupName] = group
else:
group = groupMap[groupName]
group.cases.append(case)
return groups
def filterList (results, condition):
filtered = []
for case in results:
if condition(case.results):
filtered.append(case)
return filtered
def getFilter (list, name):
for filter in list:
if filter.name == name:
return filter
return None
def getNumCasesInGroups (groups):
numCases = 0
for group in groups:
numCases += len(group.cases)
return numCases
def getCasesInSet (results, caseSet):
filtered = []
for case in results:
if case in caseSet:
filtered.append(case)
return filtered
def selectCasesInGroups (results, groups):
casesInGroups = set()
for group in groups:
for case in group.cases:
casesInGroups.add(case)
return getCasesInSet(results, casesInGroups)
def selectRandomSubset (results, groups, limit, seed):
selectedCases = set()
numSelect = min(limit, getNumCasesInGroups(groups))
random.seed(seed)
random.shuffle(groups)
groupNdx = 0
while len(selectedCases) < numSelect:
group = groups[groupNdx]
if len(group.cases) == 0:
del groups[groupNdx]
if groupNdx == len(groups):
groupNdx -= 1
continue # Try next
selected = random.choice(group.cases)
selectedCases.add(selected)
group.cases.remove(selected)
groupNdx = (groupNdx + 1) % len(groups)
return getCasesInSet(results, selectedCases)
def die (msg):
print(msg)
sys.exit(-1)
# Named filter lists
FILTER_RULES = [
FilterRule("all", "No filtering", []),
FilterRule("all-pass", "All results must be 'Pass'", [lambda l: all(l, lambda r: r == 'Pass')]),
FilterRule("any-pass", "Any of results is 'Pass'", [lambda l: any(l, lambda r: r == 'Pass')]),
FilterRule("any-fail", "Any of results is not 'Pass' or 'NotSupported'", [lambda l: not all(l, lambda r: r == 'Pass' or r == 'NotSupported')]),
FilterRule("prev-failing", "Any except last result is failure", [lambda l: l[-1] == 'Pass' and not all(l[:-1], lambda r: r == 'Pass')]),
FilterRule("prev-passing", "Any except last result is 'Pass'", [lambda l: l[-1] != 'Pass' and any(l[:-1], lambda r: r == 'Pass')])
]
if __name__ == "__main__":
parser = OptionParser(usage = "usage: %prog [options] [caselist] [result csv file]")
parser.add_option("-f", "--filter", dest="filter", default="all", help="filter rule name")
parser.add_option("-l", "--list", action="store_true", dest="list", default=False, help="list available rules")
parser.add_option("-n", "--num", dest="limit", default=0, help="limit number of cases")
parser.add_option("-s", "--seed", dest="seed", default=0, help="use selected seed for random selection")
parser.add_option("-g", "--groups", dest="groups_file", default=None, help="select cases based on group list file")
(options, args) = parser.parse_args()
if options.list:
print("Available filter rules:")
for filter in FILTER_RULES:
print(" %s: %s" % (filter.name, filter.description))
sys.exit(0)
if len(args) == 0:
die("No input files specified")
elif len(args) > 2:
die("Too many arguments")
# Fetch filter
filter = getFilter(FILTER_RULES, options.filter)
if filter == None:
die("Unknown filter '%s'" % options.filter)
# Read case list
caselist = readCaseList(args[0])
if len(args) > 1:
results = readTestResults(args[1])
results = addResultsToCaseList(caselist, results)
else:
results = toResultList(caselist)
# Execute filters for results
for rule in filter.filters:
results = filterList(results, rule)
if options.limit != 0:
if options.groups_file != None:
groups = createGroups(results, readGroupList(options.groups_file))
else:
groups = createLeafGroups(results)
results = selectRandomSubset(results, groups, int(options.limit), int(options.seed))
elif options.groups_file != None:
groups = createGroups(results, readGroupList(options.groups_file))
results = selectCasesInGroups(results, groups)
# Print test set
for result in results:
print(result.name)
|
Stanford-Online/edx-platform | refs/heads/master | common/lib/xmodule/xmodule/tests/test_annotatable_module.py | 13 | """Module annotatable tests"""
import unittest
from lxml import etree
from mock import Mock
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from xmodule.annotatable_module import AnnotatableModule
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator
from . import get_test_system
class AnnotatableModuleTestCase(unittest.TestCase):
shard = 1
sample_xml = '''
<annotatable display_name="Iliad">
<instructions>Read the text.</instructions>
<p>
<annotation body="first">Sing</annotation>,
<annotation title="goddess" body="second">O goddess</annotation>,
<annotation title="anger" body="third" highlight="blue">the anger of Achilles son of Peleus</annotation>,
that brought <i>countless</i> ills upon the Achaeans. Many a brave soul did it send
hurrying down to Hades, and many a hero did it yield a prey to dogs and
<div style="font-weight:bold"><annotation body="fourth" problem="4">vultures</annotation>, for so were the counsels
of Jove fulfilled from the day on which the son of Atreus, king of men, and great
Achilles, first fell out with one another.</div>
</p>
<annotation title="footnote" body="the end">The Iliad of Homer by Samuel Butler</annotation>
</annotatable>
'''
def setUp(self):
super(AnnotatableModuleTestCase, self).setUp()
self.annotatable = AnnotatableModule(
Mock(),
get_test_system(),
DictFieldData({'data': self.sample_xml}),
ScopeIds(None, None, None, BlockUsageLocator(CourseLocator('org', 'course', 'run'), 'category', 'name'))
)
def test_annotation_data_attr(self):
el = etree.fromstring('<annotation title="bar" body="foo" problem="0">test</annotation>')
expected_attr = {
'data-comment-body': {'value': 'foo', '_delete': 'body'},
'data-comment-title': {'value': 'bar', '_delete': 'title'},
'data-problem-id': {'value': '0', '_delete': 'problem'}
}
actual_attr = self.annotatable._get_annotation_data_attr(0, el)
self.assertIsInstance(actual_attr, dict)
self.assertDictEqual(expected_attr, actual_attr)
def test_annotation_class_attr_default(self):
xml = '<annotation title="x" body="y" problem="0">test</annotation>'
el = etree.fromstring(xml)
expected_attr = {'class': {'value': 'annotatable-span highlight'}}
actual_attr = self.annotatable._get_annotation_class_attr(0, el)
self.assertIsInstance(actual_attr, dict)
self.assertDictEqual(expected_attr, actual_attr)
def test_annotation_class_attr_with_valid_highlight(self):
xml = '<annotation title="x" body="y" problem="0" highlight="{highlight}">test</annotation>'
for color in self.annotatable.highlight_colors:
el = etree.fromstring(xml.format(highlight=color))
value = 'annotatable-span highlight highlight-{highlight}'.format(highlight=color)
expected_attr = {
'class': {
'value': value,
'_delete': 'highlight'
}
}
actual_attr = self.annotatable._get_annotation_class_attr(0, el)
self.assertIsInstance(actual_attr, dict)
self.assertDictEqual(expected_attr, actual_attr)
def test_annotation_class_attr_with_invalid_highlight(self):
xml = '<annotation title="x" body="y" problem="0" highlight="{highlight}">test</annotation>'
for invalid_color in ['rainbow', 'blink', 'invisible', '', None]:
el = etree.fromstring(xml.format(highlight=invalid_color))
expected_attr = {
'class': {
'value': 'annotatable-span highlight',
'_delete': 'highlight'
}
}
actual_attr = self.annotatable._get_annotation_class_attr(0, el)
self.assertIsInstance(actual_attr, dict)
self.assertDictEqual(expected_attr, actual_attr)
def test_render_annotation(self):
expected_html = '<span class="annotatable-span highlight highlight-yellow" data-comment-title="x" data-comment-body="y" data-problem-id="0">z</span>'
expected_el = etree.fromstring(expected_html)
actual_el = etree.fromstring('<annotation title="x" body="y" problem="0" highlight="yellow">z</annotation>')
self.annotatable._render_annotation(0, actual_el)
self.assertEqual(expected_el.tag, actual_el.tag)
self.assertEqual(expected_el.text, actual_el.text)
self.assertDictEqual(dict(expected_el.attrib), dict(actual_el.attrib))
def test_render_content(self):
content = self.annotatable._render_content()
el = etree.fromstring(content)
self.assertEqual('div', el.tag, 'root tag is a div')
expected_num_annotations = 5
actual_num_annotations = el.xpath('count(//span[contains(@class,"annotatable-span")])')
self.assertEqual(expected_num_annotations, actual_num_annotations, 'check number of annotations')
def test_get_html(self):
context = self.annotatable.get_html()
for key in ['display_name', 'element_id', 'content_html', 'instructions_html']:
self.assertIn(key, context)
def test_extract_instructions(self):
xmltree = etree.fromstring(self.sample_xml)
expected_xml = u"<div>Read the text.</div>"
actual_xml = self.annotatable._extract_instructions(xmltree)
self.assertIsNotNone(actual_xml)
self.assertEqual(expected_xml.strip(), actual_xml.strip())
xmltree = etree.fromstring('<annotatable>foo</annotatable>')
actual = self.annotatable._extract_instructions(xmltree)
self.assertIsNone(actual)
|
Cyan4973/zstd | refs/heads/master | build/meson/tests/valgrindTest.py | 6 | #!/usr/bin/env python3
# #############################################################################
# Copyright (c) 2018-present lzutao <taolzu(at)gmail.com>
# All rights reserved.
#
# This source code is licensed under both the BSD-style license (found in the
# LICENSE file in the root directory of this source tree) and the GPLv2 (found
# in the COPYING file in the root directory of this source tree).
# #############################################################################
import os
import subprocess
import tempfile
def valgrindTest(valgrind, datagen, fuzzer, zstd, fullbench):
VALGRIND_ARGS = [valgrind, '--leak-check=full', '--show-leak-kinds=all', '--error-exitcode=1']
print('\n ---- valgrind tests : memory analyzer ----')
subprocess.check_call([*VALGRIND_ARGS, datagen, '-g50M'], stdout=subprocess.DEVNULL)
if subprocess.call([*VALGRIND_ARGS, zstd],
stdout=subprocess.DEVNULL) == 0:
raise subprocess.CalledProcessError('zstd without argument should have failed')
with subprocess.Popen([datagen, '-g80'], stdout=subprocess.PIPE) as p1, \
subprocess.Popen([*VALGRIND_ARGS, zstd, '-', '-c'],
stdin=p1.stdout,
stdout=subprocess.DEVNULL) as p2:
p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.
p2.communicate()
if p2.returncode != 0:
raise subprocess.CalledProcessError()
with subprocess.Popen([datagen, '-g16KB'], stdout=subprocess.PIPE) as p1, \
subprocess.Popen([*VALGRIND_ARGS, zstd, '-vf', '-', '-c'],
stdin=p1.stdout,
stdout=subprocess.DEVNULL) as p2:
p1.stdout.close()
p2.communicate()
if p2.returncode != 0:
raise subprocess.CalledProcessError()
with tempfile.NamedTemporaryFile() as tmp_fd:
with subprocess.Popen([datagen, '-g2930KB'], stdout=subprocess.PIPE) as p1, \
subprocess.Popen([*VALGRIND_ARGS, zstd, '-5', '-vf', '-', '-o', tmp_fd.name],
stdin=p1.stdout) as p2:
p1.stdout.close()
p2.communicate()
if p2.returncode != 0:
raise subprocess.CalledProcessError()
subprocess.check_call([*VALGRIND_ARGS, zstd, '-vdf', tmp_fd.name, '-c'],
stdout=subprocess.DEVNULL)
with subprocess.Popen([datagen, '-g64MB'], stdout=subprocess.PIPE) as p1, \
subprocess.Popen([*VALGRIND_ARGS, zstd, '-vf', '-', '-c'],
stdin=p1.stdout,
stdout=subprocess.DEVNULL) as p2:
p1.stdout.close()
p2.communicate()
if p2.returncode != 0:
raise subprocess.CalledProcessError()
subprocess.check_call([*VALGRIND_ARGS, fuzzer, '-T1mn', '-t1'])
subprocess.check_call([*VALGRIND_ARGS, fullbench, '-i1'])
def main():
import argparse
parser = argparse.ArgumentParser(description='Valgrind tests : memory analyzer')
parser.add_argument('valgrind', help='valgrind path')
parser.add_argument('zstd', help='zstd path')
parser.add_argument('datagen', help='datagen path')
parser.add_argument('fuzzer', help='fuzzer path')
parser.add_argument('fullbench', help='fullbench path')
args = parser.parse_args()
valgrind = args.valgrind
zstd = args.zstd
datagen = args.datagen
fuzzer = args.fuzzer
fullbench = args.fullbench
valgrindTest(valgrind, datagen, fuzzer, zstd, fullbench)
if __name__ == '__main__':
main()
|
denys-duchier/django | refs/heads/master | tests/migrations/routers.py | 102 | class EmptyRouter:
pass
class TestRouter:
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
The Tribble model should be the only one to appear in the 'other' db.
"""
if model_name == 'tribble':
return db == 'other'
elif db == 'other':
return False
|
VasuAgrawal/tartanHacks2015 | refs/heads/master | site/flask/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py | 2039 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
# Copyright 2009 Raymond Hettinger, released under the MIT License.
# http://code.activestate.com/recipes/576693/
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
|
ajslater/picopt | refs/heads/develop | picopt/rc/settings_base.py | 1 | """Settings class for picopt."""
from abc import ABC
from argparse import Namespace
from pathlib import Path
from typing import Optional
from ruamel.yaml import YAML
class SettingsBase(Namespace, ABC):
"""Global settings base class."""
_YAML = YAML()
@property
@classmethod
def _RC_NAME(cls): # noqa: N802
"""Children must implement the _RC_NAME."""
raise NotImplementedError()
def __init__(
self,
arg_namespace: Optional[Namespace] = None,
rc_path: Optional[Path] = None,
check_programs: bool = False,
) -> None:
"""Initialize settings object with arguments namespace."""
self.arg_namespace = arg_namespace
if check_programs:
# only do this once for the whole system
self._config_program_reqs()
self.load_settings(rc_path)
def clone(self, path: Path):
"""Clone this settings for a new path."""
return type(self)(self.arg_namespace, path)
def _update(self, namespace: Optional[Namespace]) -> None:
"""Update settings with a dict."""
if not namespace:
return
for key, val in namespace.__dict__.items():
if key.startswith("_"):
continue
setattr(self, key, val)
def load_settings(self, path: Optional[Path]) -> None:
"""Load settings for a path."""
if path is not None:
rc_namespace = self.load_rc(path)
# rc settings write over defaulst
self._update(rc_namespace)
# passed in args overwrite rc
self._update(self.arg_namespace)
def load_rc(self, path: Path) -> Namespace:
"""Load an rc file, searching recursively upwards."""
if path.is_file():
path = path.parent
rc_path = path / self._RC_NAME
if rc_path.is_file():
try:
rc_settings = self._YAML.load(rc_path)
for attr in self._SET_ATTRS:
attr_list = rc_settings.get(attr)
if attr_list is not None:
rc_settings[attr] = set(attr_list)
return Namespace(**rc_settings)
except Exception as exc:
print(f"Error parsing {rc_path}")
print(exc)
if path == path.parent:
# at root /, no rc found.
res = Namespace()
else:
res = self.load_rc(path.parent)
return res
|
strint/tensorflow | refs/heads/master | tensorflow/python/kernel_tests/summary_audio_op_test.py | 134 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for summary sound op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.framework import summary_pb2
from tensorflow.python.framework import ops
from tensorflow.python.platform import test
from tensorflow.python.summary import summary
class SummaryAudioOpTest(test.TestCase):
def _AsSummary(self, s):
summ = summary_pb2.Summary()
summ.ParseFromString(s)
return summ
def _CheckProto(self, audio_summ, sample_rate, num_channels, length_frames):
"""Verify that the non-audio parts of the audio_summ proto match shape."""
# Only the first 3 sounds are returned.
for v in audio_summ.value:
v.audio.ClearField("encoded_audio_string")
expected = "\n".join("""
value {
tag: "snd/audio/%d"
audio { content_type: "audio/wav" sample_rate: %d
num_channels: %d length_frames: %d }
}""" % (i, sample_rate, num_channels, length_frames) for i in xrange(3))
self.assertProtoEquals(expected, audio_summ)
def testAudioSummary(self):
np.random.seed(7)
for channels in (1, 2, 5, 8):
with self.test_session(graph=ops.Graph()) as sess:
num_frames = 7
shape = (4, num_frames, channels)
# Generate random audio in the range [-1.0, 1.0).
const = 2.0 * np.random.random(shape) - 1.0
# Summarize
sample_rate = 8000
summ = summary.audio(
"snd", const, max_outputs=3, sample_rate=sample_rate)
value = sess.run(summ)
self.assertEqual([], summ.get_shape())
audio_summ = self._AsSummary(value)
# Check the rest of the proto
self._CheckProto(audio_summ, sample_rate, channels, num_frames)
if __name__ == "__main__":
test.main()
|
nitin-cherian/LifeLongLearning | refs/heads/master | Python/PythonProgrammingLanguage/Encapsulation/encap_env/lib/python3.5/site-packages/nbformat/v2/tests/test_nbpy.py | 28 | from unittest import TestCase
from ..nbbase import (
NotebookNode,
new_code_cell, new_text_cell, new_worksheet, new_notebook
)
from ..nbpy import reads, writes
from .nbexamples import nb0, nb0_py
class TestPy(TestCase):
def test_write(self):
s = writes(nb0)
self.assertEqual(s,nb0_py)
|
vladmm/intellij-community | refs/heads/master | python/testData/inspections/PyUnboundLocalVariableInspection/DefaultArgument.py | 83 | def f():
z = 2
def g(z=z): #pass
return z
return g
|
ghutchis/cclib | refs/heads/master | test/test_parser.py | 5 | # This file is part of cclib (http://cclib.github.io), a library for parsing
# and interpreting the results of computational chemistry packages.
#
# Copyright (C) 2015, the cclib development team
#
# The library is free software, distributed under the terms of
# the GNU Lesser General Public version 2.1 or later. You should have
# received a copy of the license along with cclib. You can also access
# the full license online at http://www.gnu.org/copyleft/lgpl.html.
"""Run parser unit tests for cclib."""
import sys
import unittest
sys.path.append('parser')
from testccio import *
from testdata import *
from testlogfileparser import *
from testutils import *
if __name__ == "__main__":
unittest.main()
|
yencarnacion/jaikuengine | refs/heads/master | common/profile.py | 33 | # Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import time
from django import template
from django.conf import settings
from django.template import loader
PROFILE_ALL_TESTS = False
enabled = False
def start():
global enabled
enabled = True
return
def stop():
global enabled
enabled = False
return
def clear():
global storage
storage = []
default_label = 'default'
current_label = 'general'
class Label(object):
""" convenience class for clearing a label """
name = None
previous = None
def __init__(self, name, previous=default_label):
self.name = name
self.previous = previous
def start(self):
global current_label
current_label = self.name
start()
def stop(self):
global current_label
current_label = self.previous
stop()
def label(name):
""" for labeling a section of profile data to associate it with
some specific call or whatever
"""
l = Label(name, current_label)
l.start()
return l
# deco
def _log_call(f, tag='general'):
call_name = f.func_name
def _wrap(self, *args, **kw):
if enabled:
call_self = self
start_time = time.time()
rv = f(self, *args, **kw)
if enabled:
time_diff = round(time.time() - start_time, 5) * 1000
store_call(call_self, call_name, tag=tag, time_ms=time_diff)
return rv
_wrap.func_name = call_name
return _wrap
def log_call(tag):
def _deco(f):
f = _log_call(f, tag=tag)
return f
return _deco
def log_write(f):
return _log_call(f, tag='write')
def log_read(f):
return _log_call(f, tag='read')
def profiled(f):
def _wrap(*args, **kw):
start()
rv = f(*args, **kw)
stop()
_wrap.func_name = f.func_name
return _wrap
def _log_api_call(f, call_self, tag='api'):
call_name = f.func_name
def _wrap(*args, **kw):
if enabled:
start_time = time.time()
rv = f(*args, **kw)
if enabled:
time_diff = round(time.time() - start_time, 5) * 1000
store_call(call_self, call_name, tag=tag, time_ms=time_diff)
return rv
_wrap.func_name = call_name
return _wrap
def install_api_profiling():
from common import api
for k in dir(api):
f = getattr(api, k)
if type(f) != type(log_call):
continue
setattr(api, k, _log_api_call(f, api, tag='api'))
def flattened(header=False):
o = []
if header:
o.append(('tag', 'call', 'time_ms'))
o.extend(storage)
return o
def csv(header=False):
rv = flattened(header)
csv = '\n'.join([','.join([str(cell) for cell in row]) for row in rv])
return csv
def html():
# our storage looks like:
# label, tag, class_func_key, time_ms
# assumes a single label for now
o = {}
total_ms = 0.0
for (label, tag, class_func_key, time_ms) in storage:
o.setdefault(tag, {'sub': {}, 'time_ms': 0.0, 'count': 0})
o[tag]['sub'].setdefault(class_func_key, {'each': [], 'time_ms': 0.0})
o[tag]['sub'][class_func_key]['each'].append(time_ms)
o[tag]['sub'][class_func_key]['time_ms'] += time_ms
o[tag]['time_ms'] += time_ms
o[tag]['count'] += 1
total_ms += time_ms
for tag in o:
# sort the calls by name
o[tag]['sub'] = sorted(o[tag]['sub'].iteritems(), key=lambda x: x[0])
c = template.Context({'timing': o, 'total': total_ms})
t = loader.get_template('common/templates/profiling.html')
return t.render(c)
storage = []
def store_call(call_class, call_name, tag='general', time_ms=0.0):
global current_label
global enabled
if not enabled:
return
class_name = getattr(call_class,
'__name__',
getattr(call_class.__class__, '__name__')
)
call_class = class_name
class_func_key = "%s.%s" % (call_class, call_name)
storage.append((current_label, tag, class_func_key, time_ms))
|
40223236/2015examQ1 | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/unittest/test/_test_warnings.py | 858 | # helper module for test_runner.Test_TextTestRunner.test_warnings
"""
This module has a number of tests that raise different kinds of warnings.
When the tests are run, the warnings are caught and their messages are printed
to stdout. This module also accepts an arg that is then passed to
unittest.main to affect the behavior of warnings.
Test_TextTestRunner.test_warnings executes this script with different
combinations of warnings args and -W flags and check that the output is correct.
See #10535.
"""
import sys
import unittest
import warnings
def warnfun():
warnings.warn('rw', RuntimeWarning)
class TestWarnings(unittest.TestCase):
# unittest warnings will be printed at most once per type (max one message
# for the fail* methods, and one for the assert* methods)
def test_assert(self):
self.assertEquals(2+2, 4)
self.assertEquals(2*2, 4)
self.assertEquals(2**2, 4)
def test_fail(self):
self.failUnless(1)
self.failUnless(True)
def test_other_unittest(self):
self.assertAlmostEqual(2+2, 4)
self.assertNotAlmostEqual(4+4, 2)
# these warnings are normally silenced, but they are printed in unittest
def test_deprecation(self):
warnings.warn('dw', DeprecationWarning)
warnings.warn('dw', DeprecationWarning)
warnings.warn('dw', DeprecationWarning)
def test_import(self):
warnings.warn('iw', ImportWarning)
warnings.warn('iw', ImportWarning)
warnings.warn('iw', ImportWarning)
# user warnings should always be printed
def test_warning(self):
warnings.warn('uw')
warnings.warn('uw')
warnings.warn('uw')
# these warnings come from the same place; they will be printed
# only once by default or three times if the 'always' filter is used
def test_function(self):
warnfun()
warnfun()
warnfun()
if __name__ == '__main__':
with warnings.catch_warnings(record=True) as ws:
# if an arg is provided pass it to unittest.main as 'warnings'
if len(sys.argv) == 2:
unittest.main(exit=False, warnings=sys.argv.pop())
else:
unittest.main(exit=False)
# print all the warning messages collected
for w in ws:
print(w.message)
|
andmaj/rrr-size | refs/heads/master | sdsl-lite/lib/src/sdsl-lite/external/gtest-1.6.0/test/gtest_env_var_test.py | 2408 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
environ = os.environ.copy()
def AssertEq(expected, actual):
if expected != actual:
print 'Expected: %s' % (expected,)
print ' Actual: %s' % (actual,)
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
TestFlag('catch_exceptions', '0', '1')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
if __name__ == '__main__':
gtest_test_utils.Main()
|
linkitspa/l10n-italy | refs/heads/10.0 | l10n_it_vat_registries_split_payment/models/account_tax.py | 1 | # -*- coding: utf-8 -*-
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import models
class AccountTax(models.Model):
_inherit = 'account.tax'
def _compute_totals_tax(self, data):
res = super(AccountTax, self)._compute_totals_tax(data)
if self.is_split_payment:
# res is (tax_name, base, tax, deductible, undeductible)
# so, in case of SP, SP VAT must not appear as deductible
return (res[0], res[1], res[2], 0.0, res[4])
return res
|
MichaelDrogalis/ansible | refs/heads/devel | lib/ansible/executor/__init__.py | 7690 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
rusthon/Rusthon | refs/heads/master | regtests/loop/for_loop.py | 3 | from runtime import *
'''
for loop tests
'''
def main():
a = [1,2,3]
y = 0
for x in a:
y += x
assert( y==6 )
z = ''
arr = ['a', 'b', 'c']
for v in arr:
z += v
assert( z == 'abc' )
b = False
if 'a' in arr:
b = True
assert( b == True )
s = 'hello world'
z = ''
for char in iter(s):
z += char
assert( z == 'hello world' )
b = False
if 'hello' in s:
b = True
assert( b==True )
print 'testing for loop over dict'
ob = {'a' : 'A', 'b' : 'B'}
k = ''
v = ''
for key in iter(ob):
k += key
v += ob[key]
print k
print v
assert(k=='ab' or k=='ba')
assert(v=='AB' or v=='BA')
keys = []
values = []
for x,y in ob.items():
keys.append( x )
values.append( y )
assert( 'a' in keys )
assert( 'A' in values )
ob2 = {'c':'C', 'd':'D'}
e = 0
arr = []
for x,y in ob.items():
arr.append(x)
arr.append(y)
for w,z in ob2.items():
e += 1
arr.append(w)
arr.append(z)
assert( e==4 )
assert( 'a' in arr)
assert( 'b' in arr)
assert( 'A' in arr)
assert( 'B' in arr)
assert( 'c' in arr)
assert( 'C' in arr)
assert( 'd' in arr)
assert( 'D' in arr)
main()
|
qzxx-syzz/ernst | refs/heads/master | Lobby/Lobby/settings.py | 1 | """
Django settings for Lobby project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'y@=#w7-evkk44!1obl0-4k04754r3g!etw8fy%r(hrpoey4tl8'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'sudoku',
'gobang',
'main',
'chatroom',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'Lobby.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Lobby.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
chris-chris/tensorflow | refs/heads/master | tensorflow/contrib/learn/python/learn/datasets/base_test.py | 136 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.datasets import base
from tensorflow.python.platform import test
mock = test.mock
_TIMEOUT = IOError(110, "timeout")
class BaseTest(test.TestCase):
"""Test load csv functions."""
def testUrlretrieveRetriesOnIOError(self):
with mock.patch.object(base, "time") as mock_time:
with mock.patch.object(base, "urllib") as mock_urllib:
mock_urllib.request.urlretrieve.side_effect = [
_TIMEOUT, _TIMEOUT, _TIMEOUT, _TIMEOUT, _TIMEOUT, None
]
base.urlretrieve_with_retry("http://dummy.com", "/tmp/dummy")
# Assert full backoff was tried
actual_list = [arg[0][0] for arg in mock_time.sleep.call_args_list]
expected_list = [1, 2, 4, 8, 16]
for actual, expected in zip(actual_list, expected_list):
self.assertLessEqual(abs(actual - expected), 0.25 * expected)
self.assertEquals(len(actual_list), len(expected_list))
def testUrlretrieveRaisesAfterRetriesAreExhausted(self):
with mock.patch.object(base, "time") as mock_time:
with mock.patch.object(base, "urllib") as mock_urllib:
mock_urllib.request.urlretrieve.side_effect = [
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
]
with self.assertRaises(IOError):
base.urlretrieve_with_retry("http://dummy.com", "/tmp/dummy")
# Assert full backoff was tried
actual_list = [arg[0][0] for arg in mock_time.sleep.call_args_list]
expected_list = [1, 2, 4, 8, 16]
for actual, expected in zip(actual_list, expected_list):
self.assertLessEqual(abs(actual - expected), 0.25 * expected)
self.assertEquals(len(actual_list), len(expected_list))
def testUrlretrieveRaisesOnNonRetriableErrorWithoutRetry(self):
with mock.patch.object(base, "time") as mock_time:
with mock.patch.object(base, "urllib") as mock_urllib:
mock_urllib.request.urlretrieve.side_effect = [
IOError(2, "No such file or directory"),
]
with self.assertRaises(IOError):
base.urlretrieve_with_retry("http://dummy.com", "/tmp/dummy")
# Assert no retries
self.assertFalse(mock_time.called)
if __name__ == "__main__":
test.main()
|
htranx/KivyMD | refs/heads/master | kivymd/ripplebehavior.py | 1 | # -*- coding: utf-8 -*-
from kivy.properties import ListProperty, NumericProperty, StringProperty, \
BooleanProperty
from kivy.animation import Animation
from kivy.graphics import Color, Ellipse, StencilPush, StencilPop, \
StencilUse, StencilUnUse, Rectangle
class CommonRipple(object):
ripple_rad = NumericProperty()
ripple_rad_default = NumericProperty(1)
ripple_post = ListProperty()
ripple_color = ListProperty()
ripple_alpha = NumericProperty(.5)
ripple_scale = NumericProperty(None)
ripple_duration_in_fast = NumericProperty(.3)
# FIXME: These speeds should be calculated based on widget size in dp
ripple_duration_in_slow = NumericProperty(2)
ripple_duration_out = NumericProperty(.3)
ripple_func_in = StringProperty('out_quad')
ripple_func_out = StringProperty('out_quad')
doing_ripple = BooleanProperty(False)
finishing_ripple = BooleanProperty(False)
fading_out = BooleanProperty(False)
def on_touch_down(self, touch):
if touch.is_mouse_scrolling:
return False
if not self.collide_point(touch.x, touch.y):
return False
if not self.disabled:
if self.doing_ripple:
Animation.cancel_all(self, 'ripple_rad', 'ripple_color',
'rect_color')
self.anim_complete()
self.ripple_rad = self.ripple_rad_default
self.ripple_pos = (touch.x, touch.y)
if self.ripple_color != []:
pass
elif hasattr(self, 'theme_cls'):
self.ripple_color = self.theme_cls.ripple_color
else:
# If no theme, set Grey 300
self.ripple_color = [0.8784313725490196, 0.8784313725490196,
0.8784313725490196, self.ripple_alpha]
self.ripple_color[3] = self.ripple_alpha
self.lay_canvas_instructions()
self.finish_rad = max(self.width, self.height) * self.ripple_scale
self.start_ripple()
return super(CommonRipple, self).on_touch_down(touch)
def lay_canvas_instructions(self):
raise NotImplementedError
def on_touch_move(self, touch, *args):
if not self.collide_point(touch.x, touch.y):
if not self.finishing_ripple and self.doing_ripple:
self.finish_ripple()
return super(CommonRipple, self).on_touch_move(touch, *args)
def on_touch_up(self, touch):
if self.collide_point(touch.x, touch.y) and self.doing_ripple:
self.finish_ripple()
return super(CommonRipple, self).on_touch_up(touch)
def start_ripple(self):
if not self.doing_ripple:
anim = Animation(
ripple_rad=self.finish_rad,
t='linear',
duration=self.ripple_duration_in_slow)
anim.bind(on_complete=self.fade_out)
self.doing_ripple = True
anim.start(self)
def _set_ellipse(self, instance, value):
self.ellipse.size = (self.ripple_rad, self.ripple_rad)
# Adjust ellipse pos here
def _set_color(self, instance, value):
self.col_instruction.a = value[3]
def finish_ripple(self):
if self.doing_ripple and not self.finishing_ripple:
Animation.cancel_all(self, 'ripple_rad')
anim = Animation(ripple_rad=self.finish_rad,
t=self.ripple_func_in,
duration=self.ripple_duration_in_fast)
anim.bind(on_complete=self.fade_out)
self.finishing_ripple = True
anim.start(self)
def fade_out(self, *args):
rc = self.ripple_color
if not self.fading_out:
Animation.cancel_all(self, 'ripple_color')
anim = Animation(ripple_color=[rc[0], rc[1], rc[2], 0.],
t=self.ripple_func_out,
duration=self.ripple_duration_out)
anim.bind(on_complete=self.anim_complete)
self.fading_out = True
anim.start(self)
def anim_complete(self, *args):
self.doing_ripple = False
self.finishing_ripple = False
self.fading_out = False
self.canvas.after.clear()
class RectangularRippleBehavior(CommonRipple):
ripple_scale = NumericProperty(2.75)
def lay_canvas_instructions(self):
with self.canvas.after:
StencilPush()
Rectangle(pos=self.pos, size=self.size)
StencilUse()
self.col_instruction = Color(rgba=self.ripple_color)
self.ellipse = \
Ellipse(size=(self.ripple_rad, self.ripple_rad),
pos=(self.ripple_pos[0] - self.ripple_rad / 2.,
self.ripple_pos[1] - self.ripple_rad / 2.))
StencilUnUse()
Rectangle(pos=self.pos, size=self.size)
StencilPop()
self.bind(ripple_color=self._set_color,
ripple_rad=self._set_ellipse)
def _set_ellipse(self, instance, value):
super(RectangularRippleBehavior, self)._set_ellipse(instance, value)
self.ellipse.pos = (self.ripple_pos[0] - self.ripple_rad / 2.,
self.ripple_pos[1] - self.ripple_rad / 2.)
class CircularRippleBehavior(CommonRipple):
ripple_scale = NumericProperty(1)
def lay_canvas_instructions(self):
with self.canvas.after:
StencilPush()
self.stencil = Ellipse(size=(self.width * self.ripple_scale,
self.height * self.ripple_scale),
pos=(self.center_x - (self.width * self.ripple_scale) / 2,
self.center_y - (self.height * self.ripple_scale) / 2))
StencilUse()
self.col_instruction = Color(rgba=self.ripple_color)
self.ellipse = Ellipse(size=(self.ripple_rad, self.ripple_rad),
pos=(self.center_x - self.ripple_rad / 2.,
self.center_y - self.ripple_rad / 2.))
StencilUnUse()
Ellipse(pos=self.pos, size=self.size)
StencilPop()
self.bind(ripple_color=self._set_color,
ripple_rad=self._set_ellipse)
def _set_ellipse(self, instance, value):
super(CircularRippleBehavior, self)._set_ellipse(instance, value)
if self.ellipse.size[0] > self.width * .6 and not self.fading_out:
self.fade_out()
self.ellipse.pos = (self.center_x - self.ripple_rad / 2.,
self.center_y - self.ripple_rad / 2.)
|
nevercast/home-assistant | refs/heads/dev | homeassistant/remote.py | 6 | """
homeassistant.remote
~~~~~~~~~~~~~~~~~~~~
A module containing drop in replacements for core parts that will interface
with a remote instance of Home Assistant.
If a connection error occurs while communicating with the API a
HomeAssistantError will be raised.
For more details about the Python API, please refer to the documentation at
https://home-assistant.io/developers/python_api/
"""
import threading
import logging
import json
import enum
import urllib.parse
import requests
import homeassistant.core as ha
from homeassistant.exceptions import HomeAssistantError
import homeassistant.bootstrap as bootstrap
from homeassistant.const import (
SERVER_PORT, HTTP_HEADER_HA_AUTH, URL_API, URL_API_STATES,
URL_API_STATES_ENTITY, URL_API_EVENTS, URL_API_EVENTS_EVENT,
URL_API_SERVICES, URL_API_SERVICES_SERVICE, URL_API_EVENT_FORWARD)
METHOD_GET = "get"
METHOD_POST = "post"
METHOD_DELETE = "delete"
_LOGGER = logging.getLogger(__name__)
class APIStatus(enum.Enum):
""" Represents API status. """
# pylint: disable=no-init,invalid-name,too-few-public-methods
OK = "ok"
INVALID_PASSWORD = "invalid_password"
CANNOT_CONNECT = "cannot_connect"
UNKNOWN = "unknown"
def __str__(self):
return self.value
class API(object):
""" Object to pass around Home Assistant API location and credentials. """
# pylint: disable=too-few-public-methods
def __init__(self, host, api_password=None, port=None, use_ssl=False):
self.host = host
self.port = port or SERVER_PORT
self.api_password = api_password
if use_ssl:
self.base_url = "https://{}:{}".format(host, self.port)
else:
self.base_url = "http://{}:{}".format(host, self.port)
self.status = None
self._headers = {}
if api_password is not None:
self._headers[HTTP_HEADER_HA_AUTH] = api_password
def validate_api(self, force_validate=False):
""" Tests if we can communicate with the API. """
if self.status is None or force_validate:
self.status = validate_api(self)
return self.status == APIStatus.OK
def __call__(self, method, path, data=None):
""" Makes a call to the Home Assistant API. """
if data is not None:
data = json.dumps(data, cls=JSONEncoder)
url = urllib.parse.urljoin(self.base_url, path)
try:
if method == METHOD_GET:
return requests.get(
url, params=data, timeout=5, headers=self._headers)
else:
return requests.request(
method, url, data=data, timeout=5, headers=self._headers)
except requests.exceptions.ConnectionError:
_LOGGER.exception("Error connecting to server")
raise HomeAssistantError("Error connecting to server")
except requests.exceptions.Timeout:
error = "Timeout when talking to {}".format(self.host)
_LOGGER.exception(error)
raise HomeAssistantError(error)
def __repr__(self):
return "API({}, {}, {})".format(
self.host, self.api_password, self.port)
class HomeAssistant(ha.HomeAssistant):
""" Home Assistant that forwards work. """
# pylint: disable=super-init-not-called,too-many-instance-attributes
def __init__(self, remote_api, local_api=None):
if not remote_api.validate_api():
raise HomeAssistantError(
"Remote API at {}:{} not valid: {}".format(
remote_api.host, remote_api.port, remote_api.status))
self.remote_api = remote_api
self.pool = pool = ha.create_worker_pool()
self.bus = EventBus(remote_api, pool)
self.services = ha.ServiceRegistry(self.bus, pool)
self.states = StateMachine(self.bus, self.remote_api)
self.config = ha.Config()
self.config.api = local_api
def start(self):
# Ensure a local API exists to connect with remote
if self.config.api is None:
if not bootstrap.setup_component(self, 'api'):
raise HomeAssistantError(
'Unable to setup local API to receive events')
ha.create_timer(self)
self.bus.fire(ha.EVENT_HOMEASSISTANT_START,
origin=ha.EventOrigin.remote)
# Setup that events from remote_api get forwarded to local_api
# Do this after we fire START, otherwise HTTP is not started
if not connect_remote_events(self.remote_api, self.config.api):
raise HomeAssistantError((
'Could not setup event forwarding from api {} to '
'local api {}').format(self.remote_api, self.config.api))
def stop(self):
""" Stops Home Assistant and shuts down all threads. """
_LOGGER.info("Stopping")
self.bus.fire(ha.EVENT_HOMEASSISTANT_STOP,
origin=ha.EventOrigin.remote)
# Disconnect master event forwarding
disconnect_remote_events(self.remote_api, self.config.api)
# Wait till all responses to homeassistant_stop are done
self.pool.block_till_done()
self.pool.stop()
class EventBus(ha.EventBus):
""" EventBus implementation that forwards fire_event to remote API. """
# pylint: disable=too-few-public-methods
def __init__(self, api, pool=None):
super().__init__(pool)
self._api = api
def fire(self, event_type, event_data=None, origin=ha.EventOrigin.local):
""" Forward local events to remote target,
handles remote event as usual. """
# All local events that are not TIME_CHANGED are forwarded to API
if origin == ha.EventOrigin.local and \
event_type != ha.EVENT_TIME_CHANGED:
fire_event(self._api, event_type, event_data)
else:
super().fire(event_type, event_data, origin)
class EventForwarder(object):
""" Listens for events and forwards to specified APIs. """
def __init__(self, hass, restrict_origin=None):
self.hass = hass
self.restrict_origin = restrict_origin
# We use a tuple (host, port) as key to ensure
# that we do not forward to the same host twice
self._targets = {}
self._lock = threading.Lock()
def connect(self, api):
"""
Attach to a Home Assistant instance and forward events.
Will overwrite old target if one exists with same host/port.
"""
with self._lock:
if len(self._targets) == 0:
# First target we get, setup listener for events
self.hass.bus.listen(ha.MATCH_ALL, self._event_listener)
key = (api.host, api.port)
self._targets[key] = api
def disconnect(self, api):
""" Removes target from being forwarded to. """
with self._lock:
key = (api.host, api.port)
did_remove = self._targets.pop(key, None) is None
if len(self._targets) == 0:
# Remove event listener if no forwarding targets present
self.hass.bus.remove_listener(ha.MATCH_ALL,
self._event_listener)
return did_remove
def _event_listener(self, event):
""" Listen and forwards all events. """
with self._lock:
# We don't forward time events or, if enabled, non-local events
if event.event_type == ha.EVENT_TIME_CHANGED or \
(self.restrict_origin and event.origin != self.restrict_origin):
return
for api in self._targets.values():
fire_event(api, event.event_type, event.data)
class StateMachine(ha.StateMachine):
"""
Fires set events to an API.
Uses state_change events to track states.
"""
def __init__(self, bus, api):
super().__init__(None)
self._api = api
self.mirror()
bus.listen(ha.EVENT_STATE_CHANGED, self._state_changed_listener)
def set(self, entity_id, new_state, attributes=None):
""" Calls set_state on remote API . """
set_state(self._api, entity_id, new_state, attributes)
def mirror(self):
""" Discards current data and mirrors the remote state machine. """
self._states = {state.entity_id: state for state
in get_states(self._api)}
def _state_changed_listener(self, event):
""" Listens for state changed events and applies them. """
self._states[event.data['entity_id']] = event.data['new_state']
class JSONEncoder(json.JSONEncoder):
""" JSONEncoder that supports Home Assistant objects. """
# pylint: disable=too-few-public-methods,method-hidden
def default(self, obj):
""" Converts Home Assistant objects and hands
other objects to the original method. """
if hasattr(obj, 'as_dict'):
return obj.as_dict()
try:
return json.JSONEncoder.default(self, obj)
except TypeError:
# If the JSON serializer couldn't serialize it
# it might be a generator, convert it to a list
try:
return [self.default(child_obj)
for child_obj in obj]
except TypeError:
# Ok, we're lost, cause the original error
return json.JSONEncoder.default(self, obj)
def validate_api(api):
""" Makes a call to validate API. """
try:
req = api(METHOD_GET, URL_API)
if req.status_code == 200:
return APIStatus.OK
elif req.status_code == 401:
return APIStatus.INVALID_PASSWORD
else:
return APIStatus.UNKNOWN
except HomeAssistantError:
return APIStatus.CANNOT_CONNECT
def connect_remote_events(from_api, to_api):
""" Sets up from_api to forward all events to to_api. """
data = {
'host': to_api.host,
'api_password': to_api.api_password,
'port': to_api.port
}
try:
req = from_api(METHOD_POST, URL_API_EVENT_FORWARD, data)
if req.status_code == 200:
return True
else:
_LOGGER.error(
"Error setting up event forwarding: %s - %s",
req.status_code, req.text)
return False
except HomeAssistantError:
_LOGGER.exception("Error setting up event forwarding")
return False
def disconnect_remote_events(from_api, to_api):
""" Disconnects forwarding events from from_api to to_api. """
data = {
'host': to_api.host,
'port': to_api.port
}
try:
req = from_api(METHOD_DELETE, URL_API_EVENT_FORWARD, data)
if req.status_code == 200:
return True
else:
_LOGGER.error(
"Error removing event forwarding: %s - %s",
req.status_code, req.text)
return False
except HomeAssistantError:
_LOGGER.exception("Error removing an event forwarder")
return False
def get_event_listeners(api):
""" List of events that is being listened for. """
try:
req = api(METHOD_GET, URL_API_EVENTS)
return req.json() if req.status_code == 200 else {}
except (HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Unexpected result retrieving event listeners")
return {}
def fire_event(api, event_type, data=None):
""" Fire an event at remote API. """
try:
req = api(METHOD_POST, URL_API_EVENTS_EVENT.format(event_type), data)
if req.status_code != 200:
_LOGGER.error("Error firing event: %d - %d",
req.status_code, req.text)
except HomeAssistantError:
_LOGGER.exception("Error firing event")
def get_state(api, entity_id):
""" Queries given API for state of entity_id. """
try:
req = api(METHOD_GET, URL_API_STATES_ENTITY.format(entity_id))
# req.status_code == 422 if entity does not exist
return ha.State.from_dict(req.json()) \
if req.status_code == 200 else None
except (HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Error fetching state")
return None
def get_states(api):
""" Queries given API for all states. """
try:
req = api(METHOD_GET,
URL_API_STATES)
return [ha.State.from_dict(item) for
item in req.json()]
except (HomeAssistantError, ValueError, AttributeError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Error fetching states")
return []
def set_state(api, entity_id, new_state, attributes=None):
"""
Tells API to update state for entity_id.
Returns True if success.
"""
attributes = attributes or {}
data = {'state': new_state,
'attributes': attributes}
try:
req = api(METHOD_POST,
URL_API_STATES_ENTITY.format(entity_id),
data)
if req.status_code not in (200, 201):
_LOGGER.error("Error changing state: %d - %s",
req.status_code, req.text)
return False
else:
return True
except HomeAssistantError:
_LOGGER.exception("Error setting state")
return False
def is_state(api, entity_id, state):
""" Queries API to see if entity_id is specified state. """
cur_state = get_state(api, entity_id)
return cur_state and cur_state.state == state
def get_services(api):
"""
Returns a list of dicts. Each dict has a string "domain" and
a list of strings "services".
"""
try:
req = api(METHOD_GET, URL_API_SERVICES)
return req.json() if req.status_code == 200 else {}
except (HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Got unexpected services result")
return {}
def call_service(api, domain, service, service_data=None):
""" Calls a service at the remote API. """
try:
req = api(METHOD_POST,
URL_API_SERVICES_SERVICE.format(domain, service),
service_data)
if req.status_code != 200:
_LOGGER.error("Error calling service: %d - %s",
req.status_code, req.text)
except HomeAssistantError:
_LOGGER.exception("Error calling service")
|
lauosi/coworkok | refs/heads/master | cowork/migrations/0001_initial.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, verbose_name='ID', serialize=False)),
('name', models.CharField(max_length=100)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, related_name='companies')),
],
),
migrations.CreateModel(
name='Desk',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, verbose_name='ID', serialize=False)),
('rent_start_date', models.DateTimeField(null=True)),
('rent_end_date', models.DateTimeField(null=True)),
],
),
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, verbose_name='ID', serialize=False)),
('city', models.CharField(max_length=200)),
('total_desks', models.IntegerField(verbose_name='Total desks')),
('reserved_desks', models.IntegerField(verbose_name='Reserved desks')),
('price', models.DecimalField(decimal_places=2, verbose_name='Price per desk $', max_digits=12)),
('company', models.ForeignKey(to='cowork.Company', related_name='locations')),
],
),
migrations.AddField(
model_name='desk',
name='location',
field=models.OneToOneField(to='cowork.Location', related_name='desks'),
),
migrations.AddField(
model_name='desk',
name='owner',
field=models.OneToOneField(to=settings.AUTH_USER_MODEL, related_name='desks', null=True),
),
]
|
consulo/consulo-python | refs/heads/master | plugin/src/main/dist/helpers/pydev/pydevd.py | 1 | '''
Entry point module (keep at root):
This module starts the debugger.
'''
from __future__ import nested_scopes # Jython 2.1 support
import atexit
import os
import sys
import traceback
from _pydevd_bundle.pydevd_constants import IS_JYTH_LESS25, IS_PY3K, IS_PY34_OLDER, get_thread_id, dict_keys, dict_pop, dict_contains, \
dict_iter_items, DebugInfoHolder, PYTHON_SUSPEND, STATE_SUSPEND, STATE_RUN, get_frame, xrange, \
clear_cached_thread_id
from _pydev_bundle import fix_getpass
from _pydev_bundle import pydev_imports, pydev_log
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
from _pydev_bundle.pydev_is_thread_alive import is_thread_alive
from _pydev_imps._pydev_saved_modules import threading
from _pydev_imps._pydev_saved_modules import time
from _pydev_imps._pydev_saved_modules import thread
from _pydevd_bundle import pydevd_io, pydevd_vm_type, pydevd_tracing
from _pydevd_bundle import pydevd_utils
from _pydevd_bundle import pydevd_vars
from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo
from _pydevd_bundle.pydevd_breakpoints import ExceptionBreakpoint, update_exception_hook
from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO, CMD_STEP_OVER, \
CMD_STEP_RETURN, CMD_STEP_INTO_MY_CODE, CMD_THREAD_SUSPEND, CMD_RUN_TO_LINE, \
CMD_ADD_EXCEPTION_BREAK, CMD_SMART_STEP_INTO, InternalConsoleExec, NetCommandFactory, \
PyDBDaemonThread, _queue, ReaderThread, GetGlobalDebugger, get_global_debugger, \
set_global_debugger, WriterThread, pydevd_find_thread_by_id, pydevd_log, \
start_client, start_server, InternalGetBreakpointException, InternalSendCurrExceptionTrace, \
InternalSendCurrExceptionTraceProceeded
from _pydevd_bundle.pydevd_custom_frames import CustomFramesContainer, custom_frames_container_init
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame
from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads
from _pydevd_bundle.pydevd_trace_dispatch import trace_dispatch as _trace_dispatch
from _pydevd_bundle.pydevd_utils import save_main_module
from pydevd_concurrency_analyser.pydevd_concurrency_logger import ThreadingLogger, AsyncioLogger, send_message, cur_time
from pydevd_concurrency_analyser.pydevd_thread_wrappers import wrap_threads
__version_info__ = (0, 0, 5)
__version_info_str__ = []
for v in __version_info__:
__version_info_str__.append(str(v))
__version__ = '.'.join(__version_info_str__)
#IMPORTANT: pydevd_constants must be the 1st thing defined because it'll keep a reference to the original sys._getframe
SUPPORT_PLUGINS = not IS_JYTH_LESS25
PluginManager = None
if SUPPORT_PLUGINS:
from _pydevd_bundle.pydevd_plugin_utils import PluginManager
threadingEnumerate = threading.enumerate
threadingCurrentThread = threading.currentThread
try:
'dummy'.encode('utf-8') # Added because otherwise Jython 2.2.1 wasn't finding the encoding (if it wasn't loaded in the main thread).
except:
pass
connected = False
bufferStdOutToServer = False
bufferStdErrToServer = False
remote = False
file_system_encoding = getfilesystemencoding()
#=======================================================================================================================
# PyDBCommandThread
#=======================================================================================================================
class PyDBCommandThread(PyDBDaemonThread):
def __init__(self, py_db):
PyDBDaemonThread.__init__(self)
self._py_db_command_thread_event = py_db._py_db_command_thread_event
self.py_db = py_db
self.setName('pydevd.CommandThread')
def _on_run(self):
for i in xrange(1, 10):
time.sleep(0.5) #this one will only start later on (because otherwise we may not have any non-daemon threads
if self.killReceived:
return
if self.pydev_do_not_trace:
self.py_db.SetTrace(None) # no debugging on this thread
try:
while not self.killReceived:
try:
self.py_db.process_internal_commands()
except:
pydevd_log(0, 'Finishing debug communication...(2)')
self._py_db_command_thread_event.clear()
self._py_db_command_thread_event.wait(0.5)
except:
pydev_log.debug(sys.exc_info()[0])
#only got this error in interpreter shutdown
#pydevd_log(0, 'Finishing debug communication...(3)')
#=======================================================================================================================
# CheckOutputThread
# Non-daemonic thread guaranties that all data is written even if program is finished
#=======================================================================================================================
class CheckOutputThread(PyDBDaemonThread):
def __init__(self, py_db):
PyDBDaemonThread.__init__(self)
self.py_db = py_db
self.setName('pydevd.CheckAliveThread')
self.daemon = False
py_db.output_checker = self
def _on_run(self):
if self.pydev_do_not_trace:
disable_tracing = True
if pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON and sys.hexversion <= 0x020201f0:
# don't run untraced threads if we're in jython 2.2.1 or lower
# jython bug: if we start a thread and another thread changes the tracing facility
# it affects other threads (it's not set only for the thread but globally)
# Bug: http://sourceforge.net/tracker/index.php?func=detail&aid=1870039&group_id=12867&atid=112867
disable_tracing = False
if disable_tracing:
pydevd_tracing.SetTrace(None) # no debugging on this thread
while not self.killReceived:
time.sleep(0.3)
if not self.py_db.has_threads_alive() and self.py_db.writer.empty() \
and not has_data_to_redirect():
try:
pydev_log.debug("No alive threads, finishing debug session")
self.py_db.finish_debugging_session()
kill_all_pydev_threads()
except:
traceback.print_exc()
self.killReceived = True
self.py_db.check_output_redirect()
def do_kill_pydev_thread(self):
self.killReceived = True
#=======================================================================================================================
# PyDB
#=======================================================================================================================
class PyDB:
""" Main debugging class
Lots of stuff going on here:
PyDB starts two threads on startup that connect to remote debugger (RDB)
The threads continuously read & write commands to RDB.
PyDB communicates with these threads through command queues.
Every RDB command is processed by calling process_net_command.
Every PyDB net command is sent to the net by posting NetCommand to WriterThread queue
Some commands need to be executed on the right thread (suspend/resume & friends)
These are placed on the internal command queue.
"""
def __init__(self):
set_global_debugger(self)
pydevd_tracing.replace_sys_set_trace_func()
self.reader = None
self.writer = None
self.output_checker = None
self.quitting = None
self.cmd_factory = NetCommandFactory()
self._cmd_queue = {} # the hash of Queues. Key is thread id, value is thread
self.breakpoints = {}
self.file_to_id_to_line_breakpoint = {}
self.file_to_id_to_plugin_breakpoint = {}
# Note: breakpoints dict should not be mutated: a copy should be created
# and later it should be assigned back (to prevent concurrency issues).
self.break_on_uncaught_exceptions = {}
self.break_on_caught_exceptions = {}
self.ready_to_run = False
self._main_lock = thread.allocate_lock()
self._lock_running_thread_ids = thread.allocate_lock()
self._py_db_command_thread_event = threading.Event()
CustomFramesContainer._py_db_command_thread_event = self._py_db_command_thread_event
self._finish_debugging_session = False
self._termination_event_set = False
self.signature_factory = None
self.SetTrace = pydevd_tracing.SetTrace
self.break_on_exceptions_thrown_in_same_context = False
self.ignore_exceptions_thrown_in_lines_with_ignore_exception = True
# Suspend debugger even if breakpoint condition raises an exception
SUSPEND_ON_BREAKPOINT_EXCEPTION = True
self.suspend_on_breakpoint_exception = SUSPEND_ON_BREAKPOINT_EXCEPTION
# By default user can step into properties getter/setter/deleter methods
self.disable_property_trace = False
self.disable_property_getter_trace = False
self.disable_property_setter_trace = False
self.disable_property_deleter_trace = False
#this is a dict of thread ids pointing to thread ids. Whenever a command is passed to the java end that
#acknowledges that a thread was created, the thread id should be passed here -- and if at some time we do not
#find that thread alive anymore, we must remove it from this list and make the java side know that the thread
#was killed.
self._running_thread_ids = {}
self._set_breakpoints_with_id = False
# This attribute holds the file-> lines which have an @IgnoreException.
self.filename_to_lines_where_exceptions_are_ignored = {}
#working with plugins (lazily initialized)
self.plugin = None
self.has_plugin_line_breaks = False
self.has_plugin_exception_breaks = False
self.thread_analyser = None
self.asyncio_analyser = None
# matplotlib support in debugger and debug console
self.mpl_in_use = False
self.mpl_hooks_in_debug_console = False
self.mpl_modules_for_patching = {}
self._filename_to_not_in_scope = {}
self.first_breakpoint_reached = False
self.is_filter_enabled = pydevd_utils.is_filter_enabled()
self.is_filter_libraries = pydevd_utils.is_filter_libraries()
self.show_return_values = False
self.remove_return_values_flag = False
def get_plugin_lazy_init(self):
if self.plugin is None and SUPPORT_PLUGINS:
self.plugin = PluginManager(self)
return self.plugin
def not_in_scope(self, filename):
return pydevd_utils.not_in_project_roots(filename)
def is_ignored_by_filters(self, filename):
return pydevd_utils.is_ignored_by_filter(filename)
def first_appearance_in_scope(self, trace):
if trace is None or self.not_in_scope(trace.tb_frame.f_code.co_filename):
return False
else:
trace = trace.tb_next
while trace is not None:
frame = trace.tb_frame
if not self.not_in_scope(frame.f_code.co_filename):
return False
trace = trace.tb_next
return True
def has_threads_alive(self):
for t in threadingEnumerate():
if getattr(t, 'is_pydev_daemon_thread', False):
#Important: Jython 2.5rc4 has a bug where a thread created with thread.start_new_thread won't be
#set as a daemon thread, so, we also have to check for the 'is_pydev_daemon_thread' flag.
#See: https://github.com/fabioz/PyDev.Debugger/issues/11
continue
if isinstance(t, PyDBDaemonThread):
pydev_log.error_once(
'Error in debugger: Found PyDBDaemonThread not marked with is_pydev_daemon_thread=True.\n')
if is_thread_alive(t):
if not t.isDaemon() or hasattr(t, "__pydevd_main_thread"):
return True
return False
def finish_debugging_session(self):
self._finish_debugging_session = True
def initialize_network(self, sock):
try:
sock.settimeout(None) # infinite, no timeouts from now on - jython does not have it
except:
pass
self.writer = WriterThread(sock)
self.reader = ReaderThread(sock)
self.writer.start()
self.reader.start()
time.sleep(0.1) # give threads time to start
def connect(self, host, port):
if host:
s = start_client(host, port)
else:
s = start_server(port)
self.initialize_network(s)
def get_internal_queue(self, thread_id):
""" returns internal command queue for a given thread.
if new queue is created, notify the RDB about it """
if thread_id.startswith('__frame__'):
thread_id = thread_id[thread_id.rfind('|') + 1:]
try:
return self._cmd_queue[thread_id]
except KeyError:
return self._cmd_queue.setdefault(thread_id, _queue.Queue()) #@UndefinedVariable
def post_internal_command(self, int_cmd, thread_id):
""" if thread_id is *, post to all """
if thread_id == "*":
threads = threadingEnumerate()
for t in threads:
thread_id = get_thread_id(t)
queue = self.get_internal_queue(thread_id)
queue.put(int_cmd)
else:
queue = self.get_internal_queue(thread_id)
queue.put(int_cmd)
def check_output_redirect(self):
global bufferStdOutToServer
global bufferStdErrToServer
if bufferStdOutToServer:
init_stdout_redirect()
self.check_output(sys.stdoutBuf, 1) #@UndefinedVariable
if bufferStdErrToServer:
init_stderr_redirect()
self.check_output(sys.stderrBuf, 2) #@UndefinedVariable
def check_output(self, out, outCtx):
'''Checks the output to see if we have to send some buffered output to the debug server
@param out: sys.stdout or sys.stderr
@param outCtx: the context indicating: 1=stdout and 2=stderr (to know the colors to write it)
'''
try:
v = out.getvalue()
if v:
self.cmd_factory.make_io_message(v, outCtx, self)
except:
traceback.print_exc()
def init_matplotlib_in_debug_console(self):
# import hook and patches for matplotlib support in debug console
from _pydev_bundle.pydev_import_hook import import_hook_manager
for module in dict_keys(self.mpl_modules_for_patching):
import_hook_manager.add_module_name(module, dict_pop(self.mpl_modules_for_patching, module))
def init_matplotlib_support(self):
# prepare debugger for integration with matplotlib GUI event loop
from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot, do_enable_gui
# enable_gui_function in activate_matplotlib should be called in main thread. Unlike integrated console,
# in the debug console we have no interpreter instance with exec_queue, but we run this code in the main
# thread and can call it directly.
class _MatplotlibHelper:
_return_control_osc = False
def return_control():
# Some of the input hooks (e.g. Qt4Agg) check return control without doing
# a single operation, so we don't return True on every
# call when the debug hook is in place to allow the GUI to run
_MatplotlibHelper._return_control_osc = not _MatplotlibHelper._return_control_osc
return _MatplotlibHelper._return_control_osc
from pydev_ipython.inputhook import set_return_control_callback
set_return_control_callback(return_control)
self.mpl_modules_for_patching = {"matplotlib": lambda: activate_matplotlib(do_enable_gui),
"matplotlib.pyplot": activate_pyplot,
"pylab": activate_pylab }
def suspend_all_other_threads(self, thread_suspended_at_bp):
all_threads = threadingEnumerate()
for t in all_threads:
if getattr(t, 'is_pydev_daemon_thread', False):
pass # I.e.: skip the DummyThreads created from pydev daemon threads
elif hasattr(t, 'pydev_do_not_trace'):
pass # skip some other threads, i.e. ipython history saving thread from debug console
else:
if t is thread_suspended_at_bp:
continue
additional_info = None
try:
additional_info = t.additional_info
except AttributeError:
pass # that's ok, no info currently set
if additional_info is not None:
for frame in additional_info.iter_frames(t):
self.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=True)
del frame
self.set_suspend(t, CMD_THREAD_SUSPEND)
else:
sys.stderr.write("Can't suspend thread: %s\n" % (t,))
def process_internal_commands(self):
'''This function processes internal commands
'''
self._main_lock.acquire()
try:
self.check_output_redirect()
curr_thread_id = get_thread_id(threadingCurrentThread())
program_threads_alive = {}
all_threads = threadingEnumerate()
program_threads_dead = []
self._lock_running_thread_ids.acquire()
try:
for t in all_threads:
if getattr(t, 'is_pydev_daemon_thread', False):
pass # I.e.: skip the DummyThreads created from pydev daemon threads
elif isinstance(t, PyDBDaemonThread):
pydev_log.error_once('Error in debugger: Found PyDBDaemonThread not marked with is_pydev_daemon_thread=True.\n')
elif is_thread_alive(t):
if not self._running_thread_ids:
# Fix multiprocessing debug with breakpoints in both main and child processes
# (https://youtrack.jetbrains.com/issue/PY-17092) When the new process is created, the main
# thread in the new process already has the attribute 'pydevd_id', so the new thread doesn't
# get new id with its process number and the debugger loses access to both threads.
# Therefore we should update thread_id for every main thread in the new process.
# TODO: Investigate: should we do this for all threads in threading.enumerate()?
# (i.e.: if a fork happens on Linux, this seems likely).
old_thread_id = get_thread_id(t)
clear_cached_thread_id(t)
clear_cached_thread_id(threadingCurrentThread())
thread_id = get_thread_id(t)
curr_thread_id = get_thread_id(threadingCurrentThread())
if pydevd_vars.has_additional_frames_by_id(old_thread_id):
frames_by_id = pydevd_vars.get_additional_frames_by_id(old_thread_id)
pydevd_vars.add_additional_frame_by_id(thread_id, frames_by_id)
else:
thread_id = get_thread_id(t)
program_threads_alive[thread_id] = t
if not dict_contains(self._running_thread_ids, thread_id):
if not hasattr(t, 'additional_info'):
# see http://sourceforge.net/tracker/index.php?func=detail&aid=1955428&group_id=85796&atid=577329
# Let's create the additional info right away!
t.additional_info = PyDBAdditionalThreadInfo()
self._running_thread_ids[thread_id] = t
self.writer.add_command(self.cmd_factory.make_thread_created_message(t))
queue = self.get_internal_queue(thread_id)
cmdsToReadd = [] # some commands must be processed by the thread itself... if that's the case,
# we will re-add the commands to the queue after executing.
try:
while True:
int_cmd = queue.get(False)
if not self.mpl_hooks_in_debug_console and isinstance(int_cmd, InternalConsoleExec):
# add import hooks for matplotlib patches if only debug console was started
try:
self.init_matplotlib_in_debug_console()
self.mpl_in_use = True
except:
pydevd_log(2, "Matplotlib support in debug console failed", traceback.format_exc())
self.mpl_hooks_in_debug_console = True
if int_cmd.can_be_executed_by(curr_thread_id):
pydevd_log(2, "processing internal command ", str(int_cmd))
int_cmd.do_it(self)
else:
pydevd_log(2, "NOT processing internal command ", str(int_cmd))
cmdsToReadd.append(int_cmd)
except _queue.Empty: #@UndefinedVariable
for int_cmd in cmdsToReadd:
queue.put(int_cmd)
# this is how we exit
thread_ids = list(self._running_thread_ids.keys())
for tId in thread_ids:
if not dict_contains(program_threads_alive, tId):
program_threads_dead.append(tId)
finally:
self._lock_running_thread_ids.release()
for tId in program_threads_dead:
try:
self._process_thread_not_alive(tId)
except:
sys.stderr.write('Error iterating through %s (%s) - %s\n' % (
program_threads_alive, program_threads_alive.__class__, dir(program_threads_alive)))
raise
if len(program_threads_alive) == 0:
self.finish_debugging_session()
for t in all_threads:
if hasattr(t, 'do_kill_pydev_thread'):
t.do_kill_pydev_thread()
finally:
self._main_lock.release()
def set_tracing_for_untraced_contexts(self, ignore_frame=None, overwrite_prev_trace=False):
# Enable the tracing for existing threads (because there may be frames being executed that
# are currently untraced).
threads = threadingEnumerate()
try:
for t in threads:
if getattr(t, 'is_pydev_daemon_thread', False):
continue
# TODO: optimize so that we only actually add that tracing if it's in
# the new breakpoint context.
additional_info = None
try:
additional_info = t.additional_info
except AttributeError:
pass # that's ok, no info currently set
if additional_info is not None:
for frame in additional_info.iter_frames(t):
if frame is not ignore_frame:
self.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=overwrite_prev_trace)
finally:
frame = None
t = None
threads = None
additional_info = None
def consolidate_breakpoints(self, file, id_to_breakpoint, breakpoints):
break_dict = {}
for breakpoint_id, pybreakpoint in dict_iter_items(id_to_breakpoint):
break_dict[pybreakpoint.line] = pybreakpoint
breakpoints[file] = break_dict
def add_break_on_exception(
self,
exception,
notify_always,
notify_on_terminate,
notify_on_first_raise_only,
ignore_libraries=False
):
try:
eb = ExceptionBreakpoint(
exception,
notify_always,
notify_on_terminate,
notify_on_first_raise_only,
ignore_libraries
)
except ImportError:
pydev_log.error("Error unable to add break on exception for: %s (exception could not be imported)\n" % (exception,))
return None
if eb.notify_on_terminate:
cp = self.break_on_uncaught_exceptions.copy()
cp[exception] = eb
if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0:
pydev_log.error("Exceptions to hook on terminate: %s\n" % (cp,))
self.break_on_uncaught_exceptions = cp
if eb.notify_always:
cp = self.break_on_caught_exceptions.copy()
cp[exception] = eb
if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0:
pydev_log.error("Exceptions to hook always: %s\n" % (cp,))
self.break_on_caught_exceptions = cp
return eb
def update_after_exceptions_added(self, added):
updated_on_caught = False
updated_on_uncaught = False
for eb in added:
if not updated_on_uncaught and eb.notify_on_terminate:
updated_on_uncaught = True
update_exception_hook(self)
if not updated_on_caught and eb.notify_always:
updated_on_caught = True
self.set_tracing_for_untraced_contexts()
def _process_thread_not_alive(self, threadId):
""" if thread is not alive, cancel trace_dispatch processing """
self._lock_running_thread_ids.acquire()
try:
thread = self._running_thread_ids.pop(threadId, None)
if thread is None:
return
wasNotified = thread.additional_info.pydev_notify_kill
if not wasNotified:
thread.additional_info.pydev_notify_kill = True
finally:
self._lock_running_thread_ids.release()
cmd = self.cmd_factory.make_thread_killed_message(threadId)
self.writer.add_command(cmd)
def set_suspend(self, thread, stop_reason):
thread.additional_info.suspend_type = PYTHON_SUSPEND
thread.additional_info.pydev_state = STATE_SUSPEND
thread.stop_reason = stop_reason
# If conditional breakpoint raises any exception during evaluation send details to Java
if stop_reason == CMD_SET_BREAK and self.suspend_on_breakpoint_exception:
self._send_breakpoint_condition_exception(thread)
def _send_breakpoint_condition_exception(self, thread):
"""If conditional breakpoint raises an exception during evaluation
send exception details to java
"""
thread_id = get_thread_id(thread)
conditional_breakpoint_exception_tuple = thread.additional_info.conditional_breakpoint_exception
# conditional_breakpoint_exception_tuple - should contain 2 values (exception_type, stacktrace)
if conditional_breakpoint_exception_tuple and len(conditional_breakpoint_exception_tuple) == 2:
exc_type, stacktrace = conditional_breakpoint_exception_tuple
int_cmd = InternalGetBreakpointException(thread_id, exc_type, stacktrace)
# Reset the conditional_breakpoint_exception details to None
thread.additional_info.conditional_breakpoint_exception = None
self.post_internal_command(int_cmd, thread_id)
def send_caught_exception_stack(self, thread, arg, curr_frame_id):
"""Sends details on the exception which was caught (and where we stopped) to the java side.
arg is: exception type, description, traceback object
"""
thread_id = get_thread_id(thread)
int_cmd = InternalSendCurrExceptionTrace(thread_id, arg, curr_frame_id)
self.post_internal_command(int_cmd, thread_id)
def send_caught_exception_stack_proceeded(self, thread):
"""Sends that some thread was resumed and is no longer showing an exception trace.
"""
thread_id = get_thread_id(thread)
int_cmd = InternalSendCurrExceptionTraceProceeded(thread_id)
self.post_internal_command(int_cmd, thread_id)
self.process_internal_commands()
def send_process_created_message(self):
"""Sends a message that a new process has been created.
"""
cmd = self.cmd_factory.make_process_created_message()
self.writer.add_command(cmd)
def do_wait_suspend(self, thread, frame, event, arg): #@UnusedVariable
""" busy waits until the thread state changes to RUN
it expects thread's state as attributes of the thread.
Upon running, processes any outstanding Stepping commands.
"""
self.process_internal_commands()
message = thread.additional_info.pydev_message
cmd = self.cmd_factory.make_thread_suspend_message(get_thread_id(thread), frame, thread.stop_reason, message)
self.writer.add_command(cmd)
CustomFramesContainer.custom_frames_lock.acquire() # @UndefinedVariable
try:
from_this_thread = []
for frame_id, custom_frame in dict_iter_items(CustomFramesContainer.custom_frames):
if custom_frame.thread_id == thread.ident:
# print >> sys.stderr, 'Frame created: ', frame_id
self.writer.add_command(self.cmd_factory.make_custom_frame_created_message(frame_id, custom_frame.name))
self.writer.add_command(self.cmd_factory.make_thread_suspend_message(frame_id, custom_frame.frame, CMD_THREAD_SUSPEND, ""))
from_this_thread.append(frame_id)
finally:
CustomFramesContainer.custom_frames_lock.release() # @UndefinedVariable
imported = False
info = thread.additional_info
if info.pydev_state == STATE_SUSPEND and not self._finish_debugging_session:
# before every stop check if matplotlib modules were imported inside script code
if len(self.mpl_modules_for_patching) > 0:
for module in dict_keys(self.mpl_modules_for_patching):
if module in sys.modules:
activate_function = dict_pop(self.mpl_modules_for_patching, module)
activate_function()
self.mpl_in_use = True
while info.pydev_state == STATE_SUSPEND and not self._finish_debugging_session:
if self.mpl_in_use:
# call input hooks if only matplotlib is in use
try:
if not imported:
from pydev_ipython.inputhook import get_inputhook
imported = True
inputhook = get_inputhook()
if inputhook:
inputhook()
except:
pass
self.process_internal_commands()
time.sleep(0.01)
# process any stepping instructions
if info.pydev_step_cmd == CMD_STEP_INTO or info.pydev_step_cmd == CMD_STEP_INTO_MY_CODE:
info.pydev_step_stop = None
info.pydev_smart_step_stop = None
elif info.pydev_step_cmd == CMD_STEP_OVER:
info.pydev_step_stop = frame
info.pydev_smart_step_stop = None
self.set_trace_for_frame_and_parents(frame)
elif info.pydev_step_cmd == CMD_SMART_STEP_INTO:
self.set_trace_for_frame_and_parents(frame)
info.pydev_step_stop = None
info.pydev_smart_step_stop = frame
elif info.pydev_step_cmd == CMD_RUN_TO_LINE or info.pydev_step_cmd == CMD_SET_NEXT_STATEMENT :
self.set_trace_for_frame_and_parents(frame)
if event == 'line' or event == 'exception':
#If we're already in the correct context, we have to stop it now, because we can act only on
#line events -- if a return was the next statement it wouldn't work (so, we have this code
#repeated at pydevd_frame).
stop = False
curr_func_name = frame.f_code.co_name
#global context is set with an empty name
if curr_func_name in ('?', '<module>'):
curr_func_name = ''
if curr_func_name == info.pydev_func_name:
line = info.pydev_next_line
if frame.f_lineno == line:
stop = True
else :
if frame.f_trace is None:
frame.f_trace = self.trace_dispatch
frame.f_lineno = line
frame.f_trace = None
stop = True
if stop:
info.pydev_state = STATE_SUSPEND
self.do_wait_suspend(thread, frame, event, arg)
return
elif info.pydev_step_cmd == CMD_STEP_RETURN:
back_frame = frame.f_back
if back_frame is not None:
# steps back to the same frame (in a return call it will stop in the 'back frame' for the user)
info.pydev_step_stop = frame
self.set_trace_for_frame_and_parents(frame)
else:
# No back frame?!? -- this happens in jython when we have some frame created from an awt event
# (the previous frame would be the awt event, but this doesn't make part of 'jython', only 'java')
# so, if we're doing a step return in this situation, it's the same as just making it run
info.pydev_step_stop = None
info.pydev_step_cmd = -1
info.pydev_state = STATE_RUN
del frame
cmd = self.cmd_factory.make_thread_run_message(get_thread_id(thread), info.pydev_step_cmd)
self.writer.add_command(cmd)
CustomFramesContainer.custom_frames_lock.acquire() # @UndefinedVariable
try:
# The ones that remained on last_running must now be removed.
for frame_id in from_this_thread:
# print >> sys.stderr, 'Removing created frame: ', frame_id
self.writer.add_command(self.cmd_factory.make_thread_killed_message(frame_id))
finally:
CustomFramesContainer.custom_frames_lock.release() # @UndefinedVariable
def handle_post_mortem_stop(self, thread, frame, frames_byid, exception):
pydev_log.debug("We are stopping in post-mortem\n")
thread_id = get_thread_id(thread)
pydevd_vars.add_additional_frame_by_id(thread_id, frames_byid)
try:
try:
add_exception_to_frame(frame, exception)
self.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK)
self.do_wait_suspend(thread, frame, 'exception', None)
except:
pydev_log.error("We've got an error while stopping in post-mortem: %s\n"%sys.exc_info()[0])
finally:
pydevd_vars.remove_additional_frame_by_id(thread_id)
def set_trace_for_frame_and_parents(self, frame, also_add_to_passed_frame=True, overwrite_prev_trace=False, dispatch_func=None):
if dispatch_func is None:
dispatch_func = self.trace_dispatch
if also_add_to_passed_frame:
self.update_trace(frame, dispatch_func, overwrite_prev_trace)
frame = frame.f_back
while frame:
self.update_trace(frame, dispatch_func, overwrite_prev_trace)
frame = frame.f_back
del frame
def update_trace(self, frame, dispatch_func, overwrite_prev):
if frame.f_trace is None:
frame.f_trace = dispatch_func
else:
if overwrite_prev:
frame.f_trace = dispatch_func
else:
try:
#If it's the trace_exception, go back to the frame trace dispatch!
if frame.f_trace.im_func.__name__ == 'trace_exception':
frame.f_trace = frame.f_trace.im_self.trace_dispatch
except AttributeError:
pass
frame = frame.f_back
del frame
def prepare_to_run(self):
''' Shared code to prepare debugging by installing traces and registering threads '''
self.patch_threads()
pydevd_tracing.SetTrace(self.trace_dispatch)
PyDBCommandThread(self).start()
if self.signature_factory is not None or self.thread_analyser is not None:
# we need all data to be sent to IDE even after program finishes
CheckOutputThread(self).start()
def patch_threads(self):
try:
# not available in jython!
import threading
threading.settrace(self.trace_dispatch) # for all future threads
except:
pass
from _pydev_bundle.pydev_monkey import patch_thread_modules
patch_thread_modules()
def get_fullname(self, mod_name):
if IS_PY3K:
import pkgutil
else:
from _pydev_imps import _pydev_pkgutil_old as pkgutil
try:
loader = pkgutil.get_loader(mod_name)
except:
return None
if loader is not None:
for attr in ("get_filename", "_get_filename"):
meth = getattr(loader, attr, None)
if meth is not None:
return meth(mod_name)
return None
def run(self, file, globals=None, locals=None, module=False, set_trace=True):
if module:
filename = self.get_fullname(file)
if filename is None:
sys.stderr.write("No module named %s\n" % file)
return
else:
file = filename
if os.path.isdir(file):
new_target = os.path.join(file, '__main__.py')
if os.path.isfile(new_target):
file = new_target
if globals is None:
m = save_main_module(file, 'pydevd')
globals = m.__dict__
try:
globals['__builtins__'] = __builtins__
except NameError:
pass # Not there on Jython...
if locals is None:
locals = globals
if set_trace:
# Predefined (writable) attributes: __name__ is the module's name;
# __doc__ is the module's documentation string, or None if unavailable;
# __file__ is the pathname of the file from which the module was loaded,
# if it was loaded from a file. The __file__ attribute is not present for
# C modules that are statically linked into the interpreter; for extension modules
# loaded dynamically from a shared library, it is the pathname of the shared library file.
# I think this is an ugly hack, bug it works (seems to) for the bug that says that sys.path should be the same in
# debug and run.
if m.__file__.startswith(sys.path[0]):
# print >> sys.stderr, 'Deleting: ', sys.path[0]
del sys.path[0]
# now, the local directory has to be added to the pythonpath
# sys.path.insert(0, os.getcwd())
# Changed: it's not the local directory, but the directory of the file launched
# The file being run ust be in the pythonpath (even if it was not before)
sys.path.insert(0, os.path.split(file)[0])
self.prepare_to_run()
while not self.ready_to_run:
time.sleep(0.1) # busy wait until we receive run command
if self.thread_analyser is not None:
wrap_threads()
t = threadingCurrentThread()
self.thread_analyser.set_start_time(cur_time())
send_message("threading_event", 0, t.getName(), get_thread_id(t), "thread", "start", file, 1, None, parent=get_thread_id(t))
if self.asyncio_analyser is not None:
# we don't have main thread in asyncio graph, so we should add a fake event
send_message("asyncio_event", 0, "Task", "Task", "thread", "stop", file, 1, frame=None, parent=None)
try:
self.init_matplotlib_support()
except:
sys.stderr.write("Matplotlib support in debugger failed\n")
traceback.print_exc()
pydev_imports.execfile(file, globals, locals) # execute the script
return globals
def exiting(self):
sys.stdout.flush()
sys.stderr.flush()
self.check_output_redirect()
cmd = self.cmd_factory.make_exit_message()
self.writer.add_command(cmd)
def wait_for_commands(self, globals):
thread = threading.currentThread()
from _pydevd_bundle import pydevd_frame_utils
frame = pydevd_frame_utils.Frame(None, -1, pydevd_frame_utils.FCode("Console",
os.path.abspath(os.path.dirname(__file__))), globals, globals)
thread_id = get_thread_id(thread)
from _pydevd_bundle import pydevd_vars
pydevd_vars.add_additional_frame_by_id(thread_id, {id(frame): frame})
cmd = self.cmd_factory.make_show_console_message(thread_id, frame)
self.writer.add_command(cmd)
while True:
self.process_internal_commands()
time.sleep(0.01)
trace_dispatch = _trace_dispatch
def set_debug(setup):
setup['DEBUG_RECORD_SOCKET_READS'] = True
setup['DEBUG_TRACE_BREAKPOINTS'] = 1
setup['DEBUG_TRACE_LEVEL'] = 3
def enable_qt_support():
from _pydev_bundle import pydev_monkey_qt
pydev_monkey_qt.patch_qt()
def process_command_line(argv):
""" parses the arguments.
removes our arguments from the command line """
setup = {}
setup['client'] = None
setup['server'] = False
setup['port'] = 0
setup['file'] = ''
setup['multiproc'] = False #Used by PyCharm (reuses connection: ssh tunneling)
setup['multiprocess'] = False # Used by PyDev (creates new connection to ide)
setup['save-signatures'] = False
setup['save-threading'] = False
setup['save-asyncio'] = False
setup['qt-support'] = False
setup['print-in-debugger-startup'] = False
setup['cmd-line'] = False
setup['module'] = False
i = 0
del argv[0]
while (i < len(argv)):
if argv[i] == '--port':
del argv[i]
setup['port'] = int(argv[i])
del argv[i]
elif argv[i] == '--vm_type':
del argv[i]
setup['vm_type'] = argv[i]
del argv[i]
elif argv[i] == '--client':
del argv[i]
setup['client'] = argv[i]
del argv[i]
elif argv[i] == '--server':
del argv[i]
setup['server'] = True
elif argv[i] == '--file':
del argv[i]
setup['file'] = argv[i]
i = len(argv) # pop out, file is our last argument
elif argv[i] == '--DEBUG_RECORD_SOCKET_READS':
del argv[i]
setup['DEBUG_RECORD_SOCKET_READS'] = True
elif argv[i] == '--DEBUG':
del argv[i]
set_debug(setup)
elif argv[i] == '--multiproc':
del argv[i]
setup['multiproc'] = True
elif argv[i] == '--multiprocess':
del argv[i]
setup['multiprocess'] = True
elif argv[i] == '--save-signatures':
del argv[i]
setup['save-signatures'] = True
elif argv[i] == '--save-threading':
del argv[i]
setup['save-threading'] = True
elif argv[i] == '--save-asyncio':
del argv[i]
setup['save-asyncio'] = True
elif argv[i] == '--qt-support':
del argv[i]
setup['qt-support'] = True
elif argv[i] == '--print-in-debugger-startup':
del argv[i]
setup['print-in-debugger-startup'] = True
elif (argv[i] == '--cmd-line'):
del argv[i]
setup['cmd-line'] = True
elif (argv[i] == '--module'):
del argv[i]
setup['module'] = True
else:
raise ValueError("unexpected option " + argv[i])
return setup
def usage(doExit=0):
sys.stdout.write('Usage:\n')
sys.stdout.write('pydevd.py --port=N [(--client hostname) | --server] --file executable [file_options]\n')
if doExit:
sys.exit(0)
def init_stdout_redirect():
if not getattr(sys, 'stdoutBuf', None):
sys.stdoutBuf = pydevd_io.IOBuf()
sys.stdout_original = sys.stdout
sys.stdout = pydevd_io.IORedirector(sys.stdout, sys.stdoutBuf) #@UndefinedVariable
def init_stderr_redirect():
if not getattr(sys, 'stderrBuf', None):
sys.stderrBuf = pydevd_io.IOBuf()
sys.stderr_original = sys.stderr
sys.stderr = pydevd_io.IORedirector(sys.stderr, sys.stderrBuf) #@UndefinedVariable
def has_data_to_redirect():
if getattr(sys, 'stdoutBuf', None):
if not sys.stdoutBuf.empty():
return True
if getattr(sys, 'stderrBuf', None):
if not sys.stderrBuf.empty():
return True
return False
#=======================================================================================================================
# settrace
#=======================================================================================================================
def settrace(
host=None,
stdoutToServer=False,
stderrToServer=False,
port=5678,
suspend=True,
trace_only_current_thread=False,
overwrite_prev_trace=False,
patch_multiprocessing=False,
):
'''Sets the tracing function with the pydev debug function and initializes needed facilities.
@param host: the user may specify another host, if the debug server is not in the same machine (default is the local
host)
@param stdoutToServer: when this is true, the stdout is passed to the debug server
@param stderrToServer: when this is true, the stderr is passed to the debug server
so that they are printed in its console and not in this process console.
@param port: specifies which port to use for communicating with the server (note that the server must be started
in the same port). @note: currently it's hard-coded at 5678 in the client
@param suspend: whether a breakpoint should be emulated as soon as this function is called.
@param trace_only_current_thread: determines if only the current thread will be traced or all current and future
threads will also have the tracing enabled.
@param overwrite_prev_trace: if True we'll reset the frame.f_trace of frames which are already being traced
@param patch_multiprocessing: if True we'll patch the functions which create new processes so that launched
processes are debugged.
'''
_set_trace_lock.acquire()
try:
_locked_settrace(
host,
stdoutToServer,
stderrToServer,
port,
suspend,
trace_only_current_thread,
overwrite_prev_trace,
patch_multiprocessing,
)
finally:
_set_trace_lock.release()
_set_trace_lock = thread.allocate_lock()
def _locked_settrace(
host,
stdoutToServer,
stderrToServer,
port,
suspend,
trace_only_current_thread,
overwrite_prev_trace,
patch_multiprocessing,
):
if patch_multiprocessing:
try:
from _pydev_bundle import pydev_monkey
except:
pass
else:
pydev_monkey.patch_new_process_functions()
global connected
global bufferStdOutToServer
global bufferStdErrToServer
if not connected :
pydevd_vm_type.setup_type()
debugger = PyDB()
debugger.connect(host, port) # Note: connect can raise error.
# Mark connected only if it actually succeeded.
connected = True
bufferStdOutToServer = stdoutToServer
bufferStdErrToServer = stderrToServer
if bufferStdOutToServer:
init_stdout_redirect()
if bufferStdErrToServer:
init_stderr_redirect()
patch_stdin(debugger)
debugger.set_trace_for_frame_and_parents(get_frame(), False, overwrite_prev_trace=overwrite_prev_trace)
CustomFramesContainer.custom_frames_lock.acquire() # @UndefinedVariable
try:
for _frameId, custom_frame in dict_iter_items(CustomFramesContainer.custom_frames):
debugger.set_trace_for_frame_and_parents(custom_frame.frame, False)
finally:
CustomFramesContainer.custom_frames_lock.release() # @UndefinedVariable
t = threadingCurrentThread()
try:
additional_info = t.additional_info
except AttributeError:
additional_info = PyDBAdditionalThreadInfo()
t.additional_info = additional_info
while not debugger.ready_to_run:
time.sleep(0.1) # busy wait until we receive run command
# note that we do that through pydevd_tracing.SetTrace so that the tracing
# is not warned to the user!
pydevd_tracing.SetTrace(debugger.trace_dispatch)
if not trace_only_current_thread:
# Trace future threads?
debugger.patch_threads()
# As this is the first connection, also set tracing for any untraced threads
debugger.set_tracing_for_untraced_contexts(ignore_frame=get_frame(), overwrite_prev_trace=overwrite_prev_trace)
# Stop the tracing as the last thing before the actual shutdown for a clean exit.
atexit.register(stoptrace)
PyDBCommandThread(debugger).start()
CheckOutputThread(debugger).start()
#Suspend as the last thing after all tracing is in place.
if suspend:
debugger.set_suspend(t, CMD_THREAD_SUSPEND)
else:
# ok, we're already in debug mode, with all set, so, let's just set the break
debugger = get_global_debugger()
debugger.set_trace_for_frame_and_parents(get_frame(), False)
t = threadingCurrentThread()
try:
additional_info = t.additional_info
except AttributeError:
additional_info = PyDBAdditionalThreadInfo()
t.additional_info = additional_info
pydevd_tracing.SetTrace(debugger.trace_dispatch)
if not trace_only_current_thread:
# Trace future threads?
debugger.patch_threads()
if suspend:
debugger.set_suspend(t, CMD_THREAD_SUSPEND)
def stoptrace():
global connected
if connected:
pydevd_tracing.restore_sys_set_trace_func()
sys.settrace(None)
try:
#not available in jython!
threading.settrace(None) # for all future threads
except:
pass
from _pydev_bundle.pydev_monkey import undo_patch_thread_modules
undo_patch_thread_modules()
debugger = get_global_debugger()
if debugger:
debugger.set_trace_for_frame_and_parents(
get_frame(), also_add_to_passed_frame=True, overwrite_prev_trace=True, dispatch_func=lambda *args:None)
debugger.exiting()
kill_all_pydev_threads()
connected = False
class Dispatcher(object):
def __init__(self):
self.port = None
def connect(self, host, port):
self.host = host
self.port = port
self.client = start_client(self.host, self.port)
self.reader = DispatchReader(self)
self.reader.pydev_do_not_trace = False #we run reader in the same thread so we don't want to loose tracing
self.reader.run()
def close(self):
try:
self.reader.do_kill_pydev_thread()
except :
pass
class DispatchReader(ReaderThread):
def __init__(self, dispatcher):
self.dispatcher = dispatcher
ReaderThread.__init__(self, self.dispatcher.client)
def _on_run(self):
dummy_thread = threading.currentThread()
dummy_thread.is_pydev_daemon_thread = False
return ReaderThread._on_run(self)
def handle_except(self):
ReaderThread.handle_except(self)
def process_command(self, cmd_id, seq, text):
if cmd_id == 99:
self.dispatcher.port = int(text)
self.killReceived = True
DISPATCH_APPROACH_NEW_CONNECTION = 1 # Used by PyDev
DISPATCH_APPROACH_EXISTING_CONNECTION = 2 # Used by PyCharm
DISPATCH_APPROACH = DISPATCH_APPROACH_NEW_CONNECTION
def dispatch():
setup = SetupHolder.setup
host = setup['client']
port = setup['port']
if DISPATCH_APPROACH == DISPATCH_APPROACH_EXISTING_CONNECTION:
dispatcher = Dispatcher()
try:
dispatcher.connect(host, port)
port = dispatcher.port
finally:
dispatcher.close()
return host, port
def settrace_forked():
'''
When creating a fork from a process in the debugger, we need to reset the whole debugger environment!
'''
host, port = dispatch()
from _pydevd_bundle import pydevd_tracing
pydevd_tracing.restore_sys_set_trace_func()
if port is not None:
global connected
connected = False
custom_frames_container_init()
settrace(
host,
port=port,
suspend=False,
trace_only_current_thread=False,
overwrite_prev_trace=True,
patch_multiprocessing=True,
)
#=======================================================================================================================
# SetupHolder
#=======================================================================================================================
class SetupHolder:
setup = None
def apply_debugger_options(setup_options):
"""
:type setup_options: dict[str, bool]
"""
default_options = {'save-signatures': False, 'qt-support': False}
default_options.update(setup_options)
setup_options = default_options
debugger = GetGlobalDebugger()
if setup_options['save-signatures']:
if pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON:
sys.stderr.write("Collecting run-time type information is not supported for Jython\n")
else:
# Only import it if we're going to use it!
from _pydevd_bundle.pydevd_signature import SignatureFactory
debugger.signature_factory = SignatureFactory()
if setup_options['qt-support']:
enable_qt_support()
def patch_stdin(debugger):
from _pydev_bundle.pydev_console_utils import DebugConsoleStdIn
orig_stdin = sys.stdin
sys.stdin = DebugConsoleStdIn(debugger, orig_stdin)
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
# parse the command line. --file is our last argument that is required
try:
sys.original_argv = sys.argv[:]
setup = process_command_line(sys.argv)
SetupHolder.setup = setup
except ValueError:
traceback.print_exc()
usage(1)
if setup['print-in-debugger-startup']:
try:
pid = ' (pid: %s)' % os.getpid()
except:
pid = ''
sys.stderr.write("pydev debugger: starting%s\n" % pid)
fix_getpass.fix_getpass()
pydev_log.debug("Executing file %s" % setup['file'])
pydev_log.debug("arguments: %s"% str(sys.argv))
pydevd_vm_type.setup_type(setup.get('vm_type', None))
if os.getenv('PYCHARM_DEBUG') == 'True' or os.getenv('PYDEV_DEBUG') == 'True':
set_debug(setup)
DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = setup.get('DEBUG_RECORD_SOCKET_READS', DebugInfoHolder.DEBUG_RECORD_SOCKET_READS)
DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = setup.get('DEBUG_TRACE_BREAKPOINTS', DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS)
DebugInfoHolder.DEBUG_TRACE_LEVEL = setup.get('DEBUG_TRACE_LEVEL', DebugInfoHolder.DEBUG_TRACE_LEVEL)
port = setup['port']
host = setup['client']
f = setup['file']
fix_app_engine_debug = False
debugger = PyDB()
try:
from _pydev_bundle import pydev_monkey
except:
pass #Not usable on jython 2.1
else:
if setup['multiprocess']: # PyDev
pydev_monkey.patch_new_process_functions()
elif setup['multiproc']: # PyCharm
pydev_log.debug("Started in multiproc mode\n")
# Note: we're not inside method, so, no need for 'global'
DISPATCH_APPROACH = DISPATCH_APPROACH_EXISTING_CONNECTION
dispatcher = Dispatcher()
try:
dispatcher.connect(host, port)
if dispatcher.port is not None:
port = dispatcher.port
pydev_log.debug("Received port %d\n" %port)
pydev_log.info("pydev debugger: process %d is connecting\n"% os.getpid())
try:
pydev_monkey.patch_new_process_functions()
except:
pydev_log.error("Error patching process functions\n")
traceback.print_exc()
else:
pydev_log.error("pydev debugger: couldn't get port for new debug process\n")
finally:
dispatcher.close()
else:
pydev_log.info("pydev debugger: starting\n")
try:
pydev_monkey.patch_new_process_functions_with_warning()
except:
pydev_log.error("Error patching process functions\n")
traceback.print_exc()
# Only do this patching if we're not running with multiprocess turned on.
if f.find('dev_appserver.py') != -1:
if os.path.basename(f).startswith('dev_appserver.py'):
appserver_dir = os.path.dirname(f)
version_file = os.path.join(appserver_dir, 'VERSION')
if os.path.exists(version_file):
try:
stream = open(version_file, 'r')
try:
for line in stream.read().splitlines():
line = line.strip()
if line.startswith('release:'):
line = line[8:].strip()
version = line.replace('"', '')
version = version.split('.')
if int(version[0]) > 1:
fix_app_engine_debug = True
elif int(version[0]) == 1:
if int(version[1]) >= 7:
# Only fix from 1.7 onwards
fix_app_engine_debug = True
break
finally:
stream.close()
except:
traceback.print_exc()
try:
# In the default run (i.e.: run directly on debug mode), we try to patch stackless as soon as possible
# on a run where we have a remote debug, we may have to be more careful because patching stackless means
# that if the user already had a stackless.set_schedule_callback installed, he'd loose it and would need
# to call it again (because stackless provides no way of getting the last function which was registered
# in set_schedule_callback).
#
# So, ideally, if there's an application using stackless and the application wants to use the remote debugger
# and benefit from stackless debugging, the application itself must call:
#
# import pydevd_stackless
# pydevd_stackless.patch_stackless()
#
# itself to be able to benefit from seeing the tasklets created before the remote debugger is attached.
from _pydevd_bundle import pydevd_stackless
pydevd_stackless.patch_stackless()
except:
# It's ok not having stackless there...
try:
sys.exc_clear() # the exception information should be cleaned in Python 2
except:
pass
is_module = setup['module']
patch_stdin(debugger)
if fix_app_engine_debug:
sys.stderr.write("pydev debugger: google app engine integration enabled\n")
curr_dir = os.path.dirname(__file__)
app_engine_startup_file = os.path.join(curr_dir, 'pydev_app_engine_debug_startup.py')
sys.argv.insert(1, '--python_startup_script=' + app_engine_startup_file)
import json
setup['pydevd'] = __file__
sys.argv.insert(2, '--python_startup_args=%s' % json.dumps(setup),)
sys.argv.insert(3, '--automatic_restart=no')
sys.argv.insert(4, '--max_module_instances=1')
# Run the dev_appserver
debugger.run(setup['file'], None, None, is_module, set_trace=False)
else:
if setup['save-threading']:
debugger.thread_analyser = ThreadingLogger()
if setup['save-asyncio']:
if IS_PY34_OLDER:
debugger.asyncio_analyser = AsyncioLogger()
apply_debugger_options(setup)
try:
debugger.connect(host, port)
except:
sys.stderr.write("Could not connect to %s: %s\n" % (host, port))
traceback.print_exc()
sys.exit(1)
connected = True # Mark that we're connected when started from inside ide.
globals = debugger.run(setup['file'], None, None, is_module)
if setup['cmd-line']:
debugger.wait_for_commands(globals)
|
miguelpalacio/python-for-android | refs/heads/master | python-modules/twisted/twisted/protocols/memcache.py | 60 | # -*- test-case-name: twisted.test.test_memcache -*-
# Copyright (c) 2007-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Memcache client protocol. Memcached is a caching server, storing data in the
form of pairs key/value, and memcache is the protocol to talk with it.
To connect to a server, create a factory for L{MemCacheProtocol}::
from twisted.internet import reactor, protocol
from twisted.protocols.memcache import MemCacheProtocol, DEFAULT_PORT
d = protocol.ClientCreator(reactor, MemCacheProtocol
).connectTCP("localhost", DEFAULT_PORT)
def doSomething(proto):
# Here you call the memcache operations
return proto.set("mykey", "a lot of data")
d.addCallback(doSomething)
reactor.run()
All the operations of the memcache protocol are present, but
L{MemCacheProtocol.set} and L{MemCacheProtocol.get} are the more important.
See U{http://code.sixapart.com/svn/memcached/trunk/server/doc/protocol.txt} for
more information about the protocol.
"""
try:
from collections import deque
except ImportError:
class deque(list):
def popleft(self):
return self.pop(0)
from twisted.protocols.basic import LineReceiver
from twisted.protocols.policies import TimeoutMixin
from twisted.internet.defer import Deferred, fail, TimeoutError
from twisted.python import log
DEFAULT_PORT = 11211
class NoSuchCommand(Exception):
"""
Exception raised when a non existent command is called.
"""
class ClientError(Exception):
"""
Error caused by an invalid client call.
"""
class ServerError(Exception):
"""
Problem happening on the server.
"""
class Command(object):
"""
Wrap a client action into an object, that holds the values used in the
protocol.
@ivar _deferred: the L{Deferred} object that will be fired when the result
arrives.
@type _deferred: L{Deferred}
@ivar command: name of the command sent to the server.
@type command: C{str}
"""
def __init__(self, command, **kwargs):
"""
Create a command.
@param command: the name of the command.
@type command: C{str}
@param kwargs: this values will be stored as attributes of the object
for future use
"""
self.command = command
self._deferred = Deferred()
for k, v in kwargs.items():
setattr(self, k, v)
def success(self, value):
"""
Shortcut method to fire the underlying deferred.
"""
self._deferred.callback(value)
def fail(self, error):
"""
Make the underlying deferred fails.
"""
self._deferred.errback(error)
class MemCacheProtocol(LineReceiver, TimeoutMixin):
"""
MemCache protocol: connect to a memcached server to store/retrieve values.
@ivar persistentTimeOut: the timeout period used to wait for a response.
@type persistentTimeOut: C{int}
@ivar _current: current list of requests waiting for an answer from the
server.
@type _current: C{deque} of L{Command}
@ivar _lenExpected: amount of data expected in raw mode, when reading for
a value.
@type _lenExpected: C{int}
@ivar _getBuffer: current buffer of data, used to store temporary data
when reading in raw mode.
@type _getBuffer: C{list}
@ivar _bufferLength: the total amount of bytes in C{_getBuffer}.
@type _bufferLength: C{int}
@ivar _disconnected: indicate if the connectionLost has been called or not.
@type _disconnected: C{bool}
"""
MAX_KEY_LENGTH = 250
_disconnected = False
def __init__(self, timeOut=60):
"""
Create the protocol.
@param timeOut: the timeout to wait before detecting that the
connection is dead and close it. It's expressed in seconds.
@type timeOut: C{int}
"""
self._current = deque()
self._lenExpected = None
self._getBuffer = None
self._bufferLength = None
self.persistentTimeOut = self.timeOut = timeOut
def _cancelCommands(self, reason):
"""
Cancel all the outstanding commands, making them fail with C{reason}.
"""
while self._current:
cmd = self._current.popleft()
cmd.fail(reason)
def timeoutConnection(self):
"""
Close the connection in case of timeout.
"""
self._cancelCommands(TimeoutError("Connection timeout"))
self.transport.loseConnection()
def connectionLost(self, reason):
"""
Cause any outstanding commands to fail.
"""
self._disconnected = True
self._cancelCommands(reason)
LineReceiver.connectionLost(self, reason)
def sendLine(self, line):
"""
Override sendLine to add a timeout to response.
"""
if not self._current:
self.setTimeout(self.persistentTimeOut)
LineReceiver.sendLine(self, line)
def rawDataReceived(self, data):
"""
Collect data for a get.
"""
self.resetTimeout()
self._getBuffer.append(data)
self._bufferLength += len(data)
if self._bufferLength >= self._lenExpected + 2:
data = "".join(self._getBuffer)
buf = data[:self._lenExpected]
rem = data[self._lenExpected + 2:]
val = buf
self._lenExpected = None
self._getBuffer = None
self._bufferLength = None
cmd = self._current[0]
if cmd.multiple:
flags, cas = cmd.values[cmd.currentKey]
cmd.values[cmd.currentKey] = (flags, cas, val)
else:
cmd.value = val
self.setLineMode(rem)
def cmd_STORED(self):
"""
Manage a success response to a set operation.
"""
self._current.popleft().success(True)
def cmd_NOT_STORED(self):
"""
Manage a specific 'not stored' response to a set operation: this is not
an error, but some condition wasn't met.
"""
self._current.popleft().success(False)
def cmd_END(self):
"""
This the end token to a get or a stat operation.
"""
cmd = self._current.popleft()
if cmd.command == "get":
if cmd.multiple:
values = dict([(key, val[::2]) for key, val in
cmd.values.iteritems()])
cmd.success(values)
else:
cmd.success((cmd.flags, cmd.value))
elif cmd.command == "gets":
if cmd.multiple:
cmd.success(cmd.values)
else:
cmd.success((cmd.flags, cmd.cas, cmd.value))
elif cmd.command == "stats":
cmd.success(cmd.values)
def cmd_NOT_FOUND(self):
"""
Manage error response for incr/decr/delete.
"""
self._current.popleft().success(False)
def cmd_VALUE(self, line):
"""
Prepare the reading a value after a get.
"""
cmd = self._current[0]
if cmd.command == "get":
key, flags, length = line.split()
cas = ""
else:
key, flags, length, cas = line.split()
self._lenExpected = int(length)
self._getBuffer = []
self._bufferLength = 0
if cmd.multiple:
if key not in cmd.keys:
raise RuntimeError("Unexpected commands answer.")
cmd.currentKey = key
cmd.values[key] = [int(flags), cas]
else:
if cmd.key != key:
raise RuntimeError("Unexpected commands answer.")
cmd.flags = int(flags)
cmd.cas = cas
self.setRawMode()
def cmd_STAT(self, line):
"""
Reception of one stat line.
"""
cmd = self._current[0]
key, val = line.split(" ", 1)
cmd.values[key] = val
def cmd_VERSION(self, versionData):
"""
Read version token.
"""
self._current.popleft().success(versionData)
def cmd_ERROR(self):
"""
An non-existent command has been sent.
"""
log.err("Non-existent command sent.")
cmd = self._current.popleft()
cmd.fail(NoSuchCommand())
def cmd_CLIENT_ERROR(self, errText):
"""
An invalid input as been sent.
"""
log.err("Invalid input: %s" % (errText,))
cmd = self._current.popleft()
cmd.fail(ClientError(errText))
def cmd_SERVER_ERROR(self, errText):
"""
An error has happened server-side.
"""
log.err("Server error: %s" % (errText,))
cmd = self._current.popleft()
cmd.fail(ServerError(errText))
def cmd_DELETED(self):
"""
A delete command has completed successfully.
"""
self._current.popleft().success(True)
def cmd_OK(self):
"""
The last command has been completed.
"""
self._current.popleft().success(True)
def cmd_EXISTS(self):
"""
A C{checkAndSet} update has failed.
"""
self._current.popleft().success(False)
def lineReceived(self, line):
"""
Receive line commands from the server.
"""
self.resetTimeout()
token = line.split(" ", 1)[0]
# First manage standard commands without space
cmd = getattr(self, "cmd_%s" % (token,), None)
if cmd is not None:
args = line.split(" ", 1)[1:]
if args:
cmd(args[0])
else:
cmd()
else:
# Then manage commands with space in it
line = line.replace(" ", "_")
cmd = getattr(self, "cmd_%s" % (line,), None)
if cmd is not None:
cmd()
else:
# Increment/Decrement response
cmd = self._current.popleft()
val = int(line)
cmd.success(val)
if not self._current:
# No pending request, remove timeout
self.setTimeout(None)
def increment(self, key, val=1):
"""
Increment the value of C{key} by given value (default to 1).
C{key} must be consistent with an int. Return the new value.
@param key: the key to modify.
@type key: C{str}
@param val: the value to increment.
@type val: C{int}
@return: a deferred with will be called back with the new value
associated with the key (after the increment).
@rtype: L{Deferred}
"""
return self._incrdecr("incr", key, val)
def decrement(self, key, val=1):
"""
Decrement the value of C{key} by given value (default to 1).
C{key} must be consistent with an int. Return the new value, coerced to
0 if negative.
@param key: the key to modify.
@type key: C{str}
@param val: the value to decrement.
@type val: C{int}
@return: a deferred with will be called back with the new value
associated with the key (after the decrement).
@rtype: L{Deferred}
"""
return self._incrdecr("decr", key, val)
def _incrdecr(self, cmd, key, val):
"""
Internal wrapper for incr/decr.
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
if not isinstance(key, str):
return fail(ClientError(
"Invalid type for key: %s, expecting a string" % (type(key),)))
if len(key) > self.MAX_KEY_LENGTH:
return fail(ClientError("Key too long"))
fullcmd = "%s %s %d" % (cmd, key, int(val))
self.sendLine(fullcmd)
cmdObj = Command(cmd, key=key)
self._current.append(cmdObj)
return cmdObj._deferred
def replace(self, key, val, flags=0, expireTime=0):
"""
Replace the given C{key}. It must already exist in the server.
@param key: the key to replace.
@type key: C{str}
@param val: the new value associated with the key.
@type val: C{str}
@param flags: the flags to store with the key.
@type flags: C{int}
@param expireTime: if different from 0, the relative time in seconds
when the key will be deleted from the store.
@type expireTime: C{int}
@return: a deferred that will fire with C{True} if the operation has
succeeded, and C{False} with the key didn't previously exist.
@rtype: L{Deferred}
"""
return self._set("replace", key, val, flags, expireTime, "")
def add(self, key, val, flags=0, expireTime=0):
"""
Add the given C{key}. It must not exist in the server.
@param key: the key to add.
@type key: C{str}
@param val: the value associated with the key.
@type val: C{str}
@param flags: the flags to store with the key.
@type flags: C{int}
@param expireTime: if different from 0, the relative time in seconds
when the key will be deleted from the store.
@type expireTime: C{int}
@return: a deferred that will fire with C{True} if the operation has
succeeded, and C{False} with the key already exists.
@rtype: L{Deferred}
"""
return self._set("add", key, val, flags, expireTime, "")
def set(self, key, val, flags=0, expireTime=0):
"""
Set the given C{key}.
@param key: the key to set.
@type key: C{str}
@param val: the value associated with the key.
@type val: C{str}
@param flags: the flags to store with the key.
@type flags: C{int}
@param expireTime: if different from 0, the relative time in seconds
when the key will be deleted from the store.
@type expireTime: C{int}
@return: a deferred that will fire with C{True} if the operation has
succeeded.
@rtype: L{Deferred}
"""
return self._set("set", key, val, flags, expireTime, "")
def checkAndSet(self, key, val, cas, flags=0, expireTime=0):
"""
Change the content of C{key} only if the C{cas} value matches the
current one associated with the key. Use this to store a value which
hasn't been modified since last time you fetched it.
@param key: The key to set.
@type key: C{str}
@param val: The value associated with the key.
@type val: C{str}
@param cas: Unique 64-bit value returned by previous call of C{get}.
@type cas: C{str}
@param flags: The flags to store with the key.
@type flags: C{int}
@param expireTime: If different from 0, the relative time in seconds
when the key will be deleted from the store.
@type expireTime: C{int}
@return: A deferred that will fire with C{True} if the operation has
succeeded, C{False} otherwise.
@rtype: L{Deferred}
"""
return self._set("cas", key, val, flags, expireTime, cas)
def _set(self, cmd, key, val, flags, expireTime, cas):
"""
Internal wrapper for setting values.
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
if not isinstance(key, str):
return fail(ClientError(
"Invalid type for key: %s, expecting a string" % (type(key),)))
if len(key) > self.MAX_KEY_LENGTH:
return fail(ClientError("Key too long"))
if not isinstance(val, str):
return fail(ClientError(
"Invalid type for value: %s, expecting a string" %
(type(val),)))
if cas:
cas = " " + cas
length = len(val)
fullcmd = "%s %s %d %d %d%s" % (
cmd, key, flags, expireTime, length, cas)
self.sendLine(fullcmd)
self.sendLine(val)
cmdObj = Command(cmd, key=key, flags=flags, length=length)
self._current.append(cmdObj)
return cmdObj._deferred
def append(self, key, val):
"""
Append given data to the value of an existing key.
@param key: The key to modify.
@type key: C{str}
@param val: The value to append to the current value associated with
the key.
@type val: C{str}
@return: A deferred that will fire with C{True} if the operation has
succeeded, C{False} otherwise.
@rtype: L{Deferred}
"""
# Even if flags and expTime values are ignored, we have to pass them
return self._set("append", key, val, 0, 0, "")
def prepend(self, key, val):
"""
Prepend given data to the value of an existing key.
@param key: The key to modify.
@type key: C{str}
@param val: The value to prepend to the current value associated with
the key.
@type val: C{str}
@return: A deferred that will fire with C{True} if the operation has
succeeded, C{False} otherwise.
@rtype: L{Deferred}
"""
# Even if flags and expTime values are ignored, we have to pass them
return self._set("prepend", key, val, 0, 0, "")
def get(self, key, withIdentifier=False):
"""
Get the given C{key}. It doesn't support multiple keys. If
C{withIdentifier} is set to C{True}, the command issued is a C{gets},
that will return the current identifier associated with the value. This
identifier has to be used when issuing C{checkAndSet} update later,
using the corresponding method.
@param key: The key to retrieve.
@type key: C{str}
@param withIdentifier: If set to C{True}, retrieve the current
identifier along with the value and the flags.
@type withIdentifier: C{bool}
@return: A deferred that will fire with the tuple (flags, value) if
C{withIdentifier} is C{False}, or (flags, cas identifier, value)
if C{True}. If the server indicates there is no value
associated with C{key}, the returned value will be C{None} and
the returned flags will be C{0}.
@rtype: L{Deferred}
"""
return self._get([key], withIdentifier, False)
def getMultiple(self, keys, withIdentifier=False):
"""
Get the given list of C{keys}. If C{withIdentifier} is set to C{True},
the command issued is a C{gets}, that will return the identifiers
associated with each values. This identifier has to be used when
issuing C{checkAndSet} update later, using the corresponding method.
@param keys: The keys to retrieve.
@type keys: C{list} of C{str}
@param withIdentifier: If set to C{True}, retrieve the identifiers
along with the values and the flags.
@type withIdentifier: C{bool}
@return: A deferred that will fire with a dictionary with the elements
of C{keys} as keys and the tuples (flags, value) as values if
C{withIdentifier} is C{False}, or (flags, cas identifier, value) if
C{True}. If the server indicates there is no value associated with
C{key}, the returned values will be C{None} and the returned flags
will be C{0}.
@rtype: L{Deferred}
@since: 9.0
"""
return self._get(keys, withIdentifier, True)
def _get(self, keys, withIdentifier, multiple):
"""
Helper method for C{get} and C{getMultiple}.
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
for key in keys:
if not isinstance(key, str):
return fail(ClientError(
"Invalid type for key: %s, expecting a string" % (type(key),)))
if len(key) > self.MAX_KEY_LENGTH:
return fail(ClientError("Key too long"))
if withIdentifier:
cmd = "gets"
else:
cmd = "get"
fullcmd = "%s %s" % (cmd, " ".join(keys))
self.sendLine(fullcmd)
if multiple:
values = dict([(key, (0, "", None)) for key in keys])
cmdObj = Command(cmd, keys=keys, values=values, multiple=True)
else:
cmdObj = Command(cmd, key=keys[0], value=None, flags=0, cas="",
multiple=False)
self._current.append(cmdObj)
return cmdObj._deferred
def stats(self, arg=None):
"""
Get some stats from the server. It will be available as a dict.
@param arg: An optional additional string which will be sent along
with the I{stats} command. The interpretation of this value by
the server is left undefined by the memcache protocol
specification.
@type arg: L{NoneType} or L{str}
@return: a deferred that will fire with a C{dict} of the available
statistics.
@rtype: L{Deferred}
"""
if arg:
cmd = "stats " + arg
else:
cmd = "stats"
if self._disconnected:
return fail(RuntimeError("not connected"))
self.sendLine(cmd)
cmdObj = Command("stats", values={})
self._current.append(cmdObj)
return cmdObj._deferred
def version(self):
"""
Get the version of the server.
@return: a deferred that will fire with the string value of the
version.
@rtype: L{Deferred}
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
self.sendLine("version")
cmdObj = Command("version")
self._current.append(cmdObj)
return cmdObj._deferred
def delete(self, key):
"""
Delete an existing C{key}.
@param key: the key to delete.
@type key: C{str}
@return: a deferred that will be called back with C{True} if the key
was successfully deleted, or C{False} if not.
@rtype: L{Deferred}
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
if not isinstance(key, str):
return fail(ClientError(
"Invalid type for key: %s, expecting a string" % (type(key),)))
self.sendLine("delete %s" % key)
cmdObj = Command("delete", key=key)
self._current.append(cmdObj)
return cmdObj._deferred
def flushAll(self):
"""
Flush all cached values.
@return: a deferred that will be called back with C{True} when the
operation has succeeded.
@rtype: L{Deferred}
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
self.sendLine("flush_all")
cmdObj = Command("flush_all")
self._current.append(cmdObj)
return cmdObj._deferred
__all__ = ["MemCacheProtocol", "DEFAULT_PORT", "NoSuchCommand", "ClientError",
"ServerError"]
|
hydroshare/hydroshare | refs/heads/develop | hs_core/tests/api/native/test_utils.py | 1 | from django.contrib.auth.models import Group
from django.contrib.sites.models import Site
from django.test import TestCase
from mezzanine.conf import settings
from hs_core.hydroshare import utils
from hs_core.models import GenericResource, BaseResource
from hs_core import hydroshare
from hs_core.testing import MockIRODSTestCaseMixin
class TestUtils(MockIRODSTestCaseMixin, TestCase):
def setUp(self):
super(TestUtils, self).setUp()
self.group, _ = Group.objects.get_or_create(name='Hydroshare Author')
self.user = hydroshare.create_account(
'user1@nowhere.com',
username='user1',
first_name='Creator_FirstName',
last_name='Creator_LastName',
superuser=False,
groups=[]
)
self.user2 = hydroshare.create_account(
'user2@nowhere.com',
username='user2',
first_name='user2_FirstName',
last_name='user2_LastName',
superuser=False,
groups=[]
)
self.res = hydroshare.create_resource(
'GenericResource',
self.user,
'test resource',
)
self.res.doi = 'doi1000100010001'
self.res.save()
def test_get_resource_types(self):
res_types = utils.get_resource_types()
self.assertIn(GenericResource, res_types)
for res_type in res_types:
self.assertTrue(issubclass(res_type, BaseResource))
def test_get_resource_instance(self):
self.assertEqual(
utils.get_resource_instance('hs_core', 'GenericResource', self.res.pk),
self.res
)
def test_get_resource_by_shortkey(self):
self.assertEqual(
utils.get_resource_by_shortkey(self.res.short_id),
self.res
)
def test_get_resource_by_doi(self):
self.assertEqual(
utils.get_resource_by_doi('doi1000100010001'),
self.res
)
def test_user_from_id(self):
self.assertEqual(
utils.user_from_id(self.user),
self.user,
msg='user passthrough failed'
)
self.assertEqual(
utils.user_from_id('user1@nowhere.com'),
self.user,
msg='lookup by email address failed'
)
self.assertEqual(
utils.user_from_id('user1'),
self.user,
msg='lookup by username failed'
)
def test_group_from_id(self):
self.assertEqual(
utils.group_from_id(self.group),
self.group,
msg='group passthrough failed'
)
self.assertEqual(
utils.group_from_id('Hydroshare Author'),
self.group,
msg='lookup by group name failed'
)
def test_get_user_profile(self):
self.assertEqual(self.user.userprofile, utils.get_profile(self.user))
def test_get_mime_type(self):
test_file = 'my_file.txt'
self.assertEqual(utils.get_file_mime_type(test_file), 'text/plain')
test_file = 'my_file.tif'
self.assertEqual(utils.get_file_mime_type(test_file), 'image/tiff')
test_file = 'my_file.abc'
self.assertEqual(utils.get_file_mime_type(test_file), 'application/abc')
def test_get_current_site_url(self):
current_site = Site.objects.get_current()
protocol = getattr(settings, 'MY_SITE_PROTOCOL', 'http')
url = '%s://%s' % (protocol, current_site.domain)
self.assertEqual(utils.current_site_url(), url)
def test_resource_modified(self):
modified_date1 = self.res.metadata.dates.filter(type='modified').first()
self.assertEqual(self.res.last_changed_by, self.user)
utils.resource_modified(self.res, self.user2)
modified_date2 = self.res.metadata.dates.filter(type='modified').first()
self.assertTrue((modified_date2.start_date - modified_date1.start_date).total_seconds() > 0)
self.assertEqual(self.res.last_changed_by, self.user2)
self.assertEqual(self.res.last_updated, modified_date2.start_date) |
hurrinico/sale-workflow | refs/heads/8.0 | sale_multi_picking/__init__.py | 74 | # -*- coding: utf-8 -*-
#
#
# Copyright (C) 2012 Agile Business Group sagl (<http://www.agilebg.com>)
# Copyright (C) 2012 Domsense srl (<http://www.domsense.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from . import sale
|
Stanford-Online/edx-platform | refs/heads/master | openedx/core/djangoapps/content/block_structure/tests/test_block_structure.py | 10 | """
Tests for block_structure.py
"""
from datetime import datetime
# pylint: disable=protected-access
from collections import namedtuple
from copy import deepcopy
import ddt
import itertools
from nose.plugins.attrib import attr
from unittest import TestCase
from openedx.core.lib.graph_traversals import traverse_post_order
from ..block_structure import BlockStructure, BlockStructureModulestoreData
from ..exceptions import TransformerException
from .helpers import MockXBlock, MockTransformer, ChildrenMapTestMixin
@attr(shard=2)
@ddt.ddt
class TestBlockStructure(TestCase, ChildrenMapTestMixin):
"""
Tests for BlockStructure
"""
@ddt.data(
[],
ChildrenMapTestMixin.SIMPLE_CHILDREN_MAP,
ChildrenMapTestMixin.LINEAR_CHILDREN_MAP,
ChildrenMapTestMixin.DAG_CHILDREN_MAP,
)
def test_relations(self, children_map):
block_structure = self.create_block_structure(children_map, BlockStructure)
# get_children
for parent, children in enumerate(children_map):
self.assertSetEqual(set(block_structure.get_children(parent)), set(children))
# get_parents
for child, parents in enumerate(self.get_parents_map(children_map)):
self.assertSetEqual(set(block_structure.get_parents(child)), set(parents))
# __contains__
for node in range(len(children_map)):
self.assertIn(node, block_structure)
self.assertNotIn(len(children_map) + 1, block_structure)
@attr(shard=2)
@ddt.ddt
class TestBlockStructureData(TestCase, ChildrenMapTestMixin):
"""
Tests for BlockStructureBlockData and BlockStructureModulestoreData
"""
def test_non_versioned_transformer(self):
class TestNonVersionedTransformer(MockTransformer):
"""
Test transformer with default version number (0).
"""
WRITE_VERSION = 0
READ_VERSION = 0
block_structure = BlockStructureModulestoreData(root_block_usage_key=0)
with self.assertRaisesRegexp(TransformerException, "Version attributes are not set"):
block_structure._add_transformer(TestNonVersionedTransformer())
def test_transformer_data(self):
# transformer test cases
TransformerInfo = namedtuple("TransformerInfo", "transformer structure_wide_data block_specific_data") # pylint: disable=invalid-name
transformers_info = [
TransformerInfo(
transformer=MockTransformer(),
structure_wide_data=[("t1.global1", "t1.g.val1"), ("t1.global2", "t1.g.val2")],
block_specific_data={
"B1": [("t1.key1", "t1.b1.val1"), ("t1.key2", "t1.b1.val2")],
"B2": [("t1.key1", "t1.b2.val1"), ("t1.key2", "t1.b2.val2")],
"B3": [("t1.key1", True), ("t1.key2", False)],
"B4": [("t1.key1", None), ("t1.key2", False)],
},
),
TransformerInfo(
transformer=MockTransformer(),
structure_wide_data=[("t2.global1", "t2.g.val1"), ("t2.global2", "t2.g.val2")],
block_specific_data={
"B1": [("t2.key1", "t2.b1.val1"), ("t2.key2", "t2.b1.val2")],
"B2": [("t2.key1", "t2.b2.val1"), ("t2.key2", "t2.b2.val2")],
},
),
]
# create block structure
block_structure = BlockStructureModulestoreData(root_block_usage_key=0)
# set transformer data
for t_info in transformers_info:
block_structure._add_transformer(t_info.transformer)
for key, val in t_info.structure_wide_data:
block_structure.set_transformer_data(t_info.transformer, key, val)
for block, block_data in t_info.block_specific_data.iteritems():
for key, val in block_data:
block_structure.set_transformer_block_field(block, t_info.transformer, key, val)
# verify transformer data
for t_info in transformers_info:
self.assertEquals(
block_structure._get_transformer_data_version(t_info.transformer),
MockTransformer.WRITE_VERSION
)
for key, val in t_info.structure_wide_data:
self.assertEquals(
block_structure.get_transformer_data(t_info.transformer, key),
val,
)
for block, block_data in t_info.block_specific_data.iteritems():
for key, val in block_data:
self.assertEquals(
block_structure.get_transformer_block_field(block, t_info.transformer, key),
val,
)
def test_xblock_data(self):
# block test cases
blocks = [
MockXBlock("A", {}),
MockXBlock("B", {"field1": "B.val1"}),
MockXBlock("C", {"field1": "C.val1", "field2": "C.val2"}),
MockXBlock("D", {"field1": True, "field2": False}),
MockXBlock("E", {"field1": None, "field2": False}),
]
# add each block
block_structure = BlockStructureModulestoreData(root_block_usage_key=0)
for block in blocks:
block_structure._add_xblock(block.location, block)
block_structure._get_or_create_block(block.location)
# request fields
fields = ["field1", "field2", "field3"]
block_structure.request_xblock_fields(*fields)
# verify fields have not been collected yet
for block in blocks:
bs_block = block_structure[block.location]
for field in fields:
self.assertIsNone(getattr(bs_block, field, None))
# collect fields
block_structure._collect_requested_xblock_fields()
# verify values of collected fields
for block in blocks:
bs_block = block_structure[block.location]
for field in fields:
self.assertEquals(
getattr(bs_block, field, None),
block.field_map.get(field),
)
def test_xblock_field_override(self):
# block field override test cases
attribute = {
"start": datetime(2017, 3, 23, 16, 38, 46, 271475),
"display_name": "Foo block",
"due": datetime(2017, 5, 23, 16, 38, 46, 271475)
}
override_due_date = datetime(2018, 2, 23, 16, 38, 46, 271475)
block = MockXBlock("Foo", attribute)
# add each block
block_structure = BlockStructureModulestoreData(root_block_usage_key=0)
block_structure._add_xblock(block.location, block)
block_structure._get_or_create_block(block.location)
fields = attribute.keys()
block_structure.request_xblock_fields(*fields)
# collect fields
block_structure._collect_requested_xblock_fields()
# verify values of collected fields
bs_block = block_structure[block.location]
for field in fields:
self.assertEquals(
getattr(bs_block, field, None),
block.field_map.get(field),
)
block_structure.override_xblock_field(
block.location,
"due",
override_due_date
)
self.assertEquals(
block_structure.get_xblock_field(block.location, "due", None),
override_due_date
)
@ddt.data(
*itertools.product(
[True, False],
range(7),
[
ChildrenMapTestMixin.SIMPLE_CHILDREN_MAP,
ChildrenMapTestMixin.LINEAR_CHILDREN_MAP,
ChildrenMapTestMixin.DAG_CHILDREN_MAP,
],
)
)
@ddt.unpack
def test_remove_block(self, keep_descendants, block_to_remove, children_map):
### skip test if invalid
if (block_to_remove >= len(children_map)) or (keep_descendants and block_to_remove == 0):
return
### create structure
block_structure = self.create_block_structure(children_map)
parents_map = self.get_parents_map(children_map)
### verify blocks pre-exist
self.assert_block_structure(block_structure, children_map)
### remove block
block_structure.remove_block(block_to_remove, keep_descendants)
missing_blocks = [block_to_remove]
### compute and verify updated children_map
removed_children_map = deepcopy(children_map)
removed_children_map[block_to_remove] = []
for parent in parents_map[block_to_remove]:
removed_children_map[parent].remove(block_to_remove)
if keep_descendants:
# update the graph connecting the old parents to the old children
for child in children_map[block_to_remove]:
for parent in parents_map[block_to_remove]:
removed_children_map[parent].append(child)
self.assert_block_structure(block_structure, removed_children_map, missing_blocks)
### prune the structure
block_structure._prune_unreachable()
### compute and verify updated children_map
pruned_children_map = deepcopy(removed_children_map)
if not keep_descendants:
pruned_parents_map = self.get_parents_map(pruned_children_map)
# update all descendants
for child in children_map[block_to_remove]:
# if the child has another parent, continue
if pruned_parents_map[child]:
continue
for block in traverse_post_order(child, get_children=lambda block: pruned_children_map[block]):
# add descendant to missing blocks and empty its
# children
missing_blocks.append(block)
pruned_children_map[block] = []
self.assert_block_structure(block_structure, pruned_children_map, missing_blocks)
def test_remove_block_traversal(self):
block_structure = self.create_block_structure(ChildrenMapTestMixin.LINEAR_CHILDREN_MAP)
block_structure.remove_block_traversal(lambda block: block == 2)
self.assert_block_structure(block_structure, [[1], [], [], []], missing_blocks=[2])
def test_copy(self):
def _set_value(structure, value):
"""
Sets a test transformer block field to the given value in the given structure.
"""
structure.set_transformer_block_field(1, 'transformer', 'test_key', value)
def _get_value(structure):
"""
Returns the value of the test transformer block field in the given structure.
"""
return structure[1].transformer_data['transformer'].test_key
# create block structure and verify blocks pre-exist
block_structure = self.create_block_structure(ChildrenMapTestMixin.LINEAR_CHILDREN_MAP)
self.assert_block_structure(block_structure, [[1], [2], [3], []])
_set_value(block_structure, 'original_value')
# create a new copy of the structure and verify they are equivalent
new_copy = block_structure.copy()
self.assertEquals(block_structure.root_block_usage_key, new_copy.root_block_usage_key)
for block in block_structure:
self.assertIn(block, new_copy)
self.assertEquals(block_structure.get_parents(block), new_copy.get_parents(block))
self.assertEquals(block_structure.get_children(block), new_copy.get_children(block))
self.assertEquals(_get_value(block_structure), _get_value(new_copy))
# verify edits to original block structure do not affect the copy
block_structure.remove_block(2, keep_descendants=True)
self.assert_block_structure(block_structure, [[1], [3], [], []], missing_blocks=[2])
self.assert_block_structure(new_copy, [[1], [2], [3], []])
_set_value(block_structure, 'edit1')
self.assertEquals(_get_value(block_structure), 'edit1')
self.assertEquals(_get_value(new_copy), 'original_value')
# verify edits to copy do not affect the original
new_copy.remove_block(3, keep_descendants=True)
self.assert_block_structure(block_structure, [[1], [3], [], []], missing_blocks=[2])
self.assert_block_structure(new_copy, [[1], [2], [], []], missing_blocks=[3])
_set_value(new_copy, 'edit2')
self.assertEquals(_get_value(block_structure), 'edit1')
self.assertEquals(_get_value(new_copy), 'edit2')
|
nathanross/amiens | refs/heads/master | src/amiens/core/asserts.py | 1 | #!/usr/bin/python3
# Copyright 2015 Nathan Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from os import path
import amiens.core.util
def that(a, b, trace_distance=0):
#using this keyword typically results in breaking
# linebreak conventions.
if (not a):
amiens.core.util.Log.fatal(b, trace_distance+1)
def dne(goal, l, trace_distance=0):
that(not (os.access(l, 0)),
goal+' but a file already exists there', trace_distance+1)
def exists(goal, l, trace_distance=0):
that(os.access(l, 0),
goal+' but no such file exists', trace_distance+1)
def canwrite(goal, l, trace_distance=0):
that(os.access(l, os.W_OK),
goal+' but dont have write permissions', trace_distance+1)
def canread(goal, l, trace_distance=0):
that(os.access(l, os.R_OK),
goal+' but dont have read permissions', trace_distance+1)
def isdir(goal, l, trace_distance=0):
that(path.isdir(l),
goal+' but its not a directory', trace_distance+1)
def ispath(goal, l, trace_distance=0):
that(path.exists(l),
goal+' but its not a directory', trace_distance+1)
|
Microvellum/Fluid-Designer | refs/heads/master | win64-vc/2.78/python/lib/test/test_socketserver.py | 4 | """
Test suite for socketserver.
"""
import contextlib
import os
import select
import signal
import socket
import select
import errno
import tempfile
import unittest
import socketserver
import test.support
from test.support import reap_children, reap_threads, verbose
try:
import threading
except ImportError:
threading = None
test.support.requires("network")
TEST_STR = b"hello world\n"
HOST = test.support.HOST
HAVE_UNIX_SOCKETS = hasattr(socket, "AF_UNIX")
requires_unix_sockets = unittest.skipUnless(HAVE_UNIX_SOCKETS,
'requires Unix sockets')
HAVE_FORKING = hasattr(os, "fork")
requires_forking = unittest.skipUnless(HAVE_FORKING, 'requires forking')
def signal_alarm(n):
"""Call signal.alarm when it exists (i.e. not on Windows)."""
if hasattr(signal, 'alarm'):
signal.alarm(n)
# Remember real select() to avoid interferences with mocking
_real_select = select.select
def receive(sock, n, timeout=20):
r, w, x = _real_select([sock], [], [], timeout)
if sock in r:
return sock.recv(n)
else:
raise RuntimeError("timed out on %r" % (sock,))
if HAVE_UNIX_SOCKETS:
class ForkingUnixStreamServer(socketserver.ForkingMixIn,
socketserver.UnixStreamServer):
pass
class ForkingUnixDatagramServer(socketserver.ForkingMixIn,
socketserver.UnixDatagramServer):
pass
@contextlib.contextmanager
def simple_subprocess(testcase):
pid = os.fork()
if pid == 0:
# Don't raise an exception; it would be caught by the test harness.
os._exit(72)
yield None
pid2, status = os.waitpid(pid, 0)
testcase.assertEqual(pid2, pid)
testcase.assertEqual(72 << 8, status)
@unittest.skipUnless(threading, 'Threading required for this test.')
class SocketServerTest(unittest.TestCase):
"""Test all socket servers."""
def setUp(self):
signal_alarm(60) # Kill deadlocks after 60 seconds.
self.port_seed = 0
self.test_files = []
def tearDown(self):
signal_alarm(0) # Didn't deadlock.
reap_children()
for fn in self.test_files:
try:
os.remove(fn)
except OSError:
pass
self.test_files[:] = []
def pickaddr(self, proto):
if proto == socket.AF_INET:
return (HOST, 0)
else:
# XXX: We need a way to tell AF_UNIX to pick its own name
# like AF_INET provides port==0.
dir = None
fn = tempfile.mktemp(prefix='unix_socket.', dir=dir)
self.test_files.append(fn)
return fn
def make_server(self, addr, svrcls, hdlrbase):
class MyServer(svrcls):
def handle_error(self, request, client_address):
self.close_request(request)
self.server_close()
raise
class MyHandler(hdlrbase):
def handle(self):
line = self.rfile.readline()
self.wfile.write(line)
if verbose: print("creating server")
server = MyServer(addr, MyHandler)
self.assertEqual(server.server_address, server.socket.getsockname())
return server
@reap_threads
def run_server(self, svrcls, hdlrbase, testfunc):
server = self.make_server(self.pickaddr(svrcls.address_family),
svrcls, hdlrbase)
# We had the OS pick a port, so pull the real address out of
# the server.
addr = server.server_address
if verbose:
print("ADDR =", addr)
print("CLASS =", svrcls)
t = threading.Thread(
name='%s serving' % svrcls,
target=server.serve_forever,
# Short poll interval to make the test finish quickly.
# Time between requests is short enough that we won't wake
# up spuriously too many times.
kwargs={'poll_interval':0.01})
t.daemon = True # In case this function raises.
t.start()
if verbose: print("server running")
for i in range(3):
if verbose: print("test client", i)
testfunc(svrcls.address_family, addr)
if verbose: print("waiting for server")
server.shutdown()
t.join()
server.server_close()
self.assertEqual(-1, server.socket.fileno())
if verbose: print("done")
def stream_examine(self, proto, addr):
s = socket.socket(proto, socket.SOCK_STREAM)
s.connect(addr)
s.sendall(TEST_STR)
buf = data = receive(s, 100)
while data and b'\n' not in buf:
data = receive(s, 100)
buf += data
self.assertEqual(buf, TEST_STR)
s.close()
def dgram_examine(self, proto, addr):
s = socket.socket(proto, socket.SOCK_DGRAM)
s.sendto(TEST_STR, addr)
buf = data = receive(s, 100)
while data and b'\n' not in buf:
data = receive(s, 100)
buf += data
self.assertEqual(buf, TEST_STR)
s.close()
def test_TCPServer(self):
self.run_server(socketserver.TCPServer,
socketserver.StreamRequestHandler,
self.stream_examine)
def test_ThreadingTCPServer(self):
self.run_server(socketserver.ThreadingTCPServer,
socketserver.StreamRequestHandler,
self.stream_examine)
@requires_forking
def test_ForkingTCPServer(self):
with simple_subprocess(self):
self.run_server(socketserver.ForkingTCPServer,
socketserver.StreamRequestHandler,
self.stream_examine)
@requires_unix_sockets
def test_UnixStreamServer(self):
self.run_server(socketserver.UnixStreamServer,
socketserver.StreamRequestHandler,
self.stream_examine)
@requires_unix_sockets
def test_ThreadingUnixStreamServer(self):
self.run_server(socketserver.ThreadingUnixStreamServer,
socketserver.StreamRequestHandler,
self.stream_examine)
@requires_unix_sockets
@requires_forking
def test_ForkingUnixStreamServer(self):
with simple_subprocess(self):
self.run_server(ForkingUnixStreamServer,
socketserver.StreamRequestHandler,
self.stream_examine)
def test_UDPServer(self):
self.run_server(socketserver.UDPServer,
socketserver.DatagramRequestHandler,
self.dgram_examine)
def test_ThreadingUDPServer(self):
self.run_server(socketserver.ThreadingUDPServer,
socketserver.DatagramRequestHandler,
self.dgram_examine)
@requires_forking
def test_ForkingUDPServer(self):
with simple_subprocess(self):
self.run_server(socketserver.ForkingUDPServer,
socketserver.DatagramRequestHandler,
self.dgram_examine)
# Alas, on Linux (at least) recvfrom() doesn't return a meaningful
# client address so this cannot work:
# @requires_unix_sockets
# def test_UnixDatagramServer(self):
# self.run_server(socketserver.UnixDatagramServer,
# socketserver.DatagramRequestHandler,
# self.dgram_examine)
#
# @requires_unix_sockets
# def test_ThreadingUnixDatagramServer(self):
# self.run_server(socketserver.ThreadingUnixDatagramServer,
# socketserver.DatagramRequestHandler,
# self.dgram_examine)
#
# @requires_unix_sockets
# @requires_forking
# def test_ForkingUnixDatagramServer(self):
# self.run_server(socketserver.ForkingUnixDatagramServer,
# socketserver.DatagramRequestHandler,
# self.dgram_examine)
@reap_threads
def test_shutdown(self):
# Issue #2302: shutdown() should always succeed in making an
# other thread leave serve_forever().
class MyServer(socketserver.TCPServer):
pass
class MyHandler(socketserver.StreamRequestHandler):
pass
threads = []
for i in range(20):
s = MyServer((HOST, 0), MyHandler)
t = threading.Thread(
name='MyServer serving',
target=s.serve_forever,
kwargs={'poll_interval':0.01})
t.daemon = True # In case this function raises.
threads.append((t, s))
for t, s in threads:
t.start()
s.shutdown()
for t, s in threads:
t.join()
s.server_close()
def test_tcpserver_bind_leak(self):
# Issue #22435: the server socket wouldn't be closed if bind()/listen()
# failed.
# Create many servers for which bind() will fail, to see if this result
# in FD exhaustion.
for i in range(1024):
with self.assertRaises(OverflowError):
socketserver.TCPServer((HOST, -1),
socketserver.StreamRequestHandler)
class MiscTestCase(unittest.TestCase):
def test_all(self):
# objects defined in the module should be in __all__
expected = []
for name in dir(socketserver):
if not name.startswith('_'):
mod_object = getattr(socketserver, name)
if getattr(mod_object, '__module__', None) == 'socketserver':
expected.append(name)
self.assertCountEqual(socketserver.__all__, expected)
if __name__ == "__main__":
unittest.main()
|
mwobensmith/tls-canary | refs/heads/master | tlscanary/modes/sourceupdate.py | 1 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import csv
import datetime
import logging
import os
import sys
import zipfile
from basemode import BaseMode
from tlscanary.firefox_downloader import get_to_file
import tlscanary.progress as pr
import tlscanary.sources_db as sdb
logger = logging.getLogger(__name__)
class SourceUpdateMode(BaseMode):
"""
Mode to update host databases from publicly available top sites data
"""
name = "srcupdate"
help = "Update hosts databases used by other modes"
# There are various top sites databases that might be considered for querying here.
# The other notable database is the notorious `Alexa Top 1M` which is available at
# "http://s3.amazonaws.com/alexa-static/top-1m.csv.zip". It is based on usage data
# gathered from the equally notorious Alexa browser toolbar, while the `Umbrella top 1M`
# used below is DNS-based and its ranking is hence considered to be more representative.
# `Umbrella` and `Alexa` use precisely the same format and their links are thus
# interchangeable.
# For future reference, there is also Ulfr's database at
# "https://ulfr.io/f/top1m_has_tls_sorted.csv". It requires a different parser but
# has the advantage of clustering hosts by shared certificates.
top_sites_location = "http://s3-us-west-1.amazonaws.com/umbrella-static/top-1m.csv.zip"
def __init__(self, args, module_dir, tmp_dir):
super(SourceUpdateMode, self).__init__(args, module_dir, tmp_dir)
self.start_time = None
self.db = None
self.sources = None
self.app = None
self.profile = None
def setup(self):
global logger
self.app = self.get_test_candidate(self.args.base)
self.profile = self.make_profile("base_profile")
tmp_zip_name = os.path.join(self.tmp_dir, "top.zip")
logger.info("Fetching unfiltered top sites data from the `Umbrella Top 1M` online database")
get_to_file(self.top_sites_location, tmp_zip_name)
try:
zipped = zipfile.ZipFile(tmp_zip_name)
if len(zipped.filelist) != 1 or not zipped.filelist[0].orig_filename.lower().endswith(".csv"):
logger.critical("Top sites zip file has unexpected content")
sys.exit(5)
tmp_csv_name = zipped.extract(zipped.filelist[0], self.tmp_dir)
except zipfile.BadZipfile:
logger.critical("Error opening top sites zip archive")
sys.exit(5)
self.db = sdb.SourcesDB(self.args)
is_default = self.args.source == self.db.default
self.sources = sdb.Sources(self.args.source, is_default)
with open(tmp_csv_name) as f:
cr = csv.DictReader(f, fieldnames=["rank", "hostname"])
self.sources.rows = [row for row in cr]
# A mild sanity check to see whether the downloaded data is valid.
if len(self.sources) < 900000:
logger.warning("Top sites is surprisingly small, just %d hosts" % len(self.sources))
if self.sources.rows[0] != {"hostname": "google.com", "rank": "1"}:
logger.warning("Top sites data looks weird. First line: `%s`" % self.sources.rows[0])
def run(self):
"""
Perform the filter run. The objective is to filter out permanent errors so
we don't waste time on them during regular test runs.
The concept is:
Run top sites in chunks through Firefox and re-test all error URLs from that
chunk a number of times to weed out spurious network errors. Stop the process
once the required number of working hosts is collected.
"""
global logger
self.start_time = datetime.datetime.now()
limit = 500000
if self.args.limit is not None:
limit = self.args.limit
logger.info("There are %d hosts in the unfiltered host set" % len(self.sources))
logger.info("Compiling set of %d working hosts for `%s` database update" % (limit, self.sources.handle))
working_set = set()
# Chop unfiltered sources data into chunks and iterate over each
# .iter_chunks() returns a generator method to call for next chunk
next_chunk = self.sources.iter_chunks(chunk_size=1000)
chunk_size = self.sources.chunk_size
progress = pr.ProgressTracker(total=limit, unit="hosts", average=60 * 60.0)
try:
while True:
hosts_to_go = max(0, limit - len(working_set))
# Check if we're done
if hosts_to_go == 0:
break
logger.info("%d hosts to go to complete the working set" % hosts_to_go)
# Shrink chunk if it contains way more hosts than required to complete the working set
#
# CAVE: This assumes that this is the last chunk we require. The downsized chunk
# is still 50% larger than required to complete the set to compensate for broken
# hosts. If the error rate in the chunk is greater than 50%, another chunk will be
# consumed, resulting in a gap of untested hosts between the end of this downsized
# chunk and the beginning of the next. Not too bad, but important to be aware of.
if chunk_size > hosts_to_go * 2:
chunk_size = min(chunk_size, hosts_to_go * 2)
pass_chunk = next_chunk(chunk_size, as_set=True)
# Check if we ran out of data for completing the set
if pass_chunk is None:
logger.warning("Ran out of hosts to complete the working set")
break
# Run chunk through multiple passes of Firefox, leaving only persistent errors in the
# error set.
pass_chunk_size = len(pass_chunk)
chunk_end = self.sources.chunk_offset
chunk_start = chunk_end - pass_chunk_size
logger.info("Processing chunk of %d hosts from the unfiltered set (#%d to #%d)"
% (chunk_end - chunk_start, chunk_start, chunk_end - 1))
pass_errors = pass_chunk
for i in xrange(self.args.scans):
logger.info("Pass %d with %d hosts" % (i + 1, len(pass_errors)))
# First run is regular, every other run is overhead
if i == 0:
report_callback = None
else:
report_callback = progress.log_overhead
pass_errors = self.run_test(self.app, pass_errors, profile=self.profile, get_info=False,
get_certs=False, return_only_errors=True,
report_callback=report_callback)
len_pass_errors = len(pass_errors)
# Log progress of first pass
if i == 0:
progress.log_completed(pass_chunk_size - len_pass_errors)
progress.log_overhead(len_pass_errors)
if len_pass_errors == 0:
break
logger.info("Error rate in chunk was %.1f%%"
% (100.0 * float(len_pass_errors) / float(chunk_end - chunk_start)))
# Add all non-errors to the working set
working_set.update(pass_chunk.difference(pass_errors))
# Log progress after every chunk
logger.info(str(progress))
except KeyboardInterrupt:
logger.critical("Ctrl-C received")
raise KeyboardInterrupt
final_src = sdb.Sources(self.sources.handle, is_default=self.sources.is_default)
final_src.from_set(working_set)
final_src.sort()
final_src.trim(limit)
if len(final_src) < limit:
logger.warning("Ran out of hosts to complete the working set")
logger.info("Collected %d working hosts for the updated test set" % len(final_src))
logger.info("Writing updated `%s` host database" % final_src.handle)
self.db.write(final_src)
def teardown(self):
# Free some memory
self.db = None
self.sources = None
self.app = None
self.profile = None
|
pleaseproject/python-for-android | refs/heads/master | python-build/python-libs/gdata/build/lib/gdata/Crypto/PublicKey/__init__.py | 273 | """Public-key encryption and signature algorithms.
Public-key encryption uses two different keys, one for encryption and
one for decryption. The encryption key can be made public, and the
decryption key is kept private. Many public-key algorithms can also
be used to sign messages, and some can *only* be used for signatures.
Crypto.PublicKey.DSA Digital Signature Algorithm. (Signature only)
Crypto.PublicKey.ElGamal (Signing and encryption)
Crypto.PublicKey.RSA (Signing, encryption, and blinding)
Crypto.PublicKey.qNEW (Signature only)
"""
__all__ = ['RSA', 'DSA', 'ElGamal', 'qNEW']
__revision__ = "$Id: __init__.py,v 1.4 2003/04/03 20:27:13 akuchling Exp $"
|
luisgg/iteexe | refs/heads/master | exe/webui/reflectionfpdmodifblock.py | 2 | # ===========================================================================
# eXe
# Copyright 2004-2006, University of Auckland
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
ReflectionBlock can render and process ReflectionIdevices as XHTML
"""
import logging
from exe.webui.block import Block
from exe.webui import common
from exe.webui.element import TextAreaElement
log = logging.getLogger(__name__)
# ===========================================================================
class ReflectionfpdmodifBlock(Block):
"""
ReflectionBlock can render and process ReflectionIdevices as XHTML
"""
def __init__(self, parent, idevice):
"""
Initialize a new Block object
"""
Block.__init__(self, parent, idevice)
self.activityInstruc = idevice.activityInstruc
# self.answerInstruc = idevice.answerInstruc
# to compensate for the strange unpickling timing when objects are
# loaded from an elp, ensure that proper idevices are set:
if idevice.activityTextArea.idevice is None:
idevice.activityTextArea.idevice = idevice
# if idevice.answerTextArea.idevice is None:
# idevice.answerTextArea.idevice = idevice
self.activityElement = TextAreaElement(idevice.activityTextArea)
# self.answerElement = TextAreaElement(idevice.answerTextArea)
self.previewing = False # In view or preview render
if not hasattr(self.idevice,'undo'):
self.idevice.undo = True
def process(self, request):
"""
Process the request arguments from the web server
"""
Block.process(self, request)
is_cancel = common.requestHasCancel(request)
if not is_cancel:
self.activityElement.process(request)
# self.answerElement.process(request)
if "title"+self.id in request.args:
self.idevice.title = request.args["title"+self.id][0]
def renderEdit(self, style):
"""
Returns an XHTML string with the form element for editing this block
"""
html = "<div class=\"iDevice\"><br/>\n"
# JR
# Quitamos el prefijo "FPD -"
if self.idevice.title.find("FPD - ") == 0:
self.idevice.title = x_(u"Reflexiona")
html += common.textInput("title"+self.id, self.idevice.title)
html += self.activityElement.renderEdit()
# html += self.answerElement.renderEdit()
html += "<br/>" + self.renderEditButtons()
html += "</div>\n"
return html
def renderPreview(self, style):
"""
Remembers if we're previewing or not,
then implicitly calls self.renderViewContent (via Block.renderPreview)
"""
self.previewing = True
return Block.renderPreview(self, style)
def renderView(self, style):
"""
Remembers if we're previewing or not,
then implicitly calls self.renderViewContent (via Block.renderPreview)
"""
self.previewing = False
return Block.renderView(self, style)
def renderViewContent(self):
"""
Returns an XHTML string for this block
"""
html = u'<script type="text/javascript" src="common.js"></script>\n'
html += u'<div class="iDevice_inner">\n'
if self.previewing:
html += self.activityElement.renderPreview()
else:
html += self.activityElement.renderView()
# html += '<div id="view%s" style="display:block;">' % self.id
# html += common.feedbackButton("btnshow"+self.id, _(u"Click here"),
# onclick="showAnswer('%s',1)" % self.id)
# html += '</div>\n'
# html += '<div id="hide%s" style="display:none;">' % self.id
# html += common.feedbackButton("btnshow"+self.id, _(u"Hide"),
# onclick="showAnswer('%s',0)" % self.id)
# html += '</div>\n'
# html += '<div id="s%s" class="feedback" style=" ' % self.id
# html += 'display: none;">'
# if self.previewing:
# html += self.answerElement.renderPreview()
# else:
# html += self.answerElement.renderView()
# html += "</div>\n"
html += "</div>\n"
return html
from exe.engine.reflectionfpdmodifidevice import ReflectionfpdmodifIdevice
from exe.webui.blockfactory import g_blockFactory
g_blockFactory.registerBlockType(ReflectionfpdmodifBlock, ReflectionfpdmodifIdevice)
# ===========================================================================
|
PokemonGoF/PokemonGo-Bot-Desktop | refs/heads/development | build/pywin/Lib/distutils/command/build_ext.py | 8 | """distutils.command.build_ext
Implements the Distutils 'build_ext' command, for building extension
modules (currently limited to C extensions, should accommodate C++
extensions ASAP)."""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import sys, os, string, re
from types import *
from site import USER_BASE, USER_SITE
from distutils.core import Command
from distutils.errors import *
from distutils.sysconfig import customize_compiler, get_python_version
from distutils.dep_util import newer_group
from distutils.extension import Extension
from distutils.util import get_platform
from distutils import log
if os.name == 'nt':
from distutils.msvccompiler import get_build_version
MSVC_VERSION = int(get_build_version())
# An extension name is just a dot-separated list of Python NAMEs (ie.
# the same as a fully-qualified module name).
extension_name_re = re.compile \
(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
def show_compilers ():
from distutils.ccompiler import show_compilers
show_compilers()
class build_ext (Command):
description = "build C/C++ extensions (compile/link to build directory)"
# XXX thoughts on how to deal with complex command-line options like
# these, i.e. how to make it so fancy_getopt can suck them off the
# command line and make it look like setup.py defined the appropriate
# lists of tuples of what-have-you.
# - each command needs a callback to process its command-line options
# - Command.__init__() needs access to its share of the whole
# command line (must ultimately come from
# Distribution.parse_command_line())
# - it then calls the current command class' option-parsing
# callback to deal with weird options like -D, which have to
# parse the option text and churn out some custom data
# structure
# - that data structure (in this case, a list of 2-tuples)
# will then be present in the command object by the time
# we get to finalize_options() (i.e. the constructor
# takes care of both command-line and client options
# in between initialize_options() and finalize_options())
sep_by = " (separated by '%s')" % os.pathsep
user_options = [
('build-lib=', 'b',
"directory for compiled extension modules"),
('build-temp=', 't',
"directory for temporary files (build by-products)"),
('plat-name=', 'p',
"platform name to cross-compile for, if supported "
"(default: %s)" % get_platform()),
('inplace', 'i',
"ignore build-lib and put compiled extensions into the source " +
"directory alongside your pure Python modules"),
('include-dirs=', 'I',
"list of directories to search for header files" + sep_by),
('define=', 'D',
"C preprocessor macros to define"),
('undef=', 'U',
"C preprocessor macros to undefine"),
('libraries=', 'l',
"external C libraries to link with"),
('library-dirs=', 'L',
"directories to search for external C libraries" + sep_by),
('rpath=', 'R',
"directories to search for shared C libraries at runtime"),
('link-objects=', 'O',
"extra explicit link objects to include in the link"),
('debug', 'g',
"compile/link with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('compiler=', 'c',
"specify the compiler type"),
('swig-cpp', None,
"make SWIG create C++ files (default is C)"),
('swig-opts=', None,
"list of SWIG command line options"),
('swig=', None,
"path to the SWIG executable"),
('user', None,
"add user include, library and rpath"),
]
boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options (self):
self.extensions = None
self.build_lib = None
self.plat_name = None
self.build_temp = None
self.inplace = 0
self.package = None
self.include_dirs = None
self.define = None
self.undef = None
self.libraries = None
self.library_dirs = None
self.rpath = None
self.link_objects = None
self.debug = None
self.force = None
self.compiler = None
self.swig = None
self.swig_cpp = None
self.swig_opts = None
self.user = None
def finalize_options(self):
from distutils import sysconfig
self.set_undefined_options('build',
('build_lib', 'build_lib'),
('build_temp', 'build_temp'),
('compiler', 'compiler'),
('debug', 'debug'),
('force', 'force'),
('plat_name', 'plat_name'),
)
if self.package is None:
self.package = self.distribution.ext_package
self.extensions = self.distribution.ext_modules
# Make sure Python's include directories (for Python.h, pyconfig.h,
# etc.) are in the include search path.
py_include = sysconfig.get_python_inc()
plat_py_include = sysconfig.get_python_inc(plat_specific=1)
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
# Put the Python "system" include dir at the end, so that
# any local include dirs take precedence.
self.include_dirs.append(py_include)
if plat_py_include != py_include:
self.include_dirs.append(plat_py_include)
self.ensure_string_list('libraries')
# Life is easier if we're not forever checking for None, so
# simplify these options to empty lists if unset
if self.libraries is None:
self.libraries = []
if self.library_dirs is None:
self.library_dirs = []
elif type(self.library_dirs) is StringType:
self.library_dirs = string.split(self.library_dirs, os.pathsep)
if self.rpath is None:
self.rpath = []
elif type(self.rpath) is StringType:
self.rpath = string.split(self.rpath, os.pathsep)
# for extensions under windows use different directories
# for Release and Debug builds.
# also Python's library directory must be appended to library_dirs
if os.name == 'nt':
# the 'libs' directory is for binary installs - we assume that
# must be the *native* platform. But we don't really support
# cross-compiling via a binary install anyway, so we let it go.
self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
if self.debug:
self.build_temp = os.path.join(self.build_temp, "Debug")
else:
self.build_temp = os.path.join(self.build_temp, "Release")
# Append the source distribution include and library directories,
# this allows distutils on windows to work in the source tree
self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
if MSVC_VERSION == 9:
# Use the .lib files for the correct architecture
if self.plat_name == 'win32':
suffix = ''
else:
# win-amd64 or win-ia64
suffix = self.plat_name[4:]
# We could have been built in one of two places; add both
for d in ('PCbuild',), ('PC', 'VS9.0'):
new_lib = os.path.join(sys.exec_prefix, *d)
if suffix:
new_lib = os.path.join(new_lib, suffix)
self.library_dirs.append(new_lib)
elif MSVC_VERSION == 8:
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VS8.0'))
elif MSVC_VERSION == 7:
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VS7.1'))
else:
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VC6'))
# OS/2 (EMX) doesn't support Debug vs Release builds, but has the
# import libraries in its "Config" subdirectory
if os.name == 'os2':
self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
# for extensions under Cygwin and AtheOS Python's library directory must be
# appended to library_dirs
if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
# building third party extensions
self.library_dirs.append(os.path.join(sys.prefix, "lib",
"python" + get_python_version(),
"config"))
else:
# building python standard extensions
self.library_dirs.append('.')
# For building extensions with a shared Python library,
# Python's library directory must be appended to library_dirs
# See Issues: #1600860, #4366
if (sysconfig.get_config_var('Py_ENABLE_SHARED')):
if not sysconfig.python_build:
# building third party extensions
self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
else:
# building python standard extensions
self.library_dirs.append('.')
# The argument parsing will result in self.define being a string, but
# it has to be a list of 2-tuples. All the preprocessor symbols
# specified by the 'define' option will be set to '1'. Multiple
# symbols can be separated with commas.
if self.define:
defines = self.define.split(',')
self.define = map(lambda symbol: (symbol, '1'), defines)
# The option for macros to undefine is also a string from the
# option parsing, but has to be a list. Multiple symbols can also
# be separated with commas here.
if self.undef:
self.undef = self.undef.split(',')
if self.swig_opts is None:
self.swig_opts = []
else:
self.swig_opts = self.swig_opts.split(' ')
# Finally add the user include and library directories if requested
if self.user:
user_include = os.path.join(USER_BASE, "include")
user_lib = os.path.join(USER_BASE, "lib")
if os.path.isdir(user_include):
self.include_dirs.append(user_include)
if os.path.isdir(user_lib):
self.library_dirs.append(user_lib)
self.rpath.append(user_lib)
def run(self):
from distutils.ccompiler import new_compiler
# 'self.extensions', as supplied by setup.py, is a list of
# Extension instances. See the documentation for Extension (in
# distutils.extension) for details.
#
# For backwards compatibility with Distutils 0.8.2 and earlier, we
# also allow the 'extensions' list to be a list of tuples:
# (ext_name, build_info)
# where build_info is a dictionary containing everything that
# Extension instances do except the name, with a few things being
# differently named. We convert these 2-tuples to Extension
# instances as needed.
if not self.extensions:
return
# If we were asked to build any C/C++ libraries, make sure that the
# directory where we put them is in the library search path for
# linking extensions.
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.libraries.extend(build_clib.get_library_names() or [])
self.library_dirs.append(build_clib.build_clib)
# Setup the CCompiler object that we'll use to do all the
# compiling and linking
self.compiler = new_compiler(compiler=self.compiler,
verbose=self.verbose,
dry_run=self.dry_run,
force=self.force)
customize_compiler(self.compiler)
# If we are cross-compiling, init the compiler now (if we are not
# cross-compiling, init would not hurt, but people may rely on
# late initialization of compiler even if they shouldn't...)
if os.name == 'nt' and self.plat_name != get_platform():
self.compiler.initialize(self.plat_name)
# And make sure that any compile/link-related options (which might
# come from the command-line or from the setup script) are set in
# that CCompiler object -- that way, they automatically apply to
# all compiling and linking done here.
if self.include_dirs is not None:
self.compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name, value) in self.define:
self.compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
self.compiler.undefine_macro(macro)
if self.libraries is not None:
self.compiler.set_libraries(self.libraries)
if self.library_dirs is not None:
self.compiler.set_library_dirs(self.library_dirs)
if self.rpath is not None:
self.compiler.set_runtime_library_dirs(self.rpath)
if self.link_objects is not None:
self.compiler.set_link_objects(self.link_objects)
# Now actually compile and link everything.
self.build_extensions()
def check_extensions_list(self, extensions):
"""Ensure that the list of extensions (presumably provided as a
command option 'extensions') is valid, i.e. it is a list of
Extension objects. We also support the old-style list of 2-tuples,
where the tuples are (ext_name, build_info), which are converted to
Extension instances here.
Raise DistutilsSetupError if the structure is invalid anywhere;
just returns otherwise.
"""
if not isinstance(extensions, list):
raise DistutilsSetupError, \
"'ext_modules' option must be a list of Extension instances"
for i, ext in enumerate(extensions):
if isinstance(ext, Extension):
continue # OK! (assume type-checking done
# by Extension constructor)
if not isinstance(ext, tuple) or len(ext) != 2:
raise DistutilsSetupError, \
("each element of 'ext_modules' option must be an "
"Extension instance or 2-tuple")
ext_name, build_info = ext
log.warn(("old-style (ext_name, build_info) tuple found in "
"ext_modules for extension '%s'"
"-- please convert to Extension instance" % ext_name))
if not (isinstance(ext_name, str) and
extension_name_re.match(ext_name)):
raise DistutilsSetupError, \
("first element of each tuple in 'ext_modules' "
"must be the extension name (a string)")
if not isinstance(build_info, dict):
raise DistutilsSetupError, \
("second element of each tuple in 'ext_modules' "
"must be a dictionary (build info)")
# OK, the (ext_name, build_info) dict is type-safe: convert it
# to an Extension instance.
ext = Extension(ext_name, build_info['sources'])
# Easy stuff: one-to-one mapping from dict elements to
# instance attributes.
for key in ('include_dirs', 'library_dirs', 'libraries',
'extra_objects', 'extra_compile_args',
'extra_link_args'):
val = build_info.get(key)
if val is not None:
setattr(ext, key, val)
# Medium-easy stuff: same syntax/semantics, different names.
ext.runtime_library_dirs = build_info.get('rpath')
if 'def_file' in build_info:
log.warn("'def_file' element of build info dict "
"no longer supported")
# Non-trivial stuff: 'macros' split into 'define_macros'
# and 'undef_macros'.
macros = build_info.get('macros')
if macros:
ext.define_macros = []
ext.undef_macros = []
for macro in macros:
if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
raise DistutilsSetupError, \
("'macros' element of build info dict "
"must be 1- or 2-tuple")
if len(macro) == 1:
ext.undef_macros.append(macro[0])
elif len(macro) == 2:
ext.define_macros.append(macro)
extensions[i] = ext
def get_source_files(self):
self.check_extensions_list(self.extensions)
filenames = []
# Wouldn't it be neat if we knew the names of header files too...
for ext in self.extensions:
filenames.extend(ext.sources)
return filenames
def get_outputs(self):
# Sanity check the 'extensions' list -- can't assume this is being
# done in the same run as a 'build_extensions()' call (in fact, we
# can probably assume that it *isn't*!).
self.check_extensions_list(self.extensions)
# And build the list of output (built) filenames. Note that this
# ignores the 'inplace' flag, and assumes everything goes in the
# "build" tree.
outputs = []
for ext in self.extensions:
outputs.append(self.get_ext_fullpath(ext.name))
return outputs
def build_extensions(self):
# First, sanity-check the 'extensions' list
self.check_extensions_list(self.extensions)
for ext in self.extensions:
self.build_extension(ext)
def build_extension(self, ext):
sources = ext.sources
if sources is None or type(sources) not in (ListType, TupleType):
raise DistutilsSetupError, \
("in 'ext_modules' option (extension '%s'), " +
"'sources' must be present and must be " +
"a list of source filenames") % ext.name
sources = list(sources)
ext_path = self.get_ext_fullpath(ext.name)
depends = sources + ext.depends
if not (self.force or newer_group(depends, ext_path, 'newer')):
log.debug("skipping '%s' extension (up-to-date)", ext.name)
return
else:
log.info("building '%s' extension", ext.name)
# First, scan the sources for SWIG definition files (.i), run
# SWIG on 'em to create .c files, and modify the sources list
# accordingly.
sources = self.swig_sources(sources, ext)
# Next, compile the source code to object files.
# XXX not honouring 'define_macros' or 'undef_macros' -- the
# CCompiler API needs to change to accommodate this, and I
# want to do one thing at a time!
# Two possible sources for extra compiler arguments:
# - 'extra_compile_args' in Extension object
# - CFLAGS environment variable (not particularly
# elegant, but people seem to expect it and I
# guess it's useful)
# The environment variable should take precedence, and
# any sensible compiler will give precedence to later
# command line args. Hence we combine them in order:
extra_args = ext.extra_compile_args or []
macros = ext.define_macros[:]
for undef in ext.undef_macros:
macros.append((undef,))
objects = self.compiler.compile(sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=ext.include_dirs,
debug=self.debug,
extra_postargs=extra_args,
depends=ext.depends)
# XXX -- this is a Vile HACK!
#
# The setup.py script for Python on Unix needs to be able to
# get this list so it can perform all the clean up needed to
# avoid keeping object files around when cleaning out a failed
# build of an extension module. Since Distutils does not
# track dependencies, we have to get rid of intermediates to
# ensure all the intermediates will be properly re-built.
#
self._built_objects = objects[:]
# Now link the object files together into a "shared object" --
# of course, first we have to figure out all the other things
# that go into the mix.
if ext.extra_objects:
objects.extend(ext.extra_objects)
extra_args = ext.extra_link_args or []
# Detect target language, if not provided
language = ext.language or self.compiler.detect_language(sources)
self.compiler.link_shared_object(
objects, ext_path,
libraries=self.get_libraries(ext),
library_dirs=ext.library_dirs,
runtime_library_dirs=ext.runtime_library_dirs,
extra_postargs=extra_args,
export_symbols=self.get_export_symbols(ext),
debug=self.debug,
build_temp=self.build_temp,
target_lang=language)
def swig_sources (self, sources, extension):
"""Walk the list of source files in 'sources', looking for SWIG
interface (.i) files. Run SWIG on all that are found, and
return a modified 'sources' list with SWIG source files replaced
by the generated C (or C++) files.
"""
new_sources = []
swig_sources = []
swig_targets = {}
# XXX this drops generated C/C++ files into the source tree, which
# is fine for developers who want to distribute the generated
# source -- but there should be an option to put SWIG output in
# the temp dir.
if self.swig_cpp:
log.warn("--swig-cpp is deprecated - use --swig-opts=-c++")
if self.swig_cpp or ('-c++' in self.swig_opts) or \
('-c++' in extension.swig_opts):
target_ext = '.cpp'
else:
target_ext = '.c'
for source in sources:
(base, ext) = os.path.splitext(source)
if ext == ".i": # SWIG interface file
new_sources.append(base + '_wrap' + target_ext)
swig_sources.append(source)
swig_targets[source] = new_sources[-1]
else:
new_sources.append(source)
if not swig_sources:
return new_sources
swig = self.swig or self.find_swig()
swig_cmd = [swig, "-python"]
swig_cmd.extend(self.swig_opts)
if self.swig_cpp:
swig_cmd.append("-c++")
# Do not override commandline arguments
if not self.swig_opts:
for o in extension.swig_opts:
swig_cmd.append(o)
for source in swig_sources:
target = swig_targets[source]
log.info("swigging %s to %s", source, target)
self.spawn(swig_cmd + ["-o", target, source])
return new_sources
# swig_sources ()
def find_swig (self):
"""Return the name of the SWIG executable. On Unix, this is
just "swig" -- it should be in the PATH. Tries a bit harder on
Windows.
"""
if os.name == "posix":
return "swig"
elif os.name == "nt":
# Look for SWIG in its standard installation directory on
# Windows (or so I presume!). If we find it there, great;
# if not, act like Unix and assume it's in the PATH.
for vers in ("1.3", "1.2", "1.1"):
fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
if os.path.isfile(fn):
return fn
else:
return "swig.exe"
elif os.name == "os2":
# assume swig available in the PATH.
return "swig.exe"
else:
raise DistutilsPlatformError, \
("I don't know how to find (much less run) SWIG "
"on platform '%s'") % os.name
# find_swig ()
# -- Name generators -----------------------------------------------
# (extension names, filenames, whatever)
def get_ext_fullpath(self, ext_name):
"""Returns the path of the filename for a given extension.
The file is located in `build_lib` or directly in the package
(inplace option).
"""
# makes sure the extension name is only using dots
all_dots = string.maketrans('/'+os.sep, '..')
ext_name = ext_name.translate(all_dots)
fullname = self.get_ext_fullname(ext_name)
modpath = fullname.split('.')
filename = self.get_ext_filename(ext_name)
filename = os.path.split(filename)[-1]
if not self.inplace:
# no further work needed
# returning :
# build_dir/package/path/filename
filename = os.path.join(*modpath[:-1]+[filename])
return os.path.join(self.build_lib, filename)
# the inplace option requires to find the package directory
# using the build_py command for that
package = '.'.join(modpath[0:-1])
build_py = self.get_finalized_command('build_py')
package_dir = os.path.abspath(build_py.get_package_dir(package))
# returning
# package_dir/filename
return os.path.join(package_dir, filename)
def get_ext_fullname(self, ext_name):
"""Returns the fullname of a given extension name.
Adds the `package.` prefix"""
if self.package is None:
return ext_name
else:
return self.package + '.' + ext_name
def get_ext_filename(self, ext_name):
r"""Convert the name of an extension (eg. "foo.bar") into the name
of the file from which it will be loaded (eg. "foo/bar.so", or
"foo\bar.pyd").
"""
from distutils.sysconfig import get_config_var
ext_path = string.split(ext_name, '.')
# OS/2 has an 8 character module (extension) limit :-(
if os.name == "os2":
ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
# extensions in debug_mode are named 'module_d.pyd' under windows
so_ext = get_config_var('SO')
if os.name == 'nt' and self.debug:
return os.path.join(*ext_path) + '_d' + so_ext
return os.path.join(*ext_path) + so_ext
def get_export_symbols (self, ext):
"""Return the list of symbols that a shared extension has to
export. This either uses 'ext.export_symbols' or, if it's not
provided, "init" + module_name. Only relevant on Windows, where
the .pyd file (DLL) must export the module "init" function.
"""
initfunc_name = "init" + ext.name.split('.')[-1]
if initfunc_name not in ext.export_symbols:
ext.export_symbols.append(initfunc_name)
return ext.export_symbols
def get_libraries (self, ext):
"""Return the list of libraries to link against when building a
shared extension. On most platforms, this is just 'ext.libraries';
on Windows and OS/2, we add the Python library (eg. python20.dll).
"""
# The python library is always needed on Windows. For MSVC, this
# is redundant, since the library is mentioned in a pragma in
# pyconfig.h that MSVC groks. The other Windows compilers all seem
# to need it mentioned explicitly, though, so that's what we do.
# Append '_d' to the python import library on debug builds.
if sys.platform == "win32":
from distutils.msvccompiler import MSVCCompiler
if not isinstance(self.compiler, MSVCCompiler):
template = "python%d%d"
if self.debug:
template = template + '_d'
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
else:
return ext.libraries
elif sys.platform == "os2emx":
# EMX/GCC requires the python library explicitly, and I
# believe VACPP does as well (though not confirmed) - AIM Apr01
template = "python%d%d"
# debug versions of the main DLL aren't supported, at least
# not at this time - AIM Apr01
#if self.debug:
# template = template + '_d'
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
elif sys.platform[:6] == "cygwin":
template = "python%d.%d"
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
elif sys.platform[:6] == "atheos":
from distutils import sysconfig
template = "python%d.%d"
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# Get SHLIBS from Makefile
extra = []
for lib in sysconfig.get_config_var('SHLIBS').split():
if lib.startswith('-l'):
extra.append(lib[2:])
else:
extra.append(lib)
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib, "m"] + extra
elif sys.platform == 'darwin':
# Don't use the default code below
return ext.libraries
elif sys.platform[:3] == 'aix':
# Don't use the default code below
return ext.libraries
else:
from distutils import sysconfig
if sysconfig.get_config_var('Py_ENABLE_SHARED'):
template = "python%d.%d"
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
return ext.libraries + [pythonlib]
else:
return ext.libraries
# class build_ext
|
xindus40223115/2015cda_g1 | refs/heads/master | static/Brython3.1.0-20150301-090019/Lib/xml/etree/__init__.py | 1200 | # $Id: __init__.py 3375 2008-02-13 08:05:08Z fredrik $
# elementtree package
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2008 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
|
satterly/alerta5 | refs/heads/master | alerta/app/webhooks/newrelic.py | 1 |
from flask import request, g, jsonify
from flask_cors import cross_origin
from alerta.app.auth.utils import permission
from alerta.app.models.alert import Alert
from alerta.app.utils.api import process_alert, add_remote_ip
from alerta.app.exceptions import ApiError, RejectException
from . import webhooks
def parse_newrelic(alert):
if 'version' not in alert:
raise ValueError("New Relic Legacy Alerting is not supported")
status = alert['current_state'].lower()
if status == 'open':
severity = alert['severity'].lower()
elif status == 'acknowledged':
severity = alert['severity'].lower()
status = 'ack'
elif status == 'closed':
severity = 'ok'
else:
severity = alert['severity'].lower()
return Alert(
resource=alert['targets'][0]['name'],
event=alert['condition_name'],
environment='Production',
severity=severity,
status=status,
service=[alert['account_name']],
group=alert['targets'][0]['type'],
text=alert['details'],
tags=['%s:%s' % (key, value) for (key, value) in alert['targets'][0]['labels'].items()],
attributes={
'moreInfo': '<a href="%s" target="_blank">Incident URL</a>' % alert['incident_url'],
'runBook': '<a href="%s" target="_blank">Runbook URL</a>' % alert['runbook_url']
},
origin='New Relic/v%s' % alert['version'],
event_type=alert['event_type'].lower(),
raw_data=alert
)
@webhooks.route('/webhooks/newrelic', methods=['OPTIONS', 'POST'])
@cross_origin()
@permission('write:webhooks')
def newrelic():
try:
incomingAlert = parse_newrelic(request.json)
except ValueError as e:
return jsonify(status="error", message=str(e)), 400
if g.get('customer', None):
incomingAlert.customer = g.get('customer')
add_remote_ip(request, incomingAlert)
try:
alert = process_alert(incomingAlert)
except RejectException as e:
return jsonify(status="error", message=str(e)), 403
except Exception as e:
return jsonify(status="error", message=str(e)), 500
if alert:
return jsonify(status="ok", id=alert.id, alert=alert.serialize), 201
else:
raise ApiError("insert or update of New Relic alert failed", 500)
|
bonitadecker77/python-for-android | refs/heads/master | python-modules/twisted/twisted/internet/test/test_address.py | 56 | # Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
import re
from twisted.trial import unittest
from twisted.internet.address import IPv4Address, UNIXAddress
class AddressTestCaseMixin(object):
def test_addressComparison(self):
"""
Test that two different address instances, sharing the same
properties are considered equal.
"""
self.assertEquals(self.buildAddress(), self.buildAddress())
def _stringRepresentation(self, stringFunction):
"""
Verify that the string representation of an address object conforms to a
simple pattern (the usual one for Python object reprs) and contains
values which accurately reflect the attributes of the address.
"""
addr = self.buildAddress()
pattern = "".join([
"^",
"([^\(]+Address)", # class name,
"\(", # opening bracket,
"([^)]+)", # arguments,
"\)", # closing bracket,
"$"
])
stringValue = stringFunction(addr)
m = re.match(pattern, stringValue)
self.assertNotEquals(
None, m,
"%s does not match the standard __str__ pattern "
"ClassName(arg1, arg2, etc)" % (stringValue,))
self.assertEquals(addr.__class__.__name__, m.group(1))
args = [x.strip() for x in m.group(2).split(",")]
self.assertEquals(
args,
[argSpec[1] % (getattr(addr, argSpec[0]),) for argSpec in self.addressArgSpec])
def test_str(self):
"""
C{str} can be used to get a string representation of an address instance
containing information about that address.
"""
self._stringRepresentation(str)
def test_repr(self):
"""
C{repr} can be used to get a string representation of an address
instance containing information about that address.
"""
self._stringRepresentation(repr)
class IPv4AddressTestCaseMixin(AddressTestCaseMixin):
addressArgSpec = (("type", "%s"), ("host", "%r"), ("port", "%d"))
class IPv4AddressTCPTestCase(unittest.TestCase, IPv4AddressTestCaseMixin):
def buildAddress(self):
return IPv4Address("TCP", "127.0.0.1", 0)
class IPv4AddressUDPTestCase(unittest.TestCase, IPv4AddressTestCaseMixin):
def buildAddress(self):
return IPv4Address("UDP", "127.0.0.1", 0)
class UNIXAddressTestCase(unittest.TestCase, AddressTestCaseMixin):
addressArgSpec = (("name", "%r"),)
def setUp(self):
self._socketAddress = self.mktemp()
def buildAddress(self):
return UNIXAddress(self._socketAddress)
|
leafclick/intellij-community | refs/heads/master | python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_0/_pkg0_0_0_1/_pkg0_0_0_1_0/_mod0_0_0_1_0_4.py | 30 | name0_0_0_1_0_4_0 = None
name0_0_0_1_0_4_1 = None
name0_0_0_1_0_4_2 = None
name0_0_0_1_0_4_3 = None
name0_0_0_1_0_4_4 = None |
ceramos/micropython | refs/heads/master | py/makeversionhdr.py | 7 | """
Generate header file with macros defining MicroPython version info.
This script works with Python 2.6, 2.7, 3.3 and 3.4.
"""
from __future__ import print_function
import sys
import os
import datetime
import subprocess
def get_version_info_from_git():
# Python 2.6 doesn't have check_output, so check for that
try:
subprocess.check_output
subprocess.check_call
except AttributeError:
return None
# Note: git describe doesn't work if no tag is available
try:
git_tag = subprocess.check_output(["git", "describe", "--dirty", "--always"], universal_newlines=True).strip()
except subprocess.CalledProcessError:
git_tag = ""
except OSError:
return None
try:
git_hash = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"], stderr=subprocess.STDOUT, universal_newlines=True).strip()
except subprocess.CalledProcessError:
git_hash = "unknown"
except OSError:
return None
try:
# Check if there are any modified files.
subprocess.check_call(["git", "diff", "--no-ext-diff", "--quiet", "--exit-code"], stderr=subprocess.STDOUT)
# Check if there are any staged files.
subprocess.check_call(["git", "diff-index", "--cached", "--quiet", "HEAD", "--"], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
git_hash += "-dirty"
except OSError:
return None
# Try to extract MicroPython version from git tag
if git_tag.startswith("v"):
ver = git_tag[1:].split("-")[0].split(".")
if len(ver) == 2:
ver.append("0")
else:
ver = ["0", "0", "1"]
return git_tag, git_hash, ver
def get_version_info_from_docs_conf():
with open("%s/docs/conf.py" % sys.argv[0].rsplit("/", 2)[0]) as f:
for line in f:
if line.startswith("release = '"):
ver = line.strip()[10:].strip("'")
git_tag = "v" + ver
ver = ver.split(".")
if len(ver) == 2:
ver.append("0")
return git_tag, "<no hash>", ver
return None
def make_version_header(filename):
# Get version info using git, with fallback to docs/conf.py
info = get_version_info_from_git()
if info is None:
info = get_version_info_from_docs_conf()
git_tag, git_hash, ver = info
# Generate the file with the git and version info
file_data = """\
// This file was generated by py/makeversionhdr.py
#define MICROPY_GIT_TAG "%s"
#define MICROPY_GIT_HASH "%s"
#define MICROPY_BUILD_DATE "%s"
#define MICROPY_VERSION_MAJOR (%s)
#define MICROPY_VERSION_MINOR (%s)
#define MICROPY_VERSION_MICRO (%s)
#define MICROPY_VERSION_STRING "%s.%s.%s"
""" % (git_tag, git_hash, datetime.date.today().strftime("%Y-%m-%d"),
ver[0], ver[1], ver[2], ver[0], ver[1], ver[2])
# Check if the file contents changed from last time
write_file = True
if os.path.isfile(filename):
with open(filename, 'r') as f:
existing_data = f.read()
if existing_data == file_data:
write_file = False
# Only write the file if we need to
if write_file:
print("Generating %s" % filename)
with open(filename, 'w') as f:
f.write(file_data)
if __name__ == "__main__":
make_version_header(sys.argv[1])
|
rafafigueroa/edison-quad | refs/heads/master | keyboard_test.py | 1 | #!/usr/bin/python
import curses
import shelve
from Adafruit_PWM_Servo_Driver import PWM
import time
import numpy as np
# Initialise the PWM device using the default address
pwm = PWM(0x40, debug=False)
pulseMin = 1735 # Min pulse length out of 4096
pulseLow = 2500
pulseMax = 3047
pulseStop = 0
motorChannel = 0
pwm.setPWMFreq(400) # Set frequency to x Hz
pwm.setPWM(motorChannel, 0, pulseMin) # Set to min (thrtle down)
time.sleep(2) # Wait for motors to be armed
drone_vars = shelve.open('drone_vars')
stdscr = curses.initscr()
curses.cbreak()
stdscr.keypad(1)
stdscr.addstr(0, 10, "Hit 'q' to quit and 'w' to save")
stdscr.addstr(1, 10, "Calibrating High: Up/Down")
stdscr.addstr(2, 10, "Calibrating Low : Left/Right")
stdscr.refresh()
key = ''
pwm_pulse_low = 1735
pwm_pulse_high = 3047
def close_safely():
drone_vars.close()
pwm.setPWM(motorChannel, 0, pulseStop)
curses.endwin()
print('Stopping motor')
try:
while key != ord('q'):
key = stdscr.getch()
stdscr.refresh()
if key == curses.KEY_LEFT:
pwm_pulse_low = pwm_pulse_low - 1
stdscr.addstr(6, 20, "LOW 3000rpm: " + str(pwm_pulse_low))
pwm.setPWM(motorChannel, 0, pwm_pulse_low)
elif key == curses.KEY_RIGHT:
pwm_pulse_low = pwm_pulse_low + 1
stdscr.addstr(6, 20, "LOW 3000rpm: " + str(pwm_pulse_low))
pwm.setPWM(motorChannel, 0, pwm_pulse_low)
elif key == curses.KEY_DOWN:
pwm_pulse_high = pwm_pulse_high - 1
stdscr.addstr(5, 20, "HIGH 8000rpm: " + str(pwm_pulse_high))
pwm.setPWM(motorChannel, 0, pwm_pulse_high)
elif key == curses.KEY_UP:
pwm_pulse_high = pwm_pulse_high + 1
stdscr.addstr(5, 20, "HIGH 8000rpm: " + str(pwm_pulse_high))
pwm.setPWM(motorChannel, 0, pwm_pulse_high)
elif key == ord('w'):
stdscr.addstr(8, 20, 'Saving values to file')
drone_vars['low_pulse'] = pwm_pulse_low
drone_vars['high_pulse'] = pwm_pulse_high
close_safely()
close_safely()
# Catch an interrupt and set the motor to stop
except KeyboardInterrupt:
close_safely()
|
mycodeday/crm-platform | refs/heads/master | stock/stock.py | 2 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import date, datetime
from dateutil import relativedelta
import json
import time
from openerp import models, fields as new_fields, api
from openerp.osv import fields, osv
from openerp.tools.float_utils import float_compare, float_round
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT
from openerp import SUPERUSER_ID, api
import openerp.addons.decimal_precision as dp
from openerp.addons.procurement import procurement
import logging
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# Incoterms
#----------------------------------------------------------
class stock_incoterms(osv.osv):
_name = "stock.incoterms"
_description = "Incoterms"
_columns = {
'name': fields.char('Name', required=True, help="Incoterms are series of sales terms. They are used to divide transaction costs and responsibilities between buyer and seller and reflect state-of-the-art transportation practices."),
'code': fields.char('Code', size=3, required=True, help="Incoterm Standard Code"),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide an INCOTERM you will not use."),
}
_defaults = {
'active': True,
}
#----------------------------------------------------------
# Stock Location
#----------------------------------------------------------
class stock_location(osv.osv):
_name = "stock.location"
_description = "Inventory Locations"
_parent_name = "location_id"
_parent_store = True
_parent_order = 'name'
_order = 'parent_left'
_rec_name = 'complete_name'
def _location_owner(self, cr, uid, location, context=None):
''' Return the company owning the location if any '''
return location and (location.usage == 'internal') and location.company_id or False
def _complete_name(self, cr, uid, ids, name, args, context=None):
""" Forms complete name of location from parent location to child location.
@return: Dictionary of values
"""
res = {}
for m in self.browse(cr, uid, ids, context=context):
res[m.id] = m.name
parent = m.location_id
while parent:
res[m.id] = parent.name + ' / ' + res[m.id]
parent = parent.location_id
return res
def _get_sublocations(self, cr, uid, ids, context=None):
""" return all sublocations of the given stock locations (included) """
if context is None:
context = {}
context_with_inactive = context.copy()
context_with_inactive['active_test'] = False
return self.search(cr, uid, [('id', 'child_of', ids)], context=context_with_inactive)
def _name_get(self, cr, uid, location, context=None):
name = location.name
while location.location_id and location.usage != 'view':
location = location.location_id
name = location.name + '/' + name
return name
def name_get(self, cr, uid, ids, context=None):
res = []
for location in self.browse(cr, uid, ids, context=context):
res.append((location.id, self._name_get(cr, uid, location, context=context)))
return res
_columns = {
'name': fields.char('Location Name', required=True, translate=True),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide a location without deleting it."),
'usage': fields.selection([
('supplier', 'Supplier Location'),
('view', 'View'),
('internal', 'Internal Location'),
('customer', 'Customer Location'),
('inventory', 'Inventory'),
('procurement', 'Procurement'),
('production', 'Production'),
('transit', 'Transit Location')],
'Location Type', required=True,
help="""* Supplier Location: Virtual location representing the source location for products coming from your suppliers
\n* View: Virtual location used to create a hierarchical structures for your warehouse, aggregating its child locations ; can't directly contain products
\n* Internal Location: Physical locations inside your own warehouses,
\n* Customer Location: Virtual location representing the destination location for products sent to your customers
\n* Inventory: Virtual location serving as counterpart for inventory operations used to correct stock levels (Physical inventories)
\n* Procurement: Virtual location serving as temporary counterpart for procurement operations when the source (supplier or production) is not known yet. This location should be empty when the procurement scheduler has finished running.
\n* Production: Virtual counterpart location for production operations: this location consumes the raw material and produces finished products
\n* Transit Location: Counterpart location that should be used in inter-companies or inter-warehouses operations
""", select=True),
'complete_name': fields.function(_complete_name, type='char', string="Location Name",
store={'stock.location': (_get_sublocations, ['name', 'location_id', 'active'], 10)}),
'location_id': fields.many2one('stock.location', 'Parent Location', select=True, ondelete='cascade'),
'child_ids': fields.one2many('stock.location', 'location_id', 'Contains'),
'partner_id': fields.many2one('res.partner', 'Owner', help="Owner of the location if not internal"),
'comment': fields.text('Additional Information'),
'posx': fields.integer('Corridor (X)', help="Optional localization details, for information purpose only"),
'posy': fields.integer('Shelves (Y)', help="Optional localization details, for information purpose only"),
'posz': fields.integer('Height (Z)', help="Optional localization details, for information purpose only"),
'parent_left': fields.integer('Left Parent', select=1),
'parent_right': fields.integer('Right Parent', select=1),
'company_id': fields.many2one('res.company', 'Company', select=1, help='Let this field empty if this location is shared between companies'),
'scrap_location': fields.boolean('Is a Scrap Location?', help='Check this box to allow using this location to put scrapped/damaged goods.'),
'removal_strategy_id': fields.many2one('product.removal', 'Removal Strategy', help="Defines the default method used for suggesting the exact location (shelf) where to take the products from, which lot etc. for this location. This method can be enforced at the product category level, and a fallback is made on the parent locations if none is set here."),
'putaway_strategy_id': fields.many2one('product.putaway', 'Put Away Strategy', help="Defines the default method used for suggesting the exact location (shelf) where to store the products. This method can be enforced at the product category level, and a fallback is made on the parent locations if none is set here."),
'loc_barcode': fields.char('Location Barcode'),
}
_defaults = {
'active': True,
'usage': 'internal',
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.location', context=c),
'posx': 0,
'posy': 0,
'posz': 0,
'scrap_location': False,
}
_sql_constraints = [('loc_barcode_company_uniq', 'unique (loc_barcode,company_id)', 'The barcode for a location must be unique per company !')]
def create(self, cr, uid, default, context=None):
if not default.get('loc_barcode', False):
default.update({'loc_barcode': default.get('complete_name', False)})
return super(stock_location, self).create(cr, uid, default, context=context)
def get_putaway_strategy(self, cr, uid, location, product, context=None):
''' Returns the location where the product has to be put, if any compliant putaway strategy is found. Otherwise returns None.'''
putaway_obj = self.pool.get('product.putaway')
loc = location
while loc:
if loc.putaway_strategy_id:
res = putaway_obj.putaway_apply(cr, uid, loc.putaway_strategy_id, product, context=context)
if res:
return res
loc = loc.location_id
def _default_removal_strategy(self, cr, uid, context=None):
return 'fifo'
def get_removal_strategy(self, cr, uid, location, product, context=None):
''' Returns the removal strategy to consider for the given product and location.
:param location: browse record (stock.location)
:param product: browse record (product.product)
:rtype: char
'''
if product.categ_id.removal_strategy_id:
return product.categ_id.removal_strategy_id.method
loc = location
while loc:
if loc.removal_strategy_id:
return loc.removal_strategy_id.method
loc = loc.location_id
return self._default_removal_strategy(cr, uid, context=context)
def get_warehouse(self, cr, uid, location, context=None):
"""
Returns warehouse id of warehouse that contains location
:param location: browse record (stock.location)
"""
wh_obj = self.pool.get("stock.warehouse")
whs = wh_obj.search(cr, uid, [('view_location_id.parent_left', '<=', location.parent_left),
('view_location_id.parent_right', '>=', location.parent_left)], context=context)
return whs and whs[0] or False
#----------------------------------------------------------
# Routes
#----------------------------------------------------------
class stock_location_route(osv.osv):
_name = 'stock.location.route'
_description = "Inventory Routes"
_order = 'sequence'
_columns = {
'name': fields.char('Route Name', required=True),
'sequence': fields.integer('Sequence'),
'pull_ids': fields.one2many('procurement.rule', 'route_id', 'Pull Rules', copy=True),
'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the route without removing it."),
'push_ids': fields.one2many('stock.location.path', 'route_id', 'Push Rules', copy=True),
'product_selectable': fields.boolean('Applicable on Product'),
'product_categ_selectable': fields.boolean('Applicable on Product Category'),
'warehouse_selectable': fields.boolean('Applicable on Warehouse'),
'supplied_wh_id': fields.many2one('stock.warehouse', 'Supplied Warehouse'),
'supplier_wh_id': fields.many2one('stock.warehouse', 'Supplier Warehouse'),
'company_id': fields.many2one('res.company', 'Company', select=1, help='Let this field empty if this route is shared between all companies'),
}
_defaults = {
'sequence': lambda self, cr, uid, ctx: 0,
'active': True,
'product_selectable': True,
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.location.route', context=c),
}
def write(self, cr, uid, ids, vals, context=None):
'''when a route is deactivated, deactivate also its pull and push rules'''
if isinstance(ids, (int, long)):
ids = [ids]
res = super(stock_location_route, self).write(cr, uid, ids, vals, context=context)
if 'active' in vals:
push_ids = []
pull_ids = []
for route in self.browse(cr, uid, ids, context=context):
if route.push_ids:
push_ids += [r.id for r in route.push_ids if r.active != vals['active']]
if route.pull_ids:
pull_ids += [r.id for r in route.pull_ids if r.active != vals['active']]
if push_ids:
self.pool.get('stock.location.path').write(cr, uid, push_ids, {'active': vals['active']}, context=context)
if pull_ids:
self.pool.get('procurement.rule').write(cr, uid, pull_ids, {'active': vals['active']}, context=context)
return res
#----------------------------------------------------------
# Quants
#----------------------------------------------------------
class stock_quant(osv.osv):
"""
Quants are the smallest unit of stock physical instances
"""
_name = "stock.quant"
_description = "Quants"
def _get_quant_name(self, cr, uid, ids, name, args, context=None):
""" Forms complete name of location from parent location to child location.
@return: Dictionary of values
"""
res = {}
for q in self.browse(cr, uid, ids, context=context):
res[q.id] = q.product_id.code or ''
if q.lot_id:
res[q.id] = q.lot_id.name
res[q.id] += ': ' + str(q.qty) + q.product_id.uom_id.name
return res
def _calc_inventory_value(self, cr, uid, ids, name, attr, context=None):
context = dict(context or {})
res = {}
uid_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
for quant in self.browse(cr, uid, ids, context=context):
context.pop('force_company', None)
if quant.company_id.id != uid_company_id:
#if the company of the quant is different than the current user company, force the company in the context
#then re-do a browse to read the property fields for the good company.
context['force_company'] = quant.company_id.id
quant = self.browse(cr, uid, quant.id, context=context)
res[quant.id] = self._get_inventory_value(cr, uid, quant, context=context)
return res
def _get_inventory_value(self, cr, uid, quant, context=None):
return quant.product_id.standard_price * quant.qty
_columns = {
'name': fields.function(_get_quant_name, type='char', string='Identifier'),
'product_id': fields.many2one('product.product', 'Product', required=True, ondelete="restrict", readonly=True, select=True),
'location_id': fields.many2one('stock.location', 'Location', required=True, ondelete="restrict", readonly=True, select=True, auto_join=True),
'qty': fields.float('Quantity', required=True, help="Quantity of products in this quant, in the default unit of measure of the product", readonly=True, select=True),
'package_id': fields.many2one('stock.quant.package', string='Package', help="The package containing this quant", readonly=True, select=True),
'packaging_type_id': fields.related('package_id', 'packaging_id', type='many2one', relation='product.packaging', string='Type of packaging', readonly=True, store=True),
'reservation_id': fields.many2one('stock.move', 'Reserved for Move', help="The move the quant is reserved for", readonly=True, select=True),
'lot_id': fields.many2one('stock.production.lot', 'Lot', readonly=True, select=True, ondelete="restrict"),
'cost': fields.float('Unit Cost'),
'owner_id': fields.many2one('res.partner', 'Owner', help="This is the owner of the quant", readonly=True, select=True),
'create_date': fields.datetime('Creation Date', readonly=True),
'in_date': fields.datetime('Incoming Date', readonly=True, select=True),
'history_ids': fields.many2many('stock.move', 'stock_quant_move_rel', 'quant_id', 'move_id', 'Moves', help='Moves that operate(d) on this quant', copy=False),
'company_id': fields.many2one('res.company', 'Company', help="The company to which the quants belong", required=True, readonly=True, select=True),
'inventory_value': fields.function(_calc_inventory_value, string="Inventory Value", type='float', readonly=True),
# Used for negative quants to reconcile after compensated by a new positive one
'propagated_from_id': fields.many2one('stock.quant', 'Linked Quant', help='The negative quant this is coming from', readonly=True, select=True),
'negative_move_id': fields.many2one('stock.move', 'Move Negative Quant', help='If this is a negative quant, this will be the move that caused this negative quant.', readonly=True),
'negative_dest_location_id': fields.related('negative_move_id', 'location_dest_id', type='many2one', relation='stock.location', string="Negative Destination Location", readonly=True,
help="Technical field used to record the destination location of a move that created a negative quant"),
}
_defaults = {
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.quant', context=c),
}
def init(self, cr):
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('stock_quant_product_location_index',))
if not cr.fetchone():
cr.execute('CREATE INDEX stock_quant_product_location_index ON stock_quant (product_id, location_id, company_id, qty, in_date, reservation_id)')
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True):
''' Overwrite the read_group in order to sum the function field 'inventory_value' in group by'''
res = super(stock_quant, self).read_group(cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby, lazy=lazy)
if 'inventory_value' in fields:
for line in res:
if '__domain' in line:
lines = self.search(cr, uid, line['__domain'], context=context)
inv_value = 0.0
for line2 in self.browse(cr, uid, lines, context=context):
inv_value += line2.inventory_value
line['inventory_value'] = inv_value
return res
def action_view_quant_history(self, cr, uid, ids, context=None):
'''
This function returns an action that display the history of the quant, which
mean all the stock moves that lead to this quant creation with this quant quantity.
'''
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj.get_object_reference(cr, uid, 'stock', 'action_move_form2')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context={})[0]
move_ids = []
for quant in self.browse(cr, uid, ids, context=context):
move_ids += [move.id for move in quant.history_ids]
result['domain'] = "[('id','in',[" + ','.join(map(str, move_ids)) + "])]"
return result
def quants_reserve(self, cr, uid, quants, move, link=False, context=None):
'''This function reserves quants for the given move (and optionally given link). If the total of quantity reserved is enough, the move's state
is also set to 'assigned'
:param quants: list of tuple(quant browse record or None, qty to reserve). If None is given as first tuple element, the item will be ignored. Negative quants should not be received as argument
:param move: browse record
:param link: browse record (stock.move.operation.link)
'''
toreserve = []
reserved_availability = move.reserved_availability
#split quants if needed
for quant, qty in quants:
if qty <= 0.0 or (quant and quant.qty <= 0.0):
raise osv.except_osv(_('Error!'), _('You can not reserve a negative quantity or a negative quant.'))
if not quant:
continue
self._quant_split(cr, uid, quant, qty, context=context)
toreserve.append(quant.id)
reserved_availability += quant.qty
#reserve quants
if toreserve:
self.write(cr, SUPERUSER_ID, toreserve, {'reservation_id': move.id}, context=context)
#if move has a picking_id, write on that picking that pack_operation might have changed and need to be recomputed
if move.picking_id:
self.pool.get('stock.picking').write(cr, uid, [move.picking_id.id], {'recompute_pack_op': True}, context=context)
#check if move'state needs to be set as 'assigned'
rounding = move.product_id.uom_id.rounding
if float_compare(reserved_availability, move.product_qty, precision_rounding=rounding) == 0 and move.state in ('confirmed', 'waiting') :
self.pool.get('stock.move').write(cr, uid, [move.id], {'state': 'assigned'}, context=context)
elif float_compare(reserved_availability, 0, precision_rounding=rounding) > 0 and not move.partially_available:
self.pool.get('stock.move').write(cr, uid, [move.id], {'partially_available': True}, context=context)
def quants_move(self, cr, uid, quants, move, location_to, location_from=False, lot_id=False, owner_id=False, src_package_id=False, dest_package_id=False, context=None):
"""Moves all given stock.quant in the given destination location. Unreserve from current move.
:param quants: list of tuple(browse record(stock.quant) or None, quantity to move)
:param move: browse record (stock.move)
:param location_to: browse record (stock.location) depicting where the quants have to be moved
:param location_from: optional browse record (stock.location) explaining where the quant has to be taken (may differ from the move source location in case a removal strategy applied). This parameter is only used to pass to _quant_create if a negative quant must be created
:param lot_id: ID of the lot that must be set on the quants to move
:param owner_id: ID of the partner that must own the quants to move
:param src_package_id: ID of the package that contains the quants to move
:param dest_package_id: ID of the package that must be set on the moved quant
"""
quants_reconcile = []
to_move_quants = []
self._check_location(cr, uid, location_to, context=context)
for quant, qty in quants:
if not quant:
#If quant is None, we will create a quant to move (and potentially a negative counterpart too)
quant = self._quant_create(cr, uid, qty, move, lot_id=lot_id, owner_id=owner_id, src_package_id=src_package_id, dest_package_id=dest_package_id, force_location_from=location_from, force_location_to=location_to, context=context)
else:
self._quant_split(cr, uid, quant, qty, context=context)
to_move_quants.append(quant)
quants_reconcile.append(quant)
if to_move_quants:
to_recompute_move_ids = [x.reservation_id.id for x in to_move_quants if x.reservation_id and x.reservation_id.id != move.id]
self.move_quants_write(cr, uid, to_move_quants, move, location_to, dest_package_id, context=context)
self.pool.get('stock.move').recalculate_move_state(cr, uid, to_recompute_move_ids, context=context)
if location_to.usage == 'internal':
# Do manual search for quant to avoid full table scan (order by id)
cr.execute("""
SELECT 0 FROM stock_quant, stock_location WHERE product_id = %s AND stock_location.id = stock_quant.location_id AND
((stock_location.parent_left >= %s AND stock_location.parent_left < %s) OR stock_location.id = %s) AND qty < 0.0 LIMIT 1
""", (move.product_id.id, location_to.parent_left, location_to.parent_right, location_to.id))
if cr.fetchone():
for quant in quants_reconcile:
self._quant_reconcile_negative(cr, uid, quant, move, context=context)
def move_quants_write(self, cr, uid, quants, move, location_dest_id, dest_package_id, context=None):
context=context or {}
vals = {'location_id': location_dest_id.id,
'history_ids': [(4, move.id)],
'reservation_id': False}
if not context.get('entire_pack'):
vals.update({'package_id': dest_package_id})
self.write(cr, SUPERUSER_ID, [q.id for q in quants], vals, context=context)
def quants_get_prefered_domain(self, cr, uid, location, product, qty, domain=None, prefered_domain_list=[], restrict_lot_id=False, restrict_partner_id=False, context=None):
''' This function tries to find quants in the given location for the given domain, by trying to first limit
the choice on the quants that match the first item of prefered_domain_list as well. But if the qty requested is not reached
it tries to find the remaining quantity by looping on the prefered_domain_list (tries with the second item and so on).
Make sure the quants aren't found twice => all the domains of prefered_domain_list should be orthogonal
'''
if domain is None:
domain = []
quants = [(None, qty)]
#don't look for quants in location that are of type production, supplier or inventory.
if location.usage in ['inventory', 'production', 'supplier']:
return quants
res_qty = qty
if not prefered_domain_list:
return self.quants_get(cr, uid, location, product, qty, domain=domain, restrict_lot_id=restrict_lot_id, restrict_partner_id=restrict_partner_id, context=context)
for prefered_domain in prefered_domain_list:
res_qty_cmp = float_compare(res_qty, 0, precision_rounding=product.uom_id.rounding)
if res_qty_cmp > 0:
#try to replace the last tuple (None, res_qty) with something that wasn't chosen at first because of the prefered order
quants.pop()
tmp_quants = self.quants_get(cr, uid, location, product, res_qty, domain=domain + prefered_domain, restrict_lot_id=restrict_lot_id, restrict_partner_id=restrict_partner_id, context=context)
for quant in tmp_quants:
if quant[0]:
res_qty -= quant[1]
quants += tmp_quants
return quants
def quants_get(self, cr, uid, location, product, qty, domain=None, restrict_lot_id=False, restrict_partner_id=False, context=None):
"""
Use the removal strategies of product to search for the correct quants
If you inherit, put the super at the end of your method.
:location: browse record of the parent location where the quants have to be found
:product: browse record of the product to find
:qty in UoM of product
"""
result = []
domain = domain or [('qty', '>', 0.0)]
if restrict_partner_id:
domain += [('owner_id', '=', restrict_partner_id)]
if restrict_lot_id:
domain += [('lot_id', '=', restrict_lot_id)]
if location:
removal_strategy = self.pool.get('stock.location').get_removal_strategy(cr, uid, location, product, context=context)
result += self.apply_removal_strategy(cr, uid, location, product, qty, domain, removal_strategy, context=context)
return result
def apply_removal_strategy(self, cr, uid, location, product, quantity, domain, removal_strategy, context=None):
if removal_strategy == 'fifo':
order = 'in_date, id'
return self._quants_get_order(cr, uid, location, product, quantity, domain, order, context=context)
elif removal_strategy == 'lifo':
order = 'in_date desc, id desc'
return self._quants_get_order(cr, uid, location, product, quantity, domain, order, context=context)
raise osv.except_osv(_('Error!'), _('Removal strategy %s not implemented.' % (removal_strategy,)))
def _quant_create(self, cr, uid, qty, move, lot_id=False, owner_id=False, src_package_id=False, dest_package_id=False,
force_location_from=False, force_location_to=False, context=None):
'''Create a quant in the destination location and create a negative quant in the source location if it's an internal location.
'''
if context is None:
context = {}
price_unit = self.pool.get('stock.move').get_price_unit(cr, uid, move, context=context)
location = force_location_to or move.location_dest_id
rounding = move.product_id.uom_id.rounding
vals = {
'product_id': move.product_id.id,
'location_id': location.id,
'qty': float_round(qty, precision_rounding=rounding),
'cost': price_unit,
'history_ids': [(4, move.id)],
'in_date': datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT),
'company_id': move.company_id.id,
'lot_id': lot_id,
'owner_id': owner_id,
'package_id': dest_package_id,
}
if move.location_id.usage == 'internal':
#if we were trying to move something from an internal location and reach here (quant creation),
#it means that a negative quant has to be created as well.
negative_vals = vals.copy()
negative_vals['location_id'] = force_location_from and force_location_from.id or move.location_id.id
negative_vals['qty'] = float_round(-qty, precision_rounding=rounding)
negative_vals['cost'] = price_unit
negative_vals['negative_move_id'] = move.id
negative_vals['package_id'] = src_package_id
negative_quant_id = self.create(cr, SUPERUSER_ID, negative_vals, context=context)
vals.update({'propagated_from_id': negative_quant_id})
#create the quant as superuser, because we want to restrict the creation of quant manually: we should always use this method to create quants
quant_id = self.create(cr, SUPERUSER_ID, vals, context=context)
return self.browse(cr, uid, quant_id, context=context)
def _quant_split(self, cr, uid, quant, qty, context=None):
context = context or {}
rounding = quant.product_id.uom_id.rounding
if float_compare(abs(quant.qty), abs(qty), precision_rounding=rounding) <= 0: # if quant <= qty in abs, take it entirely
return False
qty_round = float_round(qty, precision_rounding=rounding)
new_qty_round = float_round(quant.qty - qty, precision_rounding=rounding)
# Fetch the history_ids manually as it will not do a join with the stock moves then (=> a lot faster)
cr.execute("""SELECT move_id FROM stock_quant_move_rel WHERE quant_id = %s""", (quant.id,))
res = cr.fetchall()
new_quant = self.copy(cr, SUPERUSER_ID, quant.id, default={'qty': new_qty_round, 'history_ids': [(4, x[0]) for x in res]}, context=context)
self.write(cr, SUPERUSER_ID, quant.id, {'qty': qty_round}, context=context)
return self.browse(cr, uid, new_quant, context=context)
def _get_latest_move(self, cr, uid, quant, context=None):
move = False
for m in quant.history_ids:
if not move or m.date > move.date:
move = m
return move
@api.cr_uid_ids_context
def _quants_merge(self, cr, uid, solved_quant_ids, solving_quant, context=None):
path = []
for move in solving_quant.history_ids:
path.append((4, move.id))
self.write(cr, SUPERUSER_ID, solved_quant_ids, {'history_ids': path}, context=context)
def _quant_reconcile_negative(self, cr, uid, quant, move, context=None):
"""
When new quant arrive in a location, try to reconcile it with
negative quants. If it's possible, apply the cost of the new
quant to the conter-part of the negative quant.
"""
solving_quant = quant
dom = [('qty', '<', 0)]
if quant.lot_id:
dom += [('lot_id', '=', quant.lot_id.id)]
dom += [('owner_id', '=', quant.owner_id.id)]
dom += [('package_id', '=', quant.package_id.id)]
dom += [('id', '!=', quant.propagated_from_id.id)]
quants = self.quants_get(cr, uid, quant.location_id, quant.product_id, quant.qty, dom, context=context)
product_uom_rounding = quant.product_id.uom_id.rounding
for quant_neg, qty in quants:
if not quant_neg or not solving_quant:
continue
to_solve_quant_ids = self.search(cr, uid, [('propagated_from_id', '=', quant_neg.id)], context=context)
if not to_solve_quant_ids:
continue
solving_qty = qty
solved_quant_ids = []
for to_solve_quant in self.browse(cr, uid, to_solve_quant_ids, context=context):
if float_compare(solving_qty, 0, precision_rounding=product_uom_rounding) <= 0:
continue
solved_quant_ids.append(to_solve_quant.id)
self._quant_split(cr, uid, to_solve_quant, min(solving_qty, to_solve_quant.qty), context=context)
solving_qty -= min(solving_qty, to_solve_quant.qty)
remaining_solving_quant = self._quant_split(cr, uid, solving_quant, qty, context=context)
remaining_neg_quant = self._quant_split(cr, uid, quant_neg, -qty, context=context)
#if the reconciliation was not complete, we need to link together the remaining parts
if remaining_neg_quant:
remaining_to_solve_quant_ids = self.search(cr, uid, [('propagated_from_id', '=', quant_neg.id), ('id', 'not in', solved_quant_ids)], context=context)
if remaining_to_solve_quant_ids:
self.write(cr, SUPERUSER_ID, remaining_to_solve_quant_ids, {'propagated_from_id': remaining_neg_quant.id}, context=context)
if solving_quant.propagated_from_id and solved_quant_ids:
self.write(cr, SUPERUSER_ID, solved_quant_ids, {'propagated_from_id': solving_quant.propagated_from_id.id}, context=context)
#delete the reconciled quants, as it is replaced by the solved quants
self.unlink(cr, SUPERUSER_ID, [quant_neg.id], context=context)
if solved_quant_ids:
#price update + accounting entries adjustments
self._price_update(cr, uid, solved_quant_ids, solving_quant.cost, context=context)
#merge history (and cost?)
self._quants_merge(cr, uid, solved_quant_ids, solving_quant, context=context)
self.unlink(cr, SUPERUSER_ID, [solving_quant.id], context=context)
solving_quant = remaining_solving_quant
def _price_update(self, cr, uid, ids, newprice, context=None):
self.write(cr, SUPERUSER_ID, ids, {'cost': newprice}, context=context)
def quants_unreserve(self, cr, uid, move, context=None):
related_quants = [x.id for x in move.reserved_quant_ids]
if related_quants:
#if move has a picking_id, write on that picking that pack_operation might have changed and need to be recomputed
if move.picking_id:
self.pool.get('stock.picking').write(cr, uid, [move.picking_id.id], {'recompute_pack_op': True}, context=context)
if move.partially_available:
self.pool.get("stock.move").write(cr, uid, [move.id], {'partially_available': False}, context=context)
self.write(cr, SUPERUSER_ID, related_quants, {'reservation_id': False}, context=context)
def _quants_get_order(self, cr, uid, location, product, quantity, domain=[], orderby='in_date', context=None):
''' Implementation of removal strategies
If it can not reserve, it will return a tuple (None, qty)
'''
if context is None:
context = {}
domain += location and [('location_id', 'child_of', location.id)] or []
domain += [('product_id', '=', product.id)]
if context.get('force_company'):
domain += [('company_id', '=', context.get('force_company'))]
else:
domain += [('company_id', '=', self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id)]
res = []
offset = 0
while float_compare(quantity, 0, precision_rounding=product.uom_id.rounding) > 0:
quants = self.search(cr, uid, domain, order=orderby, limit=10, offset=offset, context=context)
if not quants:
res.append((None, quantity))
break
for quant in self.browse(cr, uid, quants, context=context):
rounding = product.uom_id.rounding
if float_compare(quantity, abs(quant.qty), precision_rounding=rounding) >= 0:
res += [(quant, abs(quant.qty))]
quantity -= abs(quant.qty)
elif float_compare(quantity, 0.0, precision_rounding=rounding) != 0:
res += [(quant, quantity)]
quantity = 0
break
offset += 10
return res
def _check_location(self, cr, uid, location, context=None):
if location.usage == 'view':
raise osv.except_osv(_('Error'), _('You cannot move to a location of type view %s.') % (location.name))
return True
#----------------------------------------------------------
# Stock Picking
#----------------------------------------------------------
class stock_picking(osv.osv):
_name = "stock.picking"
_inherit = ['mail.thread']
_description = "Picking List"
_order = "priority desc, date asc, id desc"
def _set_min_date(self, cr, uid, id, field, value, arg, context=None):
move_obj = self.pool.get("stock.move")
if value:
move_ids = [move.id for move in self.browse(cr, uid, id, context=context).move_lines]
move_obj.write(cr, uid, move_ids, {'date_expected': value}, context=context)
def _set_priority(self, cr, uid, id, field, value, arg, context=None):
move_obj = self.pool.get("stock.move")
if value:
move_ids = [move.id for move in self.browse(cr, uid, id, context=context).move_lines]
move_obj.write(cr, uid, move_ids, {'priority': value}, context=context)
def get_min_max_date(self, cr, uid, ids, field_name, arg, context=None):
""" Finds minimum and maximum dates for picking.
@return: Dictionary of values
"""
res = {}
for id in ids:
res[id] = {'min_date': False, 'max_date': False, 'priority': '1'}
if not ids:
return res
cr.execute("""select
picking_id,
min(date_expected),
max(date_expected),
max(priority)
from
stock_move
where
picking_id IN %s
group by
picking_id""", (tuple(ids),))
for pick, dt1, dt2, prio in cr.fetchall():
res[pick]['min_date'] = dt1
res[pick]['max_date'] = dt2
res[pick]['priority'] = prio
return res
def create(self, cr, user, vals, context=None):
context = context or {}
if ('name' not in vals) or (vals.get('name') in ('/', False)):
ptype_id = vals.get('picking_type_id', context.get('default_picking_type_id', False))
sequence_id = self.pool.get('stock.picking.type').browse(cr, user, ptype_id, context=context).sequence_id.id
vals['name'] = self.pool.get('ir.sequence').get_id(cr, user, sequence_id, 'id', context=context)
return super(stock_picking, self).create(cr, user, vals, context)
def _state_get(self, cr, uid, ids, field_name, arg, context=None):
'''The state of a picking depends on the state of its related stock.move
draft: the picking has no line or any one of the lines is draft
done, draft, cancel: all lines are done / draft / cancel
confirmed, waiting, assigned, partially_available depends on move_type (all at once or partial)
'''
res = {}
for pick in self.browse(cr, uid, ids, context=context):
if (not pick.move_lines) or any([x.state == 'draft' for x in pick.move_lines]):
res[pick.id] = 'draft'
continue
if all([x.state == 'cancel' for x in pick.move_lines]):
res[pick.id] = 'cancel'
continue
if all([x.state in ('cancel', 'done') for x in pick.move_lines]):
res[pick.id] = 'done'
continue
order = {'confirmed': 0, 'waiting': 1, 'assigned': 2}
order_inv = {0: 'confirmed', 1: 'waiting', 2: 'assigned'}
lst = [order[x.state] for x in pick.move_lines if x.state not in ('cancel', 'done')]
if pick.move_type == 'one':
res[pick.id] = order_inv[min(lst)]
else:
#we are in the case of partial delivery, so if all move are assigned, picking
#should be assign too, else if one of the move is assigned, or partially available, picking should be
#in partially available state, otherwise, picking is in waiting or confirmed state
res[pick.id] = order_inv[max(lst)]
if not all(x == 2 for x in lst):
if any(x == 2 for x in lst):
res[pick.id] = 'partially_available'
else:
#if all moves aren't assigned, check if we have one product partially available
for move in pick.move_lines:
if move.partially_available:
res[pick.id] = 'partially_available'
break
return res
def _get_pickings(self, cr, uid, ids, context=None):
res = set()
for move in self.browse(cr, uid, ids, context=context):
if move.picking_id:
res.add(move.picking_id.id)
return list(res)
def _get_pack_operation_exist(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for pick in self.browse(cr, uid, ids, context=context):
res[pick.id] = False
if pick.pack_operation_ids:
res[pick.id] = True
return res
def _get_quant_reserved_exist(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for pick in self.browse(cr, uid, ids, context=context):
res[pick.id] = False
for move in pick.move_lines:
if move.reserved_quant_ids:
res[pick.id] = True
continue
return res
def check_group_lot(self, cr, uid, context=None):
""" This function will return true if we have the setting to use lots activated. """
return self.pool.get('res.users').has_group(cr, uid, 'stock.group_production_lot')
def check_group_pack(self, cr, uid, context=None):
""" This function will return true if we have the setting to use package activated. """
return self.pool.get('res.users').has_group(cr, uid, 'stock.group_tracking_lot')
def action_assign_owner(self, cr, uid, ids, context=None):
for picking in self.browse(cr, uid, ids, context=context):
packop_ids = [op.id for op in picking.pack_operation_ids]
self.pool.get('stock.pack.operation').write(cr, uid, packop_ids, {'owner_id': picking.owner_id.id}, context=context)
_columns = {
'name': fields.char('Reference', select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=False),
'origin': fields.char('Source Document', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="Reference of the document", select=True),
'backorder_id': fields.many2one('stock.picking', 'Back Order of', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True, copy=False),
'note': fields.text('Notes', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}),
'move_type': fields.selection([('direct', 'Partial'), ('one', 'All at once')], 'Delivery Method', required=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="It specifies goods to be deliver partially or all at once"),
'state': fields.function(_state_get, type="selection", copy=False,
store={
'stock.picking': (lambda self, cr, uid, ids, ctx: ids, ['move_type'], 20),
'stock.move': (_get_pickings, ['state', 'picking_id', 'partially_available'], 20)},
selection=[
('draft', 'Draft'),
('cancel', 'Cancelled'),
('waiting', 'Waiting Another Operation'),
('confirmed', 'Waiting Availability'),
('partially_available', 'Partially Available'),
('assigned', 'Ready to Transfer'),
('done', 'Transferred'),
], string='Status', readonly=True, select=True, track_visibility='onchange',
help="""
* Draft: not confirmed yet and will not be scheduled until confirmed\n
* Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
* Waiting Availability: still waiting for the availability of products\n
* Partially Available: some products are available and reserved\n
* Ready to Transfer: products reserved, simply waiting for confirmation.\n
* Transferred: has been processed, can't be modified or cancelled anymore\n
* Cancelled: has been cancelled, can't be confirmed anymore"""
),
'priority': fields.function(get_min_max_date, multi="min_max_date", fnct_inv=_set_priority, type='selection', selection=procurement.PROCUREMENT_PRIORITIES, string='Priority',
store={'stock.move': (_get_pickings, ['priority', 'picking_id'], 20)}, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, select=1, help="Priority for this picking. Setting manually a value here would set it as priority for all the moves",
track_visibility='onchange', required=True),
'min_date': fields.function(get_min_max_date, multi="min_max_date", fnct_inv=_set_min_date,
store={'stock.move': (_get_pickings, ['date_expected', 'picking_id'], 20)}, type='datetime', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, string='Scheduled Date', select=1, help="Scheduled time for the first part of the shipment to be processed. Setting manually a value here would set it as expected date for all the stock moves.", track_visibility='onchange'),
'max_date': fields.function(get_min_max_date, multi="min_max_date",
store={'stock.move': (_get_pickings, ['date_expected', 'picking_id'], 20)}, type='datetime', string='Max. Expected Date', select=2, help="Scheduled time for the last part of the shipment to be processed"),
'date': fields.datetime('Creation Date', help="Creation Date, usually the time of the order", select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, track_visibility='onchange'),
'date_done': fields.datetime('Date of Transfer', help="Date of Completion", states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=False),
'move_lines': fields.one2many('stock.move', 'picking_id', 'Internal Moves', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=True),
'quant_reserved_exist': fields.function(_get_quant_reserved_exist, type='boolean', string='Quant already reserved ?', help='technical field used to know if there is already at least one quant reserved on moves of a given picking'),
'partner_id': fields.many2one('res.partner', 'Partner', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}),
'pack_operation_ids': fields.one2many('stock.pack.operation', 'picking_id', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, string='Related Packing Operations'),
'pack_operation_exist': fields.function(_get_pack_operation_exist, type='boolean', string='Pack Operation Exists?', help='technical field for attrs in view'),
'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, required=True),
'picking_type_code': fields.related('picking_type_id', 'code', type='char', string='Picking Type Code', help="Technical field used to display the correct label on print button in the picking view"),
'owner_id': fields.many2one('res.partner', 'Owner', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="Default Owner"),
# Used to search on pickings
'product_id': fields.related('move_lines', 'product_id', type='many2one', relation='product.product', string='Product'),
'recompute_pack_op': fields.boolean('Recompute pack operation?', help='True if reserved quants changed, which mean we might need to recompute the package operations', copy=False),
'location_id': fields.related('move_lines', 'location_id', type='many2one', relation='stock.location', string='Location', readonly=True),
'location_dest_id': fields.related('move_lines', 'location_dest_id', type='many2one', relation='stock.location', string='Destination Location', readonly=True),
'group_id': fields.related('move_lines', 'group_id', type='many2one', relation='procurement.group', string='Procurement Group', readonly=True,
store={
'stock.picking': (lambda self, cr, uid, ids, ctx: ids, ['move_lines'], 10),
'stock.move': (_get_pickings, ['group_id', 'picking_id'], 10),
}),
}
_defaults = {
'name': '/',
'state': 'draft',
'move_type': 'direct',
'priority': '1', # normal
'date': fields.datetime.now,
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.picking', context=c),
'recompute_pack_op': True,
}
_sql_constraints = [
('name_uniq', 'unique(name, company_id)', 'Reference must be unique per company!'),
]
def do_print_picking(self, cr, uid, ids, context=None):
'''This function prints the picking list'''
context = dict(context or {}, active_ids=ids)
return self.pool.get("report").get_action(cr, uid, ids, 'stock.report_picking', context=context)
def action_confirm(self, cr, uid, ids, context=None):
todo = []
todo_force_assign = []
for picking in self.browse(cr, uid, ids, context=context):
if picking.location_id.usage in ('supplier', 'inventory', 'production'):
todo_force_assign.append(picking.id)
for r in picking.move_lines:
if r.state == 'draft':
todo.append(r.id)
if len(todo):
self.pool.get('stock.move').action_confirm(cr, uid, todo, context=context)
if todo_force_assign:
self.force_assign(cr, uid, todo_force_assign, context=context)
return True
def action_assign(self, cr, uid, ids, context=None):
""" Check availability of picking moves.
This has the effect of changing the state and reserve quants on available moves, and may
also impact the state of the picking as it is computed based on move's states.
@return: True
"""
for pick in self.browse(cr, uid, ids, context=context):
if pick.state == 'draft':
self.action_confirm(cr, uid, [pick.id], context=context)
#skip the moves that don't need to be checked
move_ids = [x.id for x in pick.move_lines if x.state not in ('draft', 'cancel', 'done')]
if not move_ids:
raise osv.except_osv(_('Warning!'), _('Nothing to check the availability for.'))
self.pool.get('stock.move').action_assign(cr, uid, move_ids, context=context)
return True
def force_assign(self, cr, uid, ids, context=None):
""" Changes state of picking to available if moves are confirmed or waiting.
@return: True
"""
for pick in self.browse(cr, uid, ids, context=context):
move_ids = [x.id for x in pick.move_lines if x.state in ['confirmed', 'waiting']]
self.pool.get('stock.move').force_assign(cr, uid, move_ids, context=context)
#pack_operation might have changed and need to be recomputed
self.write(cr, uid, ids, {'recompute_pack_op': True}, context=context)
return True
def action_cancel(self, cr, uid, ids, context=None):
for pick in self.browse(cr, uid, ids, context=context):
ids2 = [move.id for move in pick.move_lines]
self.pool.get('stock.move').action_cancel(cr, uid, ids2, context)
return True
def action_done(self, cr, uid, ids, context=None):
"""Changes picking state to done by processing the Stock Moves of the Picking
Normally that happens when the button "Done" is pressed on a Picking view.
@return: True
"""
for pick in self.browse(cr, uid, ids, context=context):
todo = []
for move in pick.move_lines:
if move.state == 'draft':
todo.extend(self.pool.get('stock.move').action_confirm(cr, uid, [move.id], context=context))
elif move.state in ('assigned', 'confirmed'):
todo.append(move.id)
if len(todo):
self.pool.get('stock.move').action_done(cr, uid, todo, context=context)
return True
def unlink(self, cr, uid, ids, context=None):
#on picking deletion, cancel its move then unlink them too
move_obj = self.pool.get('stock.move')
context = context or {}
for pick in self.browse(cr, uid, ids, context=context):
move_ids = [move.id for move in pick.move_lines]
move_obj.action_cancel(cr, uid, move_ids, context=context)
move_obj.unlink(cr, uid, move_ids, context=context)
return super(stock_picking, self).unlink(cr, uid, ids, context=context)
def write(self, cr, uid, ids, vals, context=None):
res = super(stock_picking, self).write(cr, uid, ids, vals, context=context)
#if we changed the move lines or the pack operations, we need to recompute the remaining quantities of both
if 'move_lines' in vals or 'pack_operation_ids' in vals:
self.do_recompute_remaining_quantities(cr, uid, ids, context=context)
return res
def _create_backorder(self, cr, uid, picking, backorder_moves=[], context=None):
""" Move all non-done lines into a new backorder picking. If the key 'do_only_split' is given in the context, then move all lines not in context.get('split', []) instead of all non-done lines.
"""
if not backorder_moves:
backorder_moves = picking.move_lines
backorder_move_ids = [x.id for x in backorder_moves if x.state not in ('done', 'cancel')]
if 'do_only_split' in context and context['do_only_split']:
backorder_move_ids = [x.id for x in backorder_moves if x.id not in context.get('split', [])]
if backorder_move_ids:
backorder_id = self.copy(cr, uid, picking.id, {
'name': '/',
'move_lines': [],
'pack_operation_ids': [],
'backorder_id': picking.id,
})
backorder = self.browse(cr, uid, backorder_id, context=context)
self.message_post(cr, uid, picking.id, body=_("Back order <em>%s</em> <b>created</b>.") % (backorder.name), context=context)
move_obj = self.pool.get("stock.move")
move_obj.write(cr, uid, backorder_move_ids, {'picking_id': backorder_id}, context=context)
self.write(cr, uid, [picking.id], {'date_done': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context)
self.action_confirm(cr, uid, [backorder_id], context=context)
return backorder_id
return False
@api.cr_uid_ids_context
def recheck_availability(self, cr, uid, picking_ids, context=None):
self.action_assign(cr, uid, picking_ids, context=context)
self.do_prepare_partial(cr, uid, picking_ids, context=context)
def _get_top_level_packages(self, cr, uid, quants_suggested_locations, context=None):
"""This method searches for the higher level packages that can be moved as a single operation, given a list of quants
to move and their suggested destination, and returns the list of matching packages.
"""
# Try to find as much as possible top-level packages that can be moved
pack_obj = self.pool.get("stock.quant.package")
quant_obj = self.pool.get("stock.quant")
top_lvl_packages = set()
quants_to_compare = quants_suggested_locations.keys()
for pack in list(set([x.package_id for x in quants_suggested_locations.keys() if x and x.package_id])):
loop = True
test_pack = pack
good_pack = False
pack_destination = False
while loop:
pack_quants = pack_obj.get_content(cr, uid, [test_pack.id], context=context)
all_in = True
for quant in quant_obj.browse(cr, uid, pack_quants, context=context):
# If the quant is not in the quants to compare and not in the common location
if not quant in quants_to_compare:
all_in = False
break
else:
#if putaway strat apply, the destination location of each quant may be different (and thus the package should not be taken as a single operation)
if not pack_destination:
pack_destination = quants_suggested_locations[quant]
elif pack_destination != quants_suggested_locations[quant]:
all_in = False
break
if all_in:
good_pack = test_pack
if test_pack.parent_id:
test_pack = test_pack.parent_id
else:
#stop the loop when there's no parent package anymore
loop = False
else:
#stop the loop when the package test_pack is not totally reserved for moves of this picking
#(some quants may be reserved for other picking or not reserved at all)
loop = False
if good_pack:
top_lvl_packages.add(good_pack)
return list(top_lvl_packages)
def _prepare_pack_ops(self, cr, uid, picking, quants, forced_qties, context=None):
""" returns a list of dict, ready to be used in create() of stock.pack.operation.
:param picking: browse record (stock.picking)
:param quants: browse record list (stock.quant). List of quants associated to the picking
:param forced_qties: dictionary showing for each product (keys) its corresponding quantity (value) that is not covered by the quants associated to the picking
"""
def _picking_putaway_apply(product):
location = False
# Search putaway strategy
if product_putaway_strats.get(product.id):
location = product_putaway_strats[product.id]
else:
location = self.pool.get('stock.location').get_putaway_strategy(cr, uid, picking.location_dest_id, product, context=context)
product_putaway_strats[product.id] = location
return location or picking.location_dest_id.id
# If we encounter an UoM that is smaller than the default UoM or the one already chosen, use the new one instead.
product_uom = {} # Determines UoM used in pack operations
for move in picking.move_lines:
if not product_uom.get(move.product_id.id):
product_uom[move.product_id.id] = move.product_id.uom_id
if move.product_uom.id != move.product_id.uom_id.id and move.product_uom.factor > product_uom[move.product_id.id].factor:
product_uom[move.product_id.id] = move.product_uom
pack_obj = self.pool.get("stock.quant.package")
quant_obj = self.pool.get("stock.quant")
vals = []
qtys_grouped = {}
#for each quant of the picking, find the suggested location
quants_suggested_locations = {}
product_putaway_strats = {}
for quant in quants:
if quant.qty <= 0:
continue
suggested_location_id = _picking_putaway_apply(quant.product_id)
quants_suggested_locations[quant] = suggested_location_id
#find the packages we can movei as a whole
top_lvl_packages = self._get_top_level_packages(cr, uid, quants_suggested_locations, context=context)
# and then create pack operations for the top-level packages found
for pack in top_lvl_packages:
pack_quant_ids = pack_obj.get_content(cr, uid, [pack.id], context=context)
pack_quants = quant_obj.browse(cr, uid, pack_quant_ids, context=context)
vals.append({
'picking_id': picking.id,
'package_id': pack.id,
'product_qty': 1.0,
'location_id': pack.location_id.id,
'location_dest_id': quants_suggested_locations[pack_quants[0]],
'owner_id': pack.owner_id.id,
})
#remove the quants inside the package so that they are excluded from the rest of the computation
for quant in pack_quants:
del quants_suggested_locations[quant]
# Go through all remaining reserved quants and group by product, package, lot, owner, source location and dest location
for quant, dest_location_id in quants_suggested_locations.items():
key = (quant.product_id.id, quant.package_id.id, quant.lot_id.id, quant.owner_id.id, quant.location_id.id, dest_location_id)
if qtys_grouped.get(key):
qtys_grouped[key] += quant.qty
else:
qtys_grouped[key] = quant.qty
# Do the same for the forced quantities (in cases of force_assign or incomming shipment for example)
for product, qty in forced_qties.items():
if qty <= 0:
continue
suggested_location_id = _picking_putaway_apply(product)
key = (product.id, False, False, picking.owner_id.id, picking.location_id.id, suggested_location_id)
if qtys_grouped.get(key):
qtys_grouped[key] += qty
else:
qtys_grouped[key] = qty
# Create the necessary operations for the grouped quants and remaining qtys
uom_obj = self.pool.get('product.uom')
prevals = {}
for key, qty in qtys_grouped.items():
product = self.pool.get("product.product").browse(cr, uid, key[0], context=context)
uom_id = product.uom_id.id
qty_uom = qty
if product_uom.get(key[0]):
uom_id = product_uom[key[0]].id
qty_uom = uom_obj._compute_qty(cr, uid, product.uom_id.id, qty, uom_id)
val_dict = {
'picking_id': picking.id,
'product_qty': qty_uom,
'product_id': key[0],
'package_id': key[1],
'lot_id': key[2],
'owner_id': key[3],
'location_id': key[4],
'location_dest_id': key[5],
'product_uom_id': uom_id,
}
if key[0] in prevals:
prevals[key[0]].append(val_dict)
else:
prevals[key[0]] = [val_dict]
# prevals var holds the operations in order to create them in the same order than the picking stock moves if possible
processed_products = set()
for move in picking.move_lines:
if move.product_id.id not in processed_products:
vals += prevals.get(move.product_id.id, [])
processed_products.add(move.product_id.id)
return vals
@api.cr_uid_ids_context
def open_barcode_interface(self, cr, uid, picking_ids, context=None):
final_url="/barcode/web/#action=stock.ui&picking_id="+str(picking_ids[0])
return {'type': 'ir.actions.act_url', 'url':final_url, 'target': 'self',}
@api.cr_uid_ids_context
def do_partial_open_barcode(self, cr, uid, picking_ids, context=None):
self.do_prepare_partial(cr, uid, picking_ids, context=context)
return self.open_barcode_interface(cr, uid, picking_ids, context=context)
@api.cr_uid_ids_context
def do_prepare_partial(self, cr, uid, picking_ids, context=None):
context = context or {}
pack_operation_obj = self.pool.get('stock.pack.operation')
#used to avoid recomputing the remaining quantities at each new pack operation created
ctx = context.copy()
ctx['no_recompute'] = True
#get list of existing operations and delete them
existing_package_ids = pack_operation_obj.search(cr, uid, [('picking_id', 'in', picking_ids)], context=context)
if existing_package_ids:
pack_operation_obj.unlink(cr, uid, existing_package_ids, context)
for picking in self.browse(cr, uid, picking_ids, context=context):
forced_qties = {} # Quantity remaining after calculating reserved quants
picking_quants = []
#Calculate packages, reserved quants, qtys of this picking's moves
for move in picking.move_lines:
if move.state not in ('assigned', 'confirmed'):
continue
move_quants = move.reserved_quant_ids
picking_quants += move_quants
forced_qty = (move.state == 'assigned') and move.product_qty - sum([x.qty for x in move_quants]) or 0
#if we used force_assign() on the move, or if the move is incoming, forced_qty > 0
if float_compare(forced_qty, 0, precision_rounding=move.product_id.uom_id.rounding) > 0:
if forced_qties.get(move.product_id):
forced_qties[move.product_id] += forced_qty
else:
forced_qties[move.product_id] = forced_qty
for vals in self._prepare_pack_ops(cr, uid, picking, picking_quants, forced_qties, context=context):
pack_operation_obj.create(cr, uid, vals, context=ctx)
#recompute the remaining quantities all at once
self.do_recompute_remaining_quantities(cr, uid, picking_ids, context=context)
self.write(cr, uid, picking_ids, {'recompute_pack_op': False}, context=context)
@api.cr_uid_ids_context
def do_unreserve(self, cr, uid, picking_ids, context=None):
"""
Will remove all quants for picking in picking_ids
"""
moves_to_unreserve = []
pack_line_to_unreserve = []
for picking in self.browse(cr, uid, picking_ids, context=context):
moves_to_unreserve += [m.id for m in picking.move_lines if m.state not in ('done', 'cancel')]
pack_line_to_unreserve += [p.id for p in picking.pack_operation_ids]
if moves_to_unreserve:
if pack_line_to_unreserve:
self.pool.get('stock.pack.operation').unlink(cr, uid, pack_line_to_unreserve, context=context)
self.pool.get('stock.move').do_unreserve(cr, uid, moves_to_unreserve, context=context)
def recompute_remaining_qty(self, cr, uid, picking, context=None):
def _create_link_for_index(operation_id, index, product_id, qty_to_assign, quant_id=False):
move_dict = prod2move_ids[product_id][index]
qty_on_link = min(move_dict['remaining_qty'], qty_to_assign)
self.pool.get('stock.move.operation.link').create(cr, uid, {'move_id': move_dict['move'].id, 'operation_id': operation_id, 'qty': qty_on_link, 'reserved_quant_id': quant_id}, context=context)
if move_dict['remaining_qty'] == qty_on_link:
prod2move_ids[product_id].pop(index)
else:
move_dict['remaining_qty'] -= qty_on_link
return qty_on_link
def _create_link_for_quant(operation_id, quant, qty):
"""create a link for given operation and reserved move of given quant, for the max quantity possible, and returns this quantity"""
if not quant.reservation_id.id:
return _create_link_for_product(operation_id, quant.product_id.id, qty)
qty_on_link = 0
for i in range(0, len(prod2move_ids[quant.product_id.id])):
if prod2move_ids[quant.product_id.id][i]['move'].id != quant.reservation_id.id:
continue
qty_on_link = _create_link_for_index(operation_id, i, quant.product_id.id, qty, quant_id=quant.id)
break
return qty_on_link
def _create_link_for_product(operation_id, product_id, qty):
'''method that creates the link between a given operation and move(s) of given product, for the given quantity.
Returns True if it was possible to create links for the requested quantity (False if there was not enough quantity on stock moves)'''
qty_to_assign = qty
prod_obj = self.pool.get("product.product")
product = prod_obj.browse(cr, uid, product_id)
rounding = product.uom_id.rounding
qtyassign_cmp = float_compare(qty_to_assign, 0.0, precision_rounding=rounding)
if prod2move_ids.get(product_id):
while prod2move_ids[product_id] and qtyassign_cmp > 0:
qty_on_link = _create_link_for_index(operation_id, 0, product_id, qty_to_assign, quant_id=False)
qty_to_assign -= qty_on_link
qtyassign_cmp = float_compare(qty_to_assign, 0.0, precision_rounding=rounding)
return qtyassign_cmp == 0
uom_obj = self.pool.get('product.uom')
package_obj = self.pool.get('stock.quant.package')
quant_obj = self.pool.get('stock.quant')
link_obj = self.pool.get('stock.move.operation.link')
quants_in_package_done = set()
prod2move_ids = {}
still_to_do = []
#make a dictionary giving for each product, the moves and related quantity that can be used in operation links
for move in picking.move_lines:
if not prod2move_ids.get(move.product_id.id):
prod2move_ids[move.product_id.id] = [{'move': move, 'remaining_qty': move.product_qty}]
else:
prod2move_ids[move.product_id.id].append({'move': move, 'remaining_qty': move.product_qty})
need_rereserve = False
#sort the operations in order to give higher priority to those with a package, then a serial number
operations = picking.pack_operation_ids
operations = sorted(operations, key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0))
#delete existing operations to start again from scratch
links = link_obj.search(cr, uid, [('operation_id', 'in', [x.id for x in operations])], context=context)
if links:
link_obj.unlink(cr, uid, links, context=context)
#1) first, try to create links when quants can be identified without any doubt
for ops in operations:
#for each operation, create the links with the stock move by seeking on the matching reserved quants,
#and deffer the operation if there is some ambiguity on the move to select
if ops.package_id and not ops.product_id:
#entire package
quant_ids = package_obj.get_content(cr, uid, [ops.package_id.id], context=context)
for quant in quant_obj.browse(cr, uid, quant_ids, context=context):
remaining_qty_on_quant = quant.qty
if quant.reservation_id:
#avoid quants being counted twice
quants_in_package_done.add(quant.id)
qty_on_link = _create_link_for_quant(ops.id, quant, quant.qty)
remaining_qty_on_quant -= qty_on_link
if remaining_qty_on_quant:
still_to_do.append((ops, quant.product_id.id, remaining_qty_on_quant))
need_rereserve = True
elif ops.product_id.id:
#Check moves with same product
qty_to_assign = uom_obj._compute_qty_obj(cr, uid, ops.product_uom_id, ops.product_qty, ops.product_id.uom_id, context=context)
for move_dict in prod2move_ids.get(ops.product_id.id, []):
move = move_dict['move']
for quant in move.reserved_quant_ids:
if not qty_to_assign > 0:
break
if quant.id in quants_in_package_done:
continue
#check if the quant is matching the operation details
if ops.package_id:
flag = quant.package_id and bool(package_obj.search(cr, uid, [('id', 'child_of', [ops.package_id.id])], context=context)) or False
else:
flag = not quant.package_id.id
flag = flag and ((ops.lot_id and ops.lot_id.id == quant.lot_id.id) or not ops.lot_id)
flag = flag and (ops.owner_id.id == quant.owner_id.id)
if flag:
max_qty_on_link = min(quant.qty, qty_to_assign)
qty_on_link = _create_link_for_quant(ops.id, quant, max_qty_on_link)
qty_to_assign -= qty_on_link
qty_assign_cmp = float_compare(qty_to_assign, 0, precision_rounding=ops.product_id.uom_id.rounding)
if qty_assign_cmp > 0:
#qty reserved is less than qty put in operations. We need to create a link but it's deferred after we processed
#all the quants (because they leave no choice on their related move and needs to be processed with higher priority)
still_to_do += [(ops, ops.product_id.id, qty_to_assign)]
need_rereserve = True
#2) then, process the remaining part
all_op_processed = True
for ops, product_id, remaining_qty in still_to_do:
all_op_processed = _create_link_for_product(ops.id, product_id, remaining_qty) and all_op_processed
return (need_rereserve, all_op_processed)
def picking_recompute_remaining_quantities(self, cr, uid, picking, context=None):
need_rereserve = False
all_op_processed = True
if picking.pack_operation_ids:
need_rereserve, all_op_processed = self.recompute_remaining_qty(cr, uid, picking, context=context)
return need_rereserve, all_op_processed
@api.cr_uid_ids_context
def do_recompute_remaining_quantities(self, cr, uid, picking_ids, context=None):
for picking in self.browse(cr, uid, picking_ids, context=context):
if picking.pack_operation_ids:
self.recompute_remaining_qty(cr, uid, picking, context=context)
def _prepare_values_extra_move(self, cr, uid, op, product, remaining_qty, context=None):
"""
Creates an extra move when there is no corresponding original move to be copied
"""
uom_obj = self.pool.get("product.uom")
uom_id = product.uom_id.id
qty = remaining_qty
if op.product_id and op.product_uom_id and op.product_uom_id.id != product.uom_id.id:
if op.product_uom_id.factor > product.uom_id.factor: #If the pack operation's is a smaller unit
uom_id = op.product_uom_id.id
#HALF-UP rounding as only rounding errors will be because of propagation of error from default UoM
qty = uom_obj._compute_qty_obj(cr, uid, product.uom_id, remaining_qty, op.product_uom_id, rounding_method='HALF-UP')
picking = op.picking_id
res = {
'picking_id': picking.id,
'location_id': picking.location_id.id,
'location_dest_id': picking.location_dest_id.id,
'product_id': product.id,
'product_uom': uom_id,
'product_uom_qty': qty,
'name': _('Extra Move: ') + product.name,
'state': 'draft',
}
return res
def _create_extra_moves(self, cr, uid, picking, context=None):
'''This function creates move lines on a picking, at the time of do_transfer, based on
unexpected product transfers (or exceeding quantities) found in the pack operations.
'''
move_obj = self.pool.get('stock.move')
operation_obj = self.pool.get('stock.pack.operation')
moves = []
for op in picking.pack_operation_ids:
for product_id, remaining_qty in operation_obj._get_remaining_prod_quantities(cr, uid, op, context=context).items():
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
if float_compare(remaining_qty, 0, precision_rounding=product.uom_id.rounding) > 0:
vals = self._prepare_values_extra_move(cr, uid, op, product, remaining_qty, context=context)
moves.append(move_obj.create(cr, uid, vals, context=context))
if moves:
move_obj.action_confirm(cr, uid, moves, context=context)
return moves
def rereserve_pick(self, cr, uid, ids, context=None):
"""
This can be used to provide a button that rereserves taking into account the existing pack operations
"""
for pick in self.browse(cr, uid, ids, context=context):
self.rereserve_quants(cr, uid, pick, move_ids = [x.id for x in pick.move_lines], context=context)
def rereserve_quants(self, cr, uid, picking, move_ids=[], context=None):
""" Unreserve quants then try to reassign quants."""
stock_move_obj = self.pool.get('stock.move')
if not move_ids:
self.do_unreserve(cr, uid, [picking.id], context=context)
self.action_assign(cr, uid, [picking.id], context=context)
else:
stock_move_obj.do_unreserve(cr, uid, move_ids, context=context)
stock_move_obj.action_assign(cr, uid, move_ids, context=context)
@api.cr_uid_ids_context
def do_enter_transfer_details(self, cr, uid, picking, context=None):
if not context:
context = {}
context.update({
'active_model': self._name,
'active_ids': picking,
'active_id': len(picking) and picking[0] or False
})
created_id = self.pool['stock.transfer_details'].create(cr, uid, {'picking_id': len(picking) and picking[0] or False}, context)
return self.pool['stock.transfer_details'].wizard_view(cr, uid, created_id, context)
@api.cr_uid_ids_context
def do_transfer(self, cr, uid, picking_ids, context=None):
"""
If no pack operation, we do simple action_done of the picking
Otherwise, do the pack operations
"""
if not context:
context = {}
stock_move_obj = self.pool.get('stock.move')
for picking in self.browse(cr, uid, picking_ids, context=context):
if not picking.pack_operation_ids:
self.action_done(cr, uid, [picking.id], context=context)
continue
else:
need_rereserve, all_op_processed = self.picking_recompute_remaining_quantities(cr, uid, picking, context=context)
#create extra moves in the picking (unexpected product moves coming from pack operations)
todo_move_ids = []
if not all_op_processed:
todo_move_ids += self._create_extra_moves(cr, uid, picking, context=context)
#split move lines if needed
toassign_move_ids = []
for move in picking.move_lines:
remaining_qty = move.remaining_qty
if move.state in ('done', 'cancel'):
#ignore stock moves cancelled or already done
continue
elif move.state == 'draft':
toassign_move_ids.append(move.id)
if float_compare(remaining_qty, 0, precision_rounding = move.product_id.uom_id.rounding) == 0:
if move.state in ('draft', 'assigned', 'confirmed'):
todo_move_ids.append(move.id)
elif float_compare(remaining_qty,0, precision_rounding = move.product_id.uom_id.rounding) > 0 and \
float_compare(remaining_qty, move.product_qty, precision_rounding = move.product_id.uom_id.rounding) < 0:
new_move = stock_move_obj.split(cr, uid, move, remaining_qty, context=context)
todo_move_ids.append(move.id)
#Assign move as it was assigned before
toassign_move_ids.append(new_move)
if need_rereserve or not all_op_processed:
if not picking.location_id.usage in ("supplier", "production", "inventory"):
self.rereserve_quants(cr, uid, picking, move_ids=todo_move_ids, context=context)
self.do_recompute_remaining_quantities(cr, uid, [picking.id], context=context)
if todo_move_ids and not context.get('do_only_split'):
self.pool.get('stock.move').action_done(cr, uid, todo_move_ids, context=context)
elif context.get('do_only_split'):
context = dict(context, split=todo_move_ids)
self._create_backorder(cr, uid, picking, context=context)
if toassign_move_ids:
stock_move_obj.action_assign(cr, uid, toassign_move_ids, context=context)
return True
@api.cr_uid_ids_context
def do_split(self, cr, uid, picking_ids, context=None):
""" just split the picking (create a backorder) without making it 'done' """
if context is None:
context = {}
ctx = context.copy()
ctx['do_only_split'] = True
return self.do_transfer(cr, uid, picking_ids, context=ctx)
def get_next_picking_for_ui(self, cr, uid, context=None):
""" returns the next pickings to process. Used in the barcode scanner UI"""
if context is None:
context = {}
domain = [('state', 'in', ('assigned', 'partially_available'))]
if context.get('default_picking_type_id'):
domain.append(('picking_type_id', '=', context['default_picking_type_id']))
return self.search(cr, uid, domain, context=context)
def action_done_from_ui(self, cr, uid, picking_id, context=None):
""" called when button 'done' is pushed in the barcode scanner UI """
#write qty_done into field product_qty for every package_operation before doing the transfer
pack_op_obj = self.pool.get('stock.pack.operation')
for operation in self.browse(cr, uid, picking_id, context=context).pack_operation_ids:
pack_op_obj.write(cr, uid, operation.id, {'product_qty': operation.qty_done}, context=context)
self.do_transfer(cr, uid, [picking_id], context=context)
#return id of next picking to work on
return self.get_next_picking_for_ui(cr, uid, context=context)
@api.cr_uid_ids_context
def action_pack(self, cr, uid, picking_ids, operation_filter_ids=None, context=None):
""" Create a package with the current pack_operation_ids of the picking that aren't yet in a pack.
Used in the barcode scanner UI and the normal interface as well.
operation_filter_ids is used by barcode scanner interface to specify a subset of operation to pack"""
if operation_filter_ids == None:
operation_filter_ids = []
stock_operation_obj = self.pool.get('stock.pack.operation')
package_obj = self.pool.get('stock.quant.package')
stock_move_obj = self.pool.get('stock.move')
package_id = False
for picking_id in picking_ids:
operation_search_domain = [('picking_id', '=', picking_id), ('result_package_id', '=', False)]
if operation_filter_ids != []:
operation_search_domain.append(('id', 'in', operation_filter_ids))
operation_ids = stock_operation_obj.search(cr, uid, operation_search_domain, context=context)
pack_operation_ids = []
if operation_ids:
for operation in stock_operation_obj.browse(cr, uid, operation_ids, context=context):
#If we haven't done all qty in operation, we have to split into 2 operation
op = operation
if (operation.qty_done < operation.product_qty):
new_operation = stock_operation_obj.copy(cr, uid, operation.id, {'product_qty': operation.qty_done,'qty_done': operation.qty_done}, context=context)
stock_operation_obj.write(cr, uid, operation.id, {'product_qty': operation.product_qty - operation.qty_done,'qty_done': 0, 'lot_id': False}, context=context)
op = stock_operation_obj.browse(cr, uid, new_operation, context=context)
pack_operation_ids.append(op.id)
if op.product_id and op.location_id and op.location_dest_id:
stock_move_obj.check_tracking_product(cr, uid, op.product_id, op.lot_id.id, op.location_id, op.location_dest_id, context=context)
package_id = package_obj.create(cr, uid, {}, context=context)
stock_operation_obj.write(cr, uid, pack_operation_ids, {'result_package_id': package_id}, context=context)
return package_id
def process_product_id_from_ui(self, cr, uid, picking_id, product_id, op_id, increment=True, context=None):
return self.pool.get('stock.pack.operation')._search_and_increment(cr, uid, picking_id, [('product_id', '=', product_id),('id', '=', op_id)], increment=increment, context=context)
def process_barcode_from_ui(self, cr, uid, picking_id, barcode_str, visible_op_ids, context=None):
'''This function is called each time there barcode scanner reads an input'''
lot_obj = self.pool.get('stock.production.lot')
package_obj = self.pool.get('stock.quant.package')
product_obj = self.pool.get('product.product')
stock_operation_obj = self.pool.get('stock.pack.operation')
stock_location_obj = self.pool.get('stock.location')
answer = {'filter_loc': False, 'operation_id': False}
#check if the barcode correspond to a location
matching_location_ids = stock_location_obj.search(cr, uid, [('loc_barcode', '=', barcode_str)], context=context)
if matching_location_ids:
#if we have a location, return immediatly with the location name
location = stock_location_obj.browse(cr, uid, matching_location_ids[0], context=None)
answer['filter_loc'] = stock_location_obj._name_get(cr, uid, location, context=None)
answer['filter_loc_id'] = matching_location_ids[0]
return answer
#check if the barcode correspond to a product
matching_product_ids = product_obj.search(cr, uid, ['|', ('ean13', '=', barcode_str), ('default_code', '=', barcode_str)], context=context)
if matching_product_ids:
op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('product_id', '=', matching_product_ids[0])], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context)
answer['operation_id'] = op_id
return answer
#check if the barcode correspond to a lot
matching_lot_ids = lot_obj.search(cr, uid, [('name', '=', barcode_str)], context=context)
if matching_lot_ids:
lot = lot_obj.browse(cr, uid, matching_lot_ids[0], context=context)
op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('product_id', '=', lot.product_id.id), ('lot_id', '=', lot.id)], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context)
answer['operation_id'] = op_id
return answer
#check if the barcode correspond to a package
matching_package_ids = package_obj.search(cr, uid, [('name', '=', barcode_str)], context=context)
if matching_package_ids:
op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('package_id', '=', matching_package_ids[0])], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context)
answer['operation_id'] = op_id
return answer
return answer
class stock_production_lot(osv.osv):
_name = 'stock.production.lot'
_inherit = ['mail.thread']
_description = 'Lot/Serial'
_columns = {
'name': fields.char('Serial Number', required=True, help="Unique Serial Number"),
'ref': fields.char('Internal Reference', help="Internal reference number in case it differs from the manufacturer's serial number"),
'product_id': fields.many2one('product.product', 'Product', required=True, domain=[('type', '<>', 'service')]),
'quant_ids': fields.one2many('stock.quant', 'lot_id', 'Quants', readonly=True),
'create_date': fields.datetime('Creation Date'),
}
_defaults = {
'name': lambda x, y, z, c: x.pool.get('ir.sequence').get(y, z, 'stock.lot.serial'),
'product_id': lambda x, y, z, c: c.get('product_id', False),
}
_sql_constraints = [
('name_ref_uniq', 'unique (name, ref, product_id)', 'The combination of serial number, internal reference and product must be unique !'),
]
def action_traceability(self, cr, uid, ids, context=None):
""" It traces the information of lots
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
@return: A dictionary of values
"""
quant_obj = self.pool.get("stock.quant")
quants = quant_obj.search(cr, uid, [('lot_id', 'in', ids)], context=context)
moves = set()
for quant in quant_obj.browse(cr, uid, quants, context=context):
moves |= {move.id for move in quant.history_ids}
if moves:
return {
'domain': "[('id','in',[" + ','.join(map(str, list(moves))) + "])]",
'name': _('Traceability'),
'view_mode': 'tree,form',
'view_type': 'form',
'context': {'tree_view_ref': 'stock.view_move_tree'},
'res_model': 'stock.move',
'type': 'ir.actions.act_window',
}
return False
# ----------------------------------------------------
# Move
# ----------------------------------------------------
class stock_move(models.Model):
_name = "stock.move"
_description = "Stock Move"
_order = 'date_expected desc, id'
_log_create = False
def get_price_unit(self, cr, uid, move, context=None):
""" Returns the unit price to store on the quant """
return move.price_unit or move.product_id.standard_price
def name_get(self, cr, uid, ids, context=None):
res = []
for line in self.browse(cr, uid, ids, context=context):
name = line.location_id.name + ' > ' + line.location_dest_id.name
if line.product_id.code:
name = line.product_id.code + ': ' + name
if line.picking_id.origin:
name = line.picking_id.origin + '/ ' + name
res.append((line.id, name))
return res
@api.depends('product_id', 'product_uom_qty', 'product_uom', 'product_id.uom_id')
def _quantity_normalize(self):
uom_obj = self.pool.get('product.uom')
for m in self:
m.product_qty = uom_obj._compute_qty_obj(self.env.cr, self.env.uid, m.product_uom, m.product_uom_qty, m.product_id.uom_id, context=self.env.context)
def _get_remaining_qty(self, cr, uid, ids, field_name, args, context=None):
uom_obj = self.pool.get('product.uom')
res = {}
for move in self.browse(cr, uid, ids, context=context):
qty = move.product_qty
for record in move.linked_move_operation_ids:
qty -= record.qty
# Keeping in product default UoM
res[move.id] = float_round(qty, precision_rounding=move.product_id.uom_id.rounding)
return res
def _get_lot_ids(self, cr, uid, ids, field_name, args, context=None):
res = dict.fromkeys(ids, False)
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'done':
res[move.id] = [q.lot_id.id for q in move.quant_ids if q.lot_id]
else:
res[move.id] = [q.lot_id.id for q in move.reserved_quant_ids if q.lot_id]
return res
def _get_product_availability(self, cr, uid, ids, field_name, args, context=None):
quant_obj = self.pool.get('stock.quant')
res = dict.fromkeys(ids, False)
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'done':
res[move.id] = move.product_qty
else:
sublocation_ids = self.pool.get('stock.location').search(cr, uid, [('id', 'child_of', [move.location_id.id])], context=context)
quant_ids = quant_obj.search(cr, uid, [('location_id', 'in', sublocation_ids), ('product_id', '=', move.product_id.id), ('reservation_id', '=', False)], context=context)
availability = 0
for quant in quant_obj.browse(cr, uid, quant_ids, context=context):
availability += quant.qty
res[move.id] = min(move.product_qty, availability)
return res
def _get_string_qty_information(self, cr, uid, ids, field_name, args, context=None):
settings_obj = self.pool.get('stock.config.settings')
uom_obj = self.pool.get('product.uom')
res = dict.fromkeys(ids, '')
for move in self.browse(cr, uid, ids, context=context):
if move.state in ('draft', 'done', 'cancel') or move.location_id.usage != 'internal':
res[move.id] = '' # 'not applicable' or 'n/a' could work too
continue
total_available = min(move.product_qty, move.reserved_availability + move.availability)
total_available = uom_obj._compute_qty_obj(cr, uid, move.product_id.uom_id, total_available, move.product_uom, context=context)
info = str(total_available)
#look in the settings if we need to display the UoM name or not
config_ids = settings_obj.search(cr, uid, [], limit=1, order='id DESC', context=context)
if config_ids:
stock_settings = settings_obj.browse(cr, uid, config_ids[0], context=context)
if stock_settings.group_uom:
info += ' ' + move.product_uom.name
if move.reserved_availability:
if move.reserved_availability != total_available:
#some of the available quantity is assigned and some are available but not reserved
reserved_available = uom_obj._compute_qty_obj(cr, uid, move.product_id.uom_id, move.reserved_availability, move.product_uom, context=context)
info += _(' (%s reserved)') % str(reserved_available)
else:
#all available quantity is assigned
info += _(' (reserved)')
res[move.id] = info
return res
def _get_reserved_availability(self, cr, uid, ids, field_name, args, context=None):
res = dict.fromkeys(ids, 0)
for move in self.browse(cr, uid, ids, context=context):
res[move.id] = sum([quant.qty for quant in move.reserved_quant_ids])
return res
def _get_move(self, cr, uid, ids, context=None):
res = set()
for quant in self.browse(cr, uid, ids, context=context):
if quant.reservation_id:
res.add(quant.reservation_id.id)
return list(res)
def _get_move_ids(self, cr, uid, ids, context=None):
res = []
for picking in self.browse(cr, uid, ids, context=context):
res += [x.id for x in picking.move_lines]
return res
def _set_product_qty(self, cr, uid, id, field, value, arg, context=None):
""" The meaning of product_qty field changed lately and is now a functional field computing the quantity
in the default product UoM. This code has been added to raise an error if a write is made given a value
for `product_qty`, where the same write should set the `product_uom_qty` field instead, in order to
detect errors.
"""
raise osv.except_osv(_('Programming Error!'), _('The requested operation cannot be processed because of a programming error setting the `product_qty` field instead of the `product_uom_qty`.'))
product_qty = new_fields.Float(compute='_quantity_normalize', inverse='_set_product_qty', digits=0, store=True, string='Quantity',
help='Quantity in the default UoM of the product')
_columns = {
'name': fields.char('Description', required=True, select=True),
'priority': fields.selection(procurement.PROCUREMENT_PRIORITIES, 'Priority'),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
'date': fields.datetime('Date', required=True, select=True, help="Move date: scheduled date until move is done, then date of actual move processing", states={'done': [('readonly', True)]}),
'date_expected': fields.datetime('Expected Date', states={'done': [('readonly', True)]}, required=True, select=True, help="Scheduled date for the processing of this move"),
'product_id': fields.many2one('product.product', 'Product', required=True, select=True, domain=[('type', '<>', 'service')], states={'done': [('readonly', True)]}),
'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'),
required=True, states={'done': [('readonly', True)]},
help="This is the quantity of products from an inventory "
"point of view. For moves in the state 'done', this is the "
"quantity of products that were actually moved. For other "
"moves, this is the quantity of product that is planned to "
"be moved. Lowering this quantity does not generate a "
"backorder. Changing this quantity on assigned moves affects "
"the product reservation, and should be done with care."
),
'product_uom': fields.many2one('product.uom', 'Unit of Measure', required=True, states={'done': [('readonly', True)]}),
'product_uos_qty': fields.float('Quantity (UOS)', digits_compute=dp.get_precision('Product Unit of Measure'), states={'done': [('readonly', True)]}),
'product_uos': fields.many2one('product.uom', 'Product UOS', states={'done': [('readonly', True)]}),
'product_tmpl_id': fields.related('product_id', 'product_tmpl_id', type='many2one', relation='product.template', string='Product Template'),
'product_packaging': fields.many2one('product.packaging', 'Prefered Packaging', help="It specifies attributes of packaging like type, quantity of packaging,etc."),
'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True, auto_join=True,
states={'done': [('readonly', True)]}, help="Sets a location if you produce at a fixed location. This can be a partner location if you subcontract the manufacturing operations."),
'location_dest_id': fields.many2one('stock.location', 'Destination Location', required=True, states={'done': [('readonly', True)]}, select=True,
auto_join=True, help="Location where the system will stock the finished products."),
'partner_id': fields.many2one('res.partner', 'Destination Address ', states={'done': [('readonly', True)]}, help="Optional address where goods are to be delivered, specifically used for allotment"),
'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Optional: next stock move when chaining them", select=True, copy=False),
'move_orig_ids': fields.one2many('stock.move', 'move_dest_id', 'Original Move', help="Optional: previous stock move when chaining them", select=True),
'picking_id': fields.many2one('stock.picking', 'Reference', select=True, states={'done': [('readonly', True)]}),
'note': fields.text('Notes'),
'state': fields.selection([('draft', 'New'),
('cancel', 'Cancelled'),
('waiting', 'Waiting Another Move'),
('confirmed', 'Waiting Availability'),
('assigned', 'Available'),
('done', 'Done'),
], 'Status', readonly=True, select=True, copy=False,
help= "* New: When the stock move is created and not yet confirmed.\n"\
"* Waiting Another Move: This state can be seen when a move is waiting for another one, for example in a chained flow.\n"\
"* Waiting Availability: This state is reached when the procurement resolution is not straight forward. It may need the scheduler to run, a component to me manufactured...\n"\
"* Available: When products are reserved, it is set to \'Available\'.\n"\
"* Done: When the shipment is processed, the state is \'Done\'."),
'partially_available': fields.boolean('Partially Available', readonly=True, help="Checks if the move has some stock reserved", copy=False),
'price_unit': fields.float('Unit Price', help="Technical field used to record the product cost set by the user during a picking confirmation (when costing method used is 'average price' or 'real'). Value given in company currency and in product uom."), # as it's a technical field, we intentionally don't provide the digits attribute
'company_id': fields.many2one('res.company', 'Company', required=True, select=True),
'split_from': fields.many2one('stock.move', string="Move Split From", help="Technical field used to track the origin of a split move, which can be useful in case of debug", copy=False),
'backorder_id': fields.related('picking_id', 'backorder_id', type='many2one', relation="stock.picking", string="Back Order of", select=True),
'origin': fields.char("Source"),
'procure_method': fields.selection([('make_to_stock', 'Default: Take From Stock'), ('make_to_order', 'Advanced: Apply Procurement Rules')], 'Supply Method', required=True,
help="""By default, the system will take from the stock in the source location and passively wait for availability. The other possibility allows you to directly create a procurement on the source location (and thus ignore its current stock) to gather products. If we want to chain moves and have this one to wait for the previous, this second option should be chosen."""),
# used for colors in tree views:
'scrapped': fields.related('location_dest_id', 'scrap_location', type='boolean', relation='stock.location', string='Scrapped', readonly=True),
'quant_ids': fields.many2many('stock.quant', 'stock_quant_move_rel', 'move_id', 'quant_id', 'Moved Quants', copy=False),
'reserved_quant_ids': fields.one2many('stock.quant', 'reservation_id', 'Reserved quants'),
'linked_move_operation_ids': fields.one2many('stock.move.operation.link', 'move_id', string='Linked Operations', readonly=True, help='Operations that impact this move for the computation of the remaining quantities'),
'remaining_qty': fields.function(_get_remaining_qty, type='float', string='Remaining Quantity', digits=0,
states={'done': [('readonly', True)]}, help="Remaining Quantity in default UoM according to operations matched with this move"),
'procurement_id': fields.many2one('procurement.order', 'Procurement'),
'group_id': fields.many2one('procurement.group', 'Procurement Group'),
'rule_id': fields.many2one('procurement.rule', 'Procurement Rule', help='The pull rule that created this stock move'),
'push_rule_id': fields.many2one('stock.location.path', 'Push Rule', help='The push rule that created this stock move'),
'propagate': fields.boolean('Propagate cancel and split', help='If checked, when this move is cancelled, cancel the linked move too'),
'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type'),
'inventory_id': fields.many2one('stock.inventory', 'Inventory'),
'lot_ids': fields.function(_get_lot_ids, type='many2many', relation='stock.production.lot', string='Lots'),
'origin_returned_move_id': fields.many2one('stock.move', 'Origin return move', help='move that created the return move', copy=False),
'returned_move_ids': fields.one2many('stock.move', 'origin_returned_move_id', 'All returned moves', help='Optional: all returned moves created from this move'),
'reserved_availability': fields.function(_get_reserved_availability, type='float', string='Quantity Reserved', readonly=True, help='Quantity that has already been reserved for this move'),
'availability': fields.function(_get_product_availability, type='float', string='Quantity Available', readonly=True, help='Quantity in stock that can still be reserved for this move'),
'string_availability_info': fields.function(_get_string_qty_information, type='text', string='Availability', readonly=True, help='Show various information on stock availability for this move'),
'restrict_lot_id': fields.many2one('stock.production.lot', 'Lot', help="Technical field used to depict a restriction on the lot of quants to consider when marking this move as 'done'"),
'restrict_partner_id': fields.many2one('res.partner', 'Owner ', help="Technical field used to depict a restriction on the ownership of quants to consider when marking this move as 'done'"),
'route_ids': fields.many2many('stock.location.route', 'stock_location_route_move', 'move_id', 'route_id', 'Destination route', help="Preferred route to be followed by the procurement order"),
'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', help="Technical field depicting the warehouse to consider for the route selection on the next procurement (if any)."),
}
def _default_location_destination(self, cr, uid, context=None):
context = context or {}
if context.get('default_picking_type_id', False):
pick_type = self.pool.get('stock.picking.type').browse(cr, uid, context['default_picking_type_id'], context=context)
return pick_type.default_location_dest_id and pick_type.default_location_dest_id.id or False
return False
def _default_location_source(self, cr, uid, context=None):
context = context or {}
if context.get('default_picking_type_id', False):
pick_type = self.pool.get('stock.picking.type').browse(cr, uid, context['default_picking_type_id'], context=context)
return pick_type.default_location_src_id and pick_type.default_location_src_id.id or False
return False
def _default_destination_address(self, cr, uid, context=None):
return False
_defaults = {
'location_id': _default_location_source,
'location_dest_id': _default_location_destination,
'partner_id': _default_destination_address,
'state': 'draft',
'priority': '1',
'product_uom_qty': 1.0,
'scrapped': False,
'date': fields.datetime.now,
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.move', context=c),
'date_expected': fields.datetime.now,
'procure_method': 'make_to_stock',
'propagate': True,
'partially_available': False,
}
def _check_uom(self, cr, uid, ids, context=None):
for move in self.browse(cr, uid, ids, context=context):
if move.product_id.uom_id.category_id.id != move.product_uom.category_id.id:
return False
return True
_constraints = [
(_check_uom,
'You try to move a product using a UoM that is not compatible with the UoM of the product moved. Please use an UoM in the same UoM category.',
['product_uom']),
]
def init(self, cr):
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('stock_move_product_location_index',))
if not cr.fetchone():
cr.execute('CREATE INDEX stock_move_product_location_index ON stock_move (product_id, location_id, location_dest_id, company_id, state)')
@api.cr_uid_ids_context
def do_unreserve(self, cr, uid, move_ids, context=None):
quant_obj = self.pool.get("stock.quant")
for move in self.browse(cr, uid, move_ids, context=context):
if move.state in ('done', 'cancel'):
raise osv.except_osv(_('Operation Forbidden!'), _('Cannot unreserve a done move'))
quant_obj.quants_unreserve(cr, uid, move, context=context)
if self.find_move_ancestors(cr, uid, move, context=context):
self.write(cr, uid, [move.id], {'state': 'waiting'}, context=context)
else:
self.write(cr, uid, [move.id], {'state': 'confirmed'}, context=context)
def _prepare_procurement_from_move(self, cr, uid, move, context=None):
origin = (move.group_id and (move.group_id.name + ":") or "") + (move.rule_id and move.rule_id.name or move.origin or "/")
group_id = move.group_id and move.group_id.id or False
if move.rule_id:
if move.rule_id.group_propagation_option == 'fixed' and move.rule_id.group_id:
group_id = move.rule_id.group_id.id
elif move.rule_id.group_propagation_option == 'none':
group_id = False
return {
'name': move.rule_id and move.rule_id.name or "/",
'origin': origin,
'company_id': move.company_id and move.company_id.id or False,
'date_planned': move.date,
'product_id': move.product_id.id,
'product_qty': move.product_uom_qty,
'product_uom': move.product_uom.id,
'product_uos_qty': (move.product_uos and move.product_uos_qty) or move.product_uom_qty,
'product_uos': (move.product_uos and move.product_uos.id) or move.product_uom.id,
'location_id': move.location_id.id,
'move_dest_id': move.id,
'group_id': group_id,
'route_ids': [(4, x.id) for x in move.route_ids],
'warehouse_id': move.warehouse_id.id or (move.picking_type_id and move.picking_type_id.warehouse_id.id or False),
'priority': move.priority,
}
def _push_apply(self, cr, uid, moves, context=None):
push_obj = self.pool.get("stock.location.path")
for move in moves:
#1) if the move is already chained, there is no need to check push rules
#2) if the move is a returned move, we don't want to check push rules, as returning a returned move is the only decent way
# to receive goods without triggering the push rules again (which would duplicate chained operations)
if not move.move_dest_id and not move.origin_returned_move_id:
domain = [('location_from_id', '=', move.location_dest_id.id)]
#priority goes to the route defined on the product and product category
route_ids = [x.id for x in move.product_id.route_ids + move.product_id.categ_id.total_route_ids]
rules = push_obj.search(cr, uid, domain + [('route_id', 'in', route_ids)], order='route_sequence, sequence', context=context)
if not rules:
#then we search on the warehouse if a rule can apply
wh_route_ids = []
if move.warehouse_id:
wh_route_ids = [x.id for x in move.warehouse_id.route_ids]
elif move.picking_type_id and move.picking_type_id.warehouse_id:
wh_route_ids = [x.id for x in move.picking_type_id.warehouse_id.route_ids]
if wh_route_ids:
rules = push_obj.search(cr, uid, domain + [('route_id', 'in', wh_route_ids)], order='route_sequence, sequence', context=context)
if not rules:
#if no specialized push rule has been found yet, we try to find a general one (without route)
rules = push_obj.search(cr, uid, domain + [('route_id', '=', False)], order='sequence', context=context)
if rules:
rule = push_obj.browse(cr, uid, rules[0], context=context)
push_obj._apply(cr, uid, rule, move, context=context)
return True
def _create_procurement(self, cr, uid, move, context=None):
""" This will create a procurement order """
return self.pool.get("procurement.order").create(cr, uid, self._prepare_procurement_from_move(cr, uid, move, context=context), context=context)
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
if isinstance(ids, (int, long)):
ids = [ids]
# Check that we do not modify a stock.move which is done
frozen_fields = set(['product_qty', 'product_uom', 'product_uos_qty', 'product_uos', 'location_id', 'location_dest_id', 'product_id'])
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'done':
if frozen_fields.intersection(vals):
raise osv.except_osv(_('Operation Forbidden!'),
_('Quantities, Units of Measure, Products and Locations cannot be modified on stock moves that have already been processed (except by the Administrator).'))
propagated_changes_dict = {}
#propagation of quantity change
if vals.get('product_uom_qty'):
propagated_changes_dict['product_uom_qty'] = vals['product_uom_qty']
if vals.get('product_uom_id'):
propagated_changes_dict['product_uom_id'] = vals['product_uom_id']
#propagation of expected date:
propagated_date_field = False
if vals.get('date_expected'):
#propagate any manual change of the expected date
propagated_date_field = 'date_expected'
elif (vals.get('state', '') == 'done' and vals.get('date')):
#propagate also any delta observed when setting the move as done
propagated_date_field = 'date'
if not context.get('do_not_propagate', False) and (propagated_date_field or propagated_changes_dict):
#any propagation is (maybe) needed
for move in self.browse(cr, uid, ids, context=context):
if move.move_dest_id and move.propagate:
if 'date_expected' in propagated_changes_dict:
propagated_changes_dict.pop('date_expected')
if propagated_date_field:
current_date = datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT)
new_date = datetime.strptime(vals.get(propagated_date_field), DEFAULT_SERVER_DATETIME_FORMAT)
delta = new_date - current_date
if abs(delta.days) >= move.company_id.propagation_minimum_delta:
old_move_date = datetime.strptime(move.move_dest_id.date_expected, DEFAULT_SERVER_DATETIME_FORMAT)
new_move_date = (old_move_date + relativedelta.relativedelta(days=delta.days or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT)
propagated_changes_dict['date_expected'] = new_move_date
#For pushed moves as well as for pulled moves, propagate by recursive call of write().
#Note that, for pulled moves we intentionally don't propagate on the procurement.
if propagated_changes_dict:
self.write(cr, uid, [move.move_dest_id.id], propagated_changes_dict, context=context)
return super(stock_move, self).write(cr, uid, ids, vals, context=context)
def onchange_quantity(self, cr, uid, ids, product_id, product_qty, product_uom, product_uos):
""" On change of product quantity finds UoM and UoS quantities
@param product_id: Product id
@param product_qty: Changed Quantity of product
@param product_uom: Unit of measure of product
@param product_uos: Unit of sale of product
@return: Dictionary of values
"""
result = {
'product_uos_qty': 0.00
}
warning = {}
if (not product_id) or (product_qty <= 0.0):
result['product_qty'] = 0.0
return {'value': result}
product_obj = self.pool.get('product.product')
uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff'])
# Warn if the quantity was decreased
if ids:
for move in self.read(cr, uid, ids, ['product_qty']):
if product_qty < move['product_qty']:
warning.update({
'title': _('Information'),
'message': _("By changing this quantity here, you accept the "
"new quantity as complete: Odoo will not "
"automatically generate a back order.")})
break
if product_uos and product_uom and (product_uom != product_uos):
result['product_uos_qty'] = product_qty * uos_coeff['uos_coeff']
else:
result['product_uos_qty'] = product_qty
return {'value': result, 'warning': warning}
def onchange_uos_quantity(self, cr, uid, ids, product_id, product_uos_qty,
product_uos, product_uom):
""" On change of product quantity finds UoM and UoS quantities
@param product_id: Product id
@param product_uos_qty: Changed UoS Quantity of product
@param product_uom: Unit of measure of product
@param product_uos: Unit of sale of product
@return: Dictionary of values
"""
result = {
'product_uom_qty': 0.00
}
if (not product_id) or (product_uos_qty <= 0.0):
result['product_uos_qty'] = 0.0
return {'value': result}
product_obj = self.pool.get('product.product')
uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff'])
# No warning if the quantity was decreased to avoid double warnings:
# The clients should call onchange_quantity too anyway
if product_uos and product_uom and (product_uom != product_uos):
result['product_uom_qty'] = product_uos_qty / uos_coeff['uos_coeff']
else:
result['product_uom_qty'] = product_uos_qty
return {'value': result}
def onchange_product_id(self, cr, uid, ids, prod_id=False, loc_id=False, loc_dest_id=False, partner_id=False):
""" On change of product id, if finds UoM, UoS, quantity and UoS quantity.
@param prod_id: Changed Product id
@param loc_id: Source location id
@param loc_dest_id: Destination location id
@param partner_id: Address id of partner
@return: Dictionary of values
"""
if not prod_id:
return {}
user = self.pool.get('res.users').browse(cr, uid, uid)
lang = user and user.lang or False
if partner_id:
addr_rec = self.pool.get('res.partner').browse(cr, uid, partner_id)
if addr_rec:
lang = addr_rec and addr_rec.lang or False
ctx = {'lang': lang}
product = self.pool.get('product.product').browse(cr, uid, [prod_id], context=ctx)[0]
uos_id = product.uos_id and product.uos_id.id or False
result = {
'name': product.partner_ref,
'product_uom': product.uom_id.id,
'product_uos': uos_id,
'product_uom_qty': 1.00,
'product_uos_qty': self.pool.get('stock.move').onchange_quantity(cr, uid, ids, prod_id, 1.00, product.uom_id.id, uos_id)['value']['product_uos_qty'],
}
if loc_id:
result['location_id'] = loc_id
if loc_dest_id:
result['location_dest_id'] = loc_dest_id
return {'value': result}
@api.cr_uid_ids_context
def _picking_assign(self, cr, uid, move_ids, procurement_group, location_from, location_to, context=None):
"""Assign a picking on the given move_ids, which is a list of move supposed to share the same procurement_group, location_from and location_to
(and company). Those attributes are also given as parameters.
"""
pick_obj = self.pool.get("stock.picking")
# Use a SQL query as doing with the ORM will split it in different queries with id IN (,,)
# In the next version, the locations on the picking should be stored again.
query = """
SELECT stock_picking.id FROM stock_picking, stock_move
WHERE
stock_picking.state in ('draft', 'confirmed', 'waiting') AND
stock_move.picking_id = stock_picking.id AND
stock_move.location_id = %s AND
stock_move.location_dest_id = %s AND
"""
params = (location_from, location_to)
if not procurement_group:
query += "stock_picking.group_id IS NULL LIMIT 1"
else:
query += "stock_picking.group_id = %s LIMIT 1"
params += (procurement_group,)
cr.execute(query, params)
[pick] = cr.fetchone() or [None]
if not pick:
move = self.browse(cr, uid, move_ids, context=context)[0]
values = {
'origin': move.origin,
'company_id': move.company_id and move.company_id.id or False,
'move_type': move.group_id and move.group_id.move_type or 'direct',
'partner_id': move.partner_id.id or False,
'picking_type_id': move.picking_type_id and move.picking_type_id.id or False,
}
pick = pick_obj.create(cr, uid, values, context=context)
return self.write(cr, uid, move_ids, {'picking_id': pick}, context=context)
def onchange_date(self, cr, uid, ids, date, date_expected, context=None):
""" On change of Scheduled Date gives a Move date.
@param date_expected: Scheduled Date
@param date: Move Date
@return: Move Date
"""
if not date_expected:
date_expected = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
return {'value': {'date': date_expected}}
def attribute_price(self, cr, uid, move, context=None):
"""
Attribute price to move, important in inter-company moves or receipts with only one partner
"""
if not move.price_unit:
price = move.product_id.standard_price
self.write(cr, uid, [move.id], {'price_unit': price})
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms stock move or put it in waiting if it's linked to another move.
@return: List of ids.
"""
if isinstance(ids, (int, long)):
ids = [ids]
states = {
'confirmed': [],
'waiting': []
}
to_assign = {}
for move in self.browse(cr, uid, ids, context=context):
self.attribute_price(cr, uid, move, context=context)
state = 'confirmed'
#if the move is preceeded, then it's waiting (if preceeding move is done, then action_assign has been called already and its state is already available)
if move.move_orig_ids:
state = 'waiting'
#if the move is split and some of the ancestor was preceeded, then it's waiting as well
elif move.split_from:
move2 = move.split_from
while move2 and state != 'waiting':
if move2.move_orig_ids:
state = 'waiting'
move2 = move2.split_from
states[state].append(move.id)
if not move.picking_id and move.picking_type_id:
key = (move.group_id.id, move.location_id.id, move.location_dest_id.id)
if key not in to_assign:
to_assign[key] = []
to_assign[key].append(move.id)
for move in self.browse(cr, uid, states['confirmed'], context=context):
if move.procure_method == 'make_to_order':
self._create_procurement(cr, uid, move, context=context)
states['waiting'].append(move.id)
states['confirmed'].remove(move.id)
for state, write_ids in states.items():
if len(write_ids):
self.write(cr, uid, write_ids, {'state': state})
#assign picking in batch for all confirmed move that share the same details
for key, move_ids in to_assign.items():
procurement_group, location_from, location_to = key
self._picking_assign(cr, uid, move_ids, procurement_group, location_from, location_to, context=context)
moves = self.browse(cr, uid, ids, context=context)
self._push_apply(cr, uid, moves, context=context)
return ids
def force_assign(self, cr, uid, ids, context=None):
""" Changes the state to assigned.
@return: True
"""
return self.write(cr, uid, ids, {'state': 'assigned'}, context=context)
def check_tracking_product(self, cr, uid, product, lot_id, location, location_dest, context=None):
check = False
if product.track_all and not location_dest.usage == 'inventory':
check = True
elif product.track_incoming and location.usage in ('supplier', 'transit', 'inventory') and location_dest.usage == 'internal':
check = True
elif product.track_outgoing and location_dest.usage in ('customer', 'transit') and location.usage == 'internal':
check = True
if check and not lot_id:
raise osv.except_osv(_('Warning!'), _('You must assign a serial number for the product %s') % (product.name))
def check_tracking(self, cr, uid, move, lot_id, context=None):
""" Checks if serial number is assigned to stock move or not and raise an error if it had to.
"""
self.check_tracking_product(cr, uid, move.product_id, lot_id, move.location_id, move.location_dest_id, context=context)
def action_assign(self, cr, uid, ids, context=None):
""" Checks the product type and accordingly writes the state.
"""
context = context or {}
quant_obj = self.pool.get("stock.quant")
to_assign_moves = []
main_domain = {}
todo_moves = []
operations = set()
for move in self.browse(cr, uid, ids, context=context):
if move.state not in ('confirmed', 'waiting', 'assigned'):
continue
if move.location_id.usage in ('supplier', 'inventory', 'production'):
to_assign_moves.append(move.id)
#in case the move is returned, we want to try to find quants before forcing the assignment
if not move.origin_returned_move_id:
continue
if move.product_id.type == 'consu':
to_assign_moves.append(move.id)
continue
else:
todo_moves.append(move)
#we always keep the quants already assigned and try to find the remaining quantity on quants not assigned only
main_domain[move.id] = [('reservation_id', '=', False), ('qty', '>', 0)]
#if the move is preceeded, restrict the choice of quants in the ones moved previously in original move
ancestors = self.find_move_ancestors(cr, uid, move, context=context)
if move.state == 'waiting' and not ancestors:
#if the waiting move hasn't yet any ancestor (PO/MO not confirmed yet), don't find any quant available in stock
main_domain[move.id] += [('id', '=', False)]
elif ancestors:
main_domain[move.id] += [('history_ids', 'in', ancestors)]
#if the move is returned from another, restrict the choice of quants to the ones that follow the returned move
if move.origin_returned_move_id:
main_domain[move.id] += [('history_ids', 'in', move.origin_returned_move_id.id)]
for link in move.linked_move_operation_ids:
operations.add(link.operation_id)
# Check all ops and sort them: we want to process first the packages, then operations with lot then the rest
operations = list(operations)
operations.sort(key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0))
for ops in operations:
#first try to find quants based on specific domains given by linked operations
for record in ops.linked_move_operation_ids:
move = record.move_id
if move.id in main_domain:
domain = main_domain[move.id] + self.pool.get('stock.move.operation.link').get_specific_domain(cr, uid, record, context=context)
qty = record.qty
if qty:
quants = quant_obj.quants_get_prefered_domain(cr, uid, ops.location_id, move.product_id, qty, domain=domain, prefered_domain_list=[], restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context)
quant_obj.quants_reserve(cr, uid, quants, move, record, context=context)
for move in todo_moves:
if move.linked_move_operation_ids:
continue
#then if the move isn't totally assigned, try to find quants without any specific domain
if move.state != 'assigned':
qty_already_assigned = move.reserved_availability
qty = move.product_qty - qty_already_assigned
quants = quant_obj.quants_get_prefered_domain(cr, uid, move.location_id, move.product_id, qty, domain=main_domain[move.id], prefered_domain_list=[], restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context)
quant_obj.quants_reserve(cr, uid, quants, move, context=context)
#force assignation of consumable products and incoming from supplier/inventory/production
if to_assign_moves:
self.force_assign(cr, uid, to_assign_moves, context=context)
def action_cancel(self, cr, uid, ids, context=None):
""" Cancels the moves and if all moves are cancelled it cancels the picking.
@return: True
"""
procurement_obj = self.pool.get('procurement.order')
context = context or {}
procs_to_check = []
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'done':
raise osv.except_osv(_('Operation Forbidden!'),
_('You cannot cancel a stock move that has been set to \'Done\'.'))
if move.reserved_quant_ids:
self.pool.get("stock.quant").quants_unreserve(cr, uid, move, context=context)
if context.get('cancel_procurement'):
if move.propagate:
procurement_ids = procurement_obj.search(cr, uid, [('move_dest_id', '=', move.id)], context=context)
procurement_obj.cancel(cr, uid, procurement_ids, context=context)
else:
if move.move_dest_id:
if move.propagate:
self.action_cancel(cr, uid, [move.move_dest_id.id], context=context)
elif move.move_dest_id.state == 'waiting':
#If waiting, the chain will be broken and we are not sure if we can still wait for it (=> could take from stock instead)
self.write(cr, uid, [move.move_dest_id.id], {'state': 'confirmed'}, context=context)
if move.procurement_id:
# Does the same as procurement check, only eliminating a refresh
procs_to_check.append(move.procurement_id.id)
res = self.write(cr, uid, ids, {'state': 'cancel', 'move_dest_id': False}, context=context)
if procs_to_check:
procurement_obj.check(cr, uid, procs_to_check, context=context)
return res
def _check_package_from_moves(self, cr, uid, ids, context=None):
pack_obj = self.pool.get("stock.quant.package")
packs = set()
for move in self.browse(cr, uid, ids, context=context):
packs |= set([q.package_id for q in move.quant_ids if q.package_id and q.qty > 0])
return pack_obj._check_location_constraint(cr, uid, list(packs), context=context)
def find_move_ancestors(self, cr, uid, move, context=None):
'''Find the first level ancestors of given move '''
ancestors = []
move2 = move
while move2:
ancestors += [x.id for x in move2.move_orig_ids]
#loop on the split_from to find the ancestor of split moves only if the move has not direct ancestor (priority goes to them)
move2 = not move2.move_orig_ids and move2.split_from or False
return ancestors
@api.cr_uid_ids_context
def recalculate_move_state(self, cr, uid, move_ids, context=None):
'''Recompute the state of moves given because their reserved quants were used to fulfill another operation'''
for move in self.browse(cr, uid, move_ids, context=context):
vals = {}
reserved_quant_ids = move.reserved_quant_ids
if len(reserved_quant_ids) > 0 and not move.partially_available:
vals['partially_available'] = True
if len(reserved_quant_ids) == 0 and move.partially_available:
vals['partially_available'] = False
if move.state == 'assigned':
if self.find_move_ancestors(cr, uid, move, context=context):
vals['state'] = 'waiting'
else:
vals['state'] = 'confirmed'
if vals:
self.write(cr, uid, [move.id], vals, context=context)
def action_done(self, cr, uid, ids, context=None):
""" Process completely the moves given as ids and if all moves are done, it will finish the picking.
"""
context = context or {}
picking_obj = self.pool.get("stock.picking")
quant_obj = self.pool.get("stock.quant")
todo = [move.id for move in self.browse(cr, uid, ids, context=context) if move.state == "draft"]
if todo:
ids = self.action_confirm(cr, uid, todo, context=context)
pickings = set()
procurement_ids = []
#Search operations that are linked to the moves
operations = set()
move_qty = {}
for move in self.browse(cr, uid, ids, context=context):
move_qty[move.id] = move.product_qty
for link in move.linked_move_operation_ids:
operations.add(link.operation_id)
#Sort operations according to entire packages first, then package + lot, package only, lot only
operations = list(operations)
operations.sort(key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0))
for ops in operations:
if ops.picking_id:
pickings.add(ops.picking_id.id)
main_domain = [('qty', '>', 0)]
for record in ops.linked_move_operation_ids:
move = record.move_id
self.check_tracking(cr, uid, move, not ops.product_id and ops.package_id.id or ops.lot_id.id, context=context)
prefered_domain = [('reservation_id', '=', move.id)]
fallback_domain = [('reservation_id', '=', False)]
fallback_domain2 = ['&', ('reservation_id', '!=', move.id), ('reservation_id', '!=', False)]
prefered_domain_list = [prefered_domain] + [fallback_domain] + [fallback_domain2]
dom = main_domain + self.pool.get('stock.move.operation.link').get_specific_domain(cr, uid, record, context=context)
quants = quant_obj.quants_get_prefered_domain(cr, uid, ops.location_id, move.product_id, record.qty, domain=dom, prefered_domain_list=prefered_domain_list,
restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context)
if ops.product_id:
#If a product is given, the result is always put immediately in the result package (if it is False, they are without package)
quant_dest_package_id = ops.result_package_id.id
ctx = context
else:
# When a pack is moved entirely, the quants should not be written anything for the destination package
quant_dest_package_id = False
ctx = context.copy()
ctx['entire_pack'] = True
quant_obj.quants_move(cr, uid, quants, move, ops.location_dest_id, location_from=ops.location_id, lot_id=ops.lot_id.id, owner_id=ops.owner_id.id, src_package_id=ops.package_id.id, dest_package_id=quant_dest_package_id, context=ctx)
# Handle pack in pack
if not ops.product_id and ops.package_id and ops.result_package_id.id != ops.package_id.parent_id.id:
self.pool.get('stock.quant.package').write(cr, SUPERUSER_ID, [ops.package_id.id], {'parent_id': ops.result_package_id.id}, context=context)
if not move_qty.get(move.id):
raise osv.except_osv(_("Error"), _("The roundings of your Unit of Measures %s on the move vs. %s on the product don't allow to do these operations or you are not transferring the picking at once. ") % (move.product_uom.name, move.product_id.uom_id.name))
move_qty[move.id] -= record.qty
#Check for remaining qtys and unreserve/check move_dest_id in
move_dest_ids = set()
for move in self.browse(cr, uid, ids, context=context):
move_qty_cmp = float_compare(move_qty[move.id], 0, precision_rounding=move.product_id.uom_id.rounding)
if move_qty_cmp > 0: # (=In case no pack operations in picking)
main_domain = [('qty', '>', 0)]
prefered_domain = [('reservation_id', '=', move.id)]
fallback_domain = [('reservation_id', '=', False)]
fallback_domain2 = ['&', ('reservation_id', '!=', move.id), ('reservation_id', '!=', False)]
prefered_domain_list = [prefered_domain] + [fallback_domain] + [fallback_domain2]
self.check_tracking(cr, uid, move, move.restrict_lot_id.id, context=context)
qty = move_qty[move.id]
quants = quant_obj.quants_get_prefered_domain(cr, uid, move.location_id, move.product_id, qty, domain=main_domain, prefered_domain_list=prefered_domain_list, restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context)
quant_obj.quants_move(cr, uid, quants, move, move.location_dest_id, lot_id=move.restrict_lot_id.id, owner_id=move.restrict_partner_id.id, context=context)
# If the move has a destination, add it to the list to reserve
if move.move_dest_id and move.move_dest_id.state in ('waiting', 'confirmed'):
move_dest_ids.add(move.move_dest_id.id)
if move.procurement_id:
procurement_ids.append(move.procurement_id.id)
#unreserve the quants and make them available for other operations/moves
quant_obj.quants_unreserve(cr, uid, move, context=context)
# Check the packages have been placed in the correct locations
self._check_package_from_moves(cr, uid, ids, context=context)
#set the move as done
self.write(cr, uid, ids, {'state': 'done', 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context)
self.pool.get('procurement.order').check(cr, uid, procurement_ids, context=context)
#assign destination moves
if move_dest_ids:
self.action_assign(cr, uid, list(move_dest_ids), context=context)
#check picking state to set the date_done is needed
done_picking = []
for picking in picking_obj.browse(cr, uid, list(pickings), context=context):
if picking.state == 'done' and not picking.date_done:
done_picking.append(picking.id)
if done_picking:
picking_obj.write(cr, uid, done_picking, {'date_done': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context)
return True
def unlink(self, cr, uid, ids, context=None):
context = context or {}
for move in self.browse(cr, uid, ids, context=context):
if move.state not in ('draft', 'cancel'):
raise osv.except_osv(_('User Error!'), _('You can only delete draft moves.'))
return super(stock_move, self).unlink(cr, uid, ids, context=context)
def action_scrap(self, cr, uid, ids, quantity, location_id, restrict_lot_id=False, restrict_partner_id=False, context=None):
""" Move the scrap/damaged product into scrap location
@param cr: the database cursor
@param uid: the user id
@param ids: ids of stock move object to be scrapped
@param quantity : specify scrap qty
@param location_id : specify scrap location
@param context: context arguments
@return: Scraped lines
"""
#quantity should be given in MOVE UOM
if quantity <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide a positive quantity to scrap.'))
res = []
for move in self.browse(cr, uid, ids, context=context):
source_location = move.location_id
if move.state == 'done':
source_location = move.location_dest_id
#Previously used to prevent scraping from virtual location but not necessary anymore
#if source_location.usage != 'internal':
#restrict to scrap from a virtual location because it's meaningless and it may introduce errors in stock ('creating' new products from nowhere)
#raise osv.except_osv(_('Error!'), _('Forbidden operation: it is not allowed to scrap products from a virtual location.'))
move_qty = move.product_qty
uos_qty = quantity / move_qty * move.product_uos_qty
default_val = {
'location_id': source_location.id,
'product_uom_qty': quantity,
'product_uos_qty': uos_qty,
'state': move.state,
'scrapped': True,
'location_dest_id': location_id,
'restrict_lot_id': restrict_lot_id,
'restrict_partner_id': restrict_partner_id,
}
new_move = self.copy(cr, uid, move.id, default_val)
res += [new_move]
product_obj = self.pool.get('product.product')
for product in product_obj.browse(cr, uid, [move.product_id.id], context=context):
if move.picking_id:
uom = product.uom_id.name if product.uom_id else ''
message = _("%s %s %s has been <b>moved to</b> scrap.") % (quantity, uom, product.name)
move.picking_id.message_post(body=message)
self.action_done(cr, uid, res, context=context)
return res
def split(self, cr, uid, move, qty, restrict_lot_id=False, restrict_partner_id=False, context=None):
""" Splits qty from move move into a new move
:param move: browse record
:param qty: float. quantity to split (given in product UoM)
:param restrict_lot_id: optional production lot that can be given in order to force the new move to restrict its choice of quants to this lot.
:param restrict_partner_id: optional partner that can be given in order to force the new move to restrict its choice of quants to the ones belonging to this partner.
:param context: dictionay. can contains the special key 'source_location_id' in order to force the source location when copying the move
returns the ID of the backorder move created
"""
if move.state in ('done', 'cancel'):
raise osv.except_osv(_('Error'), _('You cannot split a move done'))
if move.state == 'draft':
#we restrict the split of a draft move because if not confirmed yet, it may be replaced by several other moves in
#case of phantom bom (with mrp module). And we don't want to deal with this complexity by copying the product that will explode.
raise osv.except_osv(_('Error'), _('You cannot split a draft move. It needs to be confirmed first.'))
if move.product_qty <= qty or qty == 0:
return move.id
uom_obj = self.pool.get('product.uom')
context = context or {}
#HALF-UP rounding as only rounding errors will be because of propagation of error from default UoM
uom_qty = uom_obj._compute_qty_obj(cr, uid, move.product_id.uom_id, qty, move.product_uom, rounding_method='HALF-UP', context=context)
uos_qty = uom_qty * move.product_uos_qty / move.product_uom_qty
defaults = {
'product_uom_qty': uom_qty,
'product_uos_qty': uos_qty,
'procure_method': 'make_to_stock',
'restrict_lot_id': restrict_lot_id,
'restrict_partner_id': restrict_partner_id,
'split_from': move.id,
'procurement_id': move.procurement_id.id,
'move_dest_id': move.move_dest_id.id,
'origin_returned_move_id': move.origin_returned_move_id.id,
}
if context.get('source_location_id'):
defaults['location_id'] = context['source_location_id']
new_move = self.copy(cr, uid, move.id, defaults)
ctx = context.copy()
ctx['do_not_propagate'] = True
self.write(cr, uid, [move.id], {
'product_uom_qty': move.product_uom_qty - uom_qty,
'product_uos_qty': move.product_uos_qty - uos_qty,
}, context=ctx)
if move.move_dest_id and move.propagate and move.move_dest_id.state not in ('done', 'cancel'):
new_move_prop = self.split(cr, uid, move.move_dest_id, qty, context=context)
self.write(cr, uid, [new_move], {'move_dest_id': new_move_prop}, context=context)
#returning the first element of list returned by action_confirm is ok because we checked it wouldn't be exploded (and
#thus the result of action_confirm should always be a list of 1 element length)
return self.action_confirm(cr, uid, [new_move], context=context)[0]
def get_code_from_locs(self, cr, uid, move, location_id=False, location_dest_id=False, context=None):
"""
Returns the code the picking type should have. This can easily be used
to check if a move is internal or not
move, location_id and location_dest_id are browse records
"""
code = 'internal'
src_loc = location_id or move.location_id
dest_loc = location_dest_id or move.location_dest_id
if src_loc.usage == 'internal' and dest_loc.usage != 'internal':
code = 'outgoing'
if src_loc.usage != 'internal' and dest_loc.usage == 'internal':
code = 'incoming'
return code
class stock_inventory(osv.osv):
_name = "stock.inventory"
_description = "Inventory"
def _get_move_ids_exist(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for inv in self.browse(cr, uid, ids, context=context):
res[inv.id] = False
if inv.move_ids:
res[inv.id] = True
return res
def _get_available_filters(self, cr, uid, context=None):
"""
This function will return the list of filter allowed according to the options checked
in 'Settings\Warehouse'.
:rtype: list of tuple
"""
#default available choices
res_filter = [('none', _('All products')), ('partial', _('Manual Selection of Products')), ('product', _('One product only'))]
settings_obj = self.pool.get('stock.config.settings')
config_ids = settings_obj.search(cr, uid, [], limit=1, order='id DESC', context=context)
#If we don't have updated config until now, all fields are by default false and so should be not dipslayed
if not config_ids:
return res_filter
stock_settings = settings_obj.browse(cr, uid, config_ids[0], context=context)
if stock_settings.group_stock_tracking_owner:
res_filter.append(('owner', _('One owner only')))
res_filter.append(('product_owner', _('One product for a specific owner')))
if stock_settings.group_stock_tracking_lot:
res_filter.append(('lot', _('One Lot/Serial Number')))
if stock_settings.group_stock_packaging:
res_filter.append(('pack', _('A Pack')))
return res_filter
def _get_total_qty(self, cr, uid, ids, field_name, args, context=None):
res = {}
for inv in self.browse(cr, uid, ids, context=context):
res[inv.id] = sum([x.product_qty for x in inv.line_ids])
return res
INVENTORY_STATE_SELECTION = [
('draft', 'Draft'),
('cancel', 'Cancelled'),
('confirm', 'In Progress'),
('done', 'Validated'),
]
_columns = {
'name': fields.char('Inventory Reference', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="Inventory Name."),
'date': fields.datetime('Inventory Date', required=True, readonly=True, help="The date that will be used for the stock level check of the products and the validation of the stock move related to this inventory."),
'line_ids': fields.one2many('stock.inventory.line', 'inventory_id', 'Inventories', readonly=False, states={'done': [('readonly', True)]}, help="Inventory Lines.", copy=True),
'move_ids': fields.one2many('stock.move', 'inventory_id', 'Created Moves', help="Inventory Moves.", states={'done': [('readonly', True)]}),
'state': fields.selection(INVENTORY_STATE_SELECTION, 'Status', readonly=True, select=True, copy=False),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True, readonly=True, states={'draft': [('readonly', False)]}),
'location_id': fields.many2one('stock.location', 'Inventoried Location', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_id': fields.many2one('product.product', 'Inventoried Product', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Product to focus your inventory on a particular Product."),
'package_id': fields.many2one('stock.quant.package', 'Inventoried Pack', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Pack to focus your inventory on a particular Pack."),
'partner_id': fields.many2one('res.partner', 'Inventoried Owner', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Owner to focus your inventory on a particular Owner."),
'lot_id': fields.many2one('stock.production.lot', 'Inventoried Lot/Serial Number', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Lot/Serial Number to focus your inventory on a particular Lot/Serial Number.", copy=False),
'move_ids_exist': fields.function(_get_move_ids_exist, type='boolean', string=' Stock Move Exists?', help='technical field for attrs in view'),
'filter': fields.selection(_get_available_filters, 'Inventory of', required=True,
help="If you do an entire inventory, you can choose 'All Products' and it will prefill the inventory with the current stock. If you only do some products "\
"(e.g. Cycle Counting) you can choose 'Manual Selection of Products' and the system won't propose anything. You can also let the "\
"system propose for a single product / lot /... "),
'total_qty': fields.function(_get_total_qty, type="float"),
}
def _default_stock_location(self, cr, uid, context=None):
try:
warehouse = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'warehouse0')
return warehouse.lot_stock_id.id
except:
return False
_defaults = {
'date': fields.datetime.now,
'state': 'draft',
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c),
'location_id': _default_stock_location,
'filter': 'none',
}
def reset_real_qty(self, cr, uid, ids, context=None):
inventory = self.browse(cr, uid, ids[0], context=context)
line_ids = [line.id for line in inventory.line_ids]
self.pool.get('stock.inventory.line').write(cr, uid, line_ids, {'product_qty': 0})
return True
def action_done(self, cr, uid, ids, context=None):
""" Finish the inventory
@return: True
"""
for inv in self.browse(cr, uid, ids, context=context):
for inventory_line in inv.line_ids:
if inventory_line.product_qty < 0 and inventory_line.product_qty != inventory_line.theoretical_qty:
raise osv.except_osv(_('Warning'), _('You cannot set a negative product quantity in an inventory line:\n\t%s - qty: %s' % (inventory_line.product_id.name, inventory_line.product_qty)))
self.action_check(cr, uid, [inv.id], context=context)
self.write(cr, uid, [inv.id], {'state': 'done'}, context=context)
self.post_inventory(cr, uid, inv, context=context)
return True
def post_inventory(self, cr, uid, inv, context=None):
#The inventory is posted as a single step which means quants cannot be moved from an internal location to another using an inventory
#as they will be moved to inventory loss, and other quants will be created to the encoded quant location. This is a normal behavior
#as quants cannot be reuse from inventory location (users can still manually move the products before/after the inventory if they want).
move_obj = self.pool.get('stock.move')
move_obj.action_done(cr, uid, [x.id for x in inv.move_ids if x.state != 'done'], context=context)
def action_check(self, cr, uid, ids, context=None):
""" Checks the inventory and computes the stock move to do
@return: True
"""
inventory_line_obj = self.pool.get('stock.inventory.line')
stock_move_obj = self.pool.get('stock.move')
for inventory in self.browse(cr, uid, ids, context=context):
#first remove the existing stock moves linked to this inventory
move_ids = [move.id for move in inventory.move_ids]
stock_move_obj.unlink(cr, uid, move_ids, context=context)
for line in inventory.line_ids:
#compare the checked quantities on inventory lines to the theorical one
stock_move = inventory_line_obj._resolve_inventory_line(cr, uid, line, context=context)
def action_cancel_draft(self, cr, uid, ids, context=None):
""" Cancels the stock move and change inventory state to draft.
@return: True
"""
for inv in self.browse(cr, uid, ids, context=context):
self.write(cr, uid, [inv.id], {'line_ids': [(5,)]}, context=context)
self.pool.get('stock.move').action_cancel(cr, uid, [x.id for x in inv.move_ids], context=context)
self.write(cr, uid, [inv.id], {'state': 'draft'}, context=context)
return True
def action_cancel_inventory(self, cr, uid, ids, context=None):
self.action_cancel_draft(cr, uid, ids, context=context)
def prepare_inventory(self, cr, uid, ids, context=None):
inventory_line_obj = self.pool.get('stock.inventory.line')
for inventory in self.browse(cr, uid, ids, context=context):
# If there are inventory lines already (e.g. from import), respect those and set their theoretical qty
line_ids = [line.id for line in inventory.line_ids]
if not line_ids and inventory.filter != 'partial':
#compute the inventory lines and create them
vals = self._get_inventory_lines(cr, uid, inventory, context=context)
for product_line in vals:
inventory_line_obj.create(cr, uid, product_line, context=context)
return self.write(cr, uid, ids, {'state': 'confirm', 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)})
def _get_inventory_lines(self, cr, uid, inventory, context=None):
location_obj = self.pool.get('stock.location')
product_obj = self.pool.get('product.product')
location_ids = location_obj.search(cr, uid, [('id', 'child_of', [inventory.location_id.id])], context=context)
domain = ' location_id in %s'
args = (tuple(location_ids),)
if inventory.partner_id:
domain += ' and owner_id = %s'
args += (inventory.partner_id.id,)
if inventory.lot_id:
domain += ' and lot_id = %s'
args += (inventory.lot_id.id,)
if inventory.product_id:
domain += ' and product_id = %s'
args += (inventory.product_id.id,)
if inventory.package_id:
domain += ' and package_id = %s'
args += (inventory.package_id.id,)
cr.execute('''
SELECT product_id, sum(qty) as product_qty, location_id, lot_id as prod_lot_id, package_id, owner_id as partner_id
FROM stock_quant WHERE''' + domain + '''
GROUP BY product_id, location_id, lot_id, package_id, partner_id
''', args)
vals = []
for product_line in cr.dictfetchall():
#replace the None the dictionary by False, because falsy values are tested later on
for key, value in product_line.items():
if not value:
product_line[key] = False
product_line['inventory_id'] = inventory.id
product_line['theoretical_qty'] = product_line['product_qty']
if product_line['product_id']:
product = product_obj.browse(cr, uid, product_line['product_id'], context=context)
product_line['product_uom_id'] = product.uom_id.id
vals.append(product_line)
return vals
class stock_inventory_line(osv.osv):
_name = "stock.inventory.line"
_description = "Inventory Line"
_order = "inventory_id, location_name, product_code, product_name, prodlot_name"
def _get_product_name_change(self, cr, uid, ids, context=None):
return self.pool.get('stock.inventory.line').search(cr, uid, [('product_id', 'in', ids)], context=context)
def _get_location_change(self, cr, uid, ids, context=None):
return self.pool.get('stock.inventory.line').search(cr, uid, [('location_id', 'in', ids)], context=context)
def _get_prodlot_change(self, cr, uid, ids, context=None):
return self.pool.get('stock.inventory.line').search(cr, uid, [('prod_lot_id', 'in', ids)], context=context)
def _get_theoretical_qty(self, cr, uid, ids, name, args, context=None):
res = {}
quant_obj = self.pool["stock.quant"]
uom_obj = self.pool["product.uom"]
for line in self.browse(cr, uid, ids, context=context):
quant_ids = self._get_quants(cr, uid, line, context=context)
quants = quant_obj.browse(cr, uid, quant_ids, context=context)
tot_qty = sum([x.qty for x in quants])
if line.product_uom_id and line.product_id.uom_id.id != line.product_uom_id.id:
tot_qty = uom_obj._compute_qty_obj(cr, uid, line.product_id.uom_id, tot_qty, line.product_uom_id, context=context)
res[line.id] = tot_qty
return res
_columns = {
'inventory_id': fields.many2one('stock.inventory', 'Inventory', ondelete='cascade', select=True),
'location_id': fields.many2one('stock.location', 'Location', required=True, select=True),
'product_id': fields.many2one('product.product', 'Product', required=True, select=True),
'package_id': fields.many2one('stock.quant.package', 'Pack', select=True),
'product_uom_id': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'product_qty': fields.float('Checked Quantity', digits_compute=dp.get_precision('Product Unit of Measure')),
'company_id': fields.related('inventory_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, select=True, readonly=True),
'prod_lot_id': fields.many2one('stock.production.lot', 'Serial Number', domain="[('product_id','=',product_id)]"),
'state': fields.related('inventory_id', 'state', type='char', string='Status', readonly=True),
'theoretical_qty': fields.function(_get_theoretical_qty, type='float', digits_compute=dp.get_precision('Product Unit of Measure'),
store={'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['location_id', 'product_id', 'package_id', 'product_uom_id', 'company_id', 'prod_lot_id', 'partner_id'], 20),},
readonly=True, string="Theoretical Quantity"),
'partner_id': fields.many2one('res.partner', 'Owner'),
'product_name': fields.related('product_id', 'name', type='char', string='Product Name', store={
'product.product': (_get_product_name_change, ['name', 'default_code'], 20),
'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['product_id'], 20),}),
'product_code': fields.related('product_id', 'default_code', type='char', string='Product Code', store={
'product.product': (_get_product_name_change, ['name', 'default_code'], 20),
'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['product_id'], 20),}),
'location_name': fields.related('location_id', 'complete_name', type='char', string='Location Name', store={
'stock.location': (_get_location_change, ['name', 'location_id', 'active'], 20),
'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['location_id'], 20),}),
'prodlot_name': fields.related('prod_lot_id', 'name', type='char', string='Serial Number Name', store={
'stock.production.lot': (_get_prodlot_change, ['name'], 20),
'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['prod_lot_id'], 20),}),
}
_defaults = {
'product_qty': 0,
}
def _get_quants(self, cr, uid, line, context=None):
quant_obj = self.pool["stock.quant"]
dom = [('company_id', '=', line.company_id.id), ('location_id', '=', line.location_id.id), ('lot_id', '=', line.prod_lot_id.id),
('product_id','=', line.product_id.id), ('owner_id', '=', line.partner_id.id), ('package_id', '=', line.package_id.id)]
quants = quant_obj.search(cr, uid, dom, context=context)
return quants
def onchange_createline(self, cr, uid, ids, location_id=False, product_id=False, uom_id=False, package_id=False, prod_lot_id=False, partner_id=False, company_id=False, context=None):
quant_obj = self.pool["stock.quant"]
uom_obj = self.pool["product.uom"]
res = {'value': {}}
# If no UoM already put the default UoM of the product
if product_id and not uom_id:
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
res['value']['product_uom_id'] = product.uom_id.id
res['domain'] = {'product_uom_id': [('category_id','=',product.uom_id.category_id.id)]}
uom_id = product.uom_id.id
# Calculate theoretical quantity by searching the quants as in quants_get
if product_id and location_id:
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
if not company_id:
company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
dom = [('company_id', '=', company_id), ('location_id', '=', location_id), ('lot_id', '=', prod_lot_id),
('product_id','=', product_id), ('owner_id', '=', partner_id), ('package_id', '=', package_id)]
quants = quant_obj.search(cr, uid, dom, context=context)
th_qty = sum([x.qty for x in quant_obj.browse(cr, uid, quants, context=context)])
if product_id and uom_id and product.uom_id.id != uom_id:
th_qty = uom_obj._compute_qty(cr, uid, product.uom_id.id, th_qty, uom_id)
res['value']['theoretical_qty'] = th_qty
return res
def _resolve_inventory_line(self, cr, uid, inventory_line, context=None):
stock_move_obj = self.pool.get('stock.move')
quant_obj = self.pool.get('stock.quant')
diff = inventory_line.theoretical_qty - inventory_line.product_qty
if not diff:
return
#each theorical_lines where difference between theoretical and checked quantities is not 0 is a line for which we need to create a stock move
vals = {
'name': _('INV:') + (inventory_line.inventory_id.name or ''),
'product_id': inventory_line.product_id.id,
'product_uom': inventory_line.product_uom_id.id,
'date': inventory_line.inventory_id.date,
'company_id': inventory_line.inventory_id.company_id.id,
'inventory_id': inventory_line.inventory_id.id,
'state': 'confirmed',
'restrict_lot_id': inventory_line.prod_lot_id.id,
'restrict_partner_id': inventory_line.partner_id.id,
}
inventory_location_id = inventory_line.product_id.property_stock_inventory.id
if diff < 0:
#found more than expected
vals['location_id'] = inventory_location_id
vals['location_dest_id'] = inventory_line.location_id.id
vals['product_uom_qty'] = -diff
else:
#found less than expected
vals['location_id'] = inventory_line.location_id.id
vals['location_dest_id'] = inventory_location_id
vals['product_uom_qty'] = diff
move_id = stock_move_obj.create(cr, uid, vals, context=context)
move = stock_move_obj.browse(cr, uid, move_id, context=context)
if diff > 0:
domain = [('qty', '>', 0.0), ('package_id', '=', inventory_line.package_id.id), ('lot_id', '=', inventory_line.prod_lot_id.id), ('location_id', '=', inventory_line.location_id.id)]
preferred_domain_list = [[('reservation_id', '=', False)], [('reservation_id.inventory_id', '!=', inventory_line.inventory_id.id)]]
quants = quant_obj.quants_get_prefered_domain(cr, uid, move.location_id, move.product_id, move.product_qty, domain=domain, prefered_domain_list=preferred_domain_list, restrict_partner_id=move.restrict_partner_id.id, context=context)
quant_obj.quants_reserve(cr, uid, quants, move, context=context)
elif inventory_line.package_id:
stock_move_obj.action_done(cr, uid, move_id, context=context)
quants = [x.id for x in move.quant_ids]
quant_obj.write(cr, uid, quants, {'package_id': inventory_line.package_id.id}, context=context)
res = quant_obj.search(cr, uid, [('qty', '<', 0.0), ('product_id', '=', move.product_id.id),
('location_id', '=', move.location_dest_id.id), ('package_id', '!=', False)], limit=1, context=context)
if res:
for quant in move.quant_ids:
if quant.location_id.id == move.location_dest_id.id: #To avoid we take a quant that was reconcile already
quant_obj._quant_reconcile_negative(cr, uid, quant, move, context=context)
return move_id
# Should be left out in next version
def restrict_change(self, cr, uid, ids, theoretical_qty, context=None):
return {}
# Should be left out in next version
def on_change_product_id(self, cr, uid, ids, product, uom, theoretical_qty, context=None):
""" Changes UoM
@param location_id: Location id
@param product: Changed product_id
@param uom: UoM product
@return: Dictionary of changed values
"""
if not product:
return {'value': {'product_uom_id': False}}
obj_product = self.pool.get('product.product').browse(cr, uid, product, context=context)
return {'value': {'product_uom_id': uom or obj_product.uom_id.id}}
#----------------------------------------------------------
# Stock Warehouse
#----------------------------------------------------------
class stock_warehouse(osv.osv):
_name = "stock.warehouse"
_description = "Warehouse"
_columns = {
'name': fields.char('Warehouse Name', required=True, select=True),
'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, select=True),
'partner_id': fields.many2one('res.partner', 'Address'),
'view_location_id': fields.many2one('stock.location', 'View Location', required=True, domain=[('usage', '=', 'view')]),
'lot_stock_id': fields.many2one('stock.location', 'Location Stock', domain=[('usage', '=', 'internal')], required=True),
'code': fields.char('Short Name', size=5, required=True, help="Short name used to identify your warehouse"),
'route_ids': fields.many2many('stock.location.route', 'stock_route_warehouse', 'warehouse_id', 'route_id', 'Routes', domain="[('warehouse_selectable', '=', True)]", help='Defaults routes through the warehouse'),
'reception_steps': fields.selection([
('one_step', 'Receive goods directly in stock (1 step)'),
('two_steps', 'Unload in input location then go to stock (2 steps)'),
('three_steps', 'Unload in input location, go through a quality control before being admitted in stock (3 steps)')], 'Incoming Shipments',
help="Default incoming route to follow", required=True),
'delivery_steps': fields.selection([
('ship_only', 'Ship directly from stock (Ship only)'),
('pick_ship', 'Bring goods to output location before shipping (Pick + Ship)'),
('pick_pack_ship', 'Make packages into a dedicated location, then bring them to the output location for shipping (Pick + Pack + Ship)')], 'Outgoing Shippings',
help="Default outgoing route to follow", required=True),
'wh_input_stock_loc_id': fields.many2one('stock.location', 'Input Location'),
'wh_qc_stock_loc_id': fields.many2one('stock.location', 'Quality Control Location'),
'wh_output_stock_loc_id': fields.many2one('stock.location', 'Output Location'),
'wh_pack_stock_loc_id': fields.many2one('stock.location', 'Packing Location'),
'mto_pull_id': fields.many2one('procurement.rule', 'MTO rule'),
'pick_type_id': fields.many2one('stock.picking.type', 'Pick Type'),
'pack_type_id': fields.many2one('stock.picking.type', 'Pack Type'),
'out_type_id': fields.many2one('stock.picking.type', 'Out Type'),
'in_type_id': fields.many2one('stock.picking.type', 'In Type'),
'int_type_id': fields.many2one('stock.picking.type', 'Internal Type'),
'crossdock_route_id': fields.many2one('stock.location.route', 'Crossdock Route'),
'reception_route_id': fields.many2one('stock.location.route', 'Receipt Route'),
'delivery_route_id': fields.many2one('stock.location.route', 'Delivery Route'),
'resupply_from_wh': fields.boolean('Resupply From Other Warehouses'),
'resupply_wh_ids': fields.many2many('stock.warehouse', 'stock_wh_resupply_table', 'supplied_wh_id', 'supplier_wh_id', 'Resupply Warehouses'),
'resupply_route_ids': fields.one2many('stock.location.route', 'supplied_wh_id', 'Resupply Routes',
help="Routes will be created for these resupply warehouses and you can select them on products and product categories"),
'default_resupply_wh_id': fields.many2one('stock.warehouse', 'Default Resupply Warehouse', help="Goods will always be resupplied from this warehouse"),
}
def onchange_filter_default_resupply_wh_id(self, cr, uid, ids, default_resupply_wh_id, resupply_wh_ids, context=None):
resupply_wh_ids = set([x['id'] for x in (self.resolve_2many_commands(cr, uid, 'resupply_wh_ids', resupply_wh_ids, ['id']))])
if default_resupply_wh_id: #If we are removing the default resupply, we don't have default_resupply_wh_id
resupply_wh_ids.add(default_resupply_wh_id)
resupply_wh_ids = list(resupply_wh_ids)
return {'value': {'resupply_wh_ids': resupply_wh_ids}}
def _get_external_transit_location(self, cr, uid, warehouse, context=None):
''' returns browse record of inter company transit location, if found'''
data_obj = self.pool.get('ir.model.data')
location_obj = self.pool.get('stock.location')
try:
inter_wh_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_inter_wh')[1]
except:
return False
return location_obj.browse(cr, uid, inter_wh_loc, context=context)
def _get_inter_wh_route(self, cr, uid, warehouse, wh, context=None):
return {
'name': _('%s: Supply Product from %s') % (warehouse.name, wh.name),
'warehouse_selectable': False,
'product_selectable': True,
'product_categ_selectable': True,
'supplied_wh_id': warehouse.id,
'supplier_wh_id': wh.id,
}
def _create_resupply_routes(self, cr, uid, warehouse, supplier_warehouses, default_resupply_wh, context=None):
route_obj = self.pool.get('stock.location.route')
pull_obj = self.pool.get('procurement.rule')
#create route selectable on the product to resupply the warehouse from another one
external_transit_location = self._get_external_transit_location(cr, uid, warehouse, context=context)
internal_transit_location = warehouse.company_id.internal_transit_location_id
input_loc = warehouse.wh_input_stock_loc_id
if warehouse.reception_steps == 'one_step':
input_loc = warehouse.lot_stock_id
for wh in supplier_warehouses:
transit_location = wh.company_id.id == warehouse.company_id.id and internal_transit_location or external_transit_location
if transit_location:
output_loc = wh.wh_output_stock_loc_id
if wh.delivery_steps == 'ship_only':
output_loc = wh.lot_stock_id
# Create extra MTO rule (only for 'ship only' because in the other cases MTO rules already exists)
mto_pull_vals = self._get_mto_pull_rule(cr, uid, wh, [(output_loc, transit_location, wh.out_type_id.id)], context=context)[0]
pull_obj.create(cr, uid, mto_pull_vals, context=context)
inter_wh_route_vals = self._get_inter_wh_route(cr, uid, warehouse, wh, context=context)
inter_wh_route_id = route_obj.create(cr, uid, vals=inter_wh_route_vals, context=context)
values = [(output_loc, transit_location, wh.out_type_id.id, wh), (transit_location, input_loc, warehouse.in_type_id.id, warehouse)]
pull_rules_list = self._get_supply_pull_rules(cr, uid, wh.id, values, inter_wh_route_id, context=context)
for pull_rule in pull_rules_list:
pull_obj.create(cr, uid, vals=pull_rule, context=context)
#if the warehouse is also set as default resupply method, assign this route automatically to the warehouse
if default_resupply_wh and default_resupply_wh.id == wh.id:
self.write(cr, uid, [warehouse.id], {'route_ids': [(4, inter_wh_route_id)]}, context=context)
_defaults = {
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c),
'reception_steps': 'one_step',
'delivery_steps': 'ship_only',
}
_sql_constraints = [
('warehouse_name_uniq', 'unique(name, company_id)', 'The name of the warehouse must be unique per company!'),
('warehouse_code_uniq', 'unique(code, company_id)', 'The code of the warehouse must be unique per company!'),
]
def _get_partner_locations(self, cr, uid, ids, context=None):
''' returns a tuple made of the browse record of customer location and the browse record of supplier location'''
data_obj = self.pool.get('ir.model.data')
location_obj = self.pool.get('stock.location')
try:
customer_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_customers')[1]
supplier_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_suppliers')[1]
except:
customer_loc = location_obj.search(cr, uid, [('usage', '=', 'customer')], context=context)
customer_loc = customer_loc and customer_loc[0] or False
supplier_loc = location_obj.search(cr, uid, [('usage', '=', 'supplier')], context=context)
supplier_loc = supplier_loc and supplier_loc[0] or False
if not (customer_loc and supplier_loc):
raise osv.except_osv(_('Error!'), _('Can\'t find any customer or supplier location.'))
return location_obj.browse(cr, uid, [customer_loc, supplier_loc], context=context)
def _location_used(self, cr, uid, location_id, warehouse, context=None):
pull_obj = self.pool['procurement.rule']
push_obj = self.pool['stock.location.path']
pulls = pull_obj.search(cr, uid, ['&', ('route_id', 'not in', [x.id for x in warehouse.route_ids]), '|', ('location_src_id', '=', location_id), ('location_id', '=', location_id)], context=context)
pushs = push_obj.search(cr, uid, ['&', ('route_id', 'not in', [x.id for x in warehouse.route_ids]), '|', ('location_from_id', '=', location_id), ('location_dest_id', '=', location_id)], context=context)
if pulls or pushs:
return True
return False
def switch_location(self, cr, uid, ids, warehouse, new_reception_step=False, new_delivery_step=False, context=None):
location_obj = self.pool.get('stock.location')
new_reception_step = new_reception_step or warehouse.reception_steps
new_delivery_step = new_delivery_step or warehouse.delivery_steps
if warehouse.reception_steps != new_reception_step:
if not self._location_used(cr, uid, warehouse.wh_input_stock_loc_id.id, warehouse, context=context):
location_obj.write(cr, uid, [warehouse.wh_input_stock_loc_id.id, warehouse.wh_qc_stock_loc_id.id], {'active': False}, context=context)
if new_reception_step != 'one_step':
location_obj.write(cr, uid, warehouse.wh_input_stock_loc_id.id, {'active': True}, context=context)
if new_reception_step == 'three_steps':
location_obj.write(cr, uid, warehouse.wh_qc_stock_loc_id.id, {'active': True}, context=context)
if warehouse.delivery_steps != new_delivery_step:
if not self._location_used(cr, uid, warehouse.wh_output_stock_loc_id.id, warehouse, context=context):
location_obj.write(cr, uid, [warehouse.wh_output_stock_loc_id.id], {'active': False}, context=context)
if not self._location_used(cr, uid, warehouse.wh_pack_stock_loc_id.id, warehouse, context=context):
location_obj.write(cr, uid, [warehouse.wh_pack_stock_loc_id.id], {'active': False}, context=context)
if new_delivery_step != 'ship_only':
location_obj.write(cr, uid, warehouse.wh_output_stock_loc_id.id, {'active': True}, context=context)
if new_delivery_step == 'pick_pack_ship':
location_obj.write(cr, uid, warehouse.wh_pack_stock_loc_id.id, {'active': True}, context=context)
return True
def _get_reception_delivery_route(self, cr, uid, warehouse, route_name, context=None):
return {
'name': self._format_routename(cr, uid, warehouse, route_name, context=context),
'product_categ_selectable': True,
'product_selectable': False,
'sequence': 10,
}
def _get_supply_pull_rules(self, cr, uid, supply_warehouse, values, new_route_id, context=None):
pull_rules_list = []
for from_loc, dest_loc, pick_type_id, warehouse in values:
pull_rules_list.append({
'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context),
'location_src_id': from_loc.id,
'location_id': dest_loc.id,
'route_id': new_route_id,
'action': 'move',
'picking_type_id': pick_type_id,
'procure_method': warehouse.lot_stock_id.id != from_loc.id and 'make_to_order' or 'make_to_stock', # first part of the resuply route is MTS
'warehouse_id': warehouse.id,
'propagate_warehouse_id': supply_warehouse,
})
return pull_rules_list
def _get_push_pull_rules(self, cr, uid, warehouse, active, values, new_route_id, context=None):
first_rule = True
push_rules_list = []
pull_rules_list = []
for from_loc, dest_loc, pick_type_id in values:
push_rules_list.append({
'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context),
'location_from_id': from_loc.id,
'location_dest_id': dest_loc.id,
'route_id': new_route_id,
'auto': 'manual',
'picking_type_id': pick_type_id,
'active': active,
'warehouse_id': warehouse.id,
})
pull_rules_list.append({
'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context),
'location_src_id': from_loc.id,
'location_id': dest_loc.id,
'route_id': new_route_id,
'action': 'move',
'picking_type_id': pick_type_id,
'procure_method': first_rule is True and 'make_to_stock' or 'make_to_order',
'active': active,
'warehouse_id': warehouse.id,
})
first_rule = False
return push_rules_list, pull_rules_list
def _get_mto_route(self, cr, uid, context=None):
route_obj = self.pool.get('stock.location.route')
data_obj = self.pool.get('ir.model.data')
try:
mto_route_id = data_obj.get_object_reference(cr, uid, 'stock', 'route_warehouse0_mto')[1]
except:
mto_route_id = route_obj.search(cr, uid, [('name', 'like', _('Make To Order'))], context=context)
mto_route_id = mto_route_id and mto_route_id[0] or False
if not mto_route_id:
raise osv.except_osv(_('Error!'), _('Can\'t find any generic Make To Order route.'))
return mto_route_id
def _check_remove_mto_resupply_rules(self, cr, uid, warehouse, context=None):
""" Checks that the moves from the different """
pull_obj = self.pool.get('procurement.rule')
mto_route_id = self._get_mto_route(cr, uid, context=context)
rules = pull_obj.search(cr, uid, ['&', ('location_src_id', '=', warehouse.lot_stock_id.id), ('location_id.usage', '=', 'transit')], context=context)
pull_obj.unlink(cr, uid, rules, context=context)
def _get_mto_pull_rule(self, cr, uid, warehouse, values, context=None):
mto_route_id = self._get_mto_route(cr, uid, context=context)
res = []
for value in values:
from_loc, dest_loc, pick_type_id = value
res += [{
'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context) + _(' MTO'),
'location_src_id': from_loc.id,
'location_id': dest_loc.id,
'route_id': mto_route_id,
'action': 'move',
'picking_type_id': pick_type_id,
'procure_method': 'make_to_order',
'active': True,
'warehouse_id': warehouse.id,
}]
return res
def _get_crossdock_route(self, cr, uid, warehouse, route_name, context=None):
return {
'name': self._format_routename(cr, uid, warehouse, route_name, context=context),
'warehouse_selectable': False,
'product_selectable': True,
'product_categ_selectable': True,
'active': warehouse.delivery_steps != 'ship_only' and warehouse.reception_steps != 'one_step',
'sequence': 20,
}
def create_routes(self, cr, uid, ids, warehouse, context=None):
wh_route_ids = []
route_obj = self.pool.get('stock.location.route')
pull_obj = self.pool.get('procurement.rule')
push_obj = self.pool.get('stock.location.path')
routes_dict = self.get_routes_dict(cr, uid, ids, warehouse, context=context)
#create reception route and rules
route_name, values = routes_dict[warehouse.reception_steps]
route_vals = self._get_reception_delivery_route(cr, uid, warehouse, route_name, context=context)
reception_route_id = route_obj.create(cr, uid, route_vals, context=context)
wh_route_ids.append((4, reception_route_id))
push_rules_list, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, reception_route_id, context=context)
#create the push/pull rules
for push_rule in push_rules_list:
push_obj.create(cr, uid, vals=push_rule, context=context)
for pull_rule in pull_rules_list:
#all pull rules in reception route are mto, because we don't want to wait for the scheduler to trigger an orderpoint on input location
pull_rule['procure_method'] = 'make_to_order'
pull_obj.create(cr, uid, vals=pull_rule, context=context)
#create MTS route and pull rules for delivery and a specific route MTO to be set on the product
route_name, values = routes_dict[warehouse.delivery_steps]
route_vals = self._get_reception_delivery_route(cr, uid, warehouse, route_name, context=context)
#create the route and its pull rules
delivery_route_id = route_obj.create(cr, uid, route_vals, context=context)
wh_route_ids.append((4, delivery_route_id))
dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, delivery_route_id, context=context)
for pull_rule in pull_rules_list:
pull_obj.create(cr, uid, vals=pull_rule, context=context)
#create MTO pull rule and link it to the generic MTO route
mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, values, context=context)[0]
mto_pull_id = pull_obj.create(cr, uid, mto_pull_vals, context=context)
#create a route for cross dock operations, that can be set on products and product categories
route_name, values = routes_dict['crossdock']
crossdock_route_vals = self._get_crossdock_route(cr, uid, warehouse, route_name, context=context)
crossdock_route_id = route_obj.create(cr, uid, vals=crossdock_route_vals, context=context)
wh_route_ids.append((4, crossdock_route_id))
dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, warehouse.delivery_steps != 'ship_only' and warehouse.reception_steps != 'one_step', values, crossdock_route_id, context=context)
for pull_rule in pull_rules_list:
# Fixed cross-dock is logically mto
pull_rule['procure_method'] = 'make_to_order'
pull_obj.create(cr, uid, vals=pull_rule, context=context)
#create route selectable on the product to resupply the warehouse from another one
self._create_resupply_routes(cr, uid, warehouse, warehouse.resupply_wh_ids, warehouse.default_resupply_wh_id, context=context)
#return routes and mto pull rule to store on the warehouse
return {
'route_ids': wh_route_ids,
'mto_pull_id': mto_pull_id,
'reception_route_id': reception_route_id,
'delivery_route_id': delivery_route_id,
'crossdock_route_id': crossdock_route_id,
}
def change_route(self, cr, uid, ids, warehouse, new_reception_step=False, new_delivery_step=False, context=None):
picking_type_obj = self.pool.get('stock.picking.type')
pull_obj = self.pool.get('procurement.rule')
push_obj = self.pool.get('stock.location.path')
route_obj = self.pool.get('stock.location.route')
new_reception_step = new_reception_step or warehouse.reception_steps
new_delivery_step = new_delivery_step or warehouse.delivery_steps
#change the default source and destination location and (de)activate picking types
input_loc = warehouse.wh_input_stock_loc_id
if new_reception_step == 'one_step':
input_loc = warehouse.lot_stock_id
output_loc = warehouse.wh_output_stock_loc_id
if new_delivery_step == 'ship_only':
output_loc = warehouse.lot_stock_id
picking_type_obj.write(cr, uid, warehouse.in_type_id.id, {'default_location_dest_id': input_loc.id}, context=context)
picking_type_obj.write(cr, uid, warehouse.out_type_id.id, {'default_location_src_id': output_loc.id}, context=context)
picking_type_obj.write(cr, uid, warehouse.pick_type_id.id, {'active': new_delivery_step != 'ship_only'}, context=context)
picking_type_obj.write(cr, uid, warehouse.pack_type_id.id, {'active': new_delivery_step == 'pick_pack_ship'}, context=context)
routes_dict = self.get_routes_dict(cr, uid, ids, warehouse, context=context)
#update delivery route and rules: unlink the existing rules of the warehouse delivery route and recreate it
pull_obj.unlink(cr, uid, [pu.id for pu in warehouse.delivery_route_id.pull_ids], context=context)
route_name, values = routes_dict[new_delivery_step]
route_obj.write(cr, uid, warehouse.delivery_route_id.id, {'name': self._format_routename(cr, uid, warehouse, route_name, context=context)}, context=context)
dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, warehouse.delivery_route_id.id, context=context)
#create the pull rules
for pull_rule in pull_rules_list:
pull_obj.create(cr, uid, vals=pull_rule, context=context)
#update receipt route and rules: unlink the existing rules of the warehouse receipt route and recreate it
pull_obj.unlink(cr, uid, [pu.id for pu in warehouse.reception_route_id.pull_ids], context=context)
push_obj.unlink(cr, uid, [pu.id for pu in warehouse.reception_route_id.push_ids], context=context)
route_name, values = routes_dict[new_reception_step]
route_obj.write(cr, uid, warehouse.reception_route_id.id, {'name': self._format_routename(cr, uid, warehouse, route_name, context=context)}, context=context)
push_rules_list, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, warehouse.reception_route_id.id, context=context)
#create the push/pull rules
for push_rule in push_rules_list:
push_obj.create(cr, uid, vals=push_rule, context=context)
for pull_rule in pull_rules_list:
#all pull rules in receipt route are mto, because we don't want to wait for the scheduler to trigger an orderpoint on input location
pull_rule['procure_method'] = 'make_to_order'
pull_obj.create(cr, uid, vals=pull_rule, context=context)
route_obj.write(cr, uid, warehouse.crossdock_route_id.id, {'active': new_reception_step != 'one_step' and new_delivery_step != 'ship_only'}, context=context)
#change MTO rule
dummy, values = routes_dict[new_delivery_step]
mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, values, context=context)[0]
pull_obj.write(cr, uid, warehouse.mto_pull_id.id, mto_pull_vals, context=context)
return True
def create_sequences_and_picking_types(self, cr, uid, warehouse, context=None):
seq_obj = self.pool.get('ir.sequence')
picking_type_obj = self.pool.get('stock.picking.type')
#create new sequences
in_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence in'), 'prefix': warehouse.code + '/IN/', 'padding': 5}, context=context)
out_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence out'), 'prefix': warehouse.code + '/OUT/', 'padding': 5}, context=context)
pack_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence packing'), 'prefix': warehouse.code + '/PACK/', 'padding': 5}, context=context)
pick_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence picking'), 'prefix': warehouse.code + '/PICK/', 'padding': 5}, context=context)
int_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence internal'), 'prefix': warehouse.code + '/INT/', 'padding': 5}, context=context)
wh_stock_loc = warehouse.lot_stock_id
wh_input_stock_loc = warehouse.wh_input_stock_loc_id
wh_output_stock_loc = warehouse.wh_output_stock_loc_id
wh_pack_stock_loc = warehouse.wh_pack_stock_loc_id
#fetch customer and supplier locations, for references
customer_loc, supplier_loc = self._get_partner_locations(cr, uid, warehouse.id, context=context)
#create in, out, internal picking types for warehouse
input_loc = wh_input_stock_loc
if warehouse.reception_steps == 'one_step':
input_loc = wh_stock_loc
output_loc = wh_output_stock_loc
if warehouse.delivery_steps == 'ship_only':
output_loc = wh_stock_loc
#choose the next available color for the picking types of this warehouse
color = 0
available_colors = [c%9 for c in range(3, 12)] # put flashy colors first
all_used_colors = self.pool.get('stock.picking.type').search_read(cr, uid, [('warehouse_id', '!=', False), ('color', '!=', False)], ['color'], order='color')
#don't use sets to preserve the list order
for x in all_used_colors:
if x['color'] in available_colors:
available_colors.remove(x['color'])
if available_colors:
color = available_colors[0]
#order the picking types with a sequence allowing to have the following suit for each warehouse: reception, internal, pick, pack, ship.
max_sequence = self.pool.get('stock.picking.type').search_read(cr, uid, [], ['sequence'], order='sequence desc')
max_sequence = max_sequence and max_sequence[0]['sequence'] or 0
in_type_id = picking_type_obj.create(cr, uid, vals={
'name': _('Receipts'),
'warehouse_id': warehouse.id,
'code': 'incoming',
'sequence_id': in_seq_id,
'default_location_src_id': supplier_loc.id,
'default_location_dest_id': input_loc.id,
'sequence': max_sequence + 1,
'color': color}, context=context)
out_type_id = picking_type_obj.create(cr, uid, vals={
'name': _('Delivery Orders'),
'warehouse_id': warehouse.id,
'code': 'outgoing',
'sequence_id': out_seq_id,
'return_picking_type_id': in_type_id,
'default_location_src_id': output_loc.id,
'default_location_dest_id': customer_loc.id,
'sequence': max_sequence + 4,
'color': color}, context=context)
picking_type_obj.write(cr, uid, [in_type_id], {'return_picking_type_id': out_type_id}, context=context)
int_type_id = picking_type_obj.create(cr, uid, vals={
'name': _('Internal Transfers'),
'warehouse_id': warehouse.id,
'code': 'internal',
'sequence_id': int_seq_id,
'default_location_src_id': wh_stock_loc.id,
'default_location_dest_id': wh_stock_loc.id,
'active': True,
'sequence': max_sequence + 2,
'color': color}, context=context)
pack_type_id = picking_type_obj.create(cr, uid, vals={
'name': _('Pack'),
'warehouse_id': warehouse.id,
'code': 'internal',
'sequence_id': pack_seq_id,
'default_location_src_id': wh_pack_stock_loc.id,
'default_location_dest_id': output_loc.id,
'active': warehouse.delivery_steps == 'pick_pack_ship',
'sequence': max_sequence + 3,
'color': color}, context=context)
pick_type_id = picking_type_obj.create(cr, uid, vals={
'name': _('Pick'),
'warehouse_id': warehouse.id,
'code': 'internal',
'sequence_id': pick_seq_id,
'default_location_src_id': wh_stock_loc.id,
'default_location_dest_id': wh_pack_stock_loc.id,
'active': warehouse.delivery_steps != 'ship_only',
'sequence': max_sequence + 2,
'color': color}, context=context)
#write picking types on WH
vals = {
'in_type_id': in_type_id,
'out_type_id': out_type_id,
'pack_type_id': pack_type_id,
'pick_type_id': pick_type_id,
'int_type_id': int_type_id,
}
super(stock_warehouse, self).write(cr, uid, warehouse.id, vals=vals, context=context)
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if vals is None:
vals = {}
data_obj = self.pool.get('ir.model.data')
seq_obj = self.pool.get('ir.sequence')
picking_type_obj = self.pool.get('stock.picking.type')
location_obj = self.pool.get('stock.location')
#create view location for warehouse
loc_vals = {
'name': _(vals.get('code')),
'usage': 'view',
'location_id': data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_locations')[1],
}
if vals.get('company_id'):
loc_vals['company_id'] = vals.get('company_id')
wh_loc_id = location_obj.create(cr, uid, loc_vals, context=context)
vals['view_location_id'] = wh_loc_id
#create all location
def_values = self.default_get(cr, uid, {'reception_steps', 'delivery_steps'})
reception_steps = vals.get('reception_steps', def_values['reception_steps'])
delivery_steps = vals.get('delivery_steps', def_values['delivery_steps'])
context_with_inactive = context.copy()
context_with_inactive['active_test'] = False
sub_locations = [
{'name': _('Stock'), 'active': True, 'field': 'lot_stock_id'},
{'name': _('Input'), 'active': reception_steps != 'one_step', 'field': 'wh_input_stock_loc_id'},
{'name': _('Quality Control'), 'active': reception_steps == 'three_steps', 'field': 'wh_qc_stock_loc_id'},
{'name': _('Output'), 'active': delivery_steps != 'ship_only', 'field': 'wh_output_stock_loc_id'},
{'name': _('Packing Zone'), 'active': delivery_steps == 'pick_pack_ship', 'field': 'wh_pack_stock_loc_id'},
]
for values in sub_locations:
loc_vals = {
'name': values['name'],
'usage': 'internal',
'location_id': wh_loc_id,
'active': values['active'],
}
if vals.get('company_id'):
loc_vals['company_id'] = vals.get('company_id')
location_id = location_obj.create(cr, uid, loc_vals, context=context_with_inactive)
vals[values['field']] = location_id
#create WH
new_id = super(stock_warehouse, self).create(cr, uid, vals=vals, context=context)
warehouse = self.browse(cr, uid, new_id, context=context)
self.create_sequences_and_picking_types(cr, uid, warehouse, context=context)
#create routes and push/pull rules
new_objects_dict = self.create_routes(cr, uid, new_id, warehouse, context=context)
self.write(cr, uid, warehouse.id, new_objects_dict, context=context)
return new_id
def _format_rulename(self, cr, uid, obj, from_loc, dest_loc, context=None):
return obj.code + ': ' + from_loc.name + ' -> ' + dest_loc.name
def _format_routename(self, cr, uid, obj, name, context=None):
return obj.name + ': ' + name
def get_routes_dict(self, cr, uid, ids, warehouse, context=None):
#fetch customer and supplier locations, for references
customer_loc, supplier_loc = self._get_partner_locations(cr, uid, ids, context=context)
return {
'one_step': (_('Receipt in 1 step'), []),
'two_steps': (_('Receipt in 2 steps'), [(warehouse.wh_input_stock_loc_id, warehouse.lot_stock_id, warehouse.int_type_id.id)]),
'three_steps': (_('Receipt in 3 steps'), [(warehouse.wh_input_stock_loc_id, warehouse.wh_qc_stock_loc_id, warehouse.int_type_id.id), (warehouse.wh_qc_stock_loc_id, warehouse.lot_stock_id, warehouse.int_type_id.id)]),
'crossdock': (_('Cross-Dock'), [(warehouse.wh_input_stock_loc_id, warehouse.wh_output_stock_loc_id, warehouse.int_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]),
'ship_only': (_('Ship Only'), [(warehouse.lot_stock_id, customer_loc, warehouse.out_type_id.id)]),
'pick_ship': (_('Pick + Ship'), [(warehouse.lot_stock_id, warehouse.wh_output_stock_loc_id, warehouse.pick_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]),
'pick_pack_ship': (_('Pick + Pack + Ship'), [(warehouse.lot_stock_id, warehouse.wh_pack_stock_loc_id, warehouse.pick_type_id.id), (warehouse.wh_pack_stock_loc_id, warehouse.wh_output_stock_loc_id, warehouse.pack_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]),
}
def _handle_renaming(self, cr, uid, warehouse, name, code, context=None):
location_obj = self.pool.get('stock.location')
route_obj = self.pool.get('stock.location.route')
pull_obj = self.pool.get('procurement.rule')
push_obj = self.pool.get('stock.location.path')
#rename location
location_id = warehouse.lot_stock_id.location_id.id
location_obj.write(cr, uid, location_id, {'name': code}, context=context)
#rename route and push-pull rules
for route in warehouse.route_ids:
route_obj.write(cr, uid, route.id, {'name': route.name.replace(warehouse.name, name, 1)}, context=context)
for pull in route.pull_ids:
pull_obj.write(cr, uid, pull.id, {'name': pull.name.replace(warehouse.name, name, 1)}, context=context)
for push in route.push_ids:
push_obj.write(cr, uid, push.id, {'name': pull.name.replace(warehouse.name, name, 1)}, context=context)
#change the mto pull rule name
if warehouse.mto_pull_id.id:
pull_obj.write(cr, uid, warehouse.mto_pull_id.id, {'name': warehouse.mto_pull_id.name.replace(warehouse.name, name, 1)}, context=context)
def _check_delivery_resupply(self, cr, uid, warehouse, new_location, change_to_multiple, context=None):
""" Will check if the resupply routes from this warehouse follow the changes of number of delivery steps """
#Check routes that are being delivered by this warehouse and change the rule going to transit location
route_obj = self.pool.get("stock.location.route")
pull_obj = self.pool.get("procurement.rule")
routes = route_obj.search(cr, uid, [('supplier_wh_id','=', warehouse.id)], context=context)
pulls = pull_obj.search(cr, uid, ['&', ('route_id', 'in', routes), ('location_id.usage', '=', 'transit')], context=context)
if pulls:
pull_obj.write(cr, uid, pulls, {'location_src_id': new_location, 'procure_method': change_to_multiple and "make_to_order" or "make_to_stock"}, context=context)
# Create or clean MTO rules
mto_route_id = self._get_mto_route(cr, uid, context=context)
if not change_to_multiple:
# If single delivery we should create the necessary MTO rules for the resupply
# pulls = pull_obj.search(cr, uid, ['&', ('route_id', '=', mto_route_id), ('location_id.usage', '=', 'transit'), ('location_src_id', '=', warehouse.lot_stock_id.id)], context=context)
pull_recs = pull_obj.browse(cr, uid, pulls, context=context)
transfer_locs = list(set([x.location_id for x in pull_recs]))
vals = [(warehouse.lot_stock_id , x, warehouse.out_type_id.id) for x in transfer_locs]
mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, vals, context=context)
for mto_pull_val in mto_pull_vals:
pull_obj.create(cr, uid, mto_pull_val, context=context)
else:
# We need to delete all the MTO pull rules, otherwise they risk to be used in the system
pulls = pull_obj.search(cr, uid, ['&', ('route_id', '=', mto_route_id), ('location_id.usage', '=', 'transit'), ('location_src_id', '=', warehouse.lot_stock_id.id)], context=context)
if pulls:
pull_obj.unlink(cr, uid, pulls, context=context)
def _check_reception_resupply(self, cr, uid, warehouse, new_location, context=None):
"""
Will check if the resupply routes to this warehouse follow the changes of number of receipt steps
"""
#Check routes that are being delivered by this warehouse and change the rule coming from transit location
route_obj = self.pool.get("stock.location.route")
pull_obj = self.pool.get("procurement.rule")
routes = route_obj.search(cr, uid, [('supplied_wh_id','=', warehouse.id)], context=context)
pulls= pull_obj.search(cr, uid, ['&', ('route_id', 'in', routes), ('location_src_id.usage', '=', 'transit')])
if pulls:
pull_obj.write(cr, uid, pulls, {'location_id': new_location}, context=context)
def _check_resupply(self, cr, uid, warehouse, reception_new, delivery_new, context=None):
if reception_new:
old_val = warehouse.reception_steps
new_val = reception_new
change_to_one = (old_val != 'one_step' and new_val == 'one_step')
change_to_multiple = (old_val == 'one_step' and new_val != 'one_step')
if change_to_one or change_to_multiple:
new_location = change_to_one and warehouse.lot_stock_id.id or warehouse.wh_input_stock_loc_id.id
self._check_reception_resupply(cr, uid, warehouse, new_location, context=context)
if delivery_new:
old_val = warehouse.delivery_steps
new_val = delivery_new
change_to_one = (old_val != 'ship_only' and new_val == 'ship_only')
change_to_multiple = (old_val == 'ship_only' and new_val != 'ship_only')
if change_to_one or change_to_multiple:
new_location = change_to_one and warehouse.lot_stock_id.id or warehouse.wh_output_stock_loc_id.id
self._check_delivery_resupply(cr, uid, warehouse, new_location, change_to_multiple, context=context)
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
if isinstance(ids, (int, long)):
ids = [ids]
seq_obj = self.pool.get('ir.sequence')
route_obj = self.pool.get('stock.location.route')
context_with_inactive = context.copy()
context_with_inactive['active_test'] = False
for warehouse in self.browse(cr, uid, ids, context=context_with_inactive):
#first of all, check if we need to delete and recreate route
if vals.get('reception_steps') or vals.get('delivery_steps'):
#activate and deactivate location according to reception and delivery option
self.switch_location(cr, uid, warehouse.id, warehouse, vals.get('reception_steps', False), vals.get('delivery_steps', False), context=context)
# switch between route
self.change_route(cr, uid, ids, warehouse, vals.get('reception_steps', False), vals.get('delivery_steps', False), context=context_with_inactive)
# Check if we need to change something to resupply warehouses and associated MTO rules
self._check_resupply(cr, uid, warehouse, vals.get('reception_steps'), vals.get('delivery_steps'), context=context)
if vals.get('code') or vals.get('name'):
name = warehouse.name
#rename sequence
if vals.get('name'):
name = vals.get('name', warehouse.name)
self._handle_renaming(cr, uid, warehouse, name, vals.get('code', warehouse.code), context=context_with_inactive)
if warehouse.in_type_id:
seq_obj.write(cr, uid, warehouse.in_type_id.sequence_id.id, {'name': name + _(' Sequence in'), 'prefix': vals.get('code', warehouse.code) + '\IN\\'}, context=context)
seq_obj.write(cr, uid, warehouse.out_type_id.sequence_id.id, {'name': name + _(' Sequence out'), 'prefix': vals.get('code', warehouse.code) + '\OUT\\'}, context=context)
seq_obj.write(cr, uid, warehouse.pack_type_id.sequence_id.id, {'name': name + _(' Sequence packing'), 'prefix': vals.get('code', warehouse.code) + '\PACK\\'}, context=context)
seq_obj.write(cr, uid, warehouse.pick_type_id.sequence_id.id, {'name': name + _(' Sequence picking'), 'prefix': vals.get('code', warehouse.code) + '\PICK\\'}, context=context)
seq_obj.write(cr, uid, warehouse.int_type_id.sequence_id.id, {'name': name + _(' Sequence internal'), 'prefix': vals.get('code', warehouse.code) + '\INT\\'}, context=context)
if vals.get('resupply_wh_ids') and not vals.get('resupply_route_ids'):
for cmd in vals.get('resupply_wh_ids'):
if cmd[0] == 6:
new_ids = set(cmd[2])
old_ids = set([wh.id for wh in warehouse.resupply_wh_ids])
to_add_wh_ids = new_ids - old_ids
if to_add_wh_ids:
supplier_warehouses = self.browse(cr, uid, list(to_add_wh_ids), context=context)
self._create_resupply_routes(cr, uid, warehouse, supplier_warehouses, warehouse.default_resupply_wh_id, context=context)
to_remove_wh_ids = old_ids - new_ids
if to_remove_wh_ids:
to_remove_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', 'in', list(to_remove_wh_ids))], context=context)
if to_remove_route_ids:
route_obj.unlink(cr, uid, to_remove_route_ids, context=context)
else:
#not implemented
pass
if 'default_resupply_wh_id' in vals:
if vals.get('default_resupply_wh_id') == warehouse.id:
raise osv.except_osv(_('Warning'),_('The default resupply warehouse should be different than the warehouse itself!'))
if warehouse.default_resupply_wh_id:
#remove the existing resupplying route on the warehouse
to_remove_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', '=', warehouse.default_resupply_wh_id.id)], context=context)
for inter_wh_route_id in to_remove_route_ids:
self.write(cr, uid, [warehouse.id], {'route_ids': [(3, inter_wh_route_id)]})
if vals.get('default_resupply_wh_id'):
#assign the new resupplying route on all products
to_assign_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', '=', vals.get('default_resupply_wh_id'))], context=context)
for inter_wh_route_id in to_assign_route_ids:
self.write(cr, uid, [warehouse.id], {'route_ids': [(4, inter_wh_route_id)]})
return super(stock_warehouse, self).write(cr, uid, ids, vals=vals, context=context)
def get_all_routes_for_wh(self, cr, uid, warehouse, context=None):
route_obj = self.pool.get("stock.location.route")
all_routes = [route.id for route in warehouse.route_ids]
all_routes += route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id)], context=context)
all_routes += [warehouse.mto_pull_id.route_id.id]
return all_routes
def view_all_routes_for_wh(self, cr, uid, ids, context=None):
all_routes = []
for wh in self.browse(cr, uid, ids, context=context):
all_routes += self.get_all_routes_for_wh(cr, uid, wh, context=context)
domain = [('id', 'in', all_routes)]
return {
'name': _('Warehouse\'s Routes'),
'domain': domain,
'res_model': 'stock.location.route',
'type': 'ir.actions.act_window',
'view_id': False,
'view_mode': 'tree,form',
'view_type': 'form',
'limit': 20
}
class stock_location_path(osv.osv):
_name = "stock.location.path"
_description = "Pushed Flows"
_order = "name"
def _get_rules(self, cr, uid, ids, context=None):
res = []
for route in self.browse(cr, uid, ids, context=context):
res += [x.id for x in route.push_ids]
return res
_columns = {
'name': fields.char('Operation Name', required=True),
'company_id': fields.many2one('res.company', 'Company'),
'route_id': fields.many2one('stock.location.route', 'Route'),
'location_from_id': fields.many2one('stock.location', 'Source Location', ondelete='cascade', select=1, required=True),
'location_dest_id': fields.many2one('stock.location', 'Destination Location', ondelete='cascade', select=1, required=True),
'delay': fields.integer('Delay (days)', help="Number of days to do this transition"),
'picking_type_id': fields.many2one('stock.picking.type', 'Type of the new Operation', required=True, help="This is the picking type associated with the different pickings"),
'auto': fields.selection(
[('auto','Automatic Move'), ('manual','Manual Operation'),('transparent','Automatic No Step Added')],
'Automatic Move',
required=True, select=1,
help="This is used to define paths the product has to follow within the location tree.\n" \
"The 'Automatic Move' value will create a stock move after the current one that will be "\
"validated automatically. With 'Manual Operation', the stock move has to be validated "\
"by a worker. With 'Automatic No Step Added', the location is replaced in the original move."
),
'propagate': fields.boolean('Propagate cancel and split', help='If checked, when the previous move is cancelled or split, the move generated by this move will too'),
'active': fields.boolean('Active', help="If unchecked, it will allow you to hide the rule without removing it."),
'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse'),
'route_sequence': fields.related('route_id', 'sequence', string='Route Sequence',
store={
'stock.location.route': (_get_rules, ['sequence'], 10),
'stock.location.path': (lambda self, cr, uid, ids, c={}: ids, ['route_id'], 10),
}),
'sequence': fields.integer('Sequence'),
}
_defaults = {
'auto': 'auto',
'delay': 0,
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'procurement.order', context=c),
'propagate': True,
'active': True,
}
def _prepare_push_apply(self, cr, uid, rule, move, context=None):
newdate = (datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) + relativedelta.relativedelta(days=rule.delay or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT)
return {
'location_id': move.location_dest_id.id,
'location_dest_id': rule.location_dest_id.id,
'date': newdate,
'company_id': rule.company_id and rule.company_id.id or False,
'date_expected': newdate,
'picking_id': False,
'picking_type_id': rule.picking_type_id and rule.picking_type_id.id or False,
'propagate': rule.propagate,
'push_rule_id': rule.id,
'warehouse_id': rule.warehouse_id and rule.warehouse_id.id or False,
}
def _apply(self, cr, uid, rule, move, context=None):
move_obj = self.pool.get('stock.move')
newdate = (datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) + relativedelta.relativedelta(days=rule.delay or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT)
if rule.auto == 'transparent':
old_dest_location = move.location_dest_id.id
move_obj.write(cr, uid, [move.id], {
'date': newdate,
'date_expected': newdate,
'location_dest_id': rule.location_dest_id.id
})
#avoid looping if a push rule is not well configured
if rule.location_dest_id.id != old_dest_location:
#call again push_apply to see if a next step is defined
move_obj._push_apply(cr, uid, [move], context=context)
else:
vals = self._prepare_push_apply(cr, uid, rule, move, context=context)
move_id = move_obj.copy(cr, uid, move.id, vals, context=context)
move_obj.write(cr, uid, [move.id], {
'move_dest_id': move_id,
})
move_obj.action_confirm(cr, uid, [move_id], context=None)
# -------------------------
# Packaging related stuff
# -------------------------
from openerp.report import report_sxw
class stock_package(osv.osv):
"""
These are the packages, containing quants and/or other packages
"""
_name = "stock.quant.package"
_description = "Physical Packages"
_parent_name = "parent_id"
_parent_store = True
_parent_order = 'name'
_order = 'parent_left'
def name_get(self, cr, uid, ids, context=None):
res = self._complete_name(cr, uid, ids, 'complete_name', None, context=context)
return res.items()
def _complete_name(self, cr, uid, ids, name, args, context=None):
""" Forms complete name of location from parent location to child location.
@return: Dictionary of values
"""
res = {}
for m in self.browse(cr, uid, ids, context=context):
res[m.id] = m.name
parent = m.parent_id
while parent:
res[m.id] = parent.name + ' / ' + res[m.id]
parent = parent.parent_id
return res
def _get_packages(self, cr, uid, ids, context=None):
"""Returns packages from quants for store"""
res = set()
for quant in self.browse(cr, uid, ids, context=context):
pack = quant.package_id
while pack:
res.add(pack.id)
pack = pack.parent_id
return list(res)
def _get_package_info(self, cr, uid, ids, name, args, context=None):
quant_obj = self.pool.get("stock.quant")
default_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
res = dict((res_id, {'location_id': False, 'company_id': default_company_id, 'owner_id': False}) for res_id in ids)
for pack in self.browse(cr, uid, ids, context=context):
quants = quant_obj.search(cr, uid, [('package_id', 'child_of', pack.id)], context=context)
if quants:
quant = quant_obj.browse(cr, uid, quants[0], context=context)
res[pack.id]['location_id'] = quant.location_id.id
res[pack.id]['owner_id'] = quant.owner_id.id
res[pack.id]['company_id'] = quant.company_id.id
else:
res[pack.id]['location_id'] = False
res[pack.id]['owner_id'] = False
res[pack.id]['company_id'] = False
return res
def _get_packages_to_relocate(self, cr, uid, ids, context=None):
res = set()
for pack in self.browse(cr, uid, ids, context=context):
res.add(pack.id)
if pack.parent_id:
res.add(pack.parent_id.id)
return list(res)
_columns = {
'name': fields.char('Package Reference', select=True, copy=False),
'complete_name': fields.function(_complete_name, type='char', string="Package Name",),
'parent_left': fields.integer('Left Parent', select=1),
'parent_right': fields.integer('Right Parent', select=1),
'packaging_id': fields.many2one('product.packaging', 'Packaging', help="This field should be completed only if everything inside the package share the same product, otherwise it doesn't really makes sense.", select=True),
'ul_id': fields.many2one('product.ul', 'Logistic Unit'),
'location_id': fields.function(_get_package_info, type='many2one', relation='stock.location', string='Location', multi="package",
store={
'stock.quant': (_get_packages, ['location_id'], 10),
'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10),
}, readonly=True, select=True),
'quant_ids': fields.one2many('stock.quant', 'package_id', 'Bulk Content', readonly=True),
'parent_id': fields.many2one('stock.quant.package', 'Parent Package', help="The package containing this item", ondelete='restrict', readonly=True),
'children_ids': fields.one2many('stock.quant.package', 'parent_id', 'Contained Packages', readonly=True),
'company_id': fields.function(_get_package_info, type="many2one", relation='res.company', string='Company', multi="package",
store={
'stock.quant': (_get_packages, ['company_id'], 10),
'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10),
}, readonly=True, select=True),
'owner_id': fields.function(_get_package_info, type='many2one', relation='res.partner', string='Owner', multi="package",
store={
'stock.quant': (_get_packages, ['owner_id'], 10),
'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10),
}, readonly=True, select=True),
}
_defaults = {
'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').get(cr, uid, 'stock.quant.package') or _('Unknown Pack')
}
def _check_location_constraint(self, cr, uid, packs, context=None):
'''checks that all quants in a package are stored in the same location. This function cannot be used
as a constraint because it needs to be checked on pack operations (they may not call write on the
package)
'''
quant_obj = self.pool.get('stock.quant')
for pack in packs:
parent = pack
while parent.parent_id:
parent = parent.parent_id
quant_ids = self.get_content(cr, uid, [parent.id], context=context)
quants = [x for x in quant_obj.browse(cr, uid, quant_ids, context=context) if x.qty > 0]
location_id = quants and quants[0].location_id.id or False
if not [quant.location_id.id == location_id for quant in quants]:
raise osv.except_osv(_('Error'), _('Everything inside a package should be in the same location'))
return True
def action_print(self, cr, uid, ids, context=None):
context = dict(context or {}, active_ids=ids)
return self.pool.get("report").get_action(cr, uid, ids, 'stock.report_package_barcode_small', context=context)
def unpack(self, cr, uid, ids, context=None):
quant_obj = self.pool.get('stock.quant')
for package in self.browse(cr, uid, ids, context=context):
quant_ids = [quant.id for quant in package.quant_ids]
quant_obj.write(cr, uid, quant_ids, {'package_id': package.parent_id.id or False}, context=context)
children_package_ids = [child_package.id for child_package in package.children_ids]
self.write(cr, uid, children_package_ids, {'parent_id': package.parent_id.id or False}, context=context)
#delete current package since it contains nothing anymore
self.unlink(cr, uid, ids, context=context)
return self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'stock', 'action_package_view', context=context)
def get_content(self, cr, uid, ids, context=None):
child_package_ids = self.search(cr, uid, [('id', 'child_of', ids)], context=context)
return self.pool.get('stock.quant').search(cr, uid, [('package_id', 'in', child_package_ids)], context=context)
def get_content_package(self, cr, uid, ids, context=None):
quants_ids = self.get_content(cr, uid, ids, context=context)
res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'stock', 'quantsact', context=context)
res['domain'] = [('id', 'in', quants_ids)]
return res
def _get_product_total_qty(self, cr, uid, package_record, product_id, context=None):
''' find the total of given product 'product_id' inside the given package 'package_id'''
quant_obj = self.pool.get('stock.quant')
all_quant_ids = self.get_content(cr, uid, [package_record.id], context=context)
total = 0
for quant in quant_obj.browse(cr, uid, all_quant_ids, context=context):
if quant.product_id.id == product_id:
total += quant.qty
return total
def _get_all_products_quantities(self, cr, uid, package_id, context=None):
'''This function computes the different product quantities for the given package
'''
quant_obj = self.pool.get('stock.quant')
res = {}
for quant in quant_obj.browse(cr, uid, self.get_content(cr, uid, package_id, context=context)):
if quant.product_id.id not in res:
res[quant.product_id.id] = 0
res[quant.product_id.id] += quant.qty
return res
def copy_pack(self, cr, uid, id, default_pack_values=None, default=None, context=None):
stock_pack_operation_obj = self.pool.get('stock.pack.operation')
if default is None:
default = {}
new_package_id = self.copy(cr, uid, id, default_pack_values, context=context)
default['result_package_id'] = new_package_id
op_ids = stock_pack_operation_obj.search(cr, uid, [('result_package_id', '=', id)], context=context)
for op_id in op_ids:
stock_pack_operation_obj.copy(cr, uid, op_id, default, context=context)
class stock_pack_operation(osv.osv):
_name = "stock.pack.operation"
_description = "Packing Operation"
def _get_remaining_prod_quantities(self, cr, uid, operation, context=None):
'''Get the remaining quantities per product on an operation with a package. This function returns a dictionary'''
#if the operation doesn't concern a package, it's not relevant to call this function
if not operation.package_id or operation.product_id:
return {operation.product_id.id: operation.remaining_qty}
#get the total of products the package contains
res = self.pool.get('stock.quant.package')._get_all_products_quantities(cr, uid, operation.package_id.id, context=context)
#reduce by the quantities linked to a move
for record in operation.linked_move_operation_ids:
if record.move_id.product_id.id not in res:
res[record.move_id.product_id.id] = 0
res[record.move_id.product_id.id] -= record.qty
return res
def _get_remaining_qty(self, cr, uid, ids, name, args, context=None):
uom_obj = self.pool.get('product.uom')
res = {}
for ops in self.browse(cr, uid, ids, context=context):
res[ops.id] = 0
if ops.package_id and not ops.product_id:
#dont try to compute the remaining quantity for packages because it's not relevant (a package could include different products).
#should use _get_remaining_prod_quantities instead
continue
else:
qty = ops.product_qty
if ops.product_uom_id:
qty = uom_obj._compute_qty_obj(cr, uid, ops.product_uom_id, ops.product_qty, ops.product_id.uom_id, context=context)
for record in ops.linked_move_operation_ids:
qty -= record.qty
res[ops.id] = float_round(qty, precision_rounding=ops.product_id.uom_id.rounding)
return res
def product_id_change(self, cr, uid, ids, product_id, product_uom_id, product_qty, context=None):
res = self.on_change_tests(cr, uid, ids, product_id, product_uom_id, product_qty, context=context)
if product_id and not product_uom_id:
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
res['value']['product_uom_id'] = product.uom_id.id
return res
def on_change_tests(self, cr, uid, ids, product_id, product_uom_id, product_qty, context=None):
res = {'value': {}}
uom_obj = self.pool.get('product.uom')
if product_id:
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
product_uom_id = product_uom_id or product.uom_id.id
selected_uom = uom_obj.browse(cr, uid, product_uom_id, context=context)
if selected_uom.category_id.id != product.uom_id.category_id.id:
res['warning'] = {
'title': _('Warning: wrong UoM!'),
'message': _('The selected UoM for product %s is not compatible with the UoM set on the product form. \nPlease choose an UoM within the same UoM category.') % (product.name)
}
if product_qty and 'warning' not in res:
rounded_qty = uom_obj._compute_qty(cr, uid, product_uom_id, product_qty, product_uom_id, round=True)
if rounded_qty != product_qty:
res['warning'] = {
'title': _('Warning: wrong quantity!'),
'message': _('The chosen quantity for product %s is not compatible with the UoM rounding. It will be automatically converted at confirmation') % (product.name)
}
return res
_columns = {
'picking_id': fields.many2one('stock.picking', 'Stock Picking', help='The stock operation where the packing has been made', required=True),
'product_id': fields.many2one('product.product', 'Product', ondelete="CASCADE"), # 1
'product_uom_id': fields.many2one('product.uom', 'Product Unit of Measure'),
'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'qty_done': fields.float('Quantity Processed', digits_compute=dp.get_precision('Product Unit of Measure')),
'package_id': fields.many2one('stock.quant.package', 'Source Package'), # 2
'lot_id': fields.many2one('stock.production.lot', 'Lot/Serial Number'),
'result_package_id': fields.many2one('stock.quant.package', 'Destination Package', help="If set, the operations are packed into this package", required=False, ondelete='cascade'),
'date': fields.datetime('Date', required=True),
'owner_id': fields.many2one('res.partner', 'Owner', help="Owner of the quants"),
#'update_cost': fields.boolean('Need cost update'),
'cost': fields.float("Cost", help="Unit Cost for this product line"),
'currency': fields.many2one('res.currency', string="Currency", help="Currency in which Unit cost is expressed", ondelete='CASCADE'),
'linked_move_operation_ids': fields.one2many('stock.move.operation.link', 'operation_id', string='Linked Moves', readonly=True, help='Moves impacted by this operation for the computation of the remaining quantities'),
'remaining_qty': fields.function(_get_remaining_qty, type='float', digits = 0, string="Remaining Qty", help="Remaining quantity in default UoM according to moves matched with this operation. "),
'location_id': fields.many2one('stock.location', 'Source Location', required=True),
'location_dest_id': fields.many2one('stock.location', 'Destination Location', required=True),
'processed': fields.selection([('true','Yes'), ('false','No')],'Has been processed?', required=True),
}
_defaults = {
'date': fields.date.context_today,
'qty_done': 0,
'processed': lambda *a: 'false',
}
def write(self, cr, uid, ids, vals, context=None):
context = context or {}
res = super(stock_pack_operation, self).write(cr, uid, ids, vals, context=context)
if isinstance(ids, (int, long)):
ids = [ids]
if not context.get("no_recompute"):
pickings = vals.get('picking_id') and [vals['picking_id']] or list(set([x.picking_id.id for x in self.browse(cr, uid, ids, context=context)]))
self.pool.get("stock.picking").do_recompute_remaining_quantities(cr, uid, pickings, context=context)
return res
def create(self, cr, uid, vals, context=None):
context = context or {}
res_id = super(stock_pack_operation, self).create(cr, uid, vals, context=context)
if vals.get("picking_id") and not context.get("no_recompute"):
self.pool.get("stock.picking").do_recompute_remaining_quantities(cr, uid, [vals['picking_id']], context=context)
return res_id
def action_drop_down(self, cr, uid, ids, context=None):
''' Used by barcode interface to say that pack_operation has been moved from src location
to destination location, if qty_done is less than product_qty than we have to split the
operation in two to process the one with the qty moved
'''
processed_ids = []
move_obj = self.pool.get("stock.move")
for pack_op in self.browse(cr, uid, ids, context=None):
if pack_op.product_id and pack_op.location_id and pack_op.location_dest_id:
move_obj.check_tracking_product(cr, uid, pack_op.product_id, pack_op.lot_id.id, pack_op.location_id, pack_op.location_dest_id, context=context)
op = pack_op.id
if pack_op.qty_done < pack_op.product_qty:
# we split the operation in two
op = self.copy(cr, uid, pack_op.id, {'product_qty': pack_op.qty_done, 'qty_done': pack_op.qty_done}, context=context)
self.write(cr, uid, [pack_op.id], {'product_qty': pack_op.product_qty - pack_op.qty_done, 'qty_done': 0, 'lot_id': False}, context=context)
processed_ids.append(op)
self.write(cr, uid, processed_ids, {'processed': 'true'}, context=context)
def create_and_assign_lot(self, cr, uid, id, name, context=None):
''' Used by barcode interface to create a new lot and assign it to the operation
'''
obj = self.browse(cr,uid,id,context)
product_id = obj.product_id.id
val = {'product_id': product_id}
new_lot_id = False
if name:
lots = self.pool.get('stock.production.lot').search(cr, uid, ['&', ('name', '=', name), ('product_id', '=', product_id)], context=context)
if lots:
new_lot_id = lots[0]
val.update({'name': name})
if not new_lot_id:
new_lot_id = self.pool.get('stock.production.lot').create(cr, uid, val, context=context)
self.write(cr, uid, id, {'lot_id': new_lot_id}, context=context)
def _search_and_increment(self, cr, uid, picking_id, domain, filter_visible=False, visible_op_ids=False, increment=True, context=None):
'''Search for an operation with given 'domain' in a picking, if it exists increment the qty (+1) otherwise create it
:param domain: list of tuple directly reusable as a domain
context can receive a key 'current_package_id' with the package to consider for this operation
returns True
'''
if context is None:
context = {}
#if current_package_id is given in the context, we increase the number of items in this package
package_clause = [('result_package_id', '=', context.get('current_package_id', False))]
existing_operation_ids = self.search(cr, uid, [('picking_id', '=', picking_id)] + domain + package_clause, context=context)
todo_operation_ids = []
if existing_operation_ids:
if filter_visible:
todo_operation_ids = [val for val in existing_operation_ids if val in visible_op_ids]
else:
todo_operation_ids = existing_operation_ids
if todo_operation_ids:
#existing operation found for the given domain and picking => increment its quantity
operation_id = todo_operation_ids[0]
op_obj = self.browse(cr, uid, operation_id, context=context)
qty = op_obj.qty_done
if increment:
qty += 1
else:
qty -= 1 if qty >= 1 else 0
if qty == 0 and op_obj.product_qty == 0:
#we have a line with 0 qty set, so delete it
self.unlink(cr, uid, [operation_id], context=context)
return False
self.write(cr, uid, [operation_id], {'qty_done': qty}, context=context)
else:
#no existing operation found for the given domain and picking => create a new one
picking_obj = self.pool.get("stock.picking")
picking = picking_obj.browse(cr, uid, picking_id, context=context)
values = {
'picking_id': picking_id,
'product_qty': 0,
'location_id': picking.location_id.id,
'location_dest_id': picking.location_dest_id.id,
'qty_done': 1,
}
for key in domain:
var_name, dummy, value = key
uom_id = False
if var_name == 'product_id':
uom_id = self.pool.get('product.product').browse(cr, uid, value, context=context).uom_id.id
update_dict = {var_name: value}
if uom_id:
update_dict['product_uom_id'] = uom_id
values.update(update_dict)
operation_id = self.create(cr, uid, values, context=context)
return operation_id
class stock_move_operation_link(osv.osv):
"""
Table making the link between stock.moves and stock.pack.operations to compute the remaining quantities on each of these objects
"""
_name = "stock.move.operation.link"
_description = "Link between stock moves and pack operations"
_columns = {
'qty': fields.float('Quantity', help="Quantity of products to consider when talking about the contribution of this pack operation towards the remaining quantity of the move (and inverse). Given in the product main uom."),
'operation_id': fields.many2one('stock.pack.operation', 'Operation', required=True, ondelete="cascade"),
'move_id': fields.many2one('stock.move', 'Move', required=True, ondelete="cascade"),
'reserved_quant_id': fields.many2one('stock.quant', 'Reserved Quant', help="Technical field containing the quant that created this link between an operation and a stock move. Used at the stock_move_obj.action_done() time to avoid seeking a matching quant again"),
}
def get_specific_domain(self, cr, uid, record, context=None):
'''Returns the specific domain to consider for quant selection in action_assign() or action_done() of stock.move,
having the record given as parameter making the link between the stock move and a pack operation'''
op = record.operation_id
domain = []
if op.package_id and op.product_id:
#if removing a product from a box, we restrict the choice of quants to this box
domain.append(('package_id', '=', op.package_id.id))
elif op.package_id:
#if moving a box, we allow to take everything from inside boxes as well
domain.append(('package_id', 'child_of', [op.package_id.id]))
else:
#if not given any information about package, we don't open boxes
domain.append(('package_id', '=', False))
#if lot info is given, we restrict choice to this lot otherwise we can take any
if op.lot_id:
domain.append(('lot_id', '=', op.lot_id.id))
#if owner info is given, we restrict to this owner otherwise we restrict to no owner
if op.owner_id:
domain.append(('owner_id', '=', op.owner_id.id))
else:
domain.append(('owner_id', '=', False))
return domain
class stock_warehouse_orderpoint(osv.osv):
"""
Defines Minimum stock rules.
"""
_name = "stock.warehouse.orderpoint"
_description = "Minimum Inventory Rule"
def subtract_procurements(self, cr, uid, orderpoint, context=None):
'''This function returns quantity of product that needs to be deducted from the orderpoint computed quantity because there's already a procurement created with aim to fulfill it.
'''
qty = 0
uom_obj = self.pool.get("product.uom")
for procurement in orderpoint.procurement_ids:
if procurement.state in ('cancel', 'done'):
continue
procurement_qty = uom_obj._compute_qty_obj(cr, uid, procurement.product_uom, procurement.product_qty, procurement.product_id.uom_id, context=context)
for move in procurement.move_ids:
#need to add the moves in draft as they aren't in the virtual quantity + moves that have not been created yet
if move.state not in ('draft'):
#if move is already confirmed, assigned or done, the virtual stock is already taking this into account so it shouldn't be deducted
procurement_qty -= move.product_qty
qty += procurement_qty
return qty
def _check_product_uom(self, cr, uid, ids, context=None):
'''
Check if the UoM has the same category as the product standard UoM
'''
if not context:
context = {}
for rule in self.browse(cr, uid, ids, context=context):
if rule.product_id.uom_id.category_id.id != rule.product_uom.category_id.id:
return False
return True
def action_view_proc_to_process(self, cr, uid, ids, context=None):
act_obj = self.pool.get('ir.actions.act_window')
mod_obj = self.pool.get('ir.model.data')
proc_ids = self.pool.get('procurement.order').search(cr, uid, [('orderpoint_id', 'in', ids), ('state', 'not in', ('done', 'cancel'))], context=context)
result = mod_obj.get_object_reference(cr, uid, 'procurement', 'do_view_procurements')
if not result:
return False
result = act_obj.read(cr, uid, [result[1]], context=context)[0]
result['domain'] = "[('id', 'in', [" + ','.join(map(str, proc_ids)) + "])]"
return result
_columns = {
'name': fields.char('Name', required=True, copy=False),
'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the orderpoint without removing it."),
'logic': fields.selection([('max', 'Order to Max'), ('price', 'Best price (not yet active!)')], 'Reordering Mode', required=True),
'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', required=True, ondelete="cascade"),
'location_id': fields.many2one('stock.location', 'Location', required=True, ondelete="cascade"),
'product_id': fields.many2one('product.product', 'Product', required=True, ondelete='cascade', domain=[('type', '=', 'product')]),
'product_uom': fields.related('product_id', 'uom_id', type='many2one', relation='product.uom', string='Product Unit of Measure', readonly=True, required=True),
'product_min_qty': fields.float('Minimum Quantity', required=True,
digits_compute=dp.get_precision('Product Unit of Measure'),
help="When the virtual stock goes below the Min Quantity specified for this field, Odoo generates "\
"a procurement to bring the forecasted quantity to the Max Quantity."),
'product_max_qty': fields.float('Maximum Quantity', required=True,
digits_compute=dp.get_precision('Product Unit of Measure'),
help="When the virtual stock goes below the Min Quantity, Odoo generates "\
"a procurement to bring the forecasted quantity to the Quantity specified as Max Quantity."),
'qty_multiple': fields.float('Qty Multiple', required=True,
digits_compute=dp.get_precision('Product Unit of Measure'),
help="The procurement quantity will be rounded up to this multiple. If it is 0, the exact quantity will be used. "),
'procurement_ids': fields.one2many('procurement.order', 'orderpoint_id', 'Created Procurements'),
'group_id': fields.many2one('procurement.group', 'Procurement Group', help="Moves created through this orderpoint will be put in this procurement group. If none is given, the moves generated by procurement rules will be grouped into one big picking.", copy=False),
'company_id': fields.many2one('res.company', 'Company', required=True),
}
_defaults = {
'active': lambda *a: 1,
'logic': lambda *a: 'max',
'qty_multiple': lambda *a: 1,
'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').get(cr, uid, 'stock.orderpoint') or '',
'product_uom': lambda self, cr, uid, context: context.get('product_uom', False),
'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.warehouse.orderpoint', context=context)
}
_sql_constraints = [
('qty_multiple_check', 'CHECK( qty_multiple >= 0 )', 'Qty Multiple must be greater than or equal to zero.'),
]
_constraints = [
(_check_product_uom, 'You have to select a product unit of measure in the same category than the default unit of measure of the product', ['product_id', 'product_uom']),
]
def default_get(self, cr, uid, fields, context=None):
warehouse_obj = self.pool.get('stock.warehouse')
res = super(stock_warehouse_orderpoint, self).default_get(cr, uid, fields, context)
# default 'warehouse_id' and 'location_id'
if 'warehouse_id' not in res:
warehouse_ids = res.get('company_id') and warehouse_obj.search(cr, uid, [('company_id', '=', res['company_id'])], limit=1, context=context) or []
res['warehouse_id'] = warehouse_ids and warehouse_ids[0] or False
if 'location_id' not in res:
res['location_id'] = res.get('warehouse_id') and warehouse_obj.browse(cr, uid, res['warehouse_id'], context).lot_stock_id.id or False
return res
def onchange_warehouse_id(self, cr, uid, ids, warehouse_id, context=None):
""" Finds location id for changed warehouse.
@param warehouse_id: Changed id of warehouse.
@return: Dictionary of values.
"""
if warehouse_id:
w = self.pool.get('stock.warehouse').browse(cr, uid, warehouse_id, context=context)
v = {'location_id': w.lot_stock_id.id}
return {'value': v}
return {}
def onchange_product_id(self, cr, uid, ids, product_id, context=None):
""" Finds UoM for changed product.
@param product_id: Changed id of product.
@return: Dictionary of values.
"""
if product_id:
prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
d = {'product_uom': [('category_id', '=', prod.uom_id.category_id.id)]}
v = {'product_uom': prod.uom_id.id}
return {'value': v, 'domain': d}
return {'domain': {'product_uom': []}}
class stock_picking_type(osv.osv):
_name = "stock.picking.type"
_description = "The picking type determines the picking view"
_order = 'sequence'
def open_barcode_interface(self, cr, uid, ids, context=None):
final_url = "/barcode/web/#action=stock.ui&picking_type_id=" + str(ids[0]) if len(ids) else '0'
return {'type': 'ir.actions.act_url', 'url': final_url, 'target': 'self'}
def _get_tristate_values(self, cr, uid, ids, field_name, arg, context=None):
picking_obj = self.pool.get('stock.picking')
res = {}
for picking_type_id in ids:
#get last 10 pickings of this type
picking_ids = picking_obj.search(cr, uid, [('picking_type_id', '=', picking_type_id), ('state', '=', 'done')], order='date_done desc', limit=10, context=context)
tristates = []
for picking in picking_obj.browse(cr, uid, picking_ids, context=context):
if picking.date_done > picking.date:
tristates.insert(0, {'tooltip': picking.name or '' + ": " + _('Late'), 'value': -1})
elif picking.backorder_id:
tristates.insert(0, {'tooltip': picking.name or '' + ": " + _('Backorder exists'), 'value': 0})
else:
tristates.insert(0, {'tooltip': picking.name or '' + ": " + _('OK'), 'value': 1})
res[picking_type_id] = json.dumps(tristates)
return res
def _get_picking_count(self, cr, uid, ids, field_names, arg, context=None):
obj = self.pool.get('stock.picking')
domains = {
'count_picking_draft': [('state', '=', 'draft')],
'count_picking_waiting': [('state', '=', 'confirmed')],
'count_picking_ready': [('state', 'in', ('assigned', 'partially_available'))],
'count_picking': [('state', 'in', ('assigned', 'waiting', 'confirmed', 'partially_available'))],
'count_picking_late': [('min_date', '<', time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)), ('state', 'in', ('assigned', 'waiting', 'confirmed', 'partially_available'))],
'count_picking_backorders': [('backorder_id', '!=', False), ('state', 'in', ('confirmed', 'assigned', 'waiting', 'partially_available'))],
}
result = {}
for field in domains:
data = obj.read_group(cr, uid, domains[field] +
[('state', 'not in', ('done', 'cancel')), ('picking_type_id', 'in', ids)],
['picking_type_id'], ['picking_type_id'], context=context)
count = dict(map(lambda x: (x['picking_type_id'] and x['picking_type_id'][0], x['picking_type_id_count']), data))
for tid in ids:
result.setdefault(tid, {})[field] = count.get(tid, 0)
for tid in ids:
if result[tid]['count_picking']:
result[tid]['rate_picking_late'] = result[tid]['count_picking_late'] * 100 / result[tid]['count_picking']
result[tid]['rate_picking_backorders'] = result[tid]['count_picking_backorders'] * 100 / result[tid]['count_picking']
else:
result[tid]['rate_picking_late'] = 0
result[tid]['rate_picking_backorders'] = 0
return result
def onchange_picking_code(self, cr, uid, ids, picking_code=False):
if not picking_code:
return False
obj_data = self.pool.get('ir.model.data')
stock_loc = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_stock')
result = {
'default_location_src_id': stock_loc,
'default_location_dest_id': stock_loc,
}
if picking_code == 'incoming':
result['default_location_src_id'] = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_suppliers')
elif picking_code == 'outgoing':
result['default_location_dest_id'] = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_customers')
return {'value': result}
def _get_name(self, cr, uid, ids, field_names, arg, context=None):
return dict(self.name_get(cr, uid, ids, context=context))
def name_get(self, cr, uid, ids, context=None):
"""Overides orm name_get method to display 'Warehouse_name: PickingType_name' """
if context is None:
context = {}
if not isinstance(ids, list):
ids = [ids]
res = []
if not ids:
return res
for record in self.browse(cr, uid, ids, context=context):
name = record.name
if record.warehouse_id:
name = record.warehouse_id.name + ': ' +name
if context.get('special_shortened_wh_name'):
if record.warehouse_id:
name = record.warehouse_id.name
else:
name = _('Customer') + ' (' + record.name + ')'
res.append((record.id, name))
return res
def _default_warehouse(self, cr, uid, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context)
res = self.pool.get('stock.warehouse').search(cr, uid, [('company_id', '=', user.company_id.id)], limit=1, context=context)
return res and res[0] or False
_columns = {
'name': fields.char('Picking Type Name', translate=True, required=True),
'complete_name': fields.function(_get_name, type='char', string='Name'),
'color': fields.integer('Color'),
'sequence': fields.integer('Sequence', help="Used to order the 'All Operations' kanban view"),
'sequence_id': fields.many2one('ir.sequence', 'Reference Sequence', required=True),
'default_location_src_id': fields.many2one('stock.location', 'Default Source Location'),
'default_location_dest_id': fields.many2one('stock.location', 'Default Destination Location'),
'code': fields.selection([('incoming', 'Suppliers'), ('outgoing', 'Customers'), ('internal', 'Internal')], 'Type of Operation', required=True),
'return_picking_type_id': fields.many2one('stock.picking.type', 'Picking Type for Returns'),
'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', ondelete='cascade'),
'active': fields.boolean('Active'),
# Statistics for the kanban view
'last_done_picking': fields.function(_get_tristate_values,
type='char',
string='Last 10 Done Pickings'),
'count_picking_draft': fields.function(_get_picking_count,
type='integer', multi='_get_picking_count'),
'count_picking_ready': fields.function(_get_picking_count,
type='integer', multi='_get_picking_count'),
'count_picking': fields.function(_get_picking_count,
type='integer', multi='_get_picking_count'),
'count_picking_waiting': fields.function(_get_picking_count,
type='integer', multi='_get_picking_count'),
'count_picking_late': fields.function(_get_picking_count,
type='integer', multi='_get_picking_count'),
'count_picking_backorders': fields.function(_get_picking_count,
type='integer', multi='_get_picking_count'),
'rate_picking_late': fields.function(_get_picking_count,
type='integer', multi='_get_picking_count'),
'rate_picking_backorders': fields.function(_get_picking_count,
type='integer', multi='_get_picking_count'),
}
_defaults = {
'warehouse_id': _default_warehouse,
'active': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
thnee/ansible | refs/heads/devel | test/units/template/test_templar.py | 55 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from jinja2.runtime import Context
from units.compat import unittest
from units.compat.mock import patch
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.module_utils.six import string_types
from ansible.template import Templar, AnsibleContext, AnsibleEnvironment, AnsibleUndefined
from ansible.utils.unsafe_proxy import AnsibleUnsafe, wrap_var
from units.mock.loader import DictDataLoader
class BaseTemplar(object):
def setUp(self):
self.test_vars = dict(
foo="bar",
bam="{{foo}}",
num=1,
var_true=True,
var_false=False,
var_dict=dict(a="b"),
bad_dict="{a='b'",
var_list=[1],
recursive="{{recursive}}",
some_var="blip",
some_static_var="static_blip",
some_keyword="{{ foo }}",
some_unsafe_var=wrap_var("unsafe_blip"),
some_static_unsafe_var=wrap_var("static_unsafe_blip"),
some_unsafe_keyword=wrap_var("{{ foo }}"),
str_with_error="{{ 'str' | from_json }}",
)
self.fake_loader = DictDataLoader({
"/path/to/my_file.txt": "foo\n",
})
self.templar = Templar(loader=self.fake_loader, variables=self.test_vars)
self._ansible_context = AnsibleContext(self.templar.environment, {}, {}, {})
def is_unsafe(self, obj):
return self._ansible_context._is_unsafe(obj)
# class used for testing arbitrary objects passed to template
class SomeClass(object):
foo = 'bar'
def __init__(self):
self.blip = 'blip'
class SomeUnsafeClass(AnsibleUnsafe):
def __init__(self):
super(SomeUnsafeClass, self).__init__()
self.blip = 'unsafe blip'
class TestTemplarTemplate(BaseTemplar, unittest.TestCase):
def test_lookup_jinja_dict_key_in_static_vars(self):
res = self.templar.template("{'some_static_var': '{{ some_var }}'}",
static_vars=['some_static_var'])
# self.assertEqual(res['{{ a_keyword }}'], "blip")
print(res)
def test_is_possibly_template_true(self):
tests = [
'{{ foo }}',
'{% foo %}',
'{# foo #}',
'{# {{ foo }} #}',
'{# {{ nothing }} {# #}',
'{# {{ nothing }} {# #} #}',
'{% raw %}{{ foo }}{% endraw %}',
'{{',
'{%',
'{#',
'{% raw',
]
for test in tests:
self.assertTrue(self.templar.is_possibly_template(test))
def test_is_possibly_template_false(self):
tests = [
'{',
'%',
'#',
'foo',
'}}',
'%}',
'raw %}',
'#}',
]
for test in tests:
self.assertFalse(self.templar.is_possibly_template(test))
def test_is_possible_template(self):
"""This test ensures that a broken template still gets templated"""
# Purposefully invalid jinja
self.assertRaises(AnsibleError, self.templar.template, '{{ foo|default(False)) }}')
def test_is_template_true(self):
tests = [
'{{ foo }}',
'{% foo %}',
'{# foo #}',
'{# {{ foo }} #}',
'{# {{ nothing }} {# #}',
'{# {{ nothing }} {# #} #}',
'{% raw %}{{ foo }}{% endraw %}',
]
for test in tests:
self.assertTrue(self.templar.is_template(test))
def test_is_template_false(self):
tests = [
'foo',
'{{ foo',
'{% foo',
'{# foo',
'{{ foo %}',
'{{ foo #}',
'{% foo }}',
'{% foo #}',
'{# foo %}',
'{# foo }}',
'{{ foo {{',
'{% raw %}{% foo %}',
]
for test in tests:
self.assertFalse(self.templar.is_template(test))
def test_is_template_raw_string(self):
res = self.templar.is_template('foo')
self.assertFalse(res)
def test_is_template_none(self):
res = self.templar.is_template(None)
self.assertFalse(res)
def test_template_convert_bare_string(self):
res = self.templar.template('foo', convert_bare=True)
self.assertEqual(res, 'bar')
def test_template_convert_bare_nested(self):
res = self.templar.template('bam', convert_bare=True)
self.assertEqual(res, 'bar')
def test_template_convert_bare_unsafe(self):
res = self.templar.template('some_unsafe_var', convert_bare=True)
self.assertEqual(res, 'unsafe_blip')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_convert_bare_filter(self):
res = self.templar.template('bam|capitalize', convert_bare=True)
self.assertEqual(res, 'Bar')
def test_template_convert_bare_filter_unsafe(self):
res = self.templar.template('some_unsafe_var|capitalize', convert_bare=True)
self.assertEqual(res, 'Unsafe_blip')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_convert_data(self):
res = self.templar.template('{{foo}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
@patch('ansible.template.safe_eval', side_effect=AnsibleError)
def test_template_convert_data_template_in_data(self, mock_safe_eval):
res = self.templar.template('{{bam}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
def test_template_convert_data_bare(self):
res = self.templar.template('bam', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bam')
def test_template_convert_data_to_json(self):
res = self.templar.template('{{bam|to_json}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, '"bar"')
def test_template_convert_data_convert_bare_data_bare(self):
res = self.templar.template('bam', convert_data=True, convert_bare=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
def test_template_unsafe_non_string(self):
unsafe_obj = AnsibleUnsafe()
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_unsafe_non_string_subclass(self):
unsafe_obj = SomeUnsafeClass()
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_weird(self):
data = u'''1 2 #}huh{# %}ddfg{% }}dfdfg{{ {%what%} {{#foo#}} {%{bar}%} {#%blip%#} {{asdfsd%} 3 4 {{foo}} 5 6 7'''
self.assertRaisesRegexp(AnsibleError,
'template error while templating string',
self.templar.template,
data)
def test_template_with_error(self):
"""Check that AnsibleError is raised, fail if an unhandled exception is raised"""
self.assertRaises(AnsibleError, self.templar.template, "{{ str_with_error }}")
class TestTemplarMisc(BaseTemplar, unittest.TestCase):
def test_templar_simple(self):
templar = self.templar
# test some basic templating
self.assertEqual(templar.template("{{foo}}"), "bar")
self.assertEqual(templar.template("{{foo}}\n"), "bar\n")
self.assertEqual(templar.template("{{foo}}\n", preserve_trailing_newlines=True), "bar\n")
self.assertEqual(templar.template("{{foo}}\n", preserve_trailing_newlines=False), "bar")
self.assertEqual(templar.template("{{bam}}"), "bar")
self.assertEqual(templar.template("{{num}}"), 1)
self.assertEqual(templar.template("{{var_true}}"), True)
self.assertEqual(templar.template("{{var_false}}"), False)
self.assertEqual(templar.template("{{var_dict}}"), dict(a="b"))
self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'")
self.assertEqual(templar.template("{{var_list}}"), [1])
self.assertEqual(templar.template(1, convert_bare=True), 1)
# force errors
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{bad_var}}")
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{lookup('file', bad_var)}}")
self.assertRaises(AnsibleError, templar.template, "{{lookup('bad_lookup')}}")
self.assertRaises(AnsibleError, templar.template, "{{recursive}}")
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{foo-bar}}")
# test with fail_on_undefined=False
self.assertEqual(templar.template("{{bad_var}}", fail_on_undefined=False), "{{bad_var}}")
# test setting available_variables
templar.available_variables = dict(foo="bam")
self.assertEqual(templar.template("{{foo}}"), "bam")
# variables must be a dict() for available_variables setter
# FIXME Use assertRaises() as a context manager (added in 2.7) once we do not run tests on Python 2.6 anymore.
try:
templar.available_variables = "foo=bam"
except AssertionError:
pass
except Exception as e:
self.fail(e)
def test_templar_escape_backslashes(self):
# Rule of thumb: If escape backslashes is True you should end up with
# the same number of backslashes as when you started.
self.assertEqual(self.templar.template("\t{{foo}}", escape_backslashes=True), "\tbar")
self.assertEqual(self.templar.template("\t{{foo}}", escape_backslashes=False), "\tbar")
self.assertEqual(self.templar.template("\\{{foo}}", escape_backslashes=True), "\\bar")
self.assertEqual(self.templar.template("\\{{foo}}", escape_backslashes=False), "\\bar")
self.assertEqual(self.templar.template("\\{{foo + '\t' }}", escape_backslashes=True), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\t' }}", escape_backslashes=False), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\\t' }}", escape_backslashes=True), "\\bar\\t")
self.assertEqual(self.templar.template("\\{{foo + '\\t' }}", escape_backslashes=False), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\\\\t' }}", escape_backslashes=True), "\\bar\\\\t")
self.assertEqual(self.templar.template("\\{{foo + '\\\\t' }}", escape_backslashes=False), "\\bar\\t")
def test_template_jinja2_extensions(self):
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
old_exts = C.DEFAULT_JINJA2_EXTENSIONS
try:
C.DEFAULT_JINJA2_EXTENSIONS = "foo,bar"
self.assertEqual(templar._get_extensions(), ['foo', 'bar'])
finally:
C.DEFAULT_JINJA2_EXTENSIONS = old_exts
class TestTemplarLookup(BaseTemplar, unittest.TestCase):
def test_lookup_missing_plugin(self):
self.assertRaisesRegexp(AnsibleError,
r'lookup plugin \(not_a_real_lookup_plugin\) not found',
self.templar._lookup,
'not_a_real_lookup_plugin',
'an_arg', a_keyword_arg='a_keyword_arg_value')
def test_lookup_list(self):
res = self.templar._lookup('list', 'an_arg', 'another_arg')
self.assertEqual(res, 'an_arg,another_arg')
def test_lookup_jinja_undefined(self):
self.assertRaisesRegexp(AnsibleUndefinedVariable,
"'an_undefined_jinja_var' is undefined",
self.templar._lookup,
'list', '{{ an_undefined_jinja_var }}')
def test_lookup_jinja_defined(self):
res = self.templar._lookup('list', '{{ some_var }}')
self.assertTrue(self.is_unsafe(res))
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_string_passed(self):
self.assertRaisesRegexp(AnsibleError,
"with_dict expects a dict",
self.templar._lookup,
'dict',
'{{ some_var }}')
def test_lookup_jinja_dict_list_passed(self):
self.assertRaisesRegexp(AnsibleError,
"with_dict expects a dict",
self.templar._lookup,
'dict',
['foo', 'bar'])
def test_lookup_jinja_kwargs(self):
res = self.templar._lookup('list', 'blip', random_keyword='12345')
self.assertTrue(self.is_unsafe(res))
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_list_wantlist(self):
res = self.templar._lookup('list', '{{ some_var }}', wantlist=True)
self.assertEqual(res, ["blip"])
def test_lookup_jinja_list_wantlist_undefined(self):
self.assertRaisesRegexp(AnsibleUndefinedVariable,
"'some_undefined_var' is undefined",
self.templar._lookup,
'list',
'{{ some_undefined_var }}',
wantlist=True)
def test_lookup_jinja_list_wantlist_unsafe(self):
res = self.templar._lookup('list', '{{ some_unsafe_var }}', wantlist=True)
for lookup_result in res:
self.assertTrue(self.is_unsafe(lookup_result))
# self.assertIsInstance(lookup_result, AnsibleUnsafe)
# Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict(self):
res = self.templar._lookup('list', {'{{ a_keyword }}': '{{ some_var }}'})
self.assertEqual(res['{{ a_keyword }}'], "blip")
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res['{{ a_keyword }}'], AnsibleUnsafe)
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_unsafe(self):
res = self.templar._lookup('list', {'{{ some_unsafe_key }}': '{{ some_unsafe_var }}'})
self.assertTrue(self.is_unsafe(res['{{ some_unsafe_key }}']))
# self.assertIsInstance(res['{{ some_unsafe_key }}'], AnsibleUnsafe)
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_unsafe_value(self):
res = self.templar._lookup('list', {'{{ a_keyword }}': '{{ some_unsafe_var }}'})
self.assertTrue(self.is_unsafe(res['{{ a_keyword }}']))
# self.assertIsInstance(res['{{ a_keyword }}'], AnsibleUnsafe)
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_none(self):
res = self.templar._lookup('list', None)
self.assertIsNone(res)
class TestAnsibleContext(BaseTemplar, unittest.TestCase):
def _context(self, variables=None):
variables = variables or {}
env = AnsibleEnvironment()
context = AnsibleContext(env, parent={}, name='some_context',
blocks={})
for key, value in variables.items():
context.vars[key] = value
return context
def test(self):
context = self._context()
self.assertIsInstance(context, AnsibleContext)
self.assertIsInstance(context, Context)
def test_resolve_unsafe(self):
context = self._context(variables={'some_unsafe_key': wrap_var('some_unsafe_string')})
res = context.resolve('some_unsafe_key')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res)
def test_resolve_unsafe_list(self):
context = self._context(variables={'some_unsafe_key': [wrap_var('some unsafe string 1')]})
res = context.resolve('some_unsafe_key')
# self.assertIsInstance(res[0], AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res)
def test_resolve_unsafe_dict(self):
context = self._context(variables={'some_unsafe_key':
{'an_unsafe_dict': wrap_var('some unsafe string 1')}
})
res = context.resolve('some_unsafe_key')
self.assertTrue(self.is_unsafe(res['an_unsafe_dict']),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res['an_unsafe_dict'])
def test_resolve(self):
context = self._context(variables={'some_key': 'some_string'})
res = context.resolve('some_key')
self.assertEqual(res, 'some_string')
# self.assertNotIsInstance(res, AnsibleUnsafe)
self.assertFalse(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was not expected to be marked unsafe but was' % res)
def test_resolve_none(self):
context = self._context(variables={'some_key': None})
res = context.resolve('some_key')
self.assertEqual(res, None)
# self.assertNotIsInstance(res, AnsibleUnsafe)
self.assertFalse(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was not expected to be marked unsafe but was' % res)
def test_is_unsafe(self):
context = self._context()
self.assertFalse(context._is_unsafe(AnsibleUndefined()))
|
glouppe/scikit-learn | refs/heads/master | examples/linear_model/plot_lasso_lars.py | 363 | #!/usr/bin/env python
"""
=====================
Lasso path using LARS
=====================
Computes Lasso Path along the regularization parameter using the LARS
algorithm on the diabetes dataset. Each color represents a different
feature of the coefficient vector, and this is displayed as a function
of the regularization parameter.
"""
print(__doc__)
# Author: Fabian Pedregosa <fabian.pedregosa@inria.fr>
# Alexandre Gramfort <alexandre.gramfort@inria.fr>
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn import linear_model
from sklearn import datasets
diabetes = datasets.load_diabetes()
X = diabetes.data
y = diabetes.target
print("Computing regularization path using the LARS ...")
alphas, _, coefs = linear_model.lars_path(X, y, method='lasso', verbose=True)
xx = np.sum(np.abs(coefs.T), axis=1)
xx /= xx[-1]
plt.plot(xx, coefs.T)
ymin, ymax = plt.ylim()
plt.vlines(xx, ymin, ymax, linestyle='dashed')
plt.xlabel('|coef| / max|coef|')
plt.ylabel('Coefficients')
plt.title('LASSO Path')
plt.axis('tight')
plt.show()
|
hkariti/ansible | refs/heads/devel | lib/ansible/modules/network/ios/ios_banner.py | 18 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: ios_banner
version_added: "2.3"
author: "Ricardo Carrillo Cruz (@rcarrillocruz)"
short_description: Manage multiline banners on Cisco IOS devices
description:
- This will configure both login and motd banners on remote devices
running Cisco IOS. It allows playbooks to add or remote
banner text from the active running configuration.
extends_documentation_fragment: ios
notes:
- Tested against IOS 15.6
options:
banner:
description:
- Specifies which banner should be configured on the remote device.
In Ansible 2.4 and earlier only I(login) and I(motd) were supported.
required: true
default: null
choices: ['login', 'motd', 'exec', 'incoming', 'slip-ppp']
text:
description:
- The banner text that should be
present in the remote device running configuration. This argument
accepts a multiline string, with no empty lines. Requires I(state=present).
default: null
state:
description:
- Specifies whether or not the configuration is
present in the current devices active running configuration.
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: configure the login banner
ios_banner:
banner: login
text: |
this is my login banner
that contains a multiline
string
state: present
- name: remove the motd banner
ios_banner:
banner: motd
state: absent
- name: Configure banner from file
ios_banner:
banner: motd
text: "{{ lookup('file', './config_partial/raw_banner.cfg') }}"
state: present
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- banner login
- this is my login banner
- that contains a multiline
- string
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import exec_command
from ansible.module_utils.network.ios.ios import load_config, run_commands
from ansible.module_utils.network.ios.ios import ios_argument_spec, check_args
import re
def map_obj_to_commands(updates, module):
commands = list()
want, have = updates
state = module.params['state']
if state == 'absent' and 'text' in have.keys() and have['text']:
commands.append('no banner %s' % module.params['banner'])
elif state == 'present':
if want['text'] and (want['text'] != have.get('text')):
banner_cmd = 'banner %s' % module.params['banner']
banner_cmd += ' @\n'
banner_cmd += want['text'].strip()
banner_cmd += '\n@'
commands.append(banner_cmd)
return commands
def map_config_to_obj(module):
rc, out, err = exec_command(module, 'show banner %s' % module.params['banner'])
if rc == 0:
output = out
else:
rc, out, err = exec_command(module,
'show running-config | begin banner %s'
% module.params['banner'])
if out:
output = re.search(r'\^C(.*)\^C', out, re.S).group(1).strip()
else:
output = None
obj = {'banner': module.params['banner'], 'state': 'absent'}
if output:
obj['text'] = output
obj['state'] = 'present'
return obj
def map_params_to_obj(module):
text = module.params['text']
if text:
text = str(text).strip()
return {
'banner': module.params['banner'],
'text': text,
'state': module.params['state']
}
def main():
""" main entry point for module execution
"""
argument_spec = dict(
banner=dict(required=True, choices=['login', 'motd', 'exec', 'incoming', 'slip-ppp']),
text=dict(),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(ios_argument_spec)
required_if = [('state', 'present', ('text',))]
module = AnsibleModule(argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have), module)
result['commands'] = commands
if commands:
if not module.check_mode:
response = load_config(module, commands)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
|
ianatpn/nupictest | refs/heads/master | tests/unit/py2/nupic/encoders/sdrcategory_test.py | 7 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Unit tests for SDR Category encoder"""
import numpy
from nupic.data import SENTINEL_VALUE_FOR_MISSING_DATA
import unittest2 as unittest
from nupic.encoders.sdrcategory import SDRCategoryEncoder
#########################################################################
class SDRCategoryEncoderTest(unittest.TestCase):
'''Unit tests for SDRCategory encoder class'''
def testSDRCategoryEncoder(self):
print "Testing CategoryEncoder...",
# make sure we have > 16 categories so that we have to grow our sdrs
categories = ["ES", "S1", "S2", "S3", "S4", "S5", "S6", "S7", "S8",
"S9","S10", "S11", "S12", "S13", "S14", "S15", "S16",
"S17", "S18", "S19", "GB", "US"]
fieldWidth = 100
bitsOn = 10
s = SDRCategoryEncoder(n=fieldWidth, w=bitsOn, categoryList = categories,
name="foo", verbosity=0, forced=True)
# internal check
self.assertEqual(s.sdrs.shape, (32, fieldWidth))
# ES
es = s.encode("ES")
self.assertEqual(es.sum(), bitsOn)
self.assertEqual(es.shape, (fieldWidth,))
self.assertEqual(es.sum(), bitsOn)
x = s.decode(es)
self.assertIsInstance(x[0], dict)
self.assertTrue("foo" in x[0])
self.assertEqual(x[0]["foo"][1], "ES")
topDown = s.topDownCompute(es)
self.assertEqual(topDown.value, 'ES')
self.assertEqual(topDown.scalar, 1)
self.assertEqual(topDown.encoding.sum(), bitsOn)
# ----------------------------------------------------------------------
# Test topdown compute
for v in categories:
output = s.encode(v)
topDown = s.topDownCompute(output)
self.assertEqual(topDown.value, v)
self.assertEqual(topDown.scalar, s.getScalars(v)[0])
bucketIndices = s.getBucketIndices(v)
print "bucket index =>", bucketIndices[0]
topDown = s.getBucketInfo(bucketIndices)[0]
self.assertEqual(topDown.value, v)
self.assertEqual(topDown.scalar, s.getScalars(v)[0])
self.assertTrue((topDown.encoding == output).all())
self.assertEqual(topDown.value, s.getBucketValues()[bucketIndices[0]])
# Unknown
unknown = s.encode("ASDFLKJLK")
self.assertEqual(unknown.sum(), bitsOn)
self.assertEqual(unknown.shape, (fieldWidth,))
self.assertEqual(unknown.sum(), bitsOn)
x = s.decode(unknown)
self.assertEqual(x[0]["foo"][1], "<UNKNOWN>")
topDown = s.topDownCompute(unknown)
self.assertEqual(topDown.value, "<UNKNOWN>")
self.assertEqual(topDown.scalar, 0)
# US
us = s.encode("US")
self.assertEqual(us.sum(), bitsOn)
self.assertEqual(us.shape, (fieldWidth,))
self.assertEqual(us.sum(), bitsOn)
x = s.decode(us)
self.assertEqual(x[0]["foo"][1], "US")
topDown = s.topDownCompute(us)
self.assertEqual(topDown.value, "US")
self.assertEqual(topDown.scalar, len(categories))
self.assertEqual(topDown.encoding.sum(), bitsOn)
# empty field
empty = s.encode(SENTINEL_VALUE_FOR_MISSING_DATA)
self.assertEqual(empty.sum(), 0)
self.assertEqual(empty.shape, (fieldWidth,))
self.assertEqual(empty.sum(), 0)
# make sure it can still be decoded after a change
bit = s.random.randint(0, s.getWidth()-1)
us[bit] = 1 - us[bit]
x = s.decode(us)
self.assertEqual(x[0]["foo"][1], "US")
# add two reps together
newrep = ((us + unknown) > 0).astype('uint8')
x = s.decode(newrep)
name =x[0]["foo"][1]
if name != "US <UNKNOWN>" and name != "<UNKNOWN> US":
othercategory = name.replace("US", "")
othercategory = othercategory.replace("<UNKNOWN>", "")
othercategory = othercategory.replace(" ", "")
otherencoded = s.encode(othercategory)
print "Got: %s instead of US/unknown" % name
print "US: %s" % us
print "unknown: %s" % unknown
print "Sum: %s" % newrep
print "%s: %s" % (othercategory, s.encode(othercategory))
print "Matches with US: %d" % (us * newrep).sum()
print "Matches with unknown: %d" % (unknown * newrep).sum()
print "Matches with %s: %d" % (othercategory,
(otherencoded * newrep).sum())
raise RuntimeError("Decoding failure")
# serialization
import cPickle as pickle
t = pickle.loads(pickle.dumps(s))
self.assertTrue((t.encode("ES") == es).all())
self.assertTrue((t.encode("GB") == s.encode("GB")).all())
# Test autogrow
s = SDRCategoryEncoder(n=fieldWidth, w=bitsOn, categoryList = None, name="bar", forced=True)
es = s.encode("ES")
self.assertEqual(es.shape, (fieldWidth,))
self.assertEqual(es.sum(), bitsOn)
x = s.decode(es)
self.assertIsInstance(x[0], dict)
self.assertTrue("bar" in x[0])
self.assertEqual(x[0]["bar"][1], "ES")
us = s.encode("US")
self.assertEqual(us.shape, (fieldWidth,))
self.assertEqual(us.sum(), bitsOn)
x = s.decode(us)
self.assertEqual(x[0]["bar"][1], "US")
es2 = s.encode("ES")
self.assertTrue((es2 == es).all())
us2 = s.encode("US")
self.assertTrue((us2 == us).all())
# make sure it can still be decoded after a change
bit = s.random.randint(0, s.getWidth()-1)
us[bit] = 1 - us[bit]
x = s.decode(us)
self.assertEqual(x[0]["bar"][1], "US")
# add two reps together
newrep = ((us + es) > 0).astype('uint8')
x = s.decode(newrep)
name =x[0]["bar"][1]
self.assertTrue(name == "US ES" or name == "ES US")
# Catch duplicate categories
caughtException = False
newcategories = categories[:]
self.assertTrue("ES" in newcategories)
newcategories.append("ES")
try:
s = SDRCategoryEncoder(n=fieldWidth, w=bitsOn, categoryList = newcategories, name="foo", forced=True)
except RuntimeError, e:
caughtException = True
if not caughtException:
raise RuntimeError("Did not catch duplicate category in constructor")
raise
# serialization for autogrow encoder
gs = s.encode("GS")
t = pickle.loads(pickle.dumps(s))
self.assertTrue((t.encode("ES") == es).all())
self.assertTrue((t.encode("GS") == gs).all())
# -----------------------------------------------------------------------
def testAutogrow(self):
"""testing auto-grow"""
fieldWidth = 100
bitsOn = 10
s = SDRCategoryEncoder(n=fieldWidth, w=bitsOn, name="foo", verbosity=2, forced=True)
encoded = numpy.zeros(fieldWidth)
self.assertEqual(s.topDownCompute(encoded).value, "<UNKNOWN>")
s.encodeIntoArray("catA", encoded)
self.assertEqual(encoded.sum(), bitsOn)
self.assertEqual(s.getScalars('catA'), 1)
catA = encoded.copy()
s.encodeIntoArray("catB", encoded)
self.assertEqual(encoded.sum(), bitsOn)
self.assertEqual(s.getScalars('catB'), 2)
catB = encoded.copy()
self.assertEqual(s.topDownCompute(catA).value, 'catA')
self.assertEqual(s.topDownCompute(catB).value, 'catB')
s.encodeIntoArray(SENTINEL_VALUE_FOR_MISSING_DATA, encoded)
self.assertEqual(sum(encoded), 0)
self.assertEqual(s.topDownCompute(encoded).value, "<UNKNOWN>")
#Test Disabling Learning and autogrow
s.setLearning(False)
s.encodeIntoArray("catC", encoded)
self.assertEqual(encoded.sum(), bitsOn)
self.assertEqual(s.getScalars('catC'), 0)
self.assertEqual(s.topDownCompute(encoded).value, "<UNKNOWN>")
s.setLearning(True)
s.encodeIntoArray("catC", encoded)
self.assertEqual(encoded.sum(), bitsOn)
self.assertEqual(s.getScalars('catC'), 3)
self.assertEqual(s.topDownCompute(encoded).value, "catC")
###########################################
if __name__ == '__main__':
unittest.main()
|
miic-sw/miic | refs/heads/master | miic.core/src/miic/core/wt_fun.py | 1 | """
@author:
Eraldo Pomponi
@copyright:
The MIIC Development Team (eraldo.pomponi@uni-leipzig.de)
@license:
GNU Lesser General Public License, Version 3
(http://www.gnu.org/copyleft/lesser.html)
Created on -- Mar 22, 2011 --
"""
# PyWavelets import
import pywt
from scipy import median, diff, sqrt, log, power
from numpy import sign
try:
from traits.api import HasTraits, Array, Int, Str, Property, Float
except ImportError:
pass
class WT_Denoise(HasTraits):
""" WT Based event removal.
"""
# Signal to "clean"
sig = Array()
# Decomposition level
level = Int
# Wavelet family (['haar', 'db', 'sym', 'coif', 'bior', 'rbio', 'dmey'])
family = Str('sym')
# Order
order = Int(4)
# Wavelet
wt = Property(depends_on=['family', 'order'])
# Extension mode (['zpd', 'cpd', 'sym', 'ppd', 'sp1', 'per'])
mode = Str('per')
# noise level
sigma = Property(depends_on=['sig'])
_sigma = Float
##### Public interface #####
def filter(self, mode='soft'):
if self.level > self.max_dec_level():
clevel = self.max_dec_level()
else:
clevel = self.level
# decompose
coeffs = pywt.wavedec(self.sig, pywt.Wavelet(self.wt), \
mode=self.mode, \
level=clevel)
# threshold evaluation
th = sqrt(2 * log(len(self.sig)) * power(self.sigma, 2))
# thresholding
for (i, cAD) in enumerate(coeffs):
if mode == 'soft':
coeffs[i] = pywt.thresholding.soft(cAD, th)
elif mode == 'hard':
coeffs[i] = pywt.thresholding.hard(cAD, th)
# reconstruct
rec_sig = pywt.waverec(coeffs, pywt.Wavelet(self.wt), mode=self.mode)
if len(rec_sig) == (len(self.sig) + 1):
self.sig = rec_sig[:-1]
def max_dec_level(self):
return pywt.dwt_max_level(len(self.sig), pywt.Wavelet(self.wt))
# Getter/Setter
def _get_wt(self):
return ('%s%d') % (self.family, self.order)
def _get_sigma(self):
self._sigma = median(abs(diff(self.sig) - median(diff(self.sig)))) \
/ (sqrt(2) * 0.6745)
return self._sigma
class WT_Event_Filter(HasTraits):
""" WT Based event removal.
"""
# Signal to "clean"
sig = Array()
# Decomposition level
level = Int
# Wavelet family (['haar', 'db', 'sym', 'coif', 'bior', 'rbio', 'dmey'])
family = Str('sym')
# Order
order = Int(4)
# Wavelet
wt = Property(depends_on=['family', 'order'])
# Extension mode (['zpd', 'cpd', 'sym', 'ppd', 'sp1', 'per'])
mode = Str('per')
# noise level
sigma = Property(depends_on=['sig'])
_sigma = Float
##### Public interface #####
def filter(self):
if self.level > self.max_dec_level():
clevel = self.max_dec_level()
else:
clevel = self.level
# decompose
coeffs = pywt.wavedec(self.sig, pywt.Wavelet(self.wt), \
mode=self.mode, \
level=clevel)
# threshold evaluation
th = sqrt(2 * log(len(self.sig)) * power(self.sigma, 2))
# thresholding
for (i, cAD) in enumerate(coeffs):
if i == 0:
continue
coeffs[i] = sign(cAD) * pywt.thresholding.less(abs(cAD), th)
# reconstruct
rec_sig = pywt.waverec(coeffs, pywt.Wavelet(self.wt), mode=self.mode)
if len(rec_sig) == (len(self.sig) + 1):
self.sig = rec_sig[:-1]
def max_dec_level(self):
return pywt.dwt_max_level(len(self.sig), pywt.Wavelet(self.wt))
# Getter/Setter
def _get_wt(self):
return ('%s%d') % (self.family, self.order)
def _get_sigma(self):
self._sigma = median(abs(diff(self.sig) - median(diff(self.sig)))) \
/ (sqrt(2) * 0.6745)
return self._sigma
if __name__ == '__main__':
import numpy as np
sig = np.array([1, 2, 3, 3, 5, 5, 6, 7, 8, 9, 10, 9, -1, -5, 3, 3])
sig = sig.T
family = 'db'
order = 2
level = 2
WT_C = WT_Event_Filter(sig=sig, family=family, order=order, level=level)
print WT_C.wt
print WT_C.sigma
WT_C.filter()
print WT_C.sig
print WT_C.sig.shape
family = 'sym'
order = 8
level = 10
n_points = 645
x = np.linspace(0, 20 * np.pi, n_points)
y_orig = np.sin(x)
y = y_orig # np.random.normal(size=y_orig.shape)*0.1
WT_DEN = WT_Denoise(sig=y, family=family, order=order, level=level)
WT_DEN.filter(mode='soft')
s_den = WT_DEN.sig
print s_den.shape
print y_orig
print s_den
print np.sum(abs(y_orig - s_den), 0) / n_points
|
maxis1314/pyutils | refs/heads/master | Remember/app/apis.py | 1 | '''
all the REST api implementations
'''
import os
import uuid
import json
import werkzeug
from flask import abort, make_response, send_from_directory
from flask_restful import Api, Resource, reqparse, fields, marshal
from app import app, models
from app.models import db
api = Api(app) # pylint: disable=C0103
@api.representation("application/json")
def output_json(data, code, headers=None):
'''
convert the response data to json
'''
resp = make_response(json.dumps(data), code)
resp.headers.extend(headers or {})
return resp
@api.representation("application/xml")
def output_xml(data, code, headers=None):
'''
convert the response data to xml
TODO: place holder to implement xml response
'''
resp = make_response(json.dumps(data), code)
resp.headers.extend(headers or {})
return resp
NOTE_FIELDS = {
"id": fields.Integer,
"content": fields.String,
"width": fields.Integer,
"color": fields.String
}
TAG_FIELDS = {
"id": fields.Integer,
"name": fields.String,
}
CATEGORY_FIELDS = {
"id": fields.Integer,
"name": fields.String,
"description": fields.String
}
ATTACHMENT_FIELDS = {
"id": fields.Integer,
"name": fields.String,
"identity": fields.String,
"type": fields.String,
"tags": fields.List(fields.Nested(TAG_FIELDS))
}
TASK_FIELDS = {
"id": fields.Integer,
"name": fields.String,
"description": fields.String,
"category": fields.Nested(CATEGORY_FIELDS),
"done": fields.Boolean,
"tags": fields.List(fields.Nested(TAG_FIELDS)),
"dimension": fields.Integer
}
TASKLIST_ITEM_FIELDS = {
"id": fields.Integer,
"name": fields.String,
"category": fields.Nested(CATEGORY_FIELDS),
"done": fields.Boolean,
"tags": fields.List(fields.Nested(TAG_FIELDS)),
"dimension": fields.Integer
}
class TaskList(Resource):
'''
this is Task list resource
'''
#decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("name", type=unicode,
required=True, help="No task name provided", location="json")
self.reqparse.add_argument("description", type=unicode, default="", location="json")
self.reqparse.add_argument("done", type=bool, required=True, location="json")
self.reqparse.add_argument("category", type=dict, required=True, location="json")
self.reqparse.add_argument("tags", type=list, default=[], location="json")
self.reqparse.add_argument("dimension", type=int, required=True, location="json")
super(TaskList, self).__init__()
self.representations = {
"application/xml": output_xml,
"application/json": output_json,
}
def get(self):
'''
method to get task list
'''
tasks = models.Task.query.all()
return {"tasks": [marshal(task, TASKLIST_ITEM_FIELDS) for task in tasks]}
def post(self):
'''
method to add a task
'''
args = self.reqparse.parse_args()
category = models.Category.query.filter_by(id=args["category"]["id"]).first()
tags = [models.Tag.query.filter_by(id=tag["id"]).first() for tag in args["tags"]]
task = models.Task(args["name"], args["description"], category,
args["done"], tags, args["dimension"])
db.session.add(task)
db.session.commit()
return {"task": marshal(task, TASK_FIELDS)}, 201
class Task(Resource):
'''
this is Task resource
'''
#decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("name", type=unicode, location="json")
self.reqparse.add_argument("description", type=unicode, location="json")
self.reqparse.add_argument("done", type=bool, location="json")
self.reqparse.add_argument("category", type=dict, location="json")
self.reqparse.add_argument("tags", type=list, location="json")
self.reqparse.add_argument("dimension", type=int, required=True, location="json")
super(Task, self).__init__()
def get(self, task_id):
'''
method to get task info by task id
'''
task = models.Task.query.filter_by(id=task_id).first()
if not task:
abort(404)
return {"task": marshal(task, TASK_FIELDS)}
def put(self, task_id):
'''
method to update task by task id
'''
task = models.Task.query.filter_by(id=task_id).first()
if not task:
abort(404)
args = self.reqparse.parse_args()
args.category = models.Category.query.filter_by(id=args["category"]["id"]).first()
args.tags = [models.Tag.query.filter_by(id=tag["id"]).first() for tag in args["tags"]]
for arg_name, arg_value in args.items():
if arg_value is not None:
setattr(task, arg_name, arg_value)
db.session.commit()
return {"task": marshal(task, TASK_FIELDS)}
def delete(self, task_id):
'''
method to delete task by task id
'''
task = models.Task.query.filter_by(id=task_id).first()
if not task:
abort(404)
db.session.delete(task)
db.session.commit()
return {"task": task.id}
class TaskListByStatus(Resource):
'''
this is Task list resource by task status
'''
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("done", type=unicode, default="", location="args")
super(TaskListByStatus, self).__init__()
def get(self):
'''
method to get task list by task status
'''
args = self.reqparse.parse_args()
done = args["done"]
done = [done.lower() == "true" and True or False][0]
tasks = models.Task.query.filter_by(done=done).all()
return {"tasks": [marshal(task, TASK_FIELDS) for task in tasks]}
class TaskListByCategory(Resource):
'''
this is Task list resource by task category
'''
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("category_id", type=int, default="", location="args")
super(TaskListByCategory, self).__init__()
def get(self):
'''
method to get task list by task category
'''
args = self.reqparse.parse_args()
print args
category_id = args["category_id"]
tasks = models.Task.query.filter_by(category_id=category_id).all()
return {"tasks": [marshal(task, TASK_FIELDS) for task in tasks]}
class CategoryListAPI(Resource):
'''
this is Category list resource
'''
#decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("name", type=unicode, required=True,
help="No category name provided", location="json")
self.reqparse.add_argument("description", type=unicode, default="", location="json")
super(CategoryListAPI, self).__init__()
def get(self):
'''
method to get category list
'''
categories = models.Category.query.all()
return {"categories": [marshal(category, CATEGORY_FIELDS) for category in categories]}
def post(self):
'''
method to add a category
'''
args = self.reqparse.parse_args()
category = models.Category(args["name"], args["description"])
db.session.add(category)
db.session.commit()
return {"category": marshal(category, CATEGORY_FIELDS)}, 201
class CategoryAPI(Resource):
'''
this is Category resource
'''
#decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("name", type=unicode, location="json")
self.reqparse.add_argument("description", type=unicode, location="json")
super(CategoryAPI, self).__init__()
def get(self, category_id):
'''
method to get category info by category id
'''
category = models.Category.query.filter_by(id=category_id).first()
if not category:
abort(404)
return {"category": marshal(category, CATEGORY_FIELDS)}
def put(self, category_id):
'''
method to update category by category id
'''
category = models.Category.query.filter_by(id=category_id).first()
if not category:
abort(404)
args = self.reqparse.parse_args()
for arg_name, arg_value in args.items():
if arg_value is not None:
setattr(category, arg_name, arg_value)
db.session.commit()
return {"category": marshal(category, CATEGORY_FIELDS)}
def delete(self, category_id):
'''
method to delete category by category id
'''
category = models.Category.query.filter_by(id=category_id).first()
if not category:
abort(404)
db.session.delete(category)
db.session.commit()
return {"category": marshal(category, CATEGORY_FIELDS)}
class TagListAPI(Resource):
'''
this is Tag list resource
'''
#decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("name", type=unicode, required=True,
help="No tag name provided", location="json")
super(TagListAPI, self).__init__()
def get(self):
'''
method to get tag list
'''
tags = models.Tag.query.all()
return {"tags": [marshal(tag, TAG_FIELDS) for tag in tags]}
def post(self):
'''
method to add a tag
'''
args = self.reqparse.parse_args()
tag = models.Tag(args["name"])
db.session.add(tag)
db.session.commit()
return {"tag": marshal(tag, TAG_FIELDS)}, 201
class TagAPI(Resource):
'''
this is Tag resource
'''
#decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("name", type=unicode, location="json")
super(TagAPI, self).__init__()
def get(self, tag_id):
'''
method to get tag info by tag id
'''
tag = models.Tag.query.filter_by(id=tag_id).first()
if not tag:
abort(404)
return {"tag": marshal(tag, TAG_FIELDS)}
def put(self, tag_id):
'''
method to update tag by tag id
'''
tag = models.Tag.query.filter_by(id=tag_id).first()
if not tag:
abort(404)
args = self.reqparse.parse_args()
for arg_name, arg_value in args.items():
if arg_value is not None:
setattr(tag, arg_name, arg_value)
db.session.commit()
return {"tag": marshal(tag, TAG_FIELDS)}
def delete(self, tag_id):
'''
method to delete tag by tag id
'''
tag = models.Tag.query.filter_by(id=tag_id).first()
if not tag:
abort(404)
db.session.delete(tag)
db.session.commit()
return {"tag": marshal(tag, TAG_FIELDS)}
class NoteListAPI(Resource):
'''
this is Note list resource
'''
#decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("content", type=unicode, required=True,
help="No note content provided", location="json")
self.reqparse.add_argument("width", type=int, default=1, location="json")
self.reqparse.add_argument("color", type=str, location="json")
super(NoteListAPI, self).__init__()
def get(self):
'''
method to get note list
'''
notes = models.Note.query.all()
return {"notes": [marshal(note, NOTE_FIELDS) for note in notes]}
def post(self):
'''
method to add a note
'''
args = self.reqparse.parse_args()
print args
note = models.Note(args["content"], args["width"], args["color"])
db.session.add(note)
db.session.commit()
return {"note": marshal(note, NOTE_FIELDS)}, 201
class NoteAPI(Resource):
'''
this is Note resource
'''
#decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("content", type=unicode, location="json")
self.reqparse.add_argument("width", type=int, default=1, location="json")
self.reqparse.add_argument("color", type=str, location="json")
super(NoteAPI, self).__init__()
def get(self, note_id):
'''
method to get note info by id
'''
note = models.Note.query.filter_by(id=note_id).first()
if not note:
abort(404)
return {"note": marshal(note, NOTE_FIELDS)}
def put(self, note_id):
'''
method to update note by id
'''
note = models.Note.query.filter_by(id=note_id).first()
if not note:
abort(404)
args = self.reqparse.parse_args()
for arg_name, arg_value in args.items():
if arg_value is not None:
setattr(note, arg_name, arg_value)
db.session.commit()
return {"note": marshal(note, NOTE_FIELDS)}
def delete(self, note_id):
'''
method to delete note by id
'''
note = models.Note.query.filter_by(id=note_id).first()
if not note:
abort(404)
db.session.delete(note)
db.session.commit()
return {"note": marshal(note, NOTE_FIELDS)}
def allowed_file(filename):
'''
function to check allowed attachment extension
'''
return "." in filename and filename.rsplit(".", 1)[1] in app.config["ALLOWED_EXTENSIONS"]
class AttachmentListAPI(Resource):
'''
this is Attachment list resource
'''
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("file",
type=werkzeug.datastructures.FileStorage,
location="files")
self.reqparse.add_argument("tags", type=str, default="", location="form")
super(AttachmentListAPI, self).__init__()
def get(self):
'''
method to get attachment list
'''
attachments = models.Attachment.query.all()
return {"attachments":
[marshal(attachment, ATTACHMENT_FIELDS) for attachment in attachments]}
def post(self):
'''
method to add attachment
'''
args = self.reqparse.parse_args()
file_obj = args["file"]
tag_ids = []
if args["tags"]:
tag_ids = args["tags"].split(",")
#if file and allowed_file(file.filename.lower()):
if file_obj:
raw_name = os.path.splitext(file_obj.filename)[0]
extension = os.path.splitext(file_obj.filename)[1]
identity = str(uuid.uuid4()) + extension
file_obj.save(os.path.join(app.config["UPLOAD_FOLDER"], identity))
tags = [models.Tag.query.filter_by(id=tag_id).first() for tag_id in tag_ids]
attachment = models.Attachment(raw_name, extension, identity, tags)
db.session.add(attachment)
db.session.commit()
return {"attachment": marshal(attachment, ATTACHMENT_FIELDS)}, 201
class AttachmentAPI(Resource):
'''
this is Attachment resource
'''
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("name", type=unicode, default="", location="json")
self.reqparse.add_argument("tags", type=list, default=[], location="json")
super(AttachmentAPI, self).__init__()
def get(self, attachment_id):
'''
method to get attachment by id
'''
attachment = models.Attachment.query.filter_by(id=attachment_id).first()
if not attachment:
abort(404)
return send_from_directory(
os.path.join(os.getcwd(), app.config["UPLOAD_FOLDER"]),
attachment.identity)
def put(self, attachment_id):
'''
method to update attachment by id
'''
attachment = models.Attachment.query.filter_by(id=attachment_id).first()
if not attachment:
abort(404)
args = self.reqparse.parse_args()
tags = args["tags"]
args.tags = [models.Tag.query.filter_by(id=tag).first() for tag in tags]
for arg_name, arg_value in args.items():
if arg_value is not None:
setattr(attachment, arg_name, arg_value)
print attachment
db.session.commit()
return {"attachment": marshal(attachment, ATTACHMENT_FIELDS)}
def delete(self, attachment_id):
'''
method to delete attachment by id
'''
attachment = models.Attachment.query.filter_by(id=attachment_id).first()
if not attachment:
abort(404)
db.session.delete(attachment)
db.session.commit()
# delete the uploaded file from disk
identity = attachment.identity
os.remove(os.path.join(app.config["UPLOAD_FOLDER"], identity))
return {"attachment": attachment.id}
api.add_resource(TaskList,
"/remember/api/v1.0/tasks",
endpoint="ep_tasks")
api.add_resource(TaskListByStatus,
"/remember/api/v1.0/tasks/find-by-status",
endpoint="ep_taskByStatus")
api.add_resource(TaskListByCategory,
"/remember/api/v1.0/tasks/find-by-category",
endpoint="ep_taskByCategory")
api.add_resource(Task,
"/remember/api/v1.0/tasks/<int:task_id>",
endpoint="ep_task")
api.add_resource(CategoryListAPI,
"/remember/api/v1.0/categories",
endpoint="ep_categories")
api.add_resource(CategoryAPI,
"/remember/api/v1.0/categories/<int:category_id>",
endpoint="ep_category")
api.add_resource(TagListAPI,
"/remember/api/v1.0/tags",
endpoint="ep_tags")
api.add_resource(TagAPI,
"/remember/api/v1.0/tags/<int:tag_id>",
endpoint="ep_tag")
api.add_resource(NoteListAPI,
"/remember/api/v1.0/notes",
endpoint="ep_notes")
api.add_resource(NoteAPI,
"/remember/api/v1.0/notes/<int:note_id>",
endpoint="ep_note")
api.add_resource(AttachmentListAPI,
"/remember/api/v1.0/attachments",
endpoint="ep_attachments")
api.add_resource(AttachmentAPI,
"/remember/api/v1.0/attachments/<int:attachment_id>",
endpoint="ep_attachment")
|
nouiz/pylearn2 | refs/heads/master | pylearn2/utils/utlc.py | 49 | """Several utilities for experimenting upon utlc datasets"""
# Standard library imports
import logging
import os
import inspect
import zipfile
from tempfile import TemporaryFile
# Third-party imports
import numpy
import theano
from pylearn2.datasets.utlc import load_ndarray_dataset, load_sparse_dataset
from pylearn2.utils import subdict, sharedX
logger = logging.getLogger(__name__)
##################################################
# Shortcuts and auxiliary functions
##################################################
def getboth(dict1, dict2, key, default=None):
"""
Try to retrieve key from dict1 if exists, otherwise try with dict2.
If the key is not found in any of them, raise an exception.
Parameters
----------
dict1 : dict
WRITEME
dict2 : dict
WRITEME
key : WRITEME
default : WRITEME
Returns
-------
WRITEME
"""
try:
return dict1[key]
except KeyError:
if default is None:
return dict2[key]
else:
return dict2.get(key, default)
##################################################
# Datasets loading and contest facilities
##################################################
def load_data(conf):
"""
Loads a specified dataset according to the parameters in the dictionary
Parameters
----------
conf : WRITEME
Returns
-------
WRITEME
"""
logger.info('... loading dataset')
# Special case for sparse format
if conf.get('sparse', False):
expected = inspect.getargspec(load_sparse_dataset)[0][1:]
data = load_sparse_dataset(conf['dataset'], **subdict(conf, expected))
valid, test = data[1:3]
# Sparse TERRY data on LISA servers contains an extra null first row in
# valid and test subsets.
if conf['dataset'] == 'terry':
valid = valid[1:]
test = test[1:]
assert valid.shape[0] == test.shape[0] == 4096, \
'Sparse TERRY data loaded has wrong number of examples'
if len(data) == 3:
return [data[0], valid, test]
else:
return [data[0], valid, test, data[3]]
# Load as the usual ndarray
expected = inspect.getargspec(load_ndarray_dataset)[0][1:]
data = load_ndarray_dataset(conf['dataset'], **subdict(conf, expected))
# Special case for on-the-fly normalization
if conf.get('normalize_on_the_fly', False):
return data
# Allocate shared variables
def shared_dataset(data_x):
"""Function that loads the dataset into shared variables"""
if conf.get('normalize', True):
return sharedX(data_x, borrow=True)
else:
return theano.shared(theano._asarray(data_x), borrow=True)
return map(shared_dataset, data)
def save_submission(conf, valid_repr, test_repr):
"""
Create a submission file given a configuration dictionary and a
representation for valid and test.
Parameters
----------
conf : WRITEME
valid_repr : WRITEME
test_repr : WRITEME
"""
logger.info('... creating zipfile')
# Ensure the given directory is correct
submit_dir = conf['savedir']
if not os.path.exists(submit_dir):
os.makedirs(submit_dir)
elif not os.path.isdir(submit_dir):
raise IOError('savedir %s is not a directory' % submit_dir)
basename = os.path.join(submit_dir, conf['dataset'] + '_' + conf['expname'])
# If there are too much features, outputs kernel matrices
if (valid_repr.shape[1] > valid_repr.shape[0]):
valid_repr = numpy.dot(valid_repr, valid_repr.T)
test_repr = numpy.dot(test_repr, test_repr.T)
# Quantitize data
valid_repr = numpy.floor((valid_repr / valid_repr.max())*999)
test_repr = numpy.floor((test_repr / test_repr.max())*999)
# Store the representations in two temporary files
valid_file = TemporaryFile()
test_file = TemporaryFile()
numpy.savetxt(valid_file, valid_repr, fmt="%.3f")
numpy.savetxt(test_file, test_repr, fmt="%.3f")
# Reread those files and put them together in a .zip
valid_file.seek(0)
test_file.seek(0)
submission = zipfile.ZipFile(basename + ".zip", "w",
compression=zipfile.ZIP_DEFLATED)
submission.writestr(basename + '_valid.prepro', valid_file.read())
submission.writestr(basename + '_final.prepro', test_file.read())
submission.close()
valid_file.close()
test_file.close()
def create_submission(conf, transform_valid, transform_test=None, features=None):
"""
Create a submission file given a configuration dictionary and a
computation function.
Note that it always reload the datasets to ensure valid & test
are not permuted.
Parameters
----------
conf : WRITEME
transform_valid : WRITEME
transform_test : WRITEME
features : WRITEME
"""
if transform_test is None:
transform_test = transform_valid
# Load the dataset, without permuting valid and test
kwargs = subdict(conf, ['dataset', 'normalize', 'normalize_on_the_fly', 'sparse'])
kwargs.update(randomize_valid=False, randomize_test=False)
valid_set, test_set = load_data(kwargs)[1:3]
# Sparse datasets are not stored as Theano shared vars.
if not conf.get('sparse', False):
valid_set = valid_set.get_value(borrow=True)
test_set = test_set.get_value(borrow=True)
# Prefilter features, if needed.
if features is not None:
valid_set = valid_set[:, features]
test_set = test_set[:, features]
# Valid and test representations
valid_repr = transform_valid(valid_set)
test_repr = transform_test(test_set)
# Convert into text info
save_submission(conf, valid_repr, test_repr)
##################################################
# Proxies for representation evaluations
##################################################
def compute_alc(valid_repr, test_repr):
"""
Returns the ALC of the valid set VS test set
Note: This proxy won't work in the case of transductive learning
(This is an assumption) but it seems to be a good proxy in the
normal case (i.e only train on training set)
Parameters
----------
valid_repr : WRITEME
test_repr : WRITEME
Returns
-------
WRITEME
"""
# Concatenate the sets, and give different one hot labels for valid and test
n_valid = valid_repr.shape[0]
n_test = test_repr.shape[0]
_labvalid = numpy.hstack((numpy.ones((n_valid, 1)),
numpy.zeros((n_valid, 1))))
_labtest = numpy.hstack((numpy.zeros((n_test, 1)),
numpy.ones((n_test, 1))))
dataset = numpy.vstack((valid_repr, test_repr))
label = numpy.vstack((_labvalid, _labtest))
logger.info('... computing the ALC')
raise NotImplementedError("This got broken by embed no longer being "
"where it used to be (if it even still exists, I haven't "
"looked for it)")
# return embed.score(dataset, label)
def lookup_alc(data, transform):
"""
.. todo::
WRITEME
"""
valid_repr = transform(data[1].get_value(borrow=True))
test_repr = transform(data[2].get_value(borrow=True))
return compute_alc(valid_repr, test_repr)
|
3manuek/Data-Science-45min-Intros | refs/heads/master | python_multiprocessing/run_2.py | 25 | #!/usr/bin/env python
from multiprocessing import Process
from run_1 import bad_ass_function
if __name__ == "__main__":
n = 5
# Process takes keyword args: target=function_name, args=function_args_tuple
p = Process(target=bad_ass_function,args=(n,))
# runs "target" locally
#p.run()
# calls "p.run" in a new thread
#p.start()
# block this thread until p terminates
#p.join()
print("finished first process")
#q = Process(target=bad_ass_function,args=(n,))
#q.start()
"""
But what about the return value of the function?
"""
|
BigJMoney/WarrensMUD | refs/heads/master | server/conf/portal_services_plugins.py | 31 | """
Start plugin services
This plugin module can define user-created services for the Portal to
start.
This module must handle all imports and setups required to start
twisted services (see examples in evennia.server.portal.portal). It
must also contain a function start_plugin_services(application).
Evennia will call this function with the main Portal application (so
your services can be added to it). The function should not return
anything. Plugin services are started last in the Portal startup
process.
"""
def start_plugin_services(portal):
"""
This hook is called by Evennia, last in the Portal startup process.
portal - a reference to the main portal application.
"""
pass
|
birsoyo/conan | refs/heads/develop | conans/test/util/detect_test.py | 2 | import platform
import subprocess
import unittest
from conans import tools
from conans.client.conf.detect import detect_defaults_settings
from conans.test.utils.tools import TestBufferConanOutput
class DetectTest(unittest.TestCase):
def detect_default_compilers_test(self):
platform_default_compilers = {
"Linux": "gcc",
"Darwin": "apple-clang",
"Windows": "Visual Studio"
}
output = TestBufferConanOutput()
result = detect_defaults_settings(output)
# result is a list of tuples (name, value) so converting it to dict
result = dict(result)
platform_compiler = platform_default_compilers.get(platform.system(), None)
if platform_compiler is not None:
self.assertEquals(result.get("compiler", None), platform_compiler)
def detect_default_in_mac_os_using_gcc_as_default_test(self):
"""
Test if gcc in Mac OS X is using apple-clang as frontend
"""
# See: https://github.com/conan-io/conan/issues/2231
if platform.system() != "Darwin":
return
try:
output = subprocess.check_output(["gcc", "--version"], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
# gcc is not installed or there is any error (no test scenario)
return
if b"clang" not in output:
# Not test scenario gcc should display clang in output
# see: https://stackoverflow.com/questions/19535422/os-x-10-9-gcc-links-to-clang
raise Exception("Apple gcc doesn't point to clang with gcc frontend anymore! please check")
output = TestBufferConanOutput()
with tools.environment_append({"CC": "gcc"}):
result = detect_defaults_settings(output)
# result is a list of tuples (name, value) so converting it to dict
result = dict(result)
# No compiler should be detected
self.assertIsNone(result.get("compiler", None))
self.assertIn("gcc detected as a frontend using apple-clang", output)
self.assertIsNotNone(output.error)
|
KonstantinRitt/qmqtt | refs/heads/master | tests/gtest/gtest/googletest/googletest/test/gtest_xml_output_unittest.py | 1815 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import datetime
import errno
import os
import re
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_FILTER_FLAG = '--gtest_filter'
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
else:
STACK_TRACE_TEMPLATE = ''
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*" SetUpTestCase="yes" TearDownTestCase="aye">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0"
errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
</testsuites>"""
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests">
</testsuites>"""
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
[GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
if SUPPORTS_TYPED_TESTS:
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""Verifies XML output for a Google Test binary without actual tests.
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_EMPTY_XML, 0)
def testTimestampValue(self):
"""Checks whether the timestamp attribute in the XML output is valid.
Runs a test program that generates an empty XML output, and checks if
the timestamp attribute in the testsuites tag is valid.
"""
actual = self._GetXmlOutput('gtest_no_test_unittest', [], 0)
date_time_str = actual.documentElement.getAttributeNode('timestamp').value
# datetime.strptime() is only available in Python 2.5+ so we have to
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
'XML datettime string %s has incorrect format' % date_time_str)
date_time_from_xml = datetime.datetime(
year=int(match.group(1)), month=int(match.group(2)),
day=int(match.group(3)), hour=int(match.group(4)),
minute=int(match.group(5)), second=int(match.group(6)))
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
'time_delta is %s' % time_delta)
actual.unlink()
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
'gtest_no_test_unittest')
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + 'out.xml')
if os.path.isfile(xml_path):
os.remove(xml_path)
command = [GTEST_PROGRAM_PATH,
'%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
'--shut_down_xml']
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is avalable only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
'%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def testFilteredTestXmlOutput(self):
"""Verifies XML output when a filter is applied.
Runs a test program that executes only some tests and verifies that
non-selected tests do not show up in the XML output.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED_TEST_XML, 0,
extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
def _GetXmlOutput(self, gtest_prog_name, extra_args, expected_exit_code):
"""
Returns the xml output generated by running the program gtest_prog_name.
Furthermore, the program's exit code must be expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + 'out.xml')
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = ([gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)] +
extra_args)
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
'%s was killed by signal %d' % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, expected_exit_code))
actual = minidom.parse(xml_path)
return actual
def _TestXmlOutput(self, gtest_prog_name, expected_xml,
expected_exit_code, extra_args=None):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
actual = self._GetXmlOutput(gtest_prog_name, extra_args or [],
expected_exit_code)
expected = minidom.parseString(expected_xml)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
|
vladmm/intellij-community | refs/heads/master | python/testData/completion/overrideParamNameInRestDocstring.py | 53 | def f(param1, param2):
"""
:param par<caret>am1: description
""" |
s34rching/python_classes | refs/heads/master | tests/test_delete_contact.py | 1 | # -*- coding: utf-8 -*-
from models.contact import Contact
import random
import pytest
def test_delete_contact(app, orm, check_ui):
with pytest.allure.step('Given a non-empty contact list'):
if app.contact.count() == 0:
app.contact.create(Contact(firstname="Test", lastname="Test", address="some address, 1", home_number="+375293003030",
mobile_number="+375294004040"))
old_contact_list = orm.get_contact_list()
with pytest.allure.step('Given a random contact from the list'):
contact = random.choice(old_contact_list)
with pytest.allure.step('When I delete a new contact from the list'):
app.contact.delete_some_contact_by_id(contact.id)
with pytest.allure.step('Then the new contct list is equal to the old list without the deleted contact'):
new_contact_list = orm.get_contact_list()
old_contact_list.remove(contact)
assert old_contact_list == new_contact_list
if check_ui:
assert sorted(new_contact_list, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(), key=Contact.id_or_max) |
aktech/sympy | refs/heads/master | sympy/physics/units.py | 18 | """
Physical units and dimensions.
The base class is Unit, where all here defined units (~200) inherit from.
The find_unit function can help you find units for a given quantity:
>>> import sympy.physics.units as u
>>> u.find_unit('coul')
['coulomb', 'coulombs']
>>> u.find_unit(u.charge)
['C', 'charge', 'coulomb', 'coulombs']
>>> u.coulomb
A*s
Units are always given in terms of base units that have a name and
an abbreviation:
>>> u.A.name
'ampere'
>>> u.ampere.abbrev
'A'
The generic name for a unit (like 'length', 'mass', etc...)
can help you find units:
>>> u.find_unit('magnet')
['magnetic_flux', 'magnetic_constant', 'magnetic_flux_density']
>>> u.find_unit(u.magnetic_flux)
['Wb', 'wb', 'weber', 'webers', 'magnetic_flux']
If, for a given session, you wish to add a unit you may do so:
>>> u.find_unit('gal')
[]
>>> u.gal = 4*u.quart
>>> u.gal/u.inch**3
231
To see a given quantity in terms of some other unit, divide by the desired
unit:
>>> mph = u.miles/u.hours
>>> (u.m/u.s/mph).n(2)
2.2
The units are defined in terms of base units, so when you divide similar
units you will obtain a pure number. This means, for example, that if you
divide a real-world mass (like grams) by the atomic mass unit (amu) you
will obtain Avogadro's number. To obtain the answer in moles you
should divide by the unit ``avogadro``:
>>> u.grams/u.amu
602214085700000000000000
>>> _/u.avogadro
mol
For chemical calculations the unit ``mmu`` (molar mass unit) has been
defined so this conversion is handled automatically. For example, the
number of moles in 1 kg of water might be calculated as:
>>> u.kg/(18*u.mmu).n(3)
55.5*mol
If you need the number of atoms in a mol as a pure number you can use
``avogadro_number`` but if you need it as a dimensional quantity you should use
``avogadro_constant``. (``avogadro`` is a shorthand for the dimensional
quantity.)
>>> u.avogadro_number
602214085700000000000000
>>> u.avogadro_constant
602214085700000000000000/mol
Values of constants are recommended by Committee on Data for Science and
Technology (CODATA) as of 2014. See more at http://arxiv.org/pdf/1507.07956.pdf
"""
from __future__ import print_function, division
from sympy import Rational, pi
from sympy.core import AtomicExpr
class Unit(AtomicExpr):
"""
Base class for base unit of physical units.
>>> from sympy.physics.units import Unit
>>> Unit("meter", "m")
m
Other units are derived from base units:
>>> import sympy.physics.units as u
>>> cm = u.m/100
>>> 100*u.cm
m
"""
is_positive = True # make sqrt(m**2) --> m
is_commutative = True
__slots__ = ["name", "abbrev"]
def __new__(cls, name, abbrev, **assumptions):
obj = AtomicExpr.__new__(cls, **assumptions)
assert isinstance(name, str), repr(type(name))
assert isinstance(abbrev, str), repr(type(abbrev))
obj.name = name
obj.abbrev = abbrev
return obj
def __getnewargs__(self):
return (self.name, self.abbrev)
def __eq__(self, other):
return isinstance(other, Unit) and self.name == other.name
def __hash__(self):
return super(Unit, self).__hash__()
def _hashable_content(self):
return (self.name, self.abbrev)
@property
def free_symbols(self):
return set()
# Dimensionless
percent = percents = Rational(1, 100)
permille = permille = Rational(1, 1000)
ten = Rational(10)
yotta = ten**24
zetta = ten**21
exa = ten**18
peta = ten**15
tera = ten**12
giga = ten**9
mega = ten**6
kilo = ten**3
deca = ten**1
deci = ten**-1
centi = ten**-2
milli = ten**-3
micro = ten**-6
nano = ten**-9
pico = ten**-12
femto = ten**-15
atto = ten**-18
zepto = ten**-21
yocto = ten**-24
rad = radian = radians = 1
deg = degree = degrees = pi/180
sr = steradian = steradians = 1
mil = angular_mil = angular_mils = 2*pi/6400
# Base units
length = m = meter = meters = Unit('meter', 'm')
mass = kg = kilogram = kilograms = Unit('kilogram', 'kg')
time = s = second = seconds = Unit('second', 's')
current = A = ampere = amperes = Unit('ampere', 'A')
temperature = K = kelvin = kelvins = Unit('kelvin', 'K')
amount = mol = mole = moles = Unit('mole', 'mol')
luminosity = cd = candela = candelas = Unit('candela', 'cd')
# Derived units
volume = meter**3
frequency = Hz = hz = hertz = 1/s
force = N = newton = newtons = m*kg/s**2
energy = J = joule = joules = N*m
power = W = watt = watts = J/s
pressure = Pa = pa = pascal = pascals = N/m**2
charge = C = coulomb = coulombs = s*A
voltage = v = V = volt = volts = W/A
resistance = ohm = ohms = V/A
conductance = S = siemens = mho = mhos = A/V
capacitance = F = farad = farads = C/V
magnetic_flux = Wb = wb = weber = webers = J/A
magnetic_flux_density = T = tesla = teslas = V*s/m**2
inductance = H = henry = henrys = V*s/A
speed = m/s
acceleration = m/s**2
density = kg/m**3
optical_power = dioptre = D = 1/m
illuminance = lux = lx = sr*cd/m**2
# Common length units
km = kilometer = kilometers = kilo*m
dm = decimeter = decimeters = deci*m
cm = centimeter = centimeters = centi*m
mm = millimeter = millimeters = milli*m
um = micrometer = micrometers = micron = microns = micro*m
nm = nanometer = nanometers = nano*m
pm = picometer = picometers = pico*m
ft = foot = feet = Rational('0.3048')*m
inch = inches = Rational('25.4')*mm
yd = yard = yards = 3*ft
mi = mile = miles = 5280*ft
nmi = nautical_mile = nautical_miles = 6076*ft
# Common volume and area units
l = liter = liters = m**3 / 1000
dl = deciliter = deciliters = deci*l
cl = centiliter = centiliters = centi*l
ml = milliliter = milliliters = milli*l
# Common time units
ms = millisecond = milliseconds = milli*s
us = microsecond = microseconds = micro*s
ns = nanosecond = nanoseconds = nano*s
ps = picosecond = picoseconds = pico*s
minute = minutes = 60*s
h = hour = hours = 60*minute
day = days = 24*hour
anomalistic_year = anomalistic_years = Rational('365.259636')*day
sidereal_year = sidereal_years = Rational('31558149.540')*s
tropical_year = tropical_years = Rational('365.24219')*day
common_year = common_years = Rational('365')*day
julian_year = julian_years = Rational('365.25')*day
draconic_year = draconic_years = Rational('346.62')*day
gaussian_year = gaussian_years = Rational('365.2568983')*day
full_moon_cycle = full_moon_cycles = Rational('411.78443029')*day
year = years = tropical_year
# Common mass units
g = gram = grams = kilogram / kilo
mg = milligram = milligrams = milli * g
ug = microgram = micrograms = micro * g
#----------------------------------------------------------------------------
# Physical constants
#
c = speed_of_light = 299792458 * m/s
G = gravitational_constant = Rational('6.67408') * ten**-11 * m**3 / kg / s**2
u0 = magnetic_constant = 4*pi * ten**-7 * N/A**2
e0 = electric_constant = 1/(u0 * c**2)
Z0 = vacuum_impedance = u0 * c
planck = Rational('6.62607004') * ten**-34 * J*s
hbar = planck / (2*pi)
avogadro_number = Rational('6.022140857') * 10**23
avogadro = avogadro_constant = avogadro_number / mol
boltzmann = Rational('1.38064852') * ten**-23 * J / K
atomic_mass_constant = Rational('1.660539040') * ten**-27 * kg
gee = gees = Rational('9.80665') * m/s**2
atmosphere = atmospheres = atm = 101325 * pascal
kPa = kilo*Pa
bar = bars = 100*kPa
pound = pounds = 0.45359237 * kg * gee # exact
psi = pound / inch ** 2
dHg0 = 13.5951 # approx value at 0 C
mmHg = dHg0 * 9.80665 * Pa
amu = amus = gram / avogadro / mol
mmu = mmus = gram / mol
quart = quarts = Rational(231, 4) * inch**3
eV = Rational('1.6021766208') * ten**-19 * J
# Other convenient units and magnitudes
ly = lightyear = lightyears = c*julian_year
au = astronomical_unit = astronomical_units = 149597870691*m
def find_unit(quantity):
"""
Return a list of matching units names.
if quantity is a string -- units containing the string `quantity`
if quantity is a unit -- units having matching base units
Examples
========
>>> from sympy.physics import units as u
>>> u.find_unit('charge')
['charge']
>>> u.find_unit(u.charge)
['C', 'charge', 'coulomb', 'coulombs']
>>> u.find_unit('volt')
['volt', 'volts', 'voltage']
>>> u.find_unit(u.inch**3)[:5]
['l', 'cl', 'dl', 'ml', 'liter']
"""
import sympy.physics.units as u
rv = []
if isinstance(quantity, str):
rv = [i for i in dir(u) if quantity in i]
else:
units = quantity.as_coeff_Mul()[1]
for i in dir(u):
try:
if units == eval('u.' + i).as_coeff_Mul()[1]:
rv.append(str(i))
except Exception:
pass
return sorted(rv, key=len)
# Delete this so it doesn't pollute the namespace
del Rational, pi
|
olasitarska/django | refs/heads/master | tests/many_to_many/models.py | 38 | """
5. Many-to-many relationships
To define a many-to-many relationship, use ``ManyToManyField()``.
In this example, an ``Article`` can be published in multiple ``Publication``
objects, and a ``Publication`` has multiple ``Article`` objects.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Publication(models.Model):
title = models.CharField(max_length=30)
def __str__(self):
return self.title
class Meta:
ordering = ('title',)
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
# Assign a unicode string as name to make sure the intermediary model is
# correctly created. Refs #20207
publications = models.ManyToManyField(Publication, name='publications')
def __str__(self):
return self.headline
class Meta:
ordering = ('headline',)
|
nberliner/SRVis | refs/heads/master | SRVis.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
SRVis Copyright (C) 2015 Niklas Berliner
"""
__version__ = 'SRVis version 0.1'
import sys
sys.path.insert(1,'lib')
# Import the core and GUI elements of Qt
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import os.path as osp
import matplotlib
import numpy as np
matplotlib.use('Qt4Agg')
matplotlib.rcParams['backend.qt4']='PyQt4'
# Import program specific classes and functions
from dataHandler import dataHandler
from imageClass import overlayWidget, dataWidget, imageHistogramWidget
from SRVisInterface import openDialog, PyMultiPageWidget
class SRVis(QMainWindow):
def __init__(self):
"""
SRVis is a program to visualise super-resolution microscopy data.
Its main purpose is to verify the correct localisation of single
molecule fitting routines. For this purpose localisations can be
overlayed with the original TIFF image to visually verify their
accuracy.
Currently rapidstorm data as well as xyt data is supported.
"""
# Initialize the object as a QWidget and
# set its title and minimum width
QMainWindow.__init__(self)
self.setWindowTitle('SRVis')
self.setMinimumWidth(400)
# Some intital data initialisation
self.data = None
self.home = osp.expanduser("~")
# Initialise some values
self._initValues()
self.histogramLayout = None
self.localisationPrecision = 20.0 # assume an initial value of 20nm
self.initialised = False
# Create the main layout
#
# ________________________
# | | |
# | left | plot layout |
# | pane | |
# | |_______________|
# | | button layout |
# ------------------------
#
#
# Create the QVBoxLayout that lays out the main part and the buttons
self.outerLayout = QHBoxLayout()
# Mabe not super nice.. Create a main widget and set it as layout
self.mainWindow = QWidget(self)
self.mainWindow.setLayout(self.outerLayout)
# Create the layout of the main part and add it to the outer layout
self.layout = QHBoxLayout()
self.leftPane = QVBoxLayout()
# Add the left-right splitter
self.layoutWindow = QWidget()
self.leftPaneWindow = QWidget()
self.leftPaneWindow.setMaximumWidth(300) # restrict it to not grow too wide
self.layoutWindow.setLayout(self.layout)
self.leftPaneWindow.setLayout(self.leftPane)
self.splitterLeftRight = QSplitter(Qt.Horizontal)
self.splitterLeftRight.addWidget(self.leftPaneWindow)
self.splitterLeftRight.addWidget(self.layoutWindow)
self.outerLayout.addWidget(self.splitterLeftRight)
# Create the left pane holding input fields
self.form_layout = QFormLayout()
self.form_layout2 = QFormLayout()
self.frame = QSpinBox(self)
self.markerSize = QLineEdit(self)
self.HistBinSize = QLineEdit(self)
self.QTscaleMin = QLineEdit(self)
self.QTscaleMax = QLineEdit(self)
self.QTHistBlur = QCheckBox(self)
self.QTBlurSigma = QLineEdit(self)
self.scalebar = QLineEdit(self)
self.frame.setSingleStep(1)
self.frame.setValue(0)
self.frame.setRange(0, 100)
self.markerSize.setPlaceholderText("40")
self.HistBinSize.setPlaceholderText("1")
self.QTscaleMin.setPlaceholderText("Auto")
self.QTscaleMax.setPlaceholderText("Auto")
self.QTBlurSigma.setPlaceholderText("20")
self.scalebar.setPlaceholderText("None")
self.frame.valueChanged.connect(self.frameValueChange)
self.markerSize.returnPressed.connect(self.changeMarkerSize)
self.HistBinSize.returnPressed.connect(self.changeBinSize)
self.QTscaleMin.returnPressed.connect(self.changeQTscaleMin)
self.QTscaleMax.returnPressed.connect(self.changeQTscaleMax)
self.QTHistBlur.stateChanged.connect(self.changeQTBlur)
self.QTBlurSigma.returnPressed.connect(self.changedSigma)
self.scalebar.returnPressed.connect(self.setScalebar)
# Add them to the form layout with a label
self.form_layout.addRow('Frame:', self.frame)
self.form_layout.addRow('Marker size:', self.markerSize)
self.form_layout.addRow('Bin size (in px):', self.HistBinSize)
self.form_layout.addRow('2D histogram scale max.:', self.QTscaleMax)
self.form_layout.addRow('2D histogram scale min.:', self.QTscaleMin)
self.form_layout.addRow('Apply gaussian blur:', self.QTHistBlur)
self.form_layout.addRow('Gaussian blur sigma (in nm):', self.QTBlurSigma)
self.form_layout.addRow('Add scalebar (in nm):', self.scalebar)
self.reloadImageButton = QPushButton('&Update Image Histogram', self)
self.reloadImageButton.clicked.connect(self.updateImageHistogramData)
# Add the controls for the PSF limiting
self.pltSelector = QComboBox()
self.filterMin = QLineEdit(self)
self.filterMax = QLineEdit(self)
self.filterMin.returnPressed.connect(self.filterData)
self.filterMax.returnPressed.connect(self.filterData)
self.filterMin.setPlaceholderText("min")
self.filterMax.setPlaceholderText("max")
self.localisationCountTotal = QLabel('', self)
self.localisationCount = QLabel('', self)
self.filterMedian = QLabel('', self)
self.filterMean = QLabel('', self)
self.filterStd = QLabel('', self)
self.form_layout2.addRow('Nr. of initial localisations:', self.localisationCountTotal)
self.form_layout2.addRow('Nr. of selected localisations:', self.localisationCount)
self.form_layout2.addRow('Filter lower bound:', self.filterMin)
self.form_layout2.addRow('Filter upper bound:', self.filterMax)
self.form_layout2.addRow('Filtered data median:', self.filterMedian)
self.form_layout2.addRow('Filtered data mean:', self.filterMean)
self.form_layout2.addRow('Filtered data std:', self.filterStd)
self.leftPane.addLayout(self.form_layout)
self.leftPane.addWidget(self.reloadImageButton)
self.leftPane.addWidget(self.pltSelector)
self.leftPane.addLayout(self.form_layout2)
self.leftPane.addStretch(1)
self.layout.addLayout(self.leftPane)
# create the pot window and the open and close button line
self.rightPane = QVBoxLayout()
self.mainFrame = QVBoxLayout()
self.buttonLayout = QHBoxLayout()
self.buttonLayout.addStretch(1)
self.mainFrameWindow = QWidget()
self.buttonLayoutWindow = QWidget()
self.rightPane.addLayout(self.mainFrame)
self.rightPane.addLayout(self.buttonLayout)
self.layout.addLayout(self.rightPane)
# Create and add the label to show the close and open buttons
self.openButton = QPushButton('&Open', self)
self.openButtonCall = openDialog(self)
self.openButton.clicked.connect(self.openButtonCall.showWindow)
self.reloadButton = QPushButton('&Reload', self)
self.reloadButton.clicked.connect(self.reloadData)
self.saveButton = QPushButton('&Save', self)
self.saveButton.clicked.connect(self.saveLocalisation)
self.closeButton = QPushButton('&Close', self)
self.closeButton.clicked.connect(QCoreApplication.instance().quit)
self.buttonLayout.addWidget(self.openButton)
self.buttonLayout.addWidget(self.reloadButton)
self.buttonLayout.addWidget(self.saveButton)
self.buttonLayout.addWidget(self.closeButton)
# add the plot with overlay to the window
self.imageLayout = QHBoxLayout()
self.imageLayout.sizeHint()
self.imageOverlay = PyMultiPageWidget(parent=self)
self.imageLayout.addWidget(self.imageOverlay)
self.mainFrame.addLayout(self.imageLayout)
self.plotFrame = overlayWidget(self.data)
self.imageOverlay.addPage(self.plotFrame.canvas, '')
self.openButtonCall.sendHome.connect(self.setHome)
# Add the frame slider
self.frameSlider = QSlider(self)
self.frameSlider.setOrientation(Qt.Horizontal)
self.frameSlider.setMinimum(0)
self.frameSlider.setMaximum(100)
self.frameSlider.valueChanged.connect(self.frameValueChange)
self.mainFrame.addWidget(self.frameSlider)
# Add a statusbar message
self.statusBar()
# Set the outerLayout as the window's main layout
self.setCentralWidget(self.mainWindow)
def _initValues(self):
self.binSize = 1
self.scaleMin = None
self.scaleMax = None
self.blurHistogram = False
self.sigma = 1.0
self.dataTypes = list()
self.filterValues = dict()
def statusReady(self, msg=None):
if msg is None:
self.statusBar().showMessage('Status: Ready')
else:
self.statusBar().showMessage('Status: ' + msg + ' Done')
def statusBusy(self, msg='Status: Busy..'):
self.statusBar().showMessage('Status: ' + msg)
def updateHistogramm(self):
self.statusBusy('Updating histograms..')
self.plotFrame.redraw()
self.plotHistogram.redraw()
self.statusReady('Updating histograms..')
return
def pltChange(self, plot):
self.updateHistogramm()
return
def frameValueChange(self, frame):
assert( isinstance(frame, int) )
# Set both input field to the new value
self.frameSlider.setValue( frame )
self.frame.setValue(frame)
# Update the image
try:
self.plotFrame.redraw(frame)
except AttributeError: # this happens if there is no raw image specified by the user
pass
except ValueError: # this happens because the self.loc is not set to None.. but doing
# creates another problem. (see imageClass.py)
pass
def changeMarkerSize(self):
try:
size = int(self.markerSize.text())
except ValueError: # nothing entered
return
self.plotFrame.markerSize = size
self.plotFrame.redraw()
return
def changeImageHistogram(self, scaleMin, scaleMax, binSize):
self.statusBusy('Updating image histogram..')
self.QTHistogram.plot(scaleMin, scaleMax, binSize)
self.QTHistogram.redraw()
self.statusReady('Updating image histogram..')
def changeBinSize(self):
try:
self.binSize = float(self.HistBinSize.text())
except ValueError: # nothing entered
return
self.statusBusy('Updating bin size..')
self.updateSigma()
self.changeImageHistogram(self.scaleMin, self.scaleMax, self.binSize)
self.statusReady('Updating bin size..')
def changeQTscaleMin(self):
if str(self.QTscaleMin.text()).lower() == 'auto':
self.scaleMin = None
else:
try:
self.scaleMin = float(self.QTscaleMin.text())
except ValueError: # nothing entered
return
self.statusBusy('Rescaling image histogram..')
self.changeImageHistogram(self.scaleMin, self.scaleMax, self.binSize)
self.statusReady('Rescaling image histogram..')
def changeQTscaleMax(self):
if str(self.QTscaleMax.text()).lower() == 'auto':
self.scaleMax = None
else:
try:
self.scaleMax = float(self.QTscaleMax.text())
except ValueError: # nothing entered
return
self.statusBusy('Rescaling image histogram..')
self.changeImageHistogram(self.scaleMin, self.scaleMax, self.binSize)
self.statusReady('Rescaling image histogram..')
def changeQTBlur(self):
self.blurHistogram = self.QTHistBlur.isChecked()
if self.initialised: # only try to plot once initialized
self.statusBusy('Blurring image histogram..')
# Set the gaussian blur
self.QTHistogram.setGaussianBlur(self.blurHistogram, self.sigma)
# Update the histogram
self.changeImageHistogram(self.scaleMin, self.scaleMax, self.binSize)
self.statusReady('Blurring image histogram..')
def updateImageHistogramData(self):
self.statusBusy('Updateting image data..')
d = np.asarray(self.data.data.localisations()[['x','y']])
self.QTHistogram.setData(d)
self.changeImageHistogram(self.scaleMin, self.scaleMax, self.binSize)
self.statusReady('Updateting image data..')
@pyqtSlot(str, str)
def setHome(self, home):
self.home = home
return
def reloadData(self):
if self.data is not None:
self.statusBusy('Reloading localisation data..')
self.data.reloadData('localisations') # Update the localisation data
# Update the localisation count
self.localisationCountTotal.setText( str(len(self.data.data.localisations())) )
# Update the histograms
self.updateHistograms()
self.plotFrame.redraw()
# Update the image histogram
self.updateImageHistogramData()
self.statusReady(None)
return
def filterData(self):
if self.histogramLayout.getCurrentIndex() == 0 and self.fileNameImage is not None: # the QT plot or nr loc per frame
return # do nothing
self.statusBusy('Filtering data..')
# get the min reading
if str(self.filterMin.text()).lower() == 'min':
currentMin = -1.0 * np.inf
else:
try:
currentMin = float(self.filterMin.text())
except ValueError: # nothing entered
currentMin = -1.0*np.inf
# get the max reading
if str(self.filterMax.text()).lower() == 'max':
currentMax = +1.0 * np.inf
else:
try:
currentMax = float(self.filterMax.text())
except ValueError: # nothing entered
currentMax = +1.0*np.inf
# get which dataType should be filtered
dataType, histogram = self.getCurrentHistogram()
self.filterValues[dataType] = (currentMin, currentMax)
self.data.filterData(self.filterValues)
self.updateHistograms()
try:
self.plotFrame.redraw()
except: # errors can happen if there is no raw image specified, the image is not yet initialised etc.
pass
self.statusReady('Filtering data..')
def getHistogramIndex(self):
idx = self.histogramLayout.getCurrentIndex()
idxs = range(self.histogramLayout.count()) # the first one is the QT histogram
if self.fileNameImage is not None:
idx -= 1 # Minus one for the image histogram
idxs = range(1,self.histogramLayout.count())
return idx, idxs
def getCurrentHistogram(self):
idx, _ = self.getHistogramIndex()
dataType = self.dataTypes[idx]
histogram = self.histogramLayout.getPage()
return dataType, histogram
def updateHistograms(self):
self.statusBusy('Updating histograms..')
self.localisationCount.setText( str(len(self.data.data.localisations(dataFilter=True))) )
_, idxs = self.getHistogramIndex() # get the correct indexes
# Update the histograms
for idx in idxs:
dataType = self.dataTypes[idx-idxs[0]] # start at zero
dataUnfiltered = self.data.data.localisations(dataFilter=False)[dataType]
dataFiltered = self.data.data.localisations(dataFilter=True )[dataType]
histogram = self.histogramLayout.widget(idx)
histogram.setData(dataUnfiltered, dataFiltered)
histogram.plotHistogram()
histogram.redraw()
self.statusReady('Updating histograms..')
return
def changedHistogram(self, idx):
dataType, histogram = self.getCurrentHistogram()
dataFiltered = self.data.data.localisations(dataFilter=True )[dataType]
self.filterMedian.setText( "%.2f" %np.median(dataFiltered) )
self.filterMean.setText( "%.2f" %np.mean(dataFiltered) )
self.filterStd.setText( "%.2f" %np.std(dataFiltered) )
minValue, maxValue = self.filterValues[dataType]
if minValue == -np.inf:
self.filterMin.setText("")
else:
self.filterMin.setText( str(minValue) )
if maxValue == np.inf:
self.filterMax.setText("")
else:
self.filterMax.setText( str(maxValue) )
def updateSigma(self):
self.sigma = self.localisationPrecision / (self.pxSize * self.binSize)
self.QTBlurSigma.setText( "%.2f" %self.localisationPrecision ) # update the user interface
def changedSigma(self):
self.sigma = float(self.QTBlurSigma.text()) / (self.pxSize * self.binSize)
def setScalebar(self):
try:
scalebarLength = self.scalebar.text()
if str(scalebarLength) == "": # user reset to None
scalebarLength = None
else:
scalebarLength = float(scalebarLength)
except:
print 'Scalebar input not understood'
self.scalebar.setText("")
scalebarLength = None
if self.initialised: # if not the image doesn't exist yet
self.QTHistogram.setScalebarLength(scalebarLength)
self.QTHistogram.updateScaleBar(None)
def clearAll(self):
# Clear everything to create a fresh view
# Clear the input fields
self.HistBinSize.clear()
self.QTscaleMin.clear()
self.QTscaleMax.clear()
self.QTHistBlur.setCheckState(Qt.Unchecked)
self.QTBlurSigma.clear()
self.scalebar.clear()
# Clear the TIFF image and remove the image histogram
try:
self.plotFrame.reset()
if self.imageOverlay.count() == 2: # only remove image histogram widget
self.imageOverlay.removePage(1)
self.imageOverlay.setCurrentIndex(0)
except:
raise
# Remove the histogram plots
self.initialised = self.initaliseShowData()
self.pltSelector.clear()
# Reset the filter values
self.filterValues = dict()
# Clear the input fields
self.filterMin.clear()
self.filterMax.clear()
# Reset the frame number
self.frame.setValue(0)
self.frameSlider.setValue(0)
# Reset some initial values
self._initValues()
def initaliseShowData(self):
if self.histogramLayout is not None:
self.imageLayout.removeWidget(self.histogramLayout)
self.histogramLayout.setParent(None)
self.histogramLayout = PyMultiPageWidget(self.pltSelector)
self.histogramLayout.currentIndexChanged.connect(self.changedHistogram)
self.imageLayout.addWidget(self.histogramLayout)
return True
def showData(self, fileNameImage, fnameLocalisations, fnameLocalisationsType, pxSize, CpPh):
# Clear the previous data
self.clearAll()
## use for testing
# baseDirectory = './example/'
# fileNameImage = baseDirectory + 'SRVis_imageData.tif'
# fnameLocalisations = baseDirectory + 'SRVis_imageData.txt'
self.fileNameImage = fileNameImage
self.fnameLocalisations = fnameLocalisations
self.fnameLocalisationsType = fnameLocalisationsType
self.pxSize = pxSize
if self.fileNameImage == '':
self.fileNameImage = None
self.data = dataHandler(fileNameImage, fnameLocalisations, fnameLocalisationsType, pxSize, CpPh)
self.frame.setRange(0, self.data.maxImageFrame()-1)
self.frameSlider.setMaximum(self.data.maxImageFrame()-1)
self.localisationCountTotal.setText( str(len(self.data.data.localisations())) )
## Generate the image histogram
# Find the optimal blurring based on the localisation precision (from rapidstorm)
if 'Uncertainty x' in self.data.data.localisations().columns:
mean = self.data.data.localisations()['Uncertainty x'].mean()
std = self.data.data.localisations()['Uncertainty x'].std()
self.localisationPrecision = mean + 2.0*std
self.updateSigma()
# Get the data and plot the image histogram
d = np.asarray(self.data.data.localisations()[['x','y']])
self.QTHistogram = imageHistogramWidget(d, title='2D Histogram', parent=self)
self.QTHistogram.setGaussianBlur(self.blurHistogram, self.sigma) # Update in case the checkbox has been toggled
self.QTHistogram.plot()
if self.fileNameImage is None: # no TIFF image available, show the histogram instead
self.imageOverlay.addPage(self.QTHistogram, '')
self.imageOverlay.setCurrentIndex(1)
else:
# if not self.initialised:
# self.initialised = self.initaliseShowData()
self.plotFrame.data = self.data
self.plotFrame.initialise()
self.histogramLayout.addPage(self.QTHistogram, '2D Histogram Visualisation')
# ## Add the 1D histograms to the data
# # Add the number of localisations per frame
# if not self.initialised:
# self.initialised = self.initaliseShowData()
# Add the optional histograms
locData = self.data.data.localisations()
for dataType in locData.columns:
if dataType in ['x','y']:
continue
self.dataTypes.append(dataType)
# Add the filter information
self.filterValues[dataType] = (-np.inf, np.inf)
# Set the title
if dataType == 'frame':
title = '\n\nAvg. nr. of loc. per frame'
else:
title = '\n\n' + dataType
## Add the histogram to the pltSelector instance
# The nr. of loc. per frame data should be normalised acording to the bin size
if dataType == 'frame':
normalise = np.max(locData[dataType]) / 50.0 # Currently the number of bins is fixed to 50.
# This neeeds to be changed if the number of bins is changed
else:
normalise = 1.0 # Don't normalise
currentPlotHistogram = dataWidget(locData[dataType] , title=title, parent=self.pltSelector, normalise=normalise )
currentPlotHistogram.plotHistogram()
self.histogramLayout.addPage(currentPlotHistogram, title.strip())
self.resize(1500,700)
self.initialised = True # We're done and set up
return
def saveLocalisation(self):
# Ast the user where to save the data
path = QFileDialog.getSaveFileName(self, 'Save localisations to', self.home)
self.statusBusy('Saving data to:' + str(path) + ' ..') # update the status bar
if not path[-4:] == '.dat' and not path[-4:] == '.txt': # check the file extension
path = str(path) + '.dat'
self.data.saveLocalisations(str(path), self.pxSize)
self.statusReady('Saving data')
def run(self):
# Show the form
self.show()
# Run the qt application
qt_app.exec_()
if __name__ == '__main__':
qt_app = QApplication(sys.argv)
# Create an instance of the application window and run it
app = SRVis()
# app.run()
app.show()
sys.exit(qt_app.exec_())
|
belleandindygames/league | refs/heads/master | config/wsgi.py | 1 | """
WSGI config for cookiecutter project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# cookiecutter directory.
app_path = os.path.dirname(os.path.abspath(__file__)).replace('/config', '')
sys.path.append(os.path.join(app_path, 'league'))
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.base")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
frankinit/ThinkStats2 | refs/heads/master | code/chap14soln.py | 68 | """This file contains code for use with "Think Stats",
by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function, division
import numpy as np
import random
import first
import normal
import thinkstats2
import thinkplot
def PlotPregLengths(live, firsts, others):
"""Plots sampling distributions under the null and alternate hypotheses.
live, firsts, others: DataFrames
Results:
null hypothesis N(0, 0.00319708)
0.0837707042554 0.0837707042554 (90% CI)
estimated params N(0.0780373, 0.00321144)
-0.0151758158699 0.171250349425 (90% CI)
Sampling distribution under the null hypothesis is centered
around 0.
Sampling distribution under the null hypothesis is centered
around the observed difference, 0.078.
The variance of the two distributions is very similar; in practice,
you could reasonably compute whichever one is easier.
"""
print('prglngth example')
delta = firsts.prglngth.mean() - others.prglngth.mean()
print(delta)
dist1 = normal.SamplingDistMean(live.prglngth, len(firsts))
dist2 = normal.SamplingDistMean(live.prglngth, len(others))
dist = dist1 - dist2
print('null hypothesis', dist)
print(dist.Prob(-delta), 1 - dist.Prob(delta))
thinkplot.PrePlot(2)
thinkplot.Plot(dist, label='null hypothesis')
dist1 = normal.SamplingDistMean(firsts.prglngth, len(firsts))
dist2 = normal.SamplingDistMean(others.prglngth, len(others))
dist = dist1 - dist2
print('estimated params', dist)
print(dist.Percentile(5), dist.Percentile(95))
thinkplot.Plot(dist, label='estimated params')
thinkplot.Show(xlabel='difference in means (weeks)',
ylabel='CDF')
def GenerateAdultWeight(birth_weights, n):
"""Generate a random adult weight by simulating annual gain.
birth_weights: sequence of birth weights in lbs
n: number of years to simulate
returns: adult weight in lbs
"""
bw = random.choice(birth_weights)
factors = np.random.normal(1.09, 0.03, n)
aw = bw * np.prod(factors)
return aw
def PlotAdultWeights(live):
"""Makes a normal probability plot of log10 adult weight.
live: DataFrame of live births
results:
With n=40 the distribution is approximately lognormal except for
the lowest weights.
Actual distribution might deviate from lognormal because it is
a mixture of people at different ages, or because annual weight
gains are correlated.
"""
birth_weights = live.totalwgt_lb.dropna().values
aws = [GenerateAdultWeight(birth_weights, 40) for _ in range(1000)]
log_aws = np.log10(aws)
thinkstats2.NormalProbabilityPlot(log_aws)
thinkplot.Show(xlabel='standard normal values',
ylabel='adult weight (log10 lbs)')
def TestIntervention():
"""Tests whether reported changes are statistically significant.
Results:
-1.66 4.73095323208e-05
-0.26 0.125267987207
1.4 0.00182694836898
Conclusions:
1) Gender gap before intervention was 1.66 points (p-value 5e-5)
2) Genger gap after was 0.26 points (p-value 0.13, no significant)
3) Change in gender gap was 1.4 points (p-value 0.002, significant).
"""
male_before = normal.Normal(3.57, 0.28**2)
male_after = normal.Normal(3.44, 0.16**2)
female_before = normal.Normal(1.91, 0.32**2)
female_after = normal.Normal(3.18, 0.16**2)
diff_before = female_before - male_before
print('mean, p-value', diff_before.mu, 1-diff_before.Prob(0))
print('CI', diff_before.Percentile(5), diff_before.Percentile(95))
print('stderr', diff_before.sigma)
diff_after = female_after - male_after
print('mean, p-value', diff_after.mu, 1-diff_after.Prob(0))
print('CI', diff_after.Percentile(5), diff_after.Percentile(95))
print('stderr', diff_after.sigma)
diff = diff_after - diff_before
print('mean, p-value', diff.mu, diff.Prob(0))
print('CI', diff.Percentile(5), diff.Percentile(95))
print('stderr', diff.sigma)
def main():
thinkstats2.RandomSeed(17)
TestIntervention()
return
live, firsts, others = first.MakeFrames()
PlotAdultWeights(live)
PlotPregLengths(live, firsts, others)
if __name__ == '__main__':
main()
|
okuta/chainer | refs/heads/master | tests/chainerx_tests/math_utils.py | 3 | import unittest
import numpy
import chainerx
from chainerx_tests import array_utils
from chainerx_tests import dtype_utils
class IgnoreNumpyFloatingPointError(object):
def __enter__(self):
self.old_settings = numpy.seterr(all='ignore')
def __exit__(self, *args):
numpy.seterr(**self.old_settings)
class UnaryMathTestBase(object):
input = None
def setup(self):
in_dtype, = self.in_dtypes
in_kind = numpy.dtype(in_dtype).kind
if numpy.dtype(in_dtype).kind != 'f':
self.skip_backward_test = True
self.skip_double_backward_test = True
if in_dtype == 'float16':
self.check_forward_options.update({'rtol': 1e-3, 'atol': 1e-3})
self.check_backward_options.update({'rtol': 3e-3, 'atol': 3e-3})
self.check_double_backward_options.update(
{'rtol': 1e-2, 'atol': 1e-2})
input = self.input
if (in_kind == 'u'
and isinstance(input, (int, float))
and input < 0):
raise unittest.SkipTest(
'Combination of uint dtype and negative input cannot be '
'tested')
def generate_inputs(self):
in_dtype, = self.in_dtypes
if isinstance(self.input, numpy.ndarray):
return self.input.astype(in_dtype),
if self.input == 'random':
return array_utils.uniform(self.shape, in_dtype),
if isinstance(self.input, (bool, int, float)):
return numpy.full(self.shape, self.input, dtype=in_dtype),
assert False
def forward_xp(self, inputs, xp):
a, = inputs
# This cast was introduced in order to avoid decreasing precision.
# ex.) numpy.sqrt(x) becomes a float16 array where x is an int8 array.
a = dtype_utils.cast_if_numpy_array(xp, a, self.out_dtype)
with IgnoreNumpyFloatingPointError():
y = self.func(xp, a)
y = dtype_utils.cast_if_numpy_array(xp, y, self.out_dtype)
return y,
class BinaryMathTestBase(object):
def setup(self):
in_dtype1, in_dtype2 = self.in_dtypes
kind1 = numpy.dtype(in_dtype1).kind
kind2 = numpy.dtype(in_dtype2).kind
if kind1 != 'f' or kind2 != 'f':
self.skip_backward_test = True
self.skip_double_backward_test = True
if in_dtype1 == 'float16' or in_dtype2 == 'float16':
self.check_forward_options.update({'rtol': 1e-3, 'atol': 1e-3})
self.check_backward_options.update({'rtol': 1e-3, 'atol': 1e-3})
self.check_double_backward_options.update(
{'rtol': 1e-3, 'atol': 1e-3})
def generate_inputs(self):
in_dtype1, in_dtype2 = self.in_dtypes
in_shape1, in_shape2 = self.in_shapes
if self.input_lhs == 'random':
a = array_utils.uniform(in_shape1, in_dtype1)
elif isinstance(self.input_lhs, (bool, int, float)):
a = numpy.full(in_shape1, self.input_lhs, dtype=in_dtype1)
else:
assert False
if self.input_rhs == 'random':
b = array_utils.uniform(in_shape2, in_dtype2)
elif isinstance(self.input_rhs, (bool, int, float)):
b = numpy.full(in_shape2, self.input_rhs, dtype=in_dtype2)
else:
assert False
return a, b
def forward_xp(self, inputs, xp):
a, b = inputs
# This cast was introduced in order to avoid decreasing precision.
# ex.) x / y becomes a float16 array where x and y are an int8 arrays.
a = dtype_utils.cast_if_numpy_array(xp, a, self.out_dtype)
b = dtype_utils.cast_if_numpy_array(xp, b, self.out_dtype)
with IgnoreNumpyFloatingPointError():
y = self.func(xp, a, b)
y = dtype_utils.cast_if_numpy_array(xp, y, self.out_dtype)
return y,
class InplaceUnaryMathTestBase(UnaryMathTestBase):
skip_backward_test = True
skip_double_backward_test = True
def forward_xp(self, inputs, xp):
a, = inputs
if xp is chainerx:
a_ = a.as_grad_stopped().copy()
else:
a_ = a.copy()
with IgnoreNumpyFloatingPointError():
ret = self.func(xp, a_)
assert ret is None # func should not return anything
return a_,
class InplaceBinaryMathTestBase(BinaryMathTestBase):
skip_backward_test = True
skip_double_backward_test = True
def forward_xp(self, inputs, xp):
a, b = inputs
b = dtype_utils.cast_if_numpy_array(xp, b, a.dtype)
if xp is chainerx:
a_ = a.as_grad_stopped().copy()
b_ = b.as_grad_stopped()
else:
a_ = a.copy()
b_ = b
with IgnoreNumpyFloatingPointError():
ret = self.func(xp, a_, b_)
assert ret is None # func should not return anything
return a_,
def _convert_numpy_scalar(scalar, dtype):
# Implicit casting in NumPy's multiply depends on the 'casting' argument,
# which is not yet supported (ChainerX always casts).
# Therefore, we explicitly cast the scalar to the dtype of the ndarray
# before the multiplication for NumPy.
return numpy.dtype(dtype).type(scalar)
class MathScalarTestBase(UnaryMathTestBase):
def func(self, xp, a):
scalar = self.scalar_type(self.scalar_value)
return self.func_scalar(xp, a, scalar)
class InplaceMathScalarTestBase(InplaceUnaryMathTestBase):
def func(self, xp, a):
scalar = self.scalar_type(self.scalar_value)
if xp is numpy:
# This cast is to avoid TypeError in the following case
# a: uint8 0-dim numpy.ndarray
# scalar: int
in_dtype, = self.in_dtypes
scalar = _convert_numpy_scalar(scalar, in_dtype)
return self.func_scalar(xp, a, scalar)
def _permutate_shapes(shapes_list):
# Permutates input shapes
permutated_shapes_list = []
for in_shape1, in_shape2 in shapes_list:
permutated_shapes_list.append((in_shape1, in_shape2))
permutated_shapes_list.append((in_shape2, in_shape1))
return list(set(permutated_shapes_list))
shapes_combination_inplace_binary = [
# Same shapes
((1,), (1,)),
((3, 4), (3, 4)),
# Broadcast
((10,), (1,)),
((3, 4), (3, 1)),
((3, 4), (1, 4)),
((3, 4), (4,)),
((3, 4), (1, 1)),
((3, 4), (1,)),
((2, 3, 4), (1, 1, 1)),
# 0-dim shape
((), ()),
((1,), ()),
((3,), ()),
((2, 3), ()),
# 0-size shape
((0,), (0,)),
((0,), (1,)),
((0,), ()),
((2, 0, 3), (2, 0, 3)),
# TODO(imanishi): Fix strides
# ((2, 0, 3), (0, 1)),
]
shapes_combination_binary = _permutate_shapes([
# Broadcast
((3, 1), (1, 4)),
((2, 1, 4), (3, 1)),
# 0-size shape
# TODO(imanishi): Fix strides
# ((0, 1), (0, 1, 0)),
]) + _permutate_shapes(shapes_combination_inplace_binary)
# An association list that associates a dtype to the type which ChainerX's
# real-valued functions should return.
in_out_float_dtypes_math_functions = [
# Float.
(('float16',), 'float16'),
(('float32',), 'float32'),
(('float64',), 'float64'),
]
in_out_dtypes_math_functions = in_out_float_dtypes_math_functions + [
# Signed int.
(('int8',), 'float32'),
(('int16',), 'float32'),
(('int32',), 'float32'),
(('int64',), 'float32'),
# Unsigned int.
(('uint8',), 'float32'),
# Bool.
(('bool_',), 'float32'),
]
in_out_dtypes_math_binary_functions = dtype_utils._permutate_dtype_mapping([
# integer mixed
(('int8', 'int16'), 'float32'),
(('int8', 'int32'), 'float32'),
(('int8', 'int64'), 'float32'),
(('int8', 'uint8'), 'float32'),
(('int16', 'int32'), 'float32'),
(('int16', 'int64'), 'float32'),
(('int16', 'uint8'), 'float32'),
(('int32', 'int64'), 'float32'),
(('int32', 'uint8'), 'float32'),
(('int64', 'uint8'), 'float32'),
# integer float mixed
(('int8', 'float16'), 'float16'),
(('int8', 'float32'), 'float32'),
(('int8', 'float64'), 'float64'),
(('int16', 'float16'), 'float16'),
(('int16', 'float32'), 'float32'),
(('int16', 'float64'), 'float64'),
(('int32', 'float16'), 'float16'),
(('int32', 'float32'), 'float32'),
(('int32', 'float64'), 'float64'),
(('int64', 'float16'), 'float16'),
(('int64', 'float32'), 'float32'),
(('int64', 'float64'), 'float64'),
(('uint8', 'float16'), 'float16'),
(('uint8', 'float32'), 'float32'),
(('uint8', 'float64'), 'float64'),
# float mixed
(('float16', 'float32'), 'float32'),
(('float16', 'float64'), 'float64'),
(('float32', 'float64'), 'float64'),
])
|
praekelt/molo-ndoh-yep | refs/heads/develop | surveys/models.py | 1 | from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore.models import Page
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailsearch import index
class Survey(Page):
survey_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
page = models.ForeignKey(
'wagtailcore.Page',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+',
help_text=_("Choose Page")
)
def __str__(self):
return self.survey_text
search_fields = Page.search_fields + (
index.SearchField('pub_date'),
index.SearchField('survey_text'),
)
content_panels = Page.content_panels + [
FieldPanel('page'),
FieldPanel('pub_date'),
FieldPanel('survey_text'),
]
class Answer(Page):
survey = models.ForeignKey(
Survey,
null=True,
blank=True,
on_delete=models.SET_NULL
)
answer_text = models.CharField(max_length=200)
def __str__(self):
return self.answer_text
search_fields = Page.search_fields + (
index.SearchField('answer_text'),
)
content_panels = Page.content_panels + [
FieldPanel('survey'),
FieldPanel('answer_text'),
]
|
NathanW2/QGIS | refs/heads/master | python/plugins/db_manager/db_plugins/oracle/plugin.py | 5 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS (Oracle)
Date : Aug 27, 2014
copyright : (C) 2014 by Médéric RIBREUX
email : mederic.ribreux@gmail.com
The content of this file is based on
- PG_Manager by Martin Dobias <wonder.sk@gmail.com> (GPLv2 license)
- DB Manager by Giuseppe Sucameli <brush.tyler@gmail.com> (GPLv2 license)
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from builtins import str
from builtins import range
# this will disable the dbplugin if the connector raise an ImportError
from .connector import OracleDBConnector
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtGui import QIcon, QKeySequence
from qgis.PyQt.QtWidgets import QAction, QApplication, QMessageBox
from qgis.core import QgsVectorLayer, NULL, QgsSettings
from ..plugin import ConnectionError, InvalidDataException, DBPlugin, \
Database, Schema, Table, VectorTable, TableField, TableConstraint, \
TableIndex, TableTrigger
from qgis.core import QgsCredentials
from . import resources_rc # NOQA
def classFactory():
return OracleDBPlugin
class OracleDBPlugin(DBPlugin):
@classmethod
def icon(self):
return QIcon(":/db_manager/oracle/icon")
@classmethod
def typeName(self):
return 'oracle'
@classmethod
def typeNameString(self):
return 'Oracle Spatial'
@classmethod
def providerName(self):
return 'oracle'
@classmethod
def connectionSettingsKey(self):
return '/Oracle/connections'
def connectToUri(self, uri):
self.db = self.databasesFactory(self, uri)
if self.db:
return True
return False
def databasesFactory(self, connection, uri):
return ORDatabase(connection, uri)
def connect(self, parent=None):
conn_name = self.connectionName()
settings = QgsSettings()
settings.beginGroup(u"/{0}/{1}".format(
self.connectionSettingsKey(), conn_name))
if not settings.contains("database"): # non-existent entry?
raise InvalidDataException(
self.tr('There is no defined database connection "{0}".'.format(
conn_name)))
from qgis.core import QgsDataSourceUri
uri = QgsDataSourceUri()
settingsList = ["host", "port", "database", "username", "password"]
host, port, database, username, password = [settings.value(x, "", type=str) for x in settingsList]
# get all of the connexion options
useEstimatedMetadata = settings.value(
"estimatedMetadata", False, type=bool)
uri.setParam('userTablesOnly', str(
settings.value("userTablesOnly", False, type=bool)))
uri.setParam('geometryColumnsOnly', str(
settings.value("geometryColumnsOnly", False, type=bool)))
uri.setParam('allowGeometrylessTables', str(
settings.value("allowGeometrylessTables", False, type=bool)))
uri.setParam('onlyExistingTypes', str(
settings.value("onlyExistingTypes", False, type=bool)))
uri.setParam('includeGeoAttributes', str(
settings.value("includeGeoAttributes", False, type=bool)))
settings.endGroup()
uri.setConnection(host, port, database, username, password)
uri.setUseEstimatedMetadata(useEstimatedMetadata)
err = u""
try:
return self.connectToUri(uri)
except ConnectionError as e:
err = str(e)
# ask for valid credentials
max_attempts = 3
for i in range(max_attempts):
(ok, username, password) = QgsCredentials.instance().get(
uri.connectionInfo(False), username, password, err)
if not ok:
return False
uri.setConnection(host, port, database, username, password)
try:
self.connectToUri(uri)
except ConnectionError as e:
if i == max_attempts - 1: # failed the last attempt
raise e
err = str(e)
continue
QgsCredentials.instance().put(
uri.connectionInfo(False), username, password)
return True
return False
class ORDatabase(Database):
def __init__(self, connection, uri):
self.connName = connection.connectionName()
Database.__init__(self, connection, uri)
def connectorsFactory(self, uri):
return OracleDBConnector(uri, self.connName)
def dataTablesFactory(self, row, db, schema=None):
return ORTable(row, db, schema)
def vectorTablesFactory(self, row, db, schema=None):
return ORVectorTable(row, db, schema)
def info(self):
from .info_model import ORDatabaseInfo
return ORDatabaseInfo(self)
def schemasFactory(self, row, db):
return ORSchema(row, db)
def columnUniqueValuesModel(self, col, table, limit=10):
l = u""
if limit:
l = u"WHERE ROWNUM < {:d}".format(limit)
con = self.database().connector
# Prevent geometry column show
tableName = table.replace(u'"', u"").split(u".")
if len(tableName) == 0:
tableName = [None, tableName[0]]
colName = col.replace(u'"', u"").split(u".")[-1]
if con.isGeometryColumn(tableName, colName):
return None
query = u"SELECT DISTINCT {} FROM {} {}".format(col, table, l)
return self.sqlResultModel(query, self)
def sqlResultModel(self, sql, parent):
from .data_model import ORSqlResultModel
return ORSqlResultModel(self, sql, parent)
def toSqlLayer(self, sql, geomCol, uniqueCol,
layerName=u"QueryLayer", layerType=None,
avoidSelectById=False, filter=""):
uri = self.uri()
con = self.database().connector
uri.setDataSource(u"", u"({})".format(sql), geomCol, filter, uniqueCol.strip(u'"'))
if avoidSelectById:
uri.disableSelectAtId(True)
provider = self.dbplugin().providerName()
vlayer = QgsVectorLayer(uri.uri(False), layerName, provider)
# handling undetermined geometry type
if not vlayer.isValid():
wkbType, srid = con.getTableMainGeomType(
u"({})".format(sql), geomCol)
uri.setWkbType(wkbType)
if srid:
uri.setSrid(str(srid))
vlayer = QgsVectorLayer(uri.uri(False), layerName, provider)
return vlayer
def registerDatabaseActions(self, mainWindow):
action = QAction(QApplication.translate(
"DBManagerPlugin", "&Re-connect"), self)
mainWindow.registerAction(action, QApplication.translate(
"DBManagerPlugin", "&Database"), self.reconnectActionSlot)
if self.schemas():
action = QAction(QApplication.translate(
"DBManagerPlugin", "&Create Schema…"), self)
mainWindow.registerAction(action, QApplication.translate(
"DBManagerPlugin", "&Schema"), self.createSchemaActionSlot)
action = QAction(QApplication.translate(
"DBManagerPlugin", "&Delete (Empty) Schema…"), self)
mainWindow.registerAction(action, QApplication.translate(
"DBManagerPlugin", "&Schema"), self.deleteSchemaActionSlot)
action = QAction(QApplication.translate(
"DBManagerPlugin", "Delete Selected Item"), self)
mainWindow.registerAction(action, None, self.deleteActionSlot)
action.setShortcuts(QKeySequence.Delete)
action = QAction(QIcon(":/db_manager/actions/create_table"),
QApplication.translate(
"DBManagerPlugin", "&Create Table…"), self)
mainWindow.registerAction(action, QApplication.translate(
"DBManagerPlugin", "&Table"), self.createTableActionSlot)
action = QAction(QIcon(":/db_manager/actions/edit_table"),
QApplication.translate(
"DBManagerPlugin", "&Edit Table…"), self)
mainWindow.registerAction(action, QApplication.translate(
"DBManagerPlugin", "&Table"), self.editTableActionSlot)
action = QAction(QIcon(":/db_manager/actions/del_table"),
QApplication.translate(
"DBManagerPlugin", "&Delete Table/View…"), self)
mainWindow.registerAction(action, QApplication.translate(
"DBManagerPlugin", "&Table"), self.deleteTableActionSlot)
action = QAction(QApplication.translate(
"DBManagerPlugin", "&Empty Table…"), self)
mainWindow.registerAction(action, QApplication.translate(
"DBManagerPlugin", "&Table"), self.emptyTableActionSlot)
class ORSchema(Schema):
def __init__(self, row, db):
Schema.__init__(self, db)
# self.oid, self.name, self.owner, self.perms, self.comment = row
self.name = row[0]
class ORTable(Table):
def __init__(self, row, db, schema=None):
Table.__init__(self, db, schema)
self.name, self.owner, isView = row
self.estimatedRowCount = None
self.objectType = None
self.isView = False
self.isMaterializedView = False
if isView == 1:
self.isView = True
self.creationDate = None
self.modificationDate = None
def getDates(self):
"""Grab the creation/modification dates of the table"""
self.creationDate, self.modificationDate = (
self.database().connector.getTableDates((self.schemaName(),
self.name)))
def refreshRowEstimation(self):
"""Use ALL_ALL_TABLE to get an estimation of rows"""
if self.isView:
self.estimatedRowCount = 0
self.estimatedRowCount = (
self.database().connector.getTableRowEstimation(
(self.schemaName(), self.name)))
def getType(self):
"""Grab the type of object for the table"""
self.objectType = self.database().connector.getTableType(
(self.schemaName(), self.name))
def getComment(self):
"""Grab the general comment of the table/view"""
self.comment = self.database().connector.getTableComment(
(self.schemaName(), self.name), self.objectType)
def getDefinition(self):
return self.database().connector.getDefinition(
(self.schemaName(), self.name), self.objectType)
def getMViewInfo(self):
if self.objectType == u"MATERIALIZED VIEW":
return self.database().connector.getMViewInfo(
(self.schemaName(), self.name))
else:
return None
def runAction(self, action):
action = str(action)
if action.startswith("rows/"):
if action == "rows/recount":
self.refreshRowCount()
return True
elif action.startswith("index/"):
parts = action.split('/')
index_name = parts[1]
index_action = parts[2]
msg = QApplication.translate(
"DBManagerPlugin",
"Do you want to {} index {}?".format(
index_action, index_name))
QApplication.restoreOverrideCursor()
try:
if QMessageBox.question(
None,
QApplication.translate(
"DBManagerPlugin", "Table Index"),
msg,
QMessageBox.Yes | QMessageBox.No) == QMessageBox.No:
return False
finally:
QApplication.setOverrideCursor(Qt.WaitCursor)
if index_action == "rebuild":
self.aboutToChange.emit()
self.database().connector.rebuildTableIndex(
(self.schemaName(), self.name), index_name)
self.refreshIndexes()
return True
elif action.startswith(u"mview/"):
if action == "mview/refresh":
self.aboutToChange.emit()
self.database().connector.refreshMView(
(self.schemaName(), self.name))
return True
return Table.runAction(self, action)
def tableFieldsFactory(self, row, table):
return ORTableField(row, table)
def tableConstraintsFactory(self, row, table):
return ORTableConstraint(row, table)
def tableIndexesFactory(self, row, table):
return ORTableIndex(row, table)
def tableTriggersFactory(self, row, table):
return ORTableTrigger(row, table)
def info(self):
from .info_model import ORTableInfo
return ORTableInfo(self)
def tableDataModel(self, parent):
from .data_model import ORTableDataModel
return ORTableDataModel(self, parent)
def getValidQgisUniqueFields(self, onlyOne=False):
""" list of fields valid to load the table as layer in Qgis canvas.
Qgis automatically search for a valid unique field, so it's
needed only for queries and views.
"""
ret = []
# add the pk
pkcols = [x for x in self.fields() if x.primaryKey]
if len(pkcols) == 1:
ret.append(pkcols[0])
# then add integer fields with an unique index
indexes = self.indexes()
if indexes is not None:
for idx in indexes:
if idx.isUnique and len(idx.columns) == 1:
fld = idx.fields()[idx.columns[0]]
if (fld.dataType == u"NUMBER" and
not fld.modifier and
fld.notNull and
fld not in ret):
ret.append(fld)
# and finally append the other suitable fields
for fld in self.fields():
if (fld.dataType == u"NUMBER" and
not fld.modifier and
fld.notNull and
fld not in ret):
ret.append(fld)
if onlyOne:
return ret[0] if len(ret) > 0 else None
return ret
def uri(self):
uri = self.database().uri()
schema = self.schemaName() if self.schemaName() else ''
geomCol = self.geomColumn if self.type in [
Table.VectorType, Table.RasterType] else ""
uniqueCol = self.getValidQgisUniqueFields(
True) if self.isView else None
uri.setDataSource(schema, self.name, geomCol if geomCol else None,
None, uniqueCol.name if uniqueCol else "")
# Handle geographic table
if geomCol:
uri.setWkbType(self.wkbType)
uri.setSrid(str(self.srid))
return uri
class ORVectorTable(ORTable, VectorTable):
def __init__(self, row, db, schema=None):
ORTable.__init__(self, row[0:3], db, schema)
VectorTable.__init__(self, db, schema)
self.geomColumn, self.geomType, self.wkbType, self.geomDim, \
self.srid = row[-7:-2]
def info(self):
from .info_model import ORVectorTableInfo
return ORVectorTableInfo(self)
def runAction(self, action):
if action.startswith("extent/"):
if action == "extent/update":
self.aboutToChange.emit()
self.updateExtent()
return True
if ORTable.runAction(self, action):
return True
return VectorTable.runAction(self, action)
def canUpdateMetadata(self):
return self.database().connector.canUpdateMetadata((self.schemaName(),
self.name))
def updateExtent(self):
self.database().connector.updateMetadata(
(self.schemaName(), self.name),
self.geomColumn, extent=self.extent)
self.refreshTableEstimatedExtent()
self.refresh()
def hasSpatialIndex(self, geom_column=None):
geom_column = geom_column if geom_column else self.geomColumn
for idx in self.indexes():
if geom_column == idx.column:
return True
return False
class ORTableField(TableField):
def __init__(self, row, table):
""" build fields information from query and find primary key """
TableField.__init__(self, table)
self.num, self.name, self.dataType, self.charMaxLen, \
self.modifier, self.notNull, self.hasDefault, \
self.default, typeStr, self.comment = row
self.primaryKey = False
self.num = int(self.num)
if self.charMaxLen == NULL:
self.charMaxLen = None
else:
self.charMaxLen = int(self.charMaxLen)
if self.modifier == NULL:
self.modifier = None
else:
self.modifier = int(self.modifier)
if self.notNull.upper() == u"Y":
self.notNull = False
else:
self.notNull = True
if self.comment == NULL:
self.comment = u""
# find out whether fields are part of primary key
for con in self.table().constraints():
if (con.type == ORTableConstraint.TypePrimaryKey and
self.name == con.column):
self.primaryKey = True
break
def type2String(self):
if (u"TIMESTAMP" in self.dataType or
self.dataType in [u"DATE", u"SDO_GEOMETRY",
u"BINARY_FLOAT", u"BINARY_DOUBLE"]):
return u"{}".format(self.dataType)
if self.charMaxLen in [None, -1]:
return u"{}".format(self.dataType)
elif self.modifier in [None, -1, 0]:
return u"{}({})".format(self.dataType, self.charMaxLen)
return u"{}({},{})".format(self.dataType, self.charMaxLen,
self.modifier)
def update(self, new_name, new_type_str=None, new_not_null=None,
new_default_str=None):
self.table().aboutToChange.emit()
if self.name == new_name:
new_name = None
if self.type2String() == new_type_str:
new_type_str = None
if self.notNull == new_not_null:
new_not_null = None
if self.default2String() == new_default_str:
new_default_str = None
ret = self.table().database().connector.updateTableColumn(
(self.table().schemaName(), self.table().name),
self.name, new_name, new_type_str,
new_not_null, new_default_str)
# When changing a field, refresh also constraints and
# indexes.
if ret is not False:
self.table().refreshFields()
self.table().refreshConstraints()
self.table().refreshIndexes()
return ret
class ORTableConstraint(TableConstraint):
TypeCheck, TypeForeignKey, TypePrimaryKey, \
TypeUnique, TypeUnknown = list(range(5))
types = {"c": TypeCheck, "r": TypeForeignKey,
"p": TypePrimaryKey, "u": TypeUnique}
def __init__(self, row, table):
""" build constraints info from query """
TableConstraint.__init__(self, table)
self.name, constr_type_str, self.column, self.validated, \
self.generated, self.status = row[0:6]
constr_type_str = constr_type_str.lower()
if constr_type_str in ORTableConstraint.types:
self.type = ORTableConstraint.types[constr_type_str]
else:
self.type = ORTableConstraint.TypeUnknown
if row[6] == NULL:
self.checkSource = u""
else:
self.checkSource = row[6]
if row[8] == NULL:
self.foreignTable = u""
else:
self.foreignTable = row[8]
if row[7] == NULL:
self.foreignOnDelete = u""
else:
self.foreignOnDelete = row[7]
if row[9] == NULL:
self.foreignKey = u""
else:
self.foreignKey = row[9]
def type2String(self):
if self.type == ORTableConstraint.TypeCheck:
return QApplication.translate("DBManagerPlugin", "Check")
if self.type == ORTableConstraint.TypePrimaryKey:
return QApplication.translate("DBManagerPlugin", "Primary key")
if self.type == ORTableConstraint.TypeForeignKey:
return QApplication.translate("DBManagerPlugin", "Foreign key")
if self.type == ORTableConstraint.TypeUnique:
return QApplication.translate("DBManagerPlugin", "Unique")
return QApplication.translate("DBManagerPlugin", 'Unknown')
def fields(self):
""" Hack to make edit dialog box work """
fields = self.table().fields()
field = None
for fld in fields:
if fld.name == self.column:
field = fld
cols = {}
cols[0] = field
return cols
class ORTableIndex(TableIndex):
def __init__(self, row, table):
TableIndex.__init__(self, table)
self.name, self.column, self.indexType, self.status, \
self.analyzed, self.compression, self.isUnique = row
def fields(self):
""" Hack to make edit dialog box work """
self.table().refreshFields()
fields = self.table().fields()
field = None
for fld in fields:
if fld.name == self.column:
field = fld
cols = {}
cols[0] = field
return cols
class ORTableTrigger(TableTrigger):
def __init__(self, row, table):
TableTrigger.__init__(self, table)
self.name, self.event, self.type, self.enabled = row
|
hidekb/espressopp | refs/heads/master | src/tools/povwrite.py | 7 | # Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
*****************************
povwrite - write povray files
*****************************
"""
import espressopp
from espressopp import Real3D
def camera():
camera = "camera {\n\
up <0, 6.0000, 0>\n\
right <5.8417, 0, 0>\n\
location <0.0000, 0.0000, -2.0000>\n\
look_at <0.0000, 0.0000, -0.0000>\n\
direction <-0.0000, -0.0000, 4.0000>\n\
}\n"
return camera
def lightsource1():
lightsource1 = "light_source { \n\
<-0.1000, 0.1000, -1.0000> \n\
color rgb<1.000, 1.000, 1.000> \n\
parallel \n\
point_at <0.0, 0.0, 0.0> \n\
}\n"
return lightsource1
def lightsource2():
lightsource2 = "light_source { \n\
<1.0000, 2.0000, -0.5000> \n\
color rgb<1.000, 1.000, 1.000> \n\
parallel \n\
point_at <0.0, 0.0, 0.0> \n\
}\n"
return lightsource2
def background():
background = "background { \n\
color rgb<0.000, 0.000, 0.000> \n\
}\n\
fog {\n\
distance 3.1250 \n \
fog_type 1 \n\
color rgb<0.000, 0.000, 0.000> \n\
} \n"
return background
def povwrite(system, integrator, filename, append=False, box=True):
red = 1.0
green = 0.5
blue = 0.0
framered = 1.0
framegreen = 1.0
frameblue = 1.0
transparent = 0.0
boxsizex = system.bc.boxL[0]/2
boxsizey = system.bc.boxL[1]/2
boxsizez = system.bc.boxL[2]/2
maxParticleID = int(espressopp.analysis.MaxPID(system).compute())
f = 0.7 #size
pid = 0
r = []
xpos = []
ypos = []
zpos = []
if append:
file = open(filename, 'a')
else:
file = open(filename,'w')
if boxsizex>=boxsizey and boxsizex>=boxsizey:
g = boxsizex
if boxsizey>=boxsizex and boxsizey>=boxsizez:
g = boxsizey
if boxsizez>=boxsizex and boxsizez>=boxsizey:
g = boxsizez
while pid <= maxParticleID:
if system.storage.particleExists(pid):
particle = system.storage.getParticle(pid)
xpos.append(particle.pos[0])
ypos.append(particle.pos[1])
zpos.append(particle.pos[2])
r.append(particle.radius)
pid += 1
else:
pid += 1
head = "// \n\
// Molecular graphics export from VMD 1.9 \n\
// http://www.ks.uiuc.edu/Research/vmd/ \n\
// Requires POV-Ray 3.5 or later \n\
// \n\
// POV 3.x input script : overlay.pov \n\
// try povray +W812 +H834 -Ioverlay.pov -Ooverlay.pov.tga +P +X +A +FT +C \n\
#if (version < 3.5) \n\
#error \"VMD POV3DisplayDevice has been compiled for POV-Ray 3.5 or above. Please upgrade POV-Ray or recompile VMD. \" \n\
#end \n\
#declare VMD_clip_on=array[3] {0, 0, 0};\n\
#declare VMD_clip=array[3];\n\
#declare VMD_scaledclip=array[3];\n\
#declare VMD_line_width=0.0020;\n\
#macro VMDC ( C1 )\n\
texture { pigment { rgbt C1 }}\n\
#end\n\
#macro VMD_point (P1, R1, C1)\n\
#local T = texture { finish { ambient 1.0 diffuse 0.0 phong 0.0 specular 0.0 } pigment { C1 } }\n\
#if(VMD_clip_on[2])\n\
intersection {\n\
sphere {P1, R1 texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
VMD_clip[2]\n\
}\n\
#else\n\
sphere {P1, R1 texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
#end\n\
#end\n\
#macro VMD_line (P1, P2, C1)\n\
#local T = texture { finish { ambient 1.0 diffuse 0.0 phong 0.0 specular 0.0 } pigment { C1 } }\n\
#if(VMD_clip_on[2])\n\
intersection {\n\
cylinder {P1, P2, VMD_line_width texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
VMD_clip[2]\n\
}\n\
#else\n\
cylinder {P1, P2, VMD_line_width texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
#end\n\
#end\n\
#macro VMD_sphere (P1, R1, C1)\n\
#local T = texture { pigment { C1 } }\n\
#if(VMD_clip_on[2])\n\
intersection {\n\
sphere {P1, R1 texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
VMD_clip[2]\n\
}\n\
#else\n\
sphere {P1, R1 texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
#end\n\
#end\n\
#macro VMD_cylinder (P1, P2, R1, C1, O1)\n\
#local T = texture { pigment { C1 } }\n\
#if(VMD_clip_on[2])\n\
intersection {\n\
cylinder {P1, P2, R1 #if(O1) open #end texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
VMD_clip[2]\n\
}\n\
#else\n\
cylinder {P1, P2, R1 #if(O1) open #end texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
#end\n\
#end\n\
#macro VMD_cone (P1, P2, R1, C1)\n\
#local T = texture { pigment { C1 } }\n\
#if(VMD_clip_on[2])\n\
intersection {\n\
cone {P1, R1, P2, VMD_line_width texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
VMD_clip[2]\n\
}\n\
#else\n\
cone {P1, R1, P2, VMD_line_width texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
#end\n\
#end\n\
#macro VMD_triangle (P1, P2, P3, N1, N2, N3, C1)\n\
#local T = texture { pigment { C1 } }\n\
smooth_triangle {P1, N1, P2, N2, P3, N3 texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
#end\n\
#macro VMD_tricolor (P1, P2, P3, N1, N2, N3, C1, C2, C3)\n\
#local NX = P2-P1;\n\
#local NY = P3-P1;\n\
#local NZ = vcross(NX, NY);\n\
#local T = texture { pigment {\n\
average pigment_map {\n\
[1 gradient x color_map {[0 rgb 0] [1 C2*3]}]\n\
[1 gradient y color_map {[0 rgb 0] [1 C3*3]}]\n\
[1 gradient z color_map {[0 rgb 0] [1 C1*3]}]\n\
}\n\
matrix <1.01,0,1,0,1.01,1,0,0,1,-.002,-.002,-1>\n\
matrix <NX.x,NX.y,NX.z,NY.x,NY.y,NY.z,NZ.x,NZ.y,NZ.z,P1.x,P1.y,P1.z>\n\
} }\n\
smooth_triangle {P1, N1, P2, N2, P3, N3 texture {T} #if(VMD_clip_on[1]) clipped_by {VMD_clip[1]} #end no_shadow}\n\
#end\n "
file.write(head)
file.write(camera())
file.write(lightsource1())
file.write(lightsource2())
file.write(background())
file.write("#default { texture { \n\
finish { ambient 0.000 diffuse 0.650 phong 0.1 phong_size 40.000 specular 0.500 }\n\
} }\n")
file.write("#declare VMD_line_width=0.0020;\n\
#declare VMD_line_width=0.0060; \n")
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*f,boxsizey/g*f,boxsizez/g*-f,boxsizex/g*f,boxsizey/g*-f,boxsizez/g*-f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*f,boxsizey/g*-f,boxsizez/g*-f,boxsizex/g*-f,boxsizey/g*-f,boxsizez/g*-f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*-f,boxsizey/g*-f,boxsizez/g*-f,boxsizex/g*-f,boxsizey/g*f,boxsizez/g*-f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*-f,boxsizey/g*f,boxsizez/g*-f,boxsizex/g*f, boxsizey/g*f,boxsizez/g*-f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*f,boxsizey/g*f,boxsizez/g*-f,boxsizex/g*f,boxsizey/g* f,boxsizez/g*f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*f,boxsizey/g*-f,boxsizez/g*-f,boxsizex/g*f, boxsizey/g*-f,boxsizez/g*f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*-f,boxsizey/g*-f,boxsizez/g*-f,boxsizex/g*-f,boxsizey/g*-f,boxsizez/g*f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*-f,boxsizey/g*f,boxsizez/g*-f,boxsizex/g*-f,boxsizey/g*f,boxsizez/g*f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*f,boxsizey/g*f,boxsizez/g*f,boxsizex/g*f,boxsizey/g*-f,boxsizez/g*f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*f,boxsizey/g*-f,boxsizez/g*f,boxsizex/g*-f,boxsizey/g* -f,boxsizez/g*f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*-f,boxsizey/g*-f,boxsizez/g*f,boxsizex/g*-f,boxsizey/g*f,boxsizez/g*f, framered, framegreen, frameblue, transparent))
file.write("VMD_line(<%f,%f,%f>,<%f,%f,%f>,rgbt<%f,%f,%f,%f>)\n"%(boxsizex/g*-f,boxsizey/g*f,boxsizez/g*f,boxsizex/g*f,boxsizey/g* f,boxsizez/g*f, framered, framegreen, frameblue, transparent))
file.write("// MoleculeID: 0 ReprID: 0 Beginning VDW\n")
pid = 0
while pid < maxParticleID:
x = (boxsizex-xpos[pid])/g*f
y = (boxsizey-ypos[pid])/g*f
z = (boxsizez-zpos[pid])/g*f
radius = r[pid]/2/g*f
file.write("VMD_sphere(<%f,%f,%f>,%f,rgbt<%f,%f,%f>)\n"%(x,y,z,radius, red, green, blue))
pid += 1
|
dvliman/jaikuengine | refs/heads/master | .google_appengine/lib/django-1.5/tests/modeltests/m2m_through/__init__.py | 45382 | |
jstitch/MambuPy | refs/heads/master | MambuPy/rest/mambuuser.py | 1 | # coding: utf-8
"""Mambu Users objects.
.. autosummary::
:nosignatures:
:toctree: _autosummary
MambuUser
MambuUsers
MambuUser holds a user.
MambuUsers holds a list of users.
Uses mambuutil.getuserurl as default urlfunc
"""
from ..mambuutil import getusercustominformationurl, getuserurl
from .mambustruct import MambuStruct, MambuStructIterator
mod_urlfunc = getuserurl
class MambuUser(MambuStruct):
"""A User from Mambu.
With the default urlfunc, entid argument must be the ID of the
user you wish to retrieve.
"""
def __init__(self, urlfunc=mod_urlfunc, entid="", *args, **kwargs):
"""Tasks done here:
Just initializes the MambuStruct.
"""
MambuStruct.__init__(
self, urlfunc, entid, customFieldName="customFields", *args, **kwargs
)
def preprocess(self):
"""Preprocessing.
Removes repeated chars from firstName and lastName fields.
Adds a 'name' field joining all names in to one string.
"""
super(MambuUser, self).preprocess()
try:
self["firstName"] = self["firstName"].strip()
except Exception:
self["firstName"] = ""
try:
self["lastName"] = self["lastName"].strip()
except Exception:
self["lastName"] = ""
self["name"] = self["firstName"] + " " + self["lastName"]
def setGroups(self, *args, **kwargs):
"""Adds the groups assigned to this user to a 'groups' field.
Returns the number of requests done to Mambu.
"""
try:
self.mambugroupsclass
except AttributeError:
from .mambugroup import MambuGroups
self.mambugroupsclass = MambuGroups
groups = self.mambugroupsclass(
creditOfficerUsername=self["username"], *args, **kwargs
)
self["groups"] = groups
return 1
def setRoles(self, *args, **kwargs):
"""Adds the role assigned to this user to a 'role' field.
Depends on the 'role' field that comes with a fullDetails=True
build of the MambuUser.
Returns the number of requests done to Mambu.
"""
try:
self.mamburoleclass
except AttributeError:
from .mamburoles import MambuRole
self.mamburoleclass = MambuRole
try:
role = self.mamburoleclass(
entid=self["role"]["encodedKey"], *args, **kwargs
)
except KeyError:
return 0
self["role"]["role"] = role
return 1
def setBranch(self, *args, **kwargs):
"""Adds the branch to which this user belongs"""
try:
self.mambubranchclass
except AttributeError:
from .mambubranch import MambuBranch
self.mambubranchclass = MambuBranch
try:
branch = self.mambubranchclass(
entid=self["assignedBranchKey"], *args, **kwargs
)
self["branch"] = branch
except KeyError:
self["branch"] = ""
return 1
def create(self, data, *args, **kwargs):
"""Creates an user in Mambu
Parameters
-data dictionary with data to send
"""
super(MambuUser, self).create(data)
self["user"][self.customFieldName] = self["customInformation"]
self.init(attrs=self["user"])
return 1
def update(self, data, *args, **kwargs):
"""Updates a user in Mambu
Updates customFields of a MambuUser
https://support.mambu.com/docs/users-api#post-users
https://support.mambu.com/docs/users-api#parameters-for-patch-custom-fields-method-for-user
Parameters
-data dictionary with data to update
"""
cont_requests = 0
data2update = {}
# UPDATE customFields
if data.get("customInformation"):
data2update = {"user": {}}
data2update["customInformation"] = data.get("customInformation")
self._MambuStruct__urlfunc = getusercustominformationurl
cont_requests += self.updatePatch(data2update, *args, **kwargs)
self._MambuStruct__urlfunc = getuserurl
cont_requests += super(MambuUser, self).update(data, *args, **kwargs)
return cont_requests
def updatePatch(self, data, *args, **kwargs):
"""Updates a Mambu user using method PATCH
Args:
data (dictionary): dictionary with data to update
https://support.mambu.com/docs/users-api#patch-user-custom-field-values
"""
return super(MambuUser, self).updatePatch(data, *args, **kwargs)
class MambuUsers(MambuStruct):
"""A list of Users from Mambu.
With the default urlfunc, entid argument must be empty at
instantiation time to retrieve all the users according to any other
filter you send to the urlfunc.
itemclass argument allows you to pass some other class as the
elements for the list. Why is this useful? You may wish to override
several behaviours by creating your own MambuUser son class. Pass
that to the itemclass argument here and voila, you get a list of
YourMambuUser class using MambuUsers instead of plain old MambuUser
elements.
If you wish to specialize other Mambu objects on MambuPy you may
do that. Mind that if you desire that the iterable version of it
to have elements of your specialized class, you need to change
the logic of the constructor and the convertDict2Attrs method in
the iterable class to use some sort of itemclass there too.
Don't forget to submit the change on a pull request when done
;-)
"""
def __init__(
self, urlfunc=mod_urlfunc, entid="", itemclass=MambuUser, *args, **kwargs
):
"""By default, entid argument is empty. That makes perfect
sense: you want several branches, not just one
"""
self.itemclass = itemclass
MambuStruct.__init__(self, urlfunc, entid, *args, **kwargs)
def __iter__(self):
return MambuStructIterator(self.attrs)
def convertDict2Attrs(self, *args, **kwargs):
"""The trick for iterable Mambu Objects comes here:
You iterate over each element of the responded List from Mambu,
and create a Mambu User (or your own itemclass) object for each
one, initializing them one at a time, and changing the attrs
attribute (which just holds a list of plain dictionaries) with a
MambuUser (or your own itemclass) just created.
.. todo:: pass a valid (perhaps default) urlfunc, and its
corresponding id to entid to each itemclass, telling MambuStruct
not to connect() by default. It's desirable to connect at any
other further moment to refresh some element in the list.
"""
for n, u in enumerate(self.attrs):
try:
params = self.params
except AttributeError:
params = {}
kwargs.update(params)
try:
self.mambuuserclass
except AttributeError:
self.mambuuserclass = self.itemclass
user = self.mambuuserclass(urlfunc=None, entid=None, *args, **kwargs)
user.init(u, *args, **kwargs)
user._MambuStruct__urlfunc = getuserurl
self.attrs[n] = user
|
SEJeff/home-assistant | refs/heads/dev | homeassistant/components/light/demo.py | 12 | """
homeassistant.components.light.demo
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Demo platform that implements lights.
"""
import random
from homeassistant.components.light import (
Light, ATTR_BRIGHTNESS, ATTR_XY_COLOR)
LIGHT_COLORS = [
[0.368, 0.180],
[0.460, 0.470],
]
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Find and return demo lights. """
add_devices_callback([
DemoLight("Bed Light", False),
DemoLight("Ceiling Lights", True, LIGHT_COLORS[0]),
DemoLight("Kitchen Lights", True, LIGHT_COLORS[1])
])
class DemoLight(Light):
""" Provides a demo switch. """
def __init__(self, name, state, xy=None, brightness=180):
self._name = name
self._state = state
self._xy = xy or random.choice(LIGHT_COLORS)
self._brightness = brightness
@property
def should_poll(self):
""" No polling needed for a demo light. """
return False
@property
def name(self):
""" Returns the name of the device if any. """
return self._name
@property
def brightness(self):
""" Brightness of this light between 0..255. """
return self._brightness
@property
def color_xy(self):
""" XY color value. """
return self._xy
@property
def is_on(self):
""" True if device is on. """
return self._state
def turn_on(self, **kwargs):
""" Turn the device on. """
self._state = True
if ATTR_XY_COLOR in kwargs:
self._xy = kwargs[ATTR_XY_COLOR]
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
self.update_ha_state()
def turn_off(self, **kwargs):
""" Turn the device off. """
self._state = False
self.update_ha_state()
|
ljdursi/hadoop-for-hpcers-tutorial | refs/heads/master | examples/matmult/map.py | 2 | #!/usr/bin/env python
import sys
maxrows=100
for line in sys.stdin:
line = line.strip()
matrix, row, col, val = line.split()
row = int(row)
col = int(col)
val = int(val)
if matrix == 'A':
value = matrix+'-'+str(col)+'-'+str(val)
for j in xrange(maxrows):
key = str(row)+'-'+str(j)
print '%s\t%s' % ( key, value )
else:
value = matrix+'-'+str(row)+'-'+str(val)
for i in xrange(maxrows):
key = str(i)+'-'+str(col)
print '%s\t%s' % ( key, value )
|
razvanphp/arangodb | refs/heads/devel | 3rdParty/V8-3.31.74.1/build/gyp/test/win/gyptest-lib-ltcg.py | 269 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure LTCG setting is extracted properly.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'lib-flags'
test.run_gyp('ltcg.gyp', chdir=CHDIR)
test.build('ltcg.gyp', test.ALL, chdir=CHDIR)
test.must_not_contain_any_line(test.stdout(), ['restarting link with /LTCG'])
test.pass_test()
|
allenp/odoo | refs/heads/9.0 | openerp/addons/test_inherit/tests/__init__.py | 121 | # -*- coding: utf-8 -*-
from . import test_inherit
|
nud3l/smart-contract-analysis | refs/heads/master | data/adjustIndex.py | 1 | import csv
start = 690
newcsv = []
with open('70_percent_cheaters2.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) + start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
|
shadowmint/nwidget | refs/heads/master | src/nwidget/assets.py | 1 | # Copyright 2013 Douglas Linder
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os.path
class Assets():
""" Helper for resolving paths in a convenient manner """
__base = ""
""" The path which is the universal root for this assets object """
def __init__(self, base=""):
""" Base is the path path for the loader; for getcwd() if not provided """
if base == "":
base = os.getcwd()
self.__base = base
def resolve(self, *args):
""" Appropriately resolves a path in the form blah, blah, blah.
Base is attached as the root to this set of path elements.
Throws an error at this level if the requested file doesn't
exist.
"""
rtn = os.path.join(self.__base, *args)
if not self.__exists(rtn):
raise Exception("Bad path: %s (base: %s)" % (rtn, self.__base))
return rtn
def __exists(self, path):
rtn = os.path.isdir(path) or (os.path.isfile(path) and os.access(path, os.R_OK))
return rtn
|
dcroc16/skunk_works | refs/heads/master | google_appengine/lib/django-0.96/django/middleware/doc.py | 73 | from django.conf import settings
from django import http
class XViewMiddleware(object):
"""
Adds an X-View header to internal HEAD requests -- used by the documentation system.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
"""
If the request method is HEAD and either the IP is internal or the
user is a logged-in staff member, quickly return with an x-header
indicating the view function. This is used by the documentation module
to lookup the view function for an arbitrary page.
"""
if request.method == 'HEAD' and (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or (request.user.is_authenticated() and request.user.is_staff)):
response = http.HttpResponse()
response['X-View'] = "%s.%s" % (view_func.__module__, view_func.__name__)
return response
|
xuther/nyc-taxi-data | refs/heads/master | pre-proc/s.py | 1 | tracts = []
for i in blocks.records():
if (i[2] not in tracts):
print i[2]
tracts.append(i[2])
print len(tracts)
countiesInTract = []
for tractIndex in countiesToTracts['061']:
print str(tractIndex) + " -> " + tracts.record(tractIndex)[2]
if(tracts.record(tractIndex)[2] == '003001'):
print "FOUND!!!!!!!!!!!"
countiesInTract.append(tracts.record(tractIndex)[2])
for i in range(len(blocks.shapeRecords())):
if(blocks.record(i)[3] == '1001'):
print i
|
Omegaphora/external_chromium_org | refs/heads/lp5.1 | components/policy/tools/syntax_check_policy_template_json.py | 34 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''
Checks a policy_templates.json file for conformity to its syntax specification.
'''
import json
import optparse
import os
import re
import sys
LEADING_WHITESPACE = re.compile('^([ \t]*)')
TRAILING_WHITESPACE = re.compile('.*?([ \t]+)$')
# Matches all non-empty strings that contain no whitespaces.
NO_WHITESPACE = re.compile('[^\s]+$')
# Convert a 'type' to the schema types it may be converted to.
# The 'dict' type represents structured JSON data, and can be converted
# to an 'object' or an 'array'.
TYPE_TO_SCHEMA = {
'int': [ 'integer' ],
'list': [ 'array' ],
'dict': [ 'object', 'array' ],
'main': [ 'boolean' ],
'string': [ 'string' ],
'int-enum': [ 'integer' ],
'string-enum': [ 'string' ],
'string-enum-list': [ 'array' ],
'external': [ 'object' ],
}
# List of boolean policies that have been introduced with negative polarity in
# the past and should not trigger the negative polarity check.
LEGACY_INVERTED_POLARITY_WHITELIST = [
'DeveloperToolsDisabled',
'DeviceAutoUpdateDisabled',
'Disable3DAPIs',
'DisableAuthNegotiateCnameLookup',
'DisablePluginFinder',
'DisablePrintPreview',
'DisableSafeBrowsingProceedAnyway',
'DisableScreenshots',
'DisableSpdy',
'DisableSSLRecordSplitting',
'DriveDisabled',
'DriveDisabledOverCellular',
'ExternalStorageDisabled',
'SavingBrowserHistoryDisabled',
'SyncDisabled',
]
class PolicyTemplateChecker(object):
def __init__(self):
self.error_count = 0
self.warning_count = 0
self.num_policies = 0
self.num_groups = 0
self.num_policies_in_groups = 0
self.options = None
self.features = []
def _Error(self, message, parent_element=None, identifier=None,
offending_snippet=None):
self.error_count += 1
error = ''
if identifier is not None and parent_element is not None:
error += 'In %s %s: ' % (parent_element, identifier)
print error + 'Error: ' + message
if offending_snippet is not None:
print ' Offending:', json.dumps(offending_snippet, indent=2)
def _CheckContains(self, container, key, value_type,
optional=False,
parent_element='policy',
container_name=None,
identifier=None,
offending='__CONTAINER__',
regexp_check=None):
'''
Checks |container| for presence of |key| with value of type |value_type|.
If |value_type| is string and |regexp_check| is specified, then an error is
reported when the value does not match the regular expression object.
|value_type| can also be a list, if more than one type is supported.
The other parameters are needed to generate, if applicable, an appropriate
human-readable error message of the following form:
In |parent_element| |identifier|:
(if the key is not present):
Error: |container_name| must have a |value_type| named |key|.
Offending snippet: |offending| (if specified; defaults to |container|)
(if the value does not have the required type):
Error: Value of |key| must be a |value_type|.
Offending snippet: |container[key]|
Returns: |container[key]| if the key is present, None otherwise.
'''
if identifier is None:
try:
identifier = container.get('name')
except:
self._Error('Cannot access container name of "%s".' % container_name)
return None
if container_name is None:
container_name = parent_element
if offending == '__CONTAINER__':
offending = container
if key not in container:
if optional:
return
else:
self._Error('%s must have a %s "%s".' %
(container_name.title(), value_type.__name__, key),
container_name, identifier, offending)
return None
value = container[key]
value_types = value_type if isinstance(value_type, list) else [ value_type ]
if not any(isinstance(value, type) for type in value_types):
self._Error('Value of "%s" must be one of [ %s ].' %
(key, ', '.join([type.__name__ for type in value_types])),
container_name, identifier, value)
if str in value_types and regexp_check and not regexp_check.match(value):
self._Error('Value of "%s" must match "%s".' %
(key, regexp_check.pattern),
container_name, identifier, value)
return value
def _AddPolicyID(self, id, policy_ids, policy):
'''
Adds |id| to |policy_ids|. Generates an error message if the
|id| exists already; |policy| is needed for this message.
'''
if id in policy_ids:
self._Error('Duplicate id', 'policy', policy.get('name'),
id)
else:
policy_ids.add(id)
def _CheckPolicyIDs(self, policy_ids):
'''
Checks a set of policy_ids to make sure it contains a continuous range
of entries (i.e. no holes).
Holes would not be a technical problem, but we want to ensure that nobody
accidentally omits IDs.
'''
for i in range(len(policy_ids)):
if (i + 1) not in policy_ids:
self._Error('No policy with id: %s' % (i + 1))
def _CheckPolicySchema(self, policy, policy_type):
'''Checks that the 'schema' field matches the 'type' field.'''
self._CheckContains(policy, 'schema', dict)
if isinstance(policy.get('schema'), dict):
self._CheckContains(policy['schema'], 'type', str)
schema_type = policy['schema'].get('type')
if schema_type not in TYPE_TO_SCHEMA[policy_type]:
self._Error('Schema type must match the existing type for policy %s' %
policy.get('name'))
# Checks that boolean policies are not negated (which makes them harder to
# reason about).
if (schema_type == 'boolean' and
'disable' in policy.get('name').lower() and
policy.get('name') not in LEGACY_INVERTED_POLARITY_WHITELIST):
self._Error(('Boolean policy %s uses negative polarity, please make ' +
'new boolean policies follow the XYZEnabled pattern. ' +
'See also http://crbug.com/85687') % policy.get('name'))
def _CheckPolicy(self, policy, is_in_group, policy_ids):
if not isinstance(policy, dict):
self._Error('Each policy must be a dictionary.', 'policy', None, policy)
return
# There should not be any unknown keys in |policy|.
for key in policy:
if key not in ('name', 'type', 'caption', 'desc', 'device_only',
'supported_on', 'label', 'policies', 'items',
'example_value', 'features', 'deprecated', 'future',
'id', 'schema', 'max_size'):
self.warning_count += 1
print ('In policy %s: Warning: Unknown key: %s' %
(policy.get('name'), key))
# Each policy must have a name.
self._CheckContains(policy, 'name', str, regexp_check=NO_WHITESPACE)
# Each policy must have a type.
policy_types = ('group', 'main', 'string', 'int', 'list', 'int-enum',
'string-enum', 'string-enum-list', 'dict', 'external')
policy_type = self._CheckContains(policy, 'type', str)
if policy_type not in policy_types:
self._Error('Policy type must be one of: ' + ', '.join(policy_types),
'policy', policy.get('name'), policy_type)
return # Can't continue for unsupported type.
# Each policy must have a caption message.
self._CheckContains(policy, 'caption', str)
# Each policy must have a description message.
self._CheckContains(policy, 'desc', str)
# If 'label' is present, it must be a string.
self._CheckContains(policy, 'label', str, True)
# If 'deprecated' is present, it must be a bool.
self._CheckContains(policy, 'deprecated', bool, True)
# If 'future' is present, it must be a bool.
self._CheckContains(policy, 'future', bool, True)
if policy_type == 'group':
# Groups must not be nested.
if is_in_group:
self._Error('Policy groups must not be nested.', 'policy', policy)
# Each policy group must have a list of policies.
policies = self._CheckContains(policy, 'policies', list)
# Check sub-policies.
if policies is not None:
for nested_policy in policies:
self._CheckPolicy(nested_policy, True, policy_ids)
# Groups must not have an |id|.
if 'id' in policy:
self._Error('Policies of type "group" must not have an "id" field.',
'policy', policy)
# Statistics.
self.num_groups += 1
else: # policy_type != group
# Each policy must have a protobuf ID.
id = self._CheckContains(policy, 'id', int)
self._AddPolicyID(id, policy_ids, policy)
# 'schema' is the new 'type'.
# TODO(joaodasilva): remove the 'type' checks once 'schema' is used
# everywhere.
self._CheckPolicySchema(policy, policy_type)
# Each policy must have a supported_on list.
supported_on = self._CheckContains(policy, 'supported_on', list)
if supported_on is not None:
for s in supported_on:
if not isinstance(s, str):
self._Error('Entries in "supported_on" must be strings.', 'policy',
policy, supported_on)
# Each policy must have a 'features' dict.
features = self._CheckContains(policy, 'features', dict)
# All the features must have a documenting message.
if features:
for feature in features:
if not feature in self.features:
self._Error('Unknown feature "%s". Known features must have a '
'documentation string in the messages dictionary.' %
feature, 'policy', policy.get('name', policy))
# All user policies must have a per_profile feature flag.
if (not policy.get('device_only', False) and
not policy.get('deprecated', False) and
not filter(re.compile('^chrome_frame:.*').match, supported_on)):
self._CheckContains(features, 'per_profile', bool,
container_name='features',
identifier=policy.get('name'))
# All policies must declare whether they allow changes at runtime.
self._CheckContains(features, 'dynamic_refresh', bool,
container_name='features',
identifier=policy.get('name'))
# Each policy must have an 'example_value' of appropriate type.
if policy_type == 'main':
value_type = item_type = bool
elif policy_type in ('string', 'string-enum'):
value_type = item_type = str
elif policy_type in ('int', 'int-enum'):
value_type = item_type = int
elif policy_type in ('list', 'string-enum-list'):
value_type = list
item_type = str
elif policy_type == 'external':
value_type = item_type = dict
elif policy_type == 'dict':
value_type = item_type = [ dict, list ]
else:
raise NotImplementedError('Unimplemented policy type: %s' % policy_type)
self._CheckContains(policy, 'example_value', value_type)
# Statistics.
self.num_policies += 1
if is_in_group:
self.num_policies_in_groups += 1
if policy_type in ('int-enum', 'string-enum', 'string-enum-list'):
# Enums must contain a list of items.
items = self._CheckContains(policy, 'items', list)
if items is not None:
if len(items) < 1:
self._Error('"items" must not be empty.', 'policy', policy, items)
for item in items:
# Each item must have a name.
# Note: |policy.get('name')| is used instead of |policy['name']|
# because it returns None rather than failing when no key called
# 'name' exists.
self._CheckContains(item, 'name', str, container_name='item',
identifier=policy.get('name'),
regexp_check=NO_WHITESPACE)
# Each item must have a value of the correct type.
self._CheckContains(item, 'value', item_type, container_name='item',
identifier=policy.get('name'))
# Each item must have a caption.
self._CheckContains(item, 'caption', str, container_name='item',
identifier=policy.get('name'))
if policy_type == 'external':
# Each policy referencing external data must specify a maximum data size.
self._CheckContains(policy, 'max_size', int)
def _CheckMessage(self, key, value):
# |key| must be a string, |value| a dict.
if not isinstance(key, str):
self._Error('Each message key must be a string.', 'message', key, key)
return
if not isinstance(value, dict):
self._Error('Each message must be a dictionary.', 'message', key, value)
return
# Each message must have a desc.
self._CheckContains(value, 'desc', str, parent_element='message',
identifier=key)
# Each message must have a text.
self._CheckContains(value, 'text', str, parent_element='message',
identifier=key)
# There should not be any unknown keys in |value|.
for vkey in value:
if vkey not in ('desc', 'text'):
self.warning_count += 1
print 'In message %s: Warning: Unknown key: %s' % (key, vkey)
def _LeadingWhitespace(self, line):
match = LEADING_WHITESPACE.match(line)
if match:
return match.group(1)
return ''
def _TrailingWhitespace(self, line):
match = TRAILING_WHITESPACE.match(line)
if match:
return match.group(1)
return ''
def _LineError(self, message, line_number):
self.error_count += 1
print 'In line %d: Error: %s' % (line_number, message)
def _LineWarning(self, message, line_number):
self.warning_count += 1
print ('In line %d: Warning: Automatically fixing formatting: %s'
% (line_number, message))
def _CheckFormat(self, filename):
if self.options.fix:
fixed_lines = []
with open(filename) as f:
indent = 0
line_number = 0
for line in f:
line_number += 1
line = line.rstrip('\n')
# Check for trailing whitespace.
trailing_whitespace = self._TrailingWhitespace(line)
if len(trailing_whitespace) > 0:
if self.options.fix:
line = line.rstrip()
self._LineWarning('Trailing whitespace.', line_number)
else:
self._LineError('Trailing whitespace.', line_number)
if self.options.fix:
if len(line) == 0:
fixed_lines += ['\n']
continue
else:
if line == trailing_whitespace:
# This also catches the case of an empty line.
continue
# Check for correct amount of leading whitespace.
leading_whitespace = self._LeadingWhitespace(line)
if leading_whitespace.count('\t') > 0:
if self.options.fix:
leading_whitespace = leading_whitespace.replace('\t', ' ')
line = leading_whitespace + line.lstrip()
self._LineWarning('Tab character found.', line_number)
else:
self._LineError('Tab character found.', line_number)
if line[len(leading_whitespace)] in (']', '}'):
indent -= 2
if line[0] != '#': # Ignore 0-indented comments.
if len(leading_whitespace) != indent:
if self.options.fix:
line = ' ' * indent + line.lstrip()
self._LineWarning('Indentation should be ' + str(indent) +
' spaces.', line_number)
else:
self._LineError('Bad indentation. Should be ' + str(indent) +
' spaces.', line_number)
if line[-1] in ('[', '{'):
indent += 2
if self.options.fix:
fixed_lines.append(line + '\n')
# If --fix is specified: backup the file (deleting any existing backup),
# then write the fixed version with the old filename.
if self.options.fix:
if self.options.backup:
backupfilename = filename + '.bak'
if os.path.exists(backupfilename):
os.remove(backupfilename)
os.rename(filename, backupfilename)
with open(filename, 'w') as f:
f.writelines(fixed_lines)
def Main(self, filename, options):
try:
with open(filename) as f:
data = eval(f.read())
except:
import traceback
traceback.print_exc(file=sys.stdout)
self._Error('Invalid Python/JSON syntax.')
return 1
if data == None:
self._Error('Invalid Python/JSON syntax.')
return 1
self.options = options
# First part: check JSON structure.
# Check (non-policy-specific) message definitions.
messages = self._CheckContains(data, 'messages', dict,
parent_element=None,
container_name='The root element',
offending=None)
if messages is not None:
for message in messages:
self._CheckMessage(message, messages[message])
if message.startswith('doc_feature_'):
self.features.append(message[12:])
# Check policy definitions.
policy_definitions = self._CheckContains(data, 'policy_definitions', list,
parent_element=None,
container_name='The root element',
offending=None)
if policy_definitions is not None:
policy_ids = set()
for policy in policy_definitions:
self._CheckPolicy(policy, False, policy_ids)
self._CheckPolicyIDs(policy_ids)
# Second part: check formatting.
self._CheckFormat(filename)
# Third part: summary and exit.
print ('Finished checking %s. %d errors, %d warnings.' %
(filename, self.error_count, self.warning_count))
if self.options.stats:
if self.num_groups > 0:
print ('%d policies, %d of those in %d groups (containing on '
'average %.1f policies).' %
(self.num_policies, self.num_policies_in_groups, self.num_groups,
(1.0 * self.num_policies_in_groups / self.num_groups)))
else:
print self.num_policies, 'policies, 0 policy groups.'
if self.error_count > 0:
return 1
return 0
def Run(self, argv, filename=None):
parser = optparse.OptionParser(
usage='usage: %prog [options] filename',
description='Syntax check a policy_templates.json file.')
parser.add_option('--fix', action='store_true',
help='Automatically fix formatting.')
parser.add_option('--backup', action='store_true',
help='Create backup of original file (before fixing).')
parser.add_option('--stats', action='store_true',
help='Generate statistics.')
(options, args) = parser.parse_args(argv)
if filename is None:
if len(args) != 2:
parser.print_help()
sys.exit(1)
filename = args[1]
return self.Main(filename, options)
if __name__ == '__main__':
sys.exit(PolicyTemplateChecker().Run(sys.argv))
|
cobalys/django | refs/heads/master | tests/modeltests/m2o_recursive/models.py | 43 | """
11. Relating an object to itself, many-to-one
To define a many-to-one relationship between a model and itself, use
``ForeignKey('self')``.
In this example, a ``Category`` is related to itself. That is, each
``Category`` has a parent ``Category``.
Set ``related_name`` to designate what the reverse relationship is called.
"""
from django.db import models
class Category(models.Model):
name = models.CharField(max_length=20)
parent = models.ForeignKey('self', blank=True, null=True, related_name='child_set')
def __unicode__(self):
return self.name
class Person(models.Model):
full_name = models.CharField(max_length=20)
mother = models.ForeignKey('self', null=True, related_name='mothers_child_set')
father = models.ForeignKey('self', null=True, related_name='fathers_child_set')
def __unicode__(self):
return self.full_name
|
TangHao1987/intellij-community | refs/heads/master | python/testData/refactoring/rename/renameUpdatesImportReferences/after/bar.py | 83 | import baz
from baz import f
|
sullome/rollingdice | refs/heads/master | fudge.py | 1 | #!/usr/bin/python
from random import choice
FUDGE_LEVELS = (
'None',
'совсем-совсем ужасно-',
'совсем-совсем ужасно',
'совсем ужасно',
'ужасно',
'плохо',
'посредственно',
'нормально',
'хорошо',
'отлично',
'супер',
'легендарно',
'легендарно+',
'легендарно++',
'божественно'
)
def dF(sides = '-=+'):
return choice(sides)
def fudge(fancy = True):
if fancy:
return ''.join(dF() for i in range(4))
else:
return sum([dF([-1,0,1]) for i in range(4)])
def fudge_with_level(level, fancy_out = True):
if level not in FUDGE_LEVELS:
return 'Нет уровня «{0}».\nЕсть {1}.'.format(
level,
"\n ".join(FUDGE_LEVELS)
)
newlevel = FUDGE_LEVELS.index(level)
result = fudge(fancy = False)
newlevel = newlevel + result
if fancy_out:
return '4dF ({}) от {}. Результат: {}'.format(
result,
level,
FUDGE_LEVELS[newlevel]
)
else:
return (result, FUDGE_LEVELS[newlevel])
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('level', nargs='?')
args = parser.parse_args()
if args.level: print(fudge_with_level(args.level))
else: print(fudge())
return
if __name__ == '__main__':
main()
|
orangeduck/PyAutoC | refs/heads/master | Python27/Lib/CGIHTTPServer.py | 108 | """CGI-savvy HTTP Server.
This module builds on SimpleHTTPServer by implementing GET and POST
requests to cgi-bin scripts.
If the os.fork() function is not present (e.g. on Windows),
os.popen2() is used as a fallback, with slightly altered semantics; if
that function is not present either (e.g. on Macintosh), only Python
scripts are supported, and they are executed by the current process.
In all cases, the implementation is intentionally naive -- all
requests are executed sychronously.
SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL
-- it may execute arbitrary Python code or external programs.
Note that status code 200 is sent prior to execution of a CGI script, so
scripts cannot send other status codes such as 302 (redirect).
"""
__version__ = "0.4"
__all__ = ["CGIHTTPRequestHandler"]
import os
import sys
import urllib
import BaseHTTPServer
import SimpleHTTPServer
import select
import copy
class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""Complete HTTP server with GET, HEAD and POST commands.
GET and HEAD also support running CGI scripts.
The POST command is *only* implemented for CGI scripts.
"""
# Determine platform specifics
have_fork = hasattr(os, 'fork')
have_popen2 = hasattr(os, 'popen2')
have_popen3 = hasattr(os, 'popen3')
# Make rfile unbuffered -- we need to read one line and then pass
# the rest to a subprocess, so we can't use buffered input.
rbufsize = 0
def do_POST(self):
"""Serve a POST request.
This is only implemented for CGI scripts.
"""
if self.is_cgi():
self.run_cgi()
else:
self.send_error(501, "Can only POST to CGI scripts")
def send_head(self):
"""Version of send_head that support CGI scripts"""
if self.is_cgi():
return self.run_cgi()
else:
return SimpleHTTPServer.SimpleHTTPRequestHandler.send_head(self)
def is_cgi(self):
"""Test whether self.path corresponds to a CGI script.
Returns True and updates the cgi_info attribute to the tuple
(dir, rest) if self.path requires running a CGI script.
Returns False otherwise.
If any exception is raised, the caller should assume that
self.path was rejected as invalid and act accordingly.
The default implementation tests whether the normalized url
path begins with one of the strings in self.cgi_directories
(and the next character is a '/' or the end of the string).
"""
splitpath = _url_collapse_path_split(self.path)
if splitpath[0] in self.cgi_directories:
self.cgi_info = splitpath
return True
return False
cgi_directories = ['/cgi-bin', '/htbin']
def is_executable(self, path):
"""Test whether argument path is an executable file."""
return executable(path)
def is_python(self, path):
"""Test whether argument path is a Python script."""
head, tail = os.path.splitext(path)
return tail.lower() in (".py", ".pyw")
def run_cgi(self):
"""Execute a CGI script."""
path = self.path
dir, rest = self.cgi_info
i = path.find('/', len(dir) + 1)
while i >= 0:
nextdir = path[:i]
nextrest = path[i+1:]
scriptdir = self.translate_path(nextdir)
if os.path.isdir(scriptdir):
dir, rest = nextdir, nextrest
i = path.find('/', len(dir) + 1)
else:
break
# find an explicit query string, if present.
i = rest.rfind('?')
if i >= 0:
rest, query = rest[:i], rest[i+1:]
else:
query = ''
# dissect the part after the directory name into a script name &
# a possible additional path, to be stored in PATH_INFO.
i = rest.find('/')
if i >= 0:
script, rest = rest[:i], rest[i:]
else:
script, rest = rest, ''
scriptname = dir + '/' + script
scriptfile = self.translate_path(scriptname)
if not os.path.exists(scriptfile):
self.send_error(404, "No such CGI script (%r)" % scriptname)
return
if not os.path.isfile(scriptfile):
self.send_error(403, "CGI script is not a plain file (%r)" %
scriptname)
return
ispy = self.is_python(scriptname)
if not ispy:
if not (self.have_fork or self.have_popen2 or self.have_popen3):
self.send_error(403, "CGI script is not a Python script (%r)" %
scriptname)
return
if not self.is_executable(scriptfile):
self.send_error(403, "CGI script is not executable (%r)" %
scriptname)
return
# Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html
# XXX Much of the following could be prepared ahead of time!
env = copy.deepcopy(os.environ)
env['SERVER_SOFTWARE'] = self.version_string()
env['SERVER_NAME'] = self.server.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PROTOCOL'] = self.protocol_version
env['SERVER_PORT'] = str(self.server.server_port)
env['REQUEST_METHOD'] = self.command
uqrest = urllib.unquote(rest)
env['PATH_INFO'] = uqrest
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
env['SCRIPT_NAME'] = scriptname
if query:
env['QUERY_STRING'] = query
host = self.address_string()
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
authorization = self.headers.getheader("authorization")
if authorization:
authorization = authorization.split()
if len(authorization) == 2:
import base64, binascii
env['AUTH_TYPE'] = authorization[0]
if authorization[0].lower() == "basic":
try:
authorization = base64.decodestring(authorization[1])
except binascii.Error:
pass
else:
authorization = authorization.split(':')
if len(authorization) == 2:
env['REMOTE_USER'] = authorization[0]
# XXX REMOTE_IDENT
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
referer = self.headers.getheader('referer')
if referer:
env['HTTP_REFERER'] = referer
accept = []
for line in self.headers.getallmatchingheaders('accept'):
if line[:1] in "\t\n\r ":
accept.append(line.strip())
else:
accept = accept + line[7:].split(',')
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.getheader('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.getheaders('cookie'))
if co:
env['HTTP_COOKIE'] = ', '.join(co)
# XXX Other HTTP_* headers
# Since we're setting the env in the parent, provide empty
# values to override previously set values
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH',
'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'):
env.setdefault(k, "")
self.send_response(200, "Script output follows")
decoded_query = query.replace('+', ' ')
if self.have_fork:
# Unix -- fork as we should
args = [script]
if '=' not in decoded_query:
args.append(decoded_query)
nobody = nobody_uid()
self.wfile.flush() # Always flush before forking
pid = os.fork()
if pid != 0:
# Parent
pid, sts = os.waitpid(pid, 0)
# throw away additional data [see bug #427345]
while select.select([self.rfile], [], [], 0)[0]:
if not self.rfile.read(1):
break
if sts:
self.log_error("CGI script exit status %#x", sts)
return
# Child
try:
try:
os.setuid(nobody)
except os.error:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
os.execve(scriptfile, args, env)
except:
self.server.handle_error(self.request, self.client_address)
os._exit(127)
else:
# Non Unix - use subprocess
import subprocess
cmdline = [scriptfile]
if self.is_python(scriptfile):
interp = sys.executable
if interp.lower().endswith("w.exe"):
# On Windows, use python.exe, not pythonw.exe
interp = interp[:-5] + interp[-4:]
cmdline = [interp, '-u'] + cmdline
if '=' not in query:
cmdline.append(query)
self.log_message("command: %s", subprocess.list2cmdline(cmdline))
try:
nbytes = int(length)
except (TypeError, ValueError):
nbytes = 0
p = subprocess.Popen(cmdline,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
env = env
)
if self.command.lower() == "post" and nbytes > 0:
data = self.rfile.read(nbytes)
else:
data = None
# throw away additional data [see bug #427345]
while select.select([self.rfile._sock], [], [], 0)[0]:
if not self.rfile._sock.recv(1):
break
stdout, stderr = p.communicate(data)
self.wfile.write(stdout)
if stderr:
self.log_error('%s', stderr)
p.stderr.close()
p.stdout.close()
status = p.returncode
if status:
self.log_error("CGI script exit status %#x", status)
else:
self.log_message("CGI script exited OK")
# TODO(gregory.p.smith): Move this into an appropriate library.
def _url_collapse_path_split(path):
"""
Given a URL path, remove extra '/'s and '.' path elements and collapse
any '..' references.
Implements something akin to RFC-2396 5.2 step 6 to parse relative paths.
Returns: A tuple of (head, tail) where tail is everything after the final /
and head is everything before it. Head will always start with a '/' and,
if it contains anything else, never have a trailing '/'.
Raises: IndexError if too many '..' occur within the path.
"""
# Similar to os.path.split(os.path.normpath(path)) but specific to URL
# path semantics rather than local operating system semantics.
path_parts = []
for part in path.split('/'):
if part == '.':
path_parts.append('')
else:
path_parts.append(part)
# Filter out blank non trailing parts before consuming the '..'.
path_parts = [part for part in path_parts[:-1] if part] + path_parts[-1:]
if path_parts:
tail_part = path_parts.pop()
else:
tail_part = ''
head_parts = []
for part in path_parts:
if part == '..':
head_parts.pop()
else:
head_parts.append(part)
if tail_part and tail_part == '..':
head_parts.pop()
tail_part = ''
return ('/' + '/'.join(head_parts), tail_part)
nobody = None
def nobody_uid():
"""Internal routine to get nobody's uid"""
global nobody
if nobody:
return nobody
try:
import pwd
except ImportError:
return -1
try:
nobody = pwd.getpwnam('nobody')[2]
except KeyError:
nobody = 1 + max(map(lambda x: x[2], pwd.getpwall()))
return nobody
def executable(path):
"""Test for executable file."""
try:
st = os.stat(path)
except os.error:
return False
return st.st_mode & 0111 != 0
def test(HandlerClass = CGIHTTPRequestHandler,
ServerClass = BaseHTTPServer.HTTPServer):
SimpleHTTPServer.test(HandlerClass, ServerClass)
if __name__ == '__main__':
test()
|
sourcepole/kadas-albireo | refs/heads/master | python/plugins/processing/algs/qgis/SinglePartsToMultiparts.py | 6 | # -*- coding: utf-8 -*-
"""
***************************************************************************
SinglePartsToMultiparts.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.core import QGis, QgsFeature, QgsGeometry
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterTableField
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class SinglePartsToMultiparts(GeoAlgorithm):
INPUT = 'INPUT'
FIELD = 'FIELD'
OUTPUT = 'OUTPUT'
def defineCharacteristics(self):
self.name = 'Singleparts to multipart'
self.group = 'Vector geometry tools'
self.addParameter(ParameterVector(self.INPUT, self.tr('Input layer')))
self.addParameter(ParameterTableField(self.FIELD,
self.tr('Unique ID field'), self.INPUT))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Output layer')))
def processAlgorithm(self, progress):
layer = dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT))
fieldName = self.getParameterValue(self.FIELD)
geomType = self.singleToMultiGeom(layer.dataProvider().geometryType())
writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
layer.pendingFields().toList(), geomType, layer.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
outGeom = QgsGeometry()
index = layer.fieldNameIndex(fieldName)
unique = vector.getUniqueValues(layer, index)
current = 0
features = vector.features(layer)
total = 100.0 / float(len(features) * len(unique))
if not len(unique) == layer.featureCount():
for i in unique:
multi_feature = []
first = True
features = vector.features(layer)
for inFeat in features:
atMap = inFeat.attributes()
idVar = atMap[index]
if unicode(idVar).strip() == unicode(i).strip():
if first:
attrs = atMap
first = False
inGeom = QgsGeometry(inFeat.geometry())
vType = inGeom.type()
feature_list = self.extractAsMulti(inGeom)
multi_feature.extend(feature_list)
current += 1
progress.setPercentage(int(current * total))
outFeat.setAttributes(attrs)
outGeom = QgsGeometry(self.convertGeometry(multi_feature,
vType))
outFeat.setGeometry(outGeom)
writer.addFeature(outFeat)
del writer
else:
raise GeoAlgorithmExecutionException(self.tr('Invalid unique ID field'))
def singleToMultiGeom(self, wkbType):
try:
if wkbType in (QGis.WKBPoint, QGis.WKBMultiPoint,
QGis.WKBPoint25D, QGis.WKBMultiPoint25D):
return QGis.WKBMultiPoint
elif wkbType in (QGis.WKBLineString, QGis.WKBMultiLineString,
QGis.WKBMultiLineString25D,
QGis.WKBLineString25D):
return QGis.WKBMultiLineString
elif wkbType in (QGis.WKBPolygon, QGis.WKBMultiPolygon,
QGis.WKBMultiPolygon25D, QGis.WKBPolygon25D):
return QGis.WKBMultiPolygon
else:
return QGis.WKBUnknown
except Exception, err:
pass
def extractAsMulti(self, geom):
if geom.type() == QGis.Point:
if geom.isMultipart():
return geom.asMultiPoint()
else:
return [geom.asPoint()]
elif geom.type() == QGis.Line:
if geom.isMultipart():
return geom.asMultiPolyline()
else:
return [geom.asPolyline()]
else:
if geom.isMultipart():
return geom.asMultiPolygon()
else:
return [geom.asPolygon()]
def convertGeometry(self, geom_list, vType):
if vType == QGis.Point:
return QgsGeometry().fromMultiPoint(geom_list)
elif vType == QGis.Line:
return QgsGeometry().fromMultiPolyline(geom_list)
else:
return QgsGeometry().fromMultiPolygon(geom_list)
|
phretor/django-academic | refs/heads/master | academic/apps/people/migrations/0004_auto__add_field_person_alumni.py | 1 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Person.alumni'
db.add_column('people_person', 'alumni', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'Person.alumni'
db.delete_column('people_person', 'alumni')
models = {
'organizations.organization': {
'Meta': {'object_name': 'Organization'},
'acronym': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'db_index': 'True', 'max_length': '2', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'db_index': 'True'}),
'web_page': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'people.person': {
'Meta': {'ordering': "['rank', 'last_name', 'first_name']", 'object_name': 'Person'},
'affiliation': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'people'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['organizations.Organization']"}),
'alumni': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'current': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'e_mail': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'mid_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'picture': ('filebrowser.fields.FileBrowseField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rank': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'people'", 'null': 'True', 'to': "orm['people.Rank']"}),
'visitor': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'web_page': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'people.rank': {
'Meta': {'ordering': "['order']", 'object_name': 'Rank'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'order': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'plural_name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
}
}
complete_apps = ['people']
|
2013Commons/HUE-SHARK | refs/heads/master | desktop/core/ext-py/Django-1.2.3/build/lib.linux-i686-2.7/django/bin/compile-messages.py | 186 | #!/usr/bin/env python
if __name__ == "__main__":
import sys
name = sys.argv[0]
args = ' '.join(sys.argv[1:])
print >> sys.stderr, "%s has been moved into django-admin.py" % name
print >> sys.stderr, 'Please run "django-admin.py compilemessages %s" instead.'% args
print >> sys.stderr
sys.exit(1)
|
brain-tec/server-tools | refs/heads/11.0 | html_image_url_extractor/tests/__init__.py | 8 | # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from . import test_extractor
|
ifduyue/django | refs/heads/master | django/contrib/auth/views.py | 5 | import warnings
from urllib.parse import urlparse, urlunparse
from django.conf import settings
# Avoid shadowing the login() and logout() views below.
from django.contrib.auth import (
REDIRECT_FIELD_NAME, get_user_model, login as auth_login,
logout as auth_logout, update_session_auth_hash,
)
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import (
AuthenticationForm, PasswordChangeForm, PasswordResetForm, SetPasswordForm,
)
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.http import HttpResponseRedirect, QueryDict
from django.shortcuts import resolve_url
from django.template.response import TemplateResponse
from django.urls import reverse, reverse_lazy
from django.utils.decorators import method_decorator
from django.utils.deprecation import RemovedInDjango21Warning
from django.utils.http import is_safe_url, urlsafe_base64_decode
from django.utils.translation import gettext_lazy as _
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from django.views.generic.base import TemplateView
from django.views.generic.edit import FormView
UserModel = get_user_model()
class SuccessURLAllowedHostsMixin:
success_url_allowed_hosts = set()
def get_success_url_allowed_hosts(self):
allowed_hosts = {self.request.get_host()}
allowed_hosts.update(self.success_url_allowed_hosts)
return allowed_hosts
class LoginView(SuccessURLAllowedHostsMixin, FormView):
"""
Display the login form and handle the login action.
"""
form_class = AuthenticationForm
authentication_form = None
redirect_field_name = REDIRECT_FIELD_NAME
template_name = 'registration/login.html'
redirect_authenticated_user = False
extra_context = None
@method_decorator(sensitive_post_parameters())
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
if self.redirect_authenticated_user and self.request.user.is_authenticated:
redirect_to = self.get_success_url()
if redirect_to == self.request.path:
raise ValueError(
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page."
)
return HttpResponseRedirect(redirect_to)
return super().dispatch(request, *args, **kwargs)
def get_success_url(self):
url = self.get_redirect_url()
return url or resolve_url(settings.LOGIN_REDIRECT_URL)
def get_redirect_url(self):
"""Return the user-originating redirect URL if it's safe."""
redirect_to = self.request.POST.get(
self.redirect_field_name,
self.request.GET.get(self.redirect_field_name, '')
)
url_is_safe = is_safe_url(
url=redirect_to,
allowed_hosts=self.get_success_url_allowed_hosts(),
require_https=self.request.is_secure(),
)
return redirect_to if url_is_safe else ''
def get_form_class(self):
return self.authentication_form or self.form_class
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['request'] = self.request
return kwargs
def form_valid(self, form):
"""Security check complete. Log the user in."""
auth_login(self.request, form.get_user())
return HttpResponseRedirect(self.get_success_url())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
current_site = get_current_site(self.request)
context.update({
self.redirect_field_name: self.get_redirect_url(),
'site': current_site,
'site_name': current_site.name,
})
if self.extra_context is not None:
context.update(self.extra_context)
return context
def login(request, template_name='registration/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm,
extra_context=None, redirect_authenticated_user=False):
warnings.warn(
'The login() view is superseded by the class-based LoginView().',
RemovedInDjango21Warning, stacklevel=2
)
return LoginView.as_view(
template_name=template_name,
redirect_field_name=redirect_field_name,
form_class=authentication_form,
extra_context=extra_context,
redirect_authenticated_user=redirect_authenticated_user,
)(request)
class LogoutView(SuccessURLAllowedHostsMixin, TemplateView):
"""
Log out the user and display the 'You are logged out' message.
"""
next_page = None
redirect_field_name = REDIRECT_FIELD_NAME
template_name = 'registration/logged_out.html'
extra_context = None
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
auth_logout(request)
next_page = self.get_next_page()
if next_page:
# Redirect to this page until the session has been cleared.
return HttpResponseRedirect(next_page)
return super().dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
"""Logout may be done via POST."""
return self.get(request, *args, **kwargs)
def get_next_page(self):
if self.next_page is not None:
next_page = resolve_url(self.next_page)
elif settings.LOGOUT_REDIRECT_URL:
next_page = resolve_url(settings.LOGOUT_REDIRECT_URL)
else:
next_page = self.next_page
if (self.redirect_field_name in self.request.POST or
self.redirect_field_name in self.request.GET):
next_page = self.request.POST.get(
self.redirect_field_name,
self.request.GET.get(self.redirect_field_name)
)
url_is_safe = is_safe_url(
url=next_page,
allowed_hosts=self.get_success_url_allowed_hosts(),
require_https=self.request.is_secure(),
)
# Security check -- Ensure the user-originating redirection URL is
# safe.
if not url_is_safe:
next_page = self.request.path
return next_page
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
current_site = get_current_site(self.request)
context.update({
'site': current_site,
'site_name': current_site.name,
'title': _('Logged out'),
})
if self.extra_context is not None:
context.update(self.extra_context)
return context
def logout(request, next_page=None,
template_name='registration/logged_out.html',
redirect_field_name=REDIRECT_FIELD_NAME,
extra_context=None):
warnings.warn(
'The logout() view is superseded by the class-based LogoutView().',
RemovedInDjango21Warning, stacklevel=2
)
return LogoutView.as_view(
next_page=next_page,
template_name=template_name,
redirect_field_name=redirect_field_name,
extra_context=extra_context,
)(request)
_sentinel = object()
def logout_then_login(request, login_url=None, extra_context=_sentinel):
"""
Log out the user if they are logged in. Then redirect to the login page.
"""
if extra_context is not _sentinel:
warnings.warn(
"The unused `extra_context` parameter to `logout_then_login` "
"is deprecated.", RemovedInDjango21Warning
)
if not login_url:
login_url = settings.LOGIN_URL
login_url = resolve_url(login_url)
return LogoutView.as_view(next_page=login_url)(request)
def redirect_to_login(next, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Redirect the user to the login page, passing the given 'next' page.
"""
resolved_url = resolve_url(login_url or settings.LOGIN_URL)
login_url_parts = list(urlparse(resolved_url))
if redirect_field_name:
querystring = QueryDict(login_url_parts[4], mutable=True)
querystring[redirect_field_name] = next
login_url_parts[4] = querystring.urlencode(safe='/')
return HttpResponseRedirect(urlunparse(login_url_parts))
# 4 views for password reset:
# - password_reset sends the mail
# - password_reset_done shows a success message for the above
# - password_reset_confirm checks the link the user clicked and
# prompts for a new password
# - password_reset_complete shows a success message for the above
@csrf_protect
def password_reset(request,
template_name='registration/password_reset_form.html',
email_template_name='registration/password_reset_email.html',
subject_template_name='registration/password_reset_subject.txt',
password_reset_form=PasswordResetForm,
token_generator=default_token_generator,
post_reset_redirect=None,
from_email=None,
extra_context=None,
html_email_template_name=None,
extra_email_context=None):
warnings.warn("The password_reset() view is superseded by the "
"class-based PasswordResetView().",
RemovedInDjango21Warning, stacklevel=2)
if post_reset_redirect is None:
post_reset_redirect = reverse('password_reset_done')
else:
post_reset_redirect = resolve_url(post_reset_redirect)
if request.method == "POST":
form = password_reset_form(request.POST)
if form.is_valid():
opts = {
'use_https': request.is_secure(),
'token_generator': token_generator,
'from_email': from_email,
'email_template_name': email_template_name,
'subject_template_name': subject_template_name,
'request': request,
'html_email_template_name': html_email_template_name,
'extra_email_context': extra_email_context,
}
form.save(**opts)
return HttpResponseRedirect(post_reset_redirect)
else:
form = password_reset_form()
context = {
'form': form,
'title': _('Password reset'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
def password_reset_done(request,
template_name='registration/password_reset_done.html',
extra_context=None):
warnings.warn("The password_reset_done() view is superseded by the "
"class-based PasswordResetDoneView().",
RemovedInDjango21Warning, stacklevel=2)
context = {
'title': _('Password reset sent'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
# Doesn't need csrf_protect since no-one can guess the URL
@sensitive_post_parameters()
@never_cache
def password_reset_confirm(request, uidb64=None, token=None,
template_name='registration/password_reset_confirm.html',
token_generator=default_token_generator,
set_password_form=SetPasswordForm,
post_reset_redirect=None,
extra_context=None):
"""
Check the hash in a password reset link and present a form for entering a
new password.
"""
warnings.warn("The password_reset_confirm() view is superseded by the "
"class-based PasswordResetConfirmView().",
RemovedInDjango21Warning, stacklevel=2)
assert uidb64 is not None and token is not None # checked by URLconf
if post_reset_redirect is None:
post_reset_redirect = reverse('password_reset_complete')
else:
post_reset_redirect = resolve_url(post_reset_redirect)
try:
# urlsafe_base64_decode() decodes to bytestring
uid = urlsafe_base64_decode(uidb64).decode()
user = UserModel._default_manager.get(pk=uid)
except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist):
user = None
if user is not None and token_generator.check_token(user, token):
validlink = True
title = _('Enter new password')
if request.method == 'POST':
form = set_password_form(user, request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(post_reset_redirect)
else:
form = set_password_form(user)
else:
validlink = False
form = None
title = _('Password reset unsuccessful')
context = {
'form': form,
'title': title,
'validlink': validlink,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
def password_reset_complete(request,
template_name='registration/password_reset_complete.html',
extra_context=None):
warnings.warn("The password_reset_complete() view is superseded by the "
"class-based PasswordResetCompleteView().",
RemovedInDjango21Warning, stacklevel=2)
context = {
'login_url': resolve_url(settings.LOGIN_URL),
'title': _('Password reset complete'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
# Class-based password reset views
# - PasswordResetView sends the mail
# - PasswordResetDoneView shows a success message for the above
# - PasswordResetConfirmView checks the link the user clicked and
# prompts for a new password
# - PasswordResetCompleteView shows a success message for the above
class PasswordContextMixin:
extra_context = None
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['title'] = self.title
if self.extra_context is not None:
context.update(self.extra_context)
return context
class PasswordResetView(PasswordContextMixin, FormView):
email_template_name = 'registration/password_reset_email.html'
extra_email_context = None
form_class = PasswordResetForm
from_email = None
html_email_template_name = None
subject_template_name = 'registration/password_reset_subject.txt'
success_url = reverse_lazy('password_reset_done')
template_name = 'registration/password_reset_form.html'
title = _('Password reset')
token_generator = default_token_generator
@method_decorator(csrf_protect)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
def form_valid(self, form):
opts = {
'use_https': self.request.is_secure(),
'token_generator': self.token_generator,
'from_email': self.from_email,
'email_template_name': self.email_template_name,
'subject_template_name': self.subject_template_name,
'request': self.request,
'html_email_template_name': self.html_email_template_name,
'extra_email_context': self.extra_email_context,
}
form.save(**opts)
return super().form_valid(form)
INTERNAL_RESET_URL_TOKEN = 'set-password'
INTERNAL_RESET_SESSION_TOKEN = '_password_reset_token'
class PasswordResetDoneView(PasswordContextMixin, TemplateView):
template_name = 'registration/password_reset_done.html'
title = _('Password reset sent')
class PasswordResetConfirmView(PasswordContextMixin, FormView):
form_class = SetPasswordForm
post_reset_login = False
post_reset_login_backend = None
success_url = reverse_lazy('password_reset_complete')
template_name = 'registration/password_reset_confirm.html'
title = _('Enter new password')
token_generator = default_token_generator
@method_decorator(sensitive_post_parameters())
@method_decorator(never_cache)
def dispatch(self, *args, **kwargs):
assert 'uidb64' in kwargs and 'token' in kwargs
self.validlink = False
self.user = self.get_user(kwargs['uidb64'])
if self.user is not None:
token = kwargs['token']
if token == INTERNAL_RESET_URL_TOKEN:
session_token = self.request.session.get(INTERNAL_RESET_SESSION_TOKEN)
if self.token_generator.check_token(self.user, session_token):
# If the token is valid, display the password reset form.
self.validlink = True
return super().dispatch(*args, **kwargs)
else:
if self.token_generator.check_token(self.user, token):
# Store the token in the session and redirect to the
# password reset form at a URL without the token. That
# avoids the possibility of leaking the token in the
# HTTP Referer header.
self.request.session[INTERNAL_RESET_SESSION_TOKEN] = token
redirect_url = self.request.path.replace(token, INTERNAL_RESET_URL_TOKEN)
return HttpResponseRedirect(redirect_url)
# Display the "Password reset unsuccessful" page.
return self.render_to_response(self.get_context_data())
def get_user(self, uidb64):
try:
# urlsafe_base64_decode() decodes to bytestring
uid = urlsafe_base64_decode(uidb64).decode()
user = UserModel._default_manager.get(pk=uid)
except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist):
user = None
return user
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['user'] = self.user
return kwargs
def form_valid(self, form):
user = form.save()
del self.request.session[INTERNAL_RESET_SESSION_TOKEN]
if self.post_reset_login:
auth_login(self.request, user, self.post_reset_login_backend)
return super().form_valid(form)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
if self.validlink:
context['validlink'] = True
else:
context.update({
'form': None,
'title': _('Password reset unsuccessful'),
'validlink': False,
})
return context
class PasswordResetCompleteView(PasswordContextMixin, TemplateView):
template_name = 'registration/password_reset_complete.html'
title = _('Password reset complete')
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['login_url'] = resolve_url(settings.LOGIN_URL)
return context
@sensitive_post_parameters()
@csrf_protect
@login_required
def password_change(request,
template_name='registration/password_change_form.html',
post_change_redirect=None,
password_change_form=PasswordChangeForm,
extra_context=None):
warnings.warn("The password_change() view is superseded by the "
"class-based PasswordChangeView().",
RemovedInDjango21Warning, stacklevel=2)
if post_change_redirect is None:
post_change_redirect = reverse('password_change_done')
else:
post_change_redirect = resolve_url(post_change_redirect)
if request.method == "POST":
form = password_change_form(user=request.user, data=request.POST)
if form.is_valid():
form.save()
# Updating the password logs out all other sessions for the user
# except the current one.
update_session_auth_hash(request, form.user)
return HttpResponseRedirect(post_change_redirect)
else:
form = password_change_form(user=request.user)
context = {
'form': form,
'title': _('Password change'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
@login_required
def password_change_done(request,
template_name='registration/password_change_done.html',
extra_context=None):
warnings.warn("The password_change_done() view is superseded by the "
"class-based PasswordChangeDoneView().",
RemovedInDjango21Warning, stacklevel=2)
context = {
'title': _('Password change successful'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
class PasswordChangeView(PasswordContextMixin, FormView):
form_class = PasswordChangeForm
success_url = reverse_lazy('password_change_done')
template_name = 'registration/password_change_form.html'
title = _('Password change')
@method_decorator(sensitive_post_parameters())
@method_decorator(csrf_protect)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
form.save()
# Updating the password logs out all other sessions for the user
# except the current one.
update_session_auth_hash(self.request, form.user)
return super().form_valid(form)
class PasswordChangeDoneView(PasswordContextMixin, TemplateView):
template_name = 'registration/password_change_done.html'
title = _('Password change successful')
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
|
jsoref/django | refs/heads/master | django/utils/jslex.py | 335 | """JsLex: a lexer for Javascript"""
# Originally from https://bitbucket.org/ned/jslex
from __future__ import unicode_literals
import re
class Tok(object):
"""
A specification for a token class.
"""
num = 0
def __init__(self, name, regex, next=None):
self.id = Tok.num
Tok.num += 1
self.name = name
self.regex = regex
self.next = next
def literals(choices, prefix="", suffix=""):
"""
Create a regex from a space-separated list of literal `choices`.
If provided, `prefix` and `suffix` will be attached to each choice
individually.
"""
return "|".join(prefix + re.escape(c) + suffix for c in choices.split())
class Lexer(object):
"""
A generic multi-state regex-based lexer.
"""
def __init__(self, states, first):
self.regexes = {}
self.toks = {}
for state, rules in states.items():
parts = []
for tok in rules:
groupid = "t%d" % tok.id
self.toks[groupid] = tok
parts.append("(?P<%s>%s)" % (groupid, tok.regex))
self.regexes[state] = re.compile("|".join(parts), re.MULTILINE | re.VERBOSE)
self.state = first
def lex(self, text):
"""
Lexically analyze `text`.
Yields pairs (`name`, `tokentext`).
"""
end = len(text)
state = self.state
regexes = self.regexes
toks = self.toks
start = 0
while start < end:
for match in regexes[state].finditer(text, start):
name = match.lastgroup
tok = toks[name]
toktext = match.group(name)
start += len(toktext)
yield (tok.name, toktext)
if tok.next:
state = tok.next
break
self.state = state
class JsLexer(Lexer):
"""
A Javascript lexer
>>> lexer = JsLexer()
>>> list(lexer.lex("a = 1"))
[('id', 'a'), ('ws', ' '), ('punct', '='), ('ws', ' '), ('dnum', '1')]
This doesn't properly handle non-ASCII characters in the Javascript source.
"""
# Because these tokens are matched as alternatives in a regex, longer
# possibilities must appear in the list before shorter ones, for example,
# '>>' before '>'.
#
# Note that we don't have to detect malformed Javascript, only properly
# lex correct Javascript, so much of this is simplified.
# Details of Javascript lexical structure are taken from
# http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-262.pdf
# A useful explanation of automatic semicolon insertion is at
# http://inimino.org/~inimino/blog/javascript_semicolons
both_before = [
Tok("comment", r"/\*(.|\n)*?\*/"),
Tok("linecomment", r"//.*?$"),
Tok("ws", r"\s+"),
Tok("keyword", literals("""
break case catch class const continue debugger
default delete do else enum export extends
finally for function if import in instanceof
new return super switch this throw try typeof
var void while with
""", suffix=r"\b"), next='reg'),
Tok("reserved", literals("null true false", suffix=r"\b"), next='div'),
Tok("id", r"""
([a-zA-Z_$ ]|\\u[0-9a-fA-Z]{4}) # first char
([a-zA-Z_$0-9]|\\u[0-9a-fA-F]{4})* # rest chars
""", next='div'),
Tok("hnum", r"0[xX][0-9a-fA-F]+", next='div'),
Tok("onum", r"0[0-7]+"),
Tok("dnum", r"""
( (0|[1-9][0-9]*) # DecimalIntegerLiteral
\. # dot
[0-9]* # DecimalDigits-opt
([eE][-+]?[0-9]+)? # ExponentPart-opt
|
\. # dot
[0-9]+ # DecimalDigits
([eE][-+]?[0-9]+)? # ExponentPart-opt
|
(0|[1-9][0-9]*) # DecimalIntegerLiteral
([eE][-+]?[0-9]+)? # ExponentPart-opt
)
""", next='div'),
Tok("punct", literals("""
>>>= === !== >>> <<= >>= <= >= == != << >> &&
|| += -= *= %= &= |= ^=
"""), next="reg"),
Tok("punct", literals("++ -- ) ]"), next='div'),
Tok("punct", literals("{ } ( [ . ; , < > + - * % & | ^ ! ~ ? : ="), next='reg'),
Tok("string", r'"([^"\\]|(\\(.|\n)))*?"', next='div'),
Tok("string", r"'([^'\\]|(\\(.|\n)))*?'", next='div'),
]
both_after = [
Tok("other", r"."),
]
states = {
# slash will mean division
'div': both_before + [
Tok("punct", literals("/= /"), next='reg'),
] + both_after,
# slash will mean regex
'reg': both_before + [
Tok("regex",
r"""
/ # opening slash
# First character is..
( [^*\\/[] # anything but * \ / or [
| \\. # or an escape sequence
| \[ # or a class, which has
( [^\]\\] # anything but \ or ]
| \\. # or an escape sequence
)* # many times
\]
)
# Following characters are same, except for excluding a star
( [^\\/[] # anything but \ / or [
| \\. # or an escape sequence
| \[ # or a class, which has
( [^\]\\] # anything but \ or ]
| \\. # or an escape sequence
)* # many times
\]
)* # many times
/ # closing slash
[a-zA-Z0-9]* # trailing flags
""", next='div'),
] + both_after,
}
def __init__(self):
super(JsLexer, self).__init__(self.states, 'reg')
def prepare_js_for_gettext(js):
"""
Convert the Javascript source `js` into something resembling C for
xgettext.
What actually happens is that all the regex literals are replaced with
"REGEX".
"""
def escape_quotes(m):
"""Used in a regex to properly escape double quotes."""
s = m.group(0)
if s == '"':
return r'\"'
else:
return s
lexer = JsLexer()
c = []
for name, tok in lexer.lex(js):
if name == 'regex':
# C doesn't grok regexes, and they aren't needed for gettext,
# so just output a string instead.
tok = '"REGEX"'
elif name == 'string':
# C doesn't have single-quoted strings, so make all strings
# double-quoted.
if tok.startswith("'"):
guts = re.sub(r"\\.|.", escape_quotes, tok[1:-1])
tok = '"' + guts + '"'
elif name == 'id':
# C can't deal with Unicode escapes in identifiers. We don't
# need them for gettext anyway, so replace them with something
# innocuous
tok = tok.replace("\\", "U")
c.append(tok)
return ''.join(c)
|
jhunufernandes/ArduWatchRaspSerial | refs/heads/master | virtualenv/lib/python3.4/site-packages/pip/_vendor/colorama/winterm.py | 578 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from . import win32
# from wincon.h
class WinColor(object):
BLACK = 0
BLUE = 1
GREEN = 2
CYAN = 3
RED = 4
MAGENTA = 5
YELLOW = 6
GREY = 7
# from wincon.h
class WinStyle(object):
NORMAL = 0x00 # dim text, dim background
BRIGHT = 0x08 # bright text, dim background
BRIGHT_BACKGROUND = 0x80 # dim text, bright background
class WinTerm(object):
def __init__(self):
self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
self.set_attrs(self._default)
self._default_fore = self._fore
self._default_back = self._back
self._default_style = self._style
# In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
# So that LIGHT_EX colors and BRIGHT style do not clobber each other,
# we track them separately, since LIGHT_EX is overwritten by Fore/Back
# and BRIGHT is overwritten by Style codes.
self._light = 0
def get_attrs(self):
return self._fore + self._back * 16 + (self._style | self._light)
def set_attrs(self, value):
self._fore = value & 7
self._back = (value >> 4) & 7
self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
def reset_all(self, on_stderr=None):
self.set_attrs(self._default)
self.set_console(attrs=self._default)
def fore(self, fore=None, light=False, on_stderr=False):
if fore is None:
fore = self._default_fore
self._fore = fore
# Emulate LIGHT_EX with BRIGHT Style
if light:
self._light |= WinStyle.BRIGHT
else:
self._light &= ~WinStyle.BRIGHT
self.set_console(on_stderr=on_stderr)
def back(self, back=None, light=False, on_stderr=False):
if back is None:
back = self._default_back
self._back = back
# Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
if light:
self._light |= WinStyle.BRIGHT_BACKGROUND
else:
self._light &= ~WinStyle.BRIGHT_BACKGROUND
self.set_console(on_stderr=on_stderr)
def style(self, style=None, on_stderr=False):
if style is None:
style = self._default_style
self._style = style
self.set_console(on_stderr=on_stderr)
def set_console(self, attrs=None, on_stderr=False):
if attrs is None:
attrs = self.get_attrs()
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleTextAttribute(handle, attrs)
def get_position(self, handle):
position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
# Because Windows coordinates are 0-based,
# and win32.SetConsoleCursorPosition expects 1-based.
position.X += 1
position.Y += 1
return position
def set_cursor_position(self, position=None, on_stderr=False):
if position is None:
# I'm not currently tracking the position, so there is no default.
# position = self.get_position()
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleCursorPosition(handle, position)
def cursor_adjust(self, x, y, on_stderr=False):
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
position = self.get_position(handle)
adjusted_position = (position.Y + y, position.X + x)
win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
def erase_screen(self, mode=0, on_stderr=False):
# 0 should clear from the cursor to the end of the screen.
# 1 should clear from the cursor to the beginning of the screen.
# 2 should clear the entire screen, and move cursor to (1,1)
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
csbi = win32.GetConsoleScreenBufferInfo(handle)
# get the number of character cells in the current buffer
cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
# get number of character cells before current cursor position
cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
if mode == 0:
from_coord = csbi.dwCursorPosition
cells_to_erase = cells_in_screen - cells_before_cursor
if mode == 1:
from_coord = win32.COORD(0, 0)
cells_to_erase = cells_before_cursor
elif mode == 2:
from_coord = win32.COORD(0, 0)
cells_to_erase = cells_in_screen
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
if mode == 2:
# put the cursor where needed
win32.SetConsoleCursorPosition(handle, (1, 1))
def erase_line(self, mode=0, on_stderr=False):
# 0 should clear from the cursor to the end of the line.
# 1 should clear from the cursor to the beginning of the line.
# 2 should clear the entire line.
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
csbi = win32.GetConsoleScreenBufferInfo(handle)
if mode == 0:
from_coord = csbi.dwCursorPosition
cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
if mode == 1:
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
cells_to_erase = csbi.dwCursorPosition.X
elif mode == 2:
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
cells_to_erase = csbi.dwSize.X
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
def set_title(self, title):
win32.SetConsoleTitle(title)
|
shaistaansari/django | refs/heads/master | tests/staticfiles_tests/test_storage.py | 147 | from __future__ import unicode_literals
import os
import sys
import unittest
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.management.commands import collectstatic
from django.contrib.staticfiles.management.commands.collectstatic import \
Command as CollectstaticCommand
from django.core.cache.backends.base import BaseCache
from django.core.management import call_command
from django.test import SimpleTestCase, override_settings
from django.utils import six
from django.utils.encoding import force_text
from .cases import (
BaseCollectionTestCase, BaseStaticFilesTestCase, StaticFilesTestCase,
)
from .settings import TEST_ROOT, TEST_SETTINGS, TESTFILES_PATH
def hashed_file_path(test, path):
fullpath = test.render_template(test.static_template_snippet(path))
return fullpath.replace(settings.STATIC_URL, '')
class TestHashedFiles(object):
hashed_file_path = hashed_file_path
def tearDown(self):
# Clear hashed files to avoid side effects among tests.
storage.staticfiles_storage.hashed_files.clear()
def test_template_tag_return(self):
"""
Test the CachedStaticFilesStorage backend.
"""
self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png")
self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt")
self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt", asvar=True)
self.assertStaticRenders("cached/styles.css", "/static/cached/styles.bb84a0240107.css")
self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query")
def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.bb84a0240107.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
def test_path_ignored_completely(self):
relpath = self.hashed_file_path("cached/css/ignored.css")
self.assertEqual(relpath, "cached/css/ignored.6c77f2643390.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'#foobar', content)
self.assertIn(b'http:foobar', content)
self.assertIn(b'https:foobar', content)
self.assertIn(b'data:foobar', content)
self.assertIn(b'//foobar', content)
def test_path_with_querystring(self):
relpath = self.hashed_file_path("cached/styles.css?spam=eggs")
self.assertEqual(relpath, "cached/styles.bb84a0240107.css?spam=eggs")
with storage.staticfiles_storage.open(
"cached/styles.bb84a0240107.css") as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
def test_path_with_fragment(self):
relpath = self.hashed_file_path("cached/styles.css#eggs")
self.assertEqual(relpath, "cached/styles.bb84a0240107.css#eggs")
with storage.staticfiles_storage.open(
"cached/styles.bb84a0240107.css") as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
def test_path_with_querystring_and_fragment(self):
relpath = self.hashed_file_path("cached/css/fragments.css")
self.assertEqual(relpath, "cached/css/fragments.75433540b096.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'fonts/font.a4b0478549d0.eot?#iefix', content)
self.assertIn(b'fonts/font.b8d603e42714.svg#webfontIyfZbseF', content)
self.assertIn(b'data:font/woff;charset=utf-8;base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA', content)
self.assertIn(b'#default#VML', content)
def test_template_tag_absolute(self):
relpath = self.hashed_file_path("cached/absolute.css")
self.assertEqual(relpath, "cached/absolute.ae9ef2716fe3.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/static/cached/styles.css", content)
self.assertIn(b"/static/cached/styles.bb84a0240107.css", content)
self.assertIn(b'/static/cached/img/relative.acae32e4532b.png', content)
def test_template_tag_denorm(self):
relpath = self.hashed_file_path("cached/denorm.css")
self.assertEqual(relpath, "cached/denorm.c5bd139ad821.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"..//cached///styles.css", content)
self.assertIn(b"../cached/styles.bb84a0240107.css", content)
self.assertNotIn(b"url(img/relative.png )", content)
self.assertIn(b'url("img/relative.acae32e4532b.png', content)
def test_template_tag_relative(self):
relpath = self.hashed_file_path("cached/relative.css")
self.assertEqual(relpath, "cached/relative.b0375bd89156.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"../cached/styles.css", content)
self.assertNotIn(b'@import "styles.css"', content)
self.assertNotIn(b'url(img/relative.png)', content)
self.assertIn(b'url("img/relative.acae32e4532b.png")', content)
self.assertIn(b"../cached/styles.bb84a0240107.css", content)
def test_import_replacement(self):
"See #18050"
relpath = self.hashed_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.2b1d40b0bbd4.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"""import url("styles.bb84a0240107.css")""", relfile.read())
def test_template_tag_deep_relative(self):
relpath = self.hashed_file_path("cached/css/window.css")
self.assertEqual(relpath, "cached/css/window.3906afbb5a17.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b'url(img/window.png)', content)
self.assertIn(b'url("img/window.acae32e4532b.png")', content)
def test_template_tag_url(self):
relpath = self.hashed_file_path("cached/url.css")
self.assertEqual(relpath, "cached/url.902310b73412.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"https://", relfile.read())
def test_post_processing(self):
"""
Test that post_processing behaves correctly.
Files that are alterable should always be post-processed; files that
aren't should be skipped.
collectstatic has already been called once in setUp() for this testcase,
therefore we check by verifying behavior on a second run.
"""
collectstatic_args = {
'interactive': False,
'verbosity': 0,
'link': False,
'clear': False,
'dry_run': False,
'post_process': True,
'use_default_ignore_patterns': True,
'ignore_patterns': ['*.ignoreme'],
}
collectstatic_cmd = CollectstaticCommand()
collectstatic_cmd.set_options(**collectstatic_args)
stats = collectstatic_cmd.collect()
self.assertIn(os.path.join('cached', 'css', 'window.css'), stats['post_processed'])
self.assertIn(os.path.join('cached', 'css', 'img', 'window.png'), stats['unmodified'])
self.assertIn(os.path.join('test', 'nonascii.css'), stats['post_processed'])
def test_css_import_case_insensitive(self):
relpath = self.hashed_file_path("cached/styles_insensitive.css")
self.assertEqual(relpath, "cached/styles_insensitive.c609562b6d3c.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'faulty')],
STATICFILES_FINDERS=['django.contrib.staticfiles.finders.FileSystemFinder'],
)
def test_post_processing_failure(self):
"""
Test that post_processing indicates the origin of the error when it
fails. Regression test for #18986.
"""
finders.get_finder.cache_clear()
err = six.StringIO()
with self.assertRaises(Exception):
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'faulty.css' failed!\n\n", err.getvalue())
# we set DEBUG to False here since the template tag wouldn't work otherwise
@override_settings(**dict(
TEST_SETTINGS,
STATICFILES_STORAGE='django.contrib.staticfiles.storage.CachedStaticFilesStorage',
DEBUG=False,
))
class TestCollectionCachedStorage(TestHashedFiles, BaseCollectionTestCase,
BaseStaticFilesTestCase, SimpleTestCase):
"""
Tests for the Cache busting storage
"""
def test_cache_invalidation(self):
name = "cached/styles.css"
hashed_name = "cached/styles.bb84a0240107.css"
# check if the cache is filled correctly as expected
cache_key = storage.staticfiles_storage.hash_key(name)
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertEqual(self.hashed_file_path(name), cached_name)
# clearing the cache to make sure we re-set it correctly in the url method
storage.staticfiles_storage.hashed_files.clear()
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertEqual(cached_name, None)
self.assertEqual(self.hashed_file_path(name), hashed_name)
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertEqual(cached_name, hashed_name)
def test_cache_key_memcache_validation(self):
"""
Handle cache key creation correctly, see #17861.
"""
name = (
"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff"
"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff"
"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff"
"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff"
"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff"
"/some crazy/\x16\xb4"
)
cache_key = storage.staticfiles_storage.hash_key(name)
cache_validator = BaseCache({})
cache_validator.validate_key(cache_key)
self.assertEqual(cache_key, 'staticfiles:821ea71ef36f95b3922a77f7364670e7')
# we set DEBUG to False here since the template tag wouldn't work otherwise
@override_settings(**dict(
TEST_SETTINGS,
STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage',
DEBUG=False,
))
class TestCollectionManifestStorage(TestHashedFiles, BaseCollectionTestCase,
BaseStaticFilesTestCase, SimpleTestCase):
"""
Tests for the Cache busting storage
"""
def setUp(self):
super(TestCollectionManifestStorage, self).setUp()
self._clear_filename = os.path.join(TESTFILES_PATH, 'cleared.txt')
with open(self._clear_filename, 'w') as f:
f.write('to be deleted in one test')
def tearDown(self):
super(TestCollectionManifestStorage, self).tearDown()
if os.path.exists(self._clear_filename):
os.unlink(self._clear_filename)
def test_manifest_exists(self):
filename = storage.staticfiles_storage.manifest_name
path = storage.staticfiles_storage.path(filename)
self.assertTrue(os.path.exists(path))
def test_loaded_cache(self):
self.assertNotEqual(storage.staticfiles_storage.hashed_files, {})
manifest_content = storage.staticfiles_storage.read_manifest()
self.assertIn(
'"version": "%s"' % storage.staticfiles_storage.manifest_version,
force_text(manifest_content)
)
def test_parse_cache(self):
hashed_files = storage.staticfiles_storage.hashed_files
manifest = storage.staticfiles_storage.load_manifest()
self.assertEqual(hashed_files, manifest)
def test_clear_empties_manifest(self):
cleared_file_name = os.path.join('test', 'cleared.txt')
# collect the additional file
self.run_collectstatic()
hashed_files = storage.staticfiles_storage.hashed_files
self.assertIn(cleared_file_name, hashed_files)
manifest_content = storage.staticfiles_storage.load_manifest()
self.assertIn(cleared_file_name, manifest_content)
original_path = storage.staticfiles_storage.path(cleared_file_name)
self.assertTrue(os.path.exists(original_path))
# delete the original file form the app, collect with clear
os.unlink(self._clear_filename)
self.run_collectstatic(clear=True)
self.assertFileNotFound(original_path)
hashed_files = storage.staticfiles_storage.hashed_files
self.assertNotIn(cleared_file_name, hashed_files)
manifest_content = storage.staticfiles_storage.load_manifest()
self.assertNotIn(cleared_file_name, manifest_content)
# we set DEBUG to False here since the template tag wouldn't work otherwise
@override_settings(**dict(
TEST_SETTINGS,
STATICFILES_STORAGE='staticfiles_tests.storage.SimpleCachedStaticFilesStorage',
DEBUG=False,
))
class TestCollectionSimpleCachedStorage(BaseCollectionTestCase,
BaseStaticFilesTestCase, SimpleTestCase):
"""
Tests for the Cache busting storage
"""
hashed_file_path = hashed_file_path
def test_template_tag_return(self):
"""
Test the CachedStaticFilesStorage backend.
"""
self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png")
self.assertStaticRenders("test/file.txt", "/static/test/file.deploy12345.txt")
self.assertStaticRenders("cached/styles.css", "/static/cached/styles.deploy12345.css")
self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query")
def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.deploy12345.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.deploy12345.css", content)
class CustomStaticFilesStorage(storage.StaticFilesStorage):
"""
Used in TestStaticFilePermissions
"""
def __init__(self, *args, **kwargs):
kwargs['file_permissions_mode'] = 0o640
kwargs['directory_permissions_mode'] = 0o740
super(CustomStaticFilesStorage, self).__init__(*args, **kwargs)
@unittest.skipIf(sys.platform.startswith('win'), "Windows only partially supports chmod.")
class TestStaticFilePermissions(BaseCollectionTestCase, StaticFilesTestCase):
command_params = {
'interactive': False,
'post_process': True,
'verbosity': 0,
'ignore_patterns': ['*.ignoreme'],
'use_default_ignore_patterns': True,
'clear': False,
'link': False,
'dry_run': False,
}
def setUp(self):
self.umask = 0o027
self.old_umask = os.umask(self.umask)
super(TestStaticFilePermissions, self).setUp()
def tearDown(self):
os.umask(self.old_umask)
super(TestStaticFilePermissions, self).tearDown()
# Don't run collectstatic command in this test class.
def run_collectstatic(self, **kwargs):
pass
@override_settings(
FILE_UPLOAD_PERMISSIONS=0o655,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765,
)
def test_collect_static_files_permissions(self):
collectstatic.Command().execute(**self.command_params)
test_file = os.path.join(settings.STATIC_ROOT, "test.txt")
test_dir = os.path.join(settings.STATIC_ROOT, "subdir")
file_mode = os.stat(test_file)[0] & 0o777
dir_mode = os.stat(test_dir)[0] & 0o777
self.assertEqual(file_mode, 0o655)
self.assertEqual(dir_mode, 0o765)
@override_settings(
FILE_UPLOAD_PERMISSIONS=None,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=None,
)
def test_collect_static_files_default_permissions(self):
collectstatic.Command().execute(**self.command_params)
test_file = os.path.join(settings.STATIC_ROOT, "test.txt")
test_dir = os.path.join(settings.STATIC_ROOT, "subdir")
file_mode = os.stat(test_file)[0] & 0o777
dir_mode = os.stat(test_dir)[0] & 0o777
self.assertEqual(file_mode, 0o666 & ~self.umask)
self.assertEqual(dir_mode, 0o777 & ~self.umask)
@override_settings(
FILE_UPLOAD_PERMISSIONS=0o655,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765,
STATICFILES_STORAGE='staticfiles_tests.test_storage.CustomStaticFilesStorage',
)
def test_collect_static_files_subclass_of_static_storage(self):
collectstatic.Command().execute(**self.command_params)
test_file = os.path.join(settings.STATIC_ROOT, "test.txt")
test_dir = os.path.join(settings.STATIC_ROOT, "subdir")
file_mode = os.stat(test_file)[0] & 0o777
dir_mode = os.stat(test_dir)[0] & 0o777
self.assertEqual(file_mode, 0o640)
self.assertEqual(dir_mode, 0o740)
|
russel1237/scikit-learn | refs/heads/master | sklearn/utils/validation.py | 2 | """Utilities for input validation"""
# Authors: Olivier Grisel
# Gael Varoquaux
# Andreas Mueller
# Lars Buitinck
# Alexandre Gramfort
# Nicolas Tresegnie
# License: BSD 3 clause
import warnings
import numbers
import numpy as np
import scipy.sparse as sp
from ..externals import six
from ..utils.fixes import signature
FLOAT_DTYPES = (np.float64, np.float32, np.float16)
class DataConversionWarning(UserWarning):
"""A warning on implicit data conversions happening in the code"""
pass
warnings.simplefilter("always", DataConversionWarning)
class NonBLASDotWarning(UserWarning):
"""A warning on implicit dispatch to numpy.dot"""
class NotFittedError(ValueError, AttributeError):
"""Exception class to raise if estimator is used before fitting
This class inherits from both ValueError and AttributeError to help with
exception handling and backward compatibility.
"""
# Silenced by default to reduce verbosity. Turn on at runtime for
# performance profiling.
warnings.simplefilter('ignore', NonBLASDotWarning)
def _assert_all_finite(X):
"""Like assert_all_finite, but only for ndarray."""
X = np.asanyarray(X)
# First try an O(n) time, O(1) space solution for the common case that
# everything is finite; fall back to O(n) space np.isfinite to prevent
# false positives from overflow in sum method.
if (X.dtype.char in np.typecodes['AllFloat'] and not np.isfinite(X.sum())
and not np.isfinite(X).all()):
raise ValueError("Input contains NaN, infinity"
" or a value too large for %r." % X.dtype)
def assert_all_finite(X):
"""Throw a ValueError if X contains NaN or infinity.
Input MUST be an np.ndarray instance or a scipy.sparse matrix."""
_assert_all_finite(X.data if sp.issparse(X) else X)
def as_float_array(X, copy=True, force_all_finite=True):
"""Converts an array-like to an array of floats
The new dtype will be np.float32 or np.float64, depending on the original
type. The function can create a copy or modify the argument depending
on the argument copy.
Parameters
----------
X : {array-like, sparse matrix}
copy : bool, optional
If True, a copy of X will be created. If False, a copy may still be
returned if X's dtype is not a floating point type.
force_all_finite : boolean (default=True)
Whether to raise an error on np.inf and np.nan in X.
Returns
-------
XT : {array, sparse matrix}
An array of type np.float
"""
if isinstance(X, np.matrix) or (not isinstance(X, np.ndarray)
and not sp.issparse(X)):
return check_array(X, ['csr', 'csc', 'coo'], dtype=np.float64,
copy=copy, force_all_finite=force_all_finite,
ensure_2d=False)
elif sp.issparse(X) and X.dtype in [np.float32, np.float64]:
return X.copy() if copy else X
elif X.dtype in [np.float32, np.float64]: # is numpy array
return X.copy('F' if X.flags['F_CONTIGUOUS'] else 'C') if copy else X
else:
return X.astype(np.float32 if X.dtype == np.int32 else np.float64)
def _is_arraylike(x):
"""Returns whether the input is array-like"""
return (hasattr(x, '__len__') or
hasattr(x, 'shape') or
hasattr(x, '__array__'))
def _num_samples(x):
"""Return number of samples in array-like x."""
if hasattr(x, 'fit'):
# Don't get num_samples from an ensembles length!
raise TypeError('Expected sequence or array-like, got '
'estimator %s' % x)
if not hasattr(x, '__len__') and not hasattr(x, 'shape'):
if hasattr(x, '__array__'):
x = np.asarray(x)
else:
raise TypeError("Expected sequence or array-like, got %s" %
type(x))
if hasattr(x, 'shape'):
if len(x.shape) == 0:
raise TypeError("Singleton array %r cannot be considered"
" a valid collection." % x)
return x.shape[0]
else:
return len(x)
def _shape_repr(shape):
"""Return a platform independent reprensentation of an array shape
Under Python 2, the `long` type introduces an 'L' suffix when using the
default %r format for tuples of integers (typically used to store the shape
of an array).
Under Windows 64 bit (and Python 2), the `long` type is used by default
in numpy shapes even when the integer dimensions are well below 32 bit.
The platform specific type causes string messages or doctests to change
from one platform to another which is not desirable.
Under Python 3, there is no more `long` type so the `L` suffix is never
introduced in string representation.
>>> _shape_repr((1, 2))
'(1, 2)'
>>> one = 2 ** 64 / 2 ** 64 # force an upcast to `long` under Python 2
>>> _shape_repr((one, 2 * one))
'(1, 2)'
>>> _shape_repr((1,))
'(1,)'
>>> _shape_repr(())
'()'
"""
if len(shape) == 0:
return "()"
joined = ", ".join("%d" % e for e in shape)
if len(shape) == 1:
# special notation for singleton tuples
joined += ','
return "(%s)" % joined
def check_consistent_length(*arrays):
"""Check that all arrays have consistent first dimensions.
Checks whether all objects in arrays have the same shape or length.
Parameters
----------
*arrays : list or tuple of input objects.
Objects that will be checked for consistent length.
"""
uniques = np.unique([_num_samples(X) for X in arrays if X is not None])
if len(uniques) > 1:
raise ValueError("Found arrays with inconsistent numbers of samples: "
"%s" % str(uniques))
def indexable(*iterables):
"""Make arrays indexable for cross-validation.
Checks consistent length, passes through None, and ensures that everything
can be indexed by converting sparse matrices to csr and converting
non-interable objects to arrays.
Parameters
----------
*iterables : lists, dataframes, arrays, sparse matrices
List of objects to ensure sliceability.
"""
result = []
for X in iterables:
if sp.issparse(X):
result.append(X.tocsr())
elif hasattr(X, "__getitem__") or hasattr(X, "iloc"):
result.append(X)
elif X is None:
result.append(X)
else:
result.append(np.array(X))
check_consistent_length(*result)
return result
def _ensure_sparse_format(spmatrix, accept_sparse, dtype, copy,
force_all_finite):
"""Convert a sparse matrix to a given format.
Checks the sparse format of spmatrix and converts if necessary.
Parameters
----------
spmatrix : scipy sparse matrix
Input to validate and convert.
accept_sparse : string, list of string or None (default=None)
String[s] representing allowed sparse matrix formats ('csc',
'csr', 'coo', 'dok', 'bsr', 'lil', 'dia'). None means that sparse
matrix input will raise an error. If the input is sparse but not in
the allowed format, it will be converted to the first listed format.
dtype : string, type or None (default=none)
Data type of result. If None, the dtype of the input is preserved.
copy : boolean (default=False)
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean (default=True)
Whether to raise an error on np.inf and np.nan in X.
Returns
-------
spmatrix_converted : scipy sparse matrix.
Matrix that is ensured to have an allowed type.
"""
if accept_sparse in [None, False]:
raise TypeError('A sparse matrix was passed, but dense '
'data is required. Use X.toarray() to '
'convert to a dense numpy array.')
if dtype is None:
dtype = spmatrix.dtype
changed_format = False
if (isinstance(accept_sparse, (list, tuple))
and spmatrix.format not in accept_sparse):
# create new with correct sparse
spmatrix = spmatrix.asformat(accept_sparse[0])
changed_format = True
if dtype != spmatrix.dtype:
# convert dtype
spmatrix = spmatrix.astype(dtype)
elif copy and not changed_format:
# force copy
spmatrix = spmatrix.copy()
if force_all_finite:
if not hasattr(spmatrix, "data"):
warnings.warn("Can't check %s sparse matrix for nan or inf."
% spmatrix.format)
else:
_assert_all_finite(spmatrix.data)
return spmatrix
def check_array(array, accept_sparse=None, dtype="numeric", order=None,
copy=False, force_all_finite=True, ensure_2d=True,
allow_nd=False, ensure_min_samples=1, ensure_min_features=1,
warn_on_dtype=False, estimator=None):
"""Input validation on an array, list, sparse matrix or similar.
By default, the input is converted to an at least 2nd numpy array.
If the dtype of the array is object, attempt converting to float,
raising on failure.
Parameters
----------
array : object
Input object to check / convert.
accept_sparse : string, list of string or None (default=None)
String[s] representing allowed sparse matrix formats, such as 'csc',
'csr', etc. None means that sparse matrix input will raise an error.
If the input is sparse but not in the allowed format, it will be
converted to the first listed format.
dtype : string, type, list of types or None (default="numeric")
Data type of result. If None, the dtype of the input is preserved.
If "numeric", dtype is preserved unless array.dtype is object.
If dtype is a list of types, conversion on the first type is only
performed if the dtype of the input is not in the list.
order : 'F', 'C' or None (default=None)
Whether an array will be forced to be fortran or c-style.
copy : boolean (default=False)
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean (default=True)
Whether to raise an error on np.inf and np.nan in X.
ensure_2d : boolean (default=True)
Whether to make X at least 2d.
allow_nd : boolean (default=False)
Whether to allow X.ndim > 2.
ensure_min_samples : int (default=1)
Make sure that the array has a minimum number of samples in its first
axis (rows for a 2D array). Setting to 0 disables this check.
ensure_min_features : int (default=1)
Make sure that the 2D array has some minimum number of features
(columns). The default value of 1 rejects empty datasets.
This check is only enforced when the input data has effectively 2
dimensions or is originally 1D and ``ensure_2d`` is True. Setting to 0
disables this check.
warn_on_dtype : boolean (default=False)
Raise DataConversionWarning if the dtype of the input data structure
does not match the requested dtype, causing a memory copy.
estimator : str or estimator instance (default=None)
If passed, include the name of the estimator in warning messages.
Returns
-------
X_converted : object
The converted and validated X.
"""
if isinstance(accept_sparse, str):
accept_sparse = [accept_sparse]
# store whether originally we wanted numeric dtype
dtype_numeric = dtype == "numeric"
dtype_orig = getattr(array, "dtype", None)
if not hasattr(dtype_orig, 'kind'):
# not a data type (e.g. a column named dtype in a pandas DataFrame)
dtype_orig = None
if dtype_numeric:
if dtype_orig is not None and dtype_orig.kind == "O":
# if input is object, convert to float.
dtype = np.float64
else:
dtype = None
if isinstance(dtype, (list, tuple)):
if dtype_orig is not None and dtype_orig in dtype:
# no dtype conversion required
dtype = None
else:
# dtype conversion required. Let's select the first element of the
# list of accepted types.
dtype = dtype[0]
if estimator is not None:
if isinstance(estimator, six.string_types):
estimator_name = estimator
else:
estimator_name = estimator.__class__.__name__
else:
estimator_name = "Estimator"
context = " by %s" % estimator_name if estimator is not None else ""
if sp.issparse(array):
array = _ensure_sparse_format(array, accept_sparse, dtype, copy,
force_all_finite)
else:
array = np.array(array, dtype=dtype, order=order, copy=copy)
if ensure_2d:
if array.ndim == 1:
warnings.warn(
"Passing 1d arrays as data is deprecated in 0.17 and will"
"raise ValueError in 0.19. Reshape your data either using "
"X.reshape(-1, 1) if your data has a single feature or "
"X.reshape(1, -1) if it contains a single sample.",
DeprecationWarning)
array = np.atleast_2d(array)
# To ensure that array flags are maintained
array = np.array(array, dtype=dtype, order=order, copy=copy)
# make sure we acually converted to numeric:
if dtype_numeric and array.dtype.kind == "O":
array = array.astype(np.float64)
if not allow_nd and array.ndim >= 3:
raise ValueError("Found array with dim %d. %s expected <= 2."
% (array.ndim, estimator_name))
if force_all_finite:
_assert_all_finite(array)
shape_repr = _shape_repr(array.shape)
if ensure_min_samples > 0:
n_samples = _num_samples(array)
if n_samples < ensure_min_samples:
raise ValueError("Found array with %d sample(s) (shape=%s) while a"
" minimum of %d is required%s."
% (n_samples, shape_repr, ensure_min_samples,
context))
if ensure_min_features > 0 and array.ndim == 2:
n_features = array.shape[1]
if n_features < ensure_min_features:
raise ValueError("Found array with %d feature(s) (shape=%s) while"
" a minimum of %d is required%s."
% (n_features, shape_repr, ensure_min_features,
context))
if warn_on_dtype and dtype_orig is not None and array.dtype != dtype_orig:
msg = ("Data with input dtype %s was converted to %s%s."
% (dtype_orig, array.dtype, context))
warnings.warn(msg, DataConversionWarning)
return array
def check_X_y(X, y, accept_sparse=None, dtype="numeric", order=None, copy=False,
force_all_finite=True, ensure_2d=True, allow_nd=False,
multi_output=False, ensure_min_samples=1,
ensure_min_features=1, y_numeric=False,
warn_on_dtype=False, estimator=None):
"""Input validation for standard estimators.
Checks X and y for consistent length, enforces X 2d and y 1d.
Standard input checks are only applied to y. For multi-label y,
set multi_output=True to allow 2d and sparse y.
If the dtype of X is object, attempt converting to float,
raising on failure.
Parameters
----------
X : nd-array, list or sparse matrix
Input data.
y : nd-array, list or sparse matrix
Labels.
accept_sparse : string, list of string or None (default=None)
String[s] representing allowed sparse matrix formats, such as 'csc',
'csr', etc. None means that sparse matrix input will raise an error.
If the input is sparse but not in the allowed format, it will be
converted to the first listed format.
dtype : string, type, list of types or None (default="numeric")
Data type of result. If None, the dtype of the input is preserved.
If "numeric", dtype is preserved unless array.dtype is object.
If dtype is a list of types, conversion on the first type is only
performed if the dtype of the input is not in the list.
order : 'F', 'C' or None (default=None)
Whether an array will be forced to be fortran or c-style.
copy : boolean (default=False)
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean (default=True)
Whether to raise an error on np.inf and np.nan in X.
ensure_2d : boolean (default=True)
Whether to make X at least 2d.
allow_nd : boolean (default=False)
Whether to allow X.ndim > 2.
multi_output : boolean (default=False)
Whether to allow 2-d y (array or sparse matrix). If false, y will be
validated as a vector.
ensure_min_samples : int (default=1)
Make sure that X has a minimum number of samples in its first
axis (rows for a 2D array).
ensure_min_features : int (default=1)
Make sure that the 2D array has some minimum number of features
(columns). The default value of 1 rejects empty datasets.
This check is only enforced when X has effectively 2 dimensions or
is originally 1D and ``ensure_2d`` is True. Setting to 0 disables
this check.
y_numeric : boolean (default=False)
Whether to ensure that y has a numeric type. If dtype of y is object,
it is converted to float64. Should only be used for regression
algorithms.
warn_on_dtype : boolean (default=False)
Raise DataConversionWarning if the dtype of the input data structure
does not match the requested dtype, causing a memory copy.
estimator : str or estimator instance (default=None)
If passed, include the name of the estimator in warning messages.
Returns
-------
X_converted : object
The converted and validated X.
y_converted : object
The converted and validated y.
"""
X = check_array(X, accept_sparse, dtype, order, copy, force_all_finite,
ensure_2d, allow_nd, ensure_min_samples,
ensure_min_features, warn_on_dtype, estimator)
if multi_output:
y = check_array(y, 'csr', force_all_finite=True, ensure_2d=False,
dtype=None)
else:
y = column_or_1d(y, warn=True)
_assert_all_finite(y)
if y_numeric and y.dtype.kind == 'O':
y = y.astype(np.float64)
check_consistent_length(X, y)
return X, y
def column_or_1d(y, warn=False):
""" Ravel column or 1d numpy array, else raises an error
Parameters
----------
y : array-like
warn : boolean, default False
To control display of warnings.
Returns
-------
y : array
"""
shape = np.shape(y)
if len(shape) == 1:
return np.ravel(y)
if len(shape) == 2 and shape[1] == 1:
if warn:
warnings.warn("A column-vector y was passed when a 1d array was"
" expected. Please change the shape of y to "
"(n_samples, ), for example using ravel().",
DataConversionWarning, stacklevel=2)
return np.ravel(y)
raise ValueError("bad input shape {0}".format(shape))
def check_random_state(seed):
"""Turn seed into a np.random.RandomState instance
If seed is None, return the RandomState singleton used by np.random.
If seed is an int, return a new RandomState instance seeded with seed.
If seed is already a RandomState instance, return it.
Otherwise raise ValueError.
"""
if seed is None or seed is np.random:
return np.random.mtrand._rand
if isinstance(seed, (numbers.Integral, np.integer)):
return np.random.RandomState(seed)
if isinstance(seed, np.random.RandomState):
return seed
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
' instance' % seed)
def has_fit_parameter(estimator, parameter):
"""Checks whether the estimator's fit method supports the given parameter.
Examples
--------
>>> from sklearn.svm import SVC
>>> has_fit_parameter(SVC(), "sample_weight")
True
"""
return parameter in signature(estimator.fit).parameters
def check_symmetric(array, tol=1E-10, raise_warning=True,
raise_exception=False):
"""Make sure that array is 2D, square and symmetric.
If the array is not symmetric, then a symmetrized version is returned.
Optionally, a warning or exception is raised if the matrix is not
symmetric.
Parameters
----------
array : nd-array or sparse matrix
Input object to check / convert. Must be two-dimensional and square,
otherwise a ValueError will be raised.
tol : float
Absolute tolerance for equivalence of arrays. Default = 1E-10.
raise_warning : boolean (default=True)
If True then raise a warning if conversion is required.
raise_exception : boolean (default=False)
If True then raise an exception if array is not symmetric.
Returns
-------
array_sym : ndarray or sparse matrix
Symmetrized version of the input array, i.e. the average of array
and array.transpose(). If sparse, then duplicate entries are first
summed and zeros are eliminated.
"""
if (array.ndim != 2) or (array.shape[0] != array.shape[1]):
raise ValueError("array must be 2-dimensional and square. "
"shape = {0}".format(array.shape))
if sp.issparse(array):
diff = array - array.T
# only csr, csc, and coo have `data` attribute
if diff.format not in ['csr', 'csc', 'coo']:
diff = diff.tocsr()
symmetric = np.all(abs(diff.data) < tol)
else:
symmetric = np.allclose(array, array.T, atol=tol)
if not symmetric:
if raise_exception:
raise ValueError("Array must be symmetric")
if raise_warning:
warnings.warn("Array is not symmetric, and will be converted "
"to symmetric by average with its transpose.")
if sp.issparse(array):
conversion = 'to' + array.format
array = getattr(0.5 * (array + array.T), conversion)()
else:
array = 0.5 * (array + array.T)
return array
def check_is_fitted(estimator, attributes, msg=None, all_or_any=all):
"""Perform is_fitted validation for estimator.
Checks if the estimator is fitted by verifying the presence of
"all_or_any" of the passed attributes and raises a NotFittedError with the
given message.
Parameters
----------
estimator : estimator instance.
estimator instance for which the check is performed.
attributes : attribute name(s) given as string or a list/tuple of strings
Eg. : ["coef_", "estimator_", ...], "coef_"
msg : string
The default error message is, "This %(name)s instance is not fitted
yet. Call 'fit' with appropriate arguments before using this method."
For custom messages if "%(name)s" is present in the message string,
it is substituted for the estimator name.
Eg. : "Estimator, %(name)s, must be fitted before sparsifying".
all_or_any : callable, {all, any}, default all
Specify whether all or any of the given attributes must exist.
"""
if msg is None:
msg = ("This %(name)s instance is not fitted yet. Call 'fit' with "
"appropriate arguments before using this method.")
if not hasattr(estimator, 'fit'):
raise TypeError("%s is not an estimator instance." % (estimator))
if not isinstance(attributes, (list, tuple)):
attributes = [attributes]
if not all_or_any([hasattr(estimator, attr) for attr in attributes]):
raise NotFittedError(msg % {'name': type(estimator).__name__})
def check_non_negative(X, whom):
"""
Check if there is any negative value in an array.
Parameters
----------
X : array-like or sparse matrix
Input data.
whom : string
Who passed X to this function.
"""
X = X.data if sp.issparse(X) else X
if (X < 0).any():
raise ValueError("Negative values in data passed to %s" % whom)
|
avanov/django | refs/heads/master | django/contrib/auth/migrations/0002_alter_permission_name_max_length.py | 586 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='permission',
name='name',
field=models.CharField(max_length=255, verbose_name='name'),
),
]
|
adamcandy/QGIS-Meshing | refs/heads/master | extras/old_release_versions/rastercalc/standardModules.py | 12 | import numpy as np
##########################################################################
#
# QGIS-meshing plugins.
#
# Copyright (C) 2012-2013 Imperial College London and others.
#
# Please see the AUTHORS file in the main source directory for a
# full list of copyright holders.
#
# Dr Adam S. Candy, adam.candy@imperial.ac.uk
# Applied Modelling and Computation Group
# Department of Earth Science and Engineering
# Imperial College London
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation,
# version 2.1 of the License.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
##########################################################################
import copy
import sys #some import commands may be unnesicary
import os
from Scientific.IO import NetCDF
import DocStrings
|
zhaochl/python-utils | refs/heads/master | verify_code/Imaging-1.1.7/PIL/MpegImagePlugin.py | 40 | #
# The Python Imaging Library.
# $Id$
#
# MPEG file handling
#
# History:
# 95-09-09 fl Created
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1995.
#
# See the README file for information on usage and redistribution.
#
__version__ = "0.1"
import Image, ImageFile
#
# Bitstream parser
class BitStream:
def __init__(self, fp):
self.fp = fp
self.bits = 0
self.bitbuffer = 0
def next(self):
return ord(self.fp.read(1))
def peek(self, bits):
while self.bits < bits:
c = self.next()
if c < 0:
self.bits = 0
continue
self.bitbuffer = (self.bitbuffer << 8) + c
self.bits = self.bits + 8
return self.bitbuffer >> (self.bits - bits) & (1L << bits) - 1
def skip(self, bits):
while self.bits < bits:
self.bitbuffer = (self.bitbuffer << 8) + ord(self.fp.read(1))
self.bits = self.bits + 8
self.bits = self.bits - bits
def read(self, bits):
v = self.peek(bits)
self.bits = self.bits - bits
return v
##
# Image plugin for MPEG streams. This plugin can identify a stream,
# but it cannot read it.
class MpegImageFile(ImageFile.ImageFile):
format = "MPEG"
format_description = "MPEG"
def _open(self):
s = BitStream(self.fp)
if s.read(32) != 0x1B3:
raise SyntaxError, "not an MPEG file"
self.mode = "RGB"
self.size = s.read(12), s.read(12)
# --------------------------------------------------------------------
# Registry stuff
Image.register_open("MPEG", MpegImageFile)
Image.register_extension("MPEG", ".mpg")
Image.register_extension("MPEG", ".mpeg")
Image.register_mime("MPEG", "video/mpeg")
|
crakensio/django_training | refs/heads/master | lib/python2.7/site-packages/django/contrib/sitemaps/tests/test_http.py | 109 | from __future__ import unicode_literals
import os
from datetime import date
from django.conf import settings
from django.contrib.sitemaps import Sitemap, GenericSitemap
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.test.utils import override_settings
from django.utils.unittest import skipUnless
from django.utils.formats import localize
from django.utils._os import upath
from django.utils.translation import activate, deactivate
from .base import TestModel, SitemapTestsBase
class HTTPSitemapTests(SitemapTestsBase):
def test_simple_sitemap_index(self):
"A simple sitemap index can be rendered"
response = self.client.get('/simple/index.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap>
</sitemapindex>
""" % self.base_url
self.assertXMLEqual(response.content.decode('utf-8'), expected_content)
@override_settings(
TEMPLATE_DIRS=(os.path.join(os.path.dirname(upath(__file__)), 'templates'),)
)
def test_simple_sitemap_custom_index(self):
"A simple sitemap index can be rendered with a custom template"
response = self.client.get('/simple/custom-index.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<!-- This is a customised template -->
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap>
</sitemapindex>
""" % self.base_url
self.assertXMLEqual(response.content.decode('utf-8'), expected_content)
def test_simple_sitemap_section(self):
"A simple sitemap section can be rendered"
response = self.client.get('/simple/sitemap-simple.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""" % (self.base_url, date.today())
self.assertXMLEqual(response.content.decode('utf-8'), expected_content)
def test_simple_sitemap(self):
"A simple sitemap can be rendered"
response = self.client.get('/simple/sitemap.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""" % (self.base_url, date.today())
self.assertXMLEqual(response.content.decode('utf-8'), expected_content)
@override_settings(
TEMPLATE_DIRS=(os.path.join(os.path.dirname(upath(__file__)), 'templates'),)
)
def test_simple_custom_sitemap(self):
"A simple sitemap can be rendered with a custom template"
response = self.client.get('/simple/custom-sitemap.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<!-- This is a customised template -->
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""" % (self.base_url, date.today())
self.assertXMLEqual(response.content.decode('utf-8'), expected_content)
@skipUnless(settings.USE_I18N, "Internationalization is not enabled")
@override_settings(USE_L10N=True)
def test_localized_priority(self):
"The priority value should not be localized (Refs #14164)"
activate('fr')
self.assertEqual('0,3', localize(0.3))
# Retrieve the sitemap. Check that priorities
# haven't been rendered in localized format
response = self.client.get('/simple/sitemap.xml')
self.assertContains(response, '<priority>0.5</priority>')
self.assertContains(response, '<lastmod>%s</lastmod>' % date.today())
deactivate()
def test_requestsite_sitemap(self):
# Make sure hitting the flatpages sitemap without the sites framework
# installed doesn't raise an exception
Site._meta.installed = False
response = self.client.get('/simple/sitemap.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url><loc>http://testserver/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""" % date.today()
self.assertXMLEqual(response.content.decode('utf-8'), expected_content)
@skipUnless("django.contrib.sites" in settings.INSTALLED_APPS,
"django.contrib.sites app not installed.")
def test_sitemap_get_urls_no_site_1(self):
"""
Check we get ImproperlyConfigured if we don't pass a site object to
Sitemap.get_urls and no Site objects exist
"""
Site.objects.all().delete()
self.assertRaises(ImproperlyConfigured, Sitemap().get_urls)
def test_sitemap_get_urls_no_site_2(self):
"""
Check we get ImproperlyConfigured when we don't pass a site object to
Sitemap.get_urls if Site objects exists, but the sites framework is not
actually installed.
"""
Site._meta.installed = False
self.assertRaises(ImproperlyConfigured, Sitemap().get_urls)
def test_sitemap_item(self):
"""
Check to make sure that the raw item is included with each
Sitemap.get_url() url result.
"""
test_sitemap = GenericSitemap({'queryset': TestModel.objects.all()})
def is_testmodel(url):
return isinstance(url['item'], TestModel)
item_in_url_info = all(map(is_testmodel, test_sitemap.get_urls()))
self.assertTrue(item_in_url_info)
def test_cached_sitemap_index(self):
"""
Check that a cached sitemap index can be rendered (#2713).
"""
response = self.client.get('/cached/index.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/cached/sitemap-simple.xml</loc></sitemap>
</sitemapindex>
""" % self.base_url
self.assertXMLEqual(response.content.decode('utf-8'), expected_content)
def test_x_robots_sitemap(self):
response = self.client.get('/simple/index.xml')
self.assertEqual(response['X-Robots-Tag'], 'noindex, noodp, noarchive')
response = self.client.get('/simple/sitemap.xml')
self.assertEqual(response['X-Robots-Tag'], 'noindex, noodp, noarchive')
|
openilabs/falconlab | refs/heads/master | env/lib/python2.7/locale.py | 4 | /usr/lib/python2.7/locale.py |
TridevGuha/django | refs/heads/master | tests/inspectdb/tests.py | 108 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import re
from unittest import skipUnless
from django.core.management import call_command
from django.db import connection
from django.test import TestCase, skipUnlessDBFeature
from django.utils.six import PY3, StringIO
from .models import ColumnTypes
class InspectDBTestCase(TestCase):
def test_stealth_table_name_filter_option(self):
out = StringIO()
# Lets limit the introspection to tables created for models of this
# application
call_command('inspectdb',
table_name_filter=lambda tn: tn.startswith('inspectdb_'),
stdout=out)
error_message = "inspectdb has examined a table that should have been filtered out."
# contrib.contenttypes is one of the apps always installed when running
# the Django test suite, check that one of its tables hasn't been
# inspected
self.assertNotIn("class DjangoContentType(models.Model):", out.getvalue(), msg=error_message)
def make_field_type_asserter(self):
"""Call inspectdb and return a function to validate a field type in its output"""
out = StringIO()
call_command('inspectdb',
table_name_filter=lambda tn: tn.startswith('inspectdb_columntypes'),
stdout=out)
output = out.getvalue()
def assertFieldType(name, definition):
out_def = re.search(r'^\s*%s = (models.*)$' % name, output, re.MULTILINE).groups()[0]
self.assertEqual(definition, out_def)
return assertFieldType
def test_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
# Inspecting Oracle DB doesn't produce correct results (#19884):
# - it gets max_length wrong: it returns a number of bytes.
# - it reports fields as blank=True when they aren't.
if (connection.features.can_introspect_max_length and
not connection.features.interprets_empty_strings_as_nulls):
assertFieldType('char_field', "models.CharField(max_length=10)")
assertFieldType('null_char_field', "models.CharField(max_length=10, blank=True, null=True)")
assertFieldType('comma_separated_int_field', "models.CharField(max_length=99)")
assertFieldType('date_field', "models.DateField()")
assertFieldType('date_time_field', "models.DateTimeField()")
if (connection.features.can_introspect_max_length and
not connection.features.interprets_empty_strings_as_nulls):
assertFieldType('email_field', "models.CharField(max_length=254)")
assertFieldType('file_field', "models.CharField(max_length=100)")
assertFieldType('file_path_field', "models.CharField(max_length=100)")
if connection.features.can_introspect_ip_address_field:
assertFieldType('gen_ip_adress_field', "models.GenericIPAddressField()")
elif (connection.features.can_introspect_max_length and
not connection.features.interprets_empty_strings_as_nulls):
assertFieldType('gen_ip_adress_field', "models.CharField(max_length=39)")
if (connection.features.can_introspect_max_length and
not connection.features.interprets_empty_strings_as_nulls):
assertFieldType('slug_field', "models.CharField(max_length=50)")
if not connection.features.interprets_empty_strings_as_nulls:
assertFieldType('text_field', "models.TextField()")
if connection.features.can_introspect_time_field:
assertFieldType('time_field', "models.TimeField()")
if (connection.features.can_introspect_max_length and
not connection.features.interprets_empty_strings_as_nulls):
assertFieldType('url_field', "models.CharField(max_length=200)")
def test_number_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
if not connection.features.can_introspect_autofield:
assertFieldType('id', "models.IntegerField(primary_key=True) # AutoField?")
if connection.features.can_introspect_big_integer_field:
assertFieldType('big_int_field', "models.BigIntegerField()")
else:
assertFieldType('big_int_field', "models.IntegerField()")
bool_field = ColumnTypes._meta.get_field('bool_field')
bool_field_type = connection.features.introspected_boolean_field_type(bool_field)
assertFieldType('bool_field', "models.{}()".format(bool_field_type))
null_bool_field = ColumnTypes._meta.get_field('null_bool_field')
null_bool_field_type = connection.features.introspected_boolean_field_type(null_bool_field)
if 'BooleanField' in null_bool_field_type:
assertFieldType('null_bool_field', "models.{}()".format(null_bool_field_type))
else:
if connection.features.can_introspect_null:
assertFieldType('null_bool_field', "models.{}(blank=True, null=True)".format(null_bool_field_type))
else:
assertFieldType('null_bool_field', "models.{}()".format(null_bool_field_type))
if connection.features.can_introspect_decimal_field:
assertFieldType('decimal_field', "models.DecimalField(max_digits=6, decimal_places=1)")
else: # Guessed arguments on SQLite, see #5014
assertFieldType('decimal_field', "models.DecimalField(max_digits=10, decimal_places=5) "
"# max_digits and decimal_places have been guessed, "
"as this database handles decimal fields as float")
assertFieldType('float_field', "models.FloatField()")
assertFieldType('int_field', "models.IntegerField()")
if connection.features.can_introspect_positive_integer_field:
assertFieldType('pos_int_field', "models.PositiveIntegerField()")
else:
assertFieldType('pos_int_field', "models.IntegerField()")
if connection.features.can_introspect_positive_integer_field:
if connection.features.can_introspect_small_integer_field:
assertFieldType('pos_small_int_field', "models.PositiveSmallIntegerField()")
else:
assertFieldType('pos_small_int_field', "models.PositiveIntegerField()")
else:
if connection.features.can_introspect_small_integer_field:
assertFieldType('pos_small_int_field', "models.SmallIntegerField()")
else:
assertFieldType('pos_small_int_field', "models.IntegerField()")
if connection.features.can_introspect_small_integer_field:
assertFieldType('small_int_field', "models.SmallIntegerField()")
else:
assertFieldType('small_int_field', "models.IntegerField()")
@skipUnlessDBFeature('can_introspect_foreign_keys')
def test_attribute_name_not_python_keyword(self):
out = StringIO()
# Lets limit the introspection to tables created for models of this
# application
call_command('inspectdb',
table_name_filter=lambda tn: tn.startswith('inspectdb_'),
stdout=out)
output = out.getvalue()
error_message = "inspectdb generated an attribute name which is a python keyword"
# Recursive foreign keys should be set to 'self'
self.assertIn("parent = models.ForeignKey('self', models.DO_NOTHING)", output)
self.assertNotIn(
"from = models.ForeignKey(InspectdbPeople, models.DO_NOTHING)",
output,
msg=error_message,
)
# As InspectdbPeople model is defined after InspectdbMessage, it should be quoted
self.assertIn(
"from_field = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, db_column='from_id')",
output,
)
self.assertIn(
"people_pk = models.ForeignKey(InspectdbPeople, models.DO_NOTHING, primary_key=True)",
output,
)
self.assertIn(
"people_unique = models.ForeignKey(InspectdbPeople, models.DO_NOTHING, unique=True)",
output,
)
def test_digits_column_name_introspection(self):
"""Introspection of column names consist/start with digits (#16536/#17676)"""
out = StringIO()
# Lets limit the introspection to tables created for models of this
# application
call_command('inspectdb',
table_name_filter=lambda tn: tn.startswith('inspectdb_'),
stdout=out)
output = out.getvalue()
error_message = "inspectdb generated a model field name which is a number"
self.assertNotIn(" 123 = models.CharField", output, msg=error_message)
self.assertIn("number_123 = models.CharField", output)
error_message = "inspectdb generated a model field name which starts with a digit"
self.assertNotIn(" 4extra = models.CharField", output, msg=error_message)
self.assertIn("number_4extra = models.CharField", output)
self.assertNotIn(" 45extra = models.CharField", output, msg=error_message)
self.assertIn("number_45extra = models.CharField", output)
def test_special_column_name_introspection(self):
"""
Introspection of column names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command('inspectdb',
table_name_filter=lambda tn: tn.startswith('inspectdb_'),
stdout=out)
output = out.getvalue()
base_name = 'Field' if not connection.features.uppercases_column_names else 'field'
self.assertIn("field = models.IntegerField()", output)
self.assertIn("field_field = models.IntegerField(db_column='%s_')" % base_name, output)
self.assertIn("field_field_0 = models.IntegerField(db_column='%s__')" % base_name, output)
self.assertIn("field_field_1 = models.IntegerField(db_column='__field')", output)
self.assertIn("prc_x = models.IntegerField(db_column='prc(%) x')", output)
if PY3:
# Python 3 allows non-ASCII identifiers
self.assertIn("tamaño = models.IntegerField()", output)
else:
self.assertIn("tama_o = models.IntegerField(db_column='tama\\xf1o')", output)
def test_table_name_introspection(self):
"""
Introspection of table names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command('inspectdb',
table_name_filter=lambda tn: tn.startswith('inspectdb_'),
stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbSpecialTableName(models.Model):", output)
def test_managed_models(self):
"""Test that by default the command generates models with `Meta.managed = False` (#14305)"""
out = StringIO()
call_command('inspectdb',
table_name_filter=lambda tn: tn.startswith('inspectdb_columntypes'),
stdout=out)
output = out.getvalue()
self.longMessage = False
self.assertIn(" managed = False", output, msg='inspectdb should generate unmanaged models.')
def test_unique_together_meta(self):
out = StringIO()
call_command('inspectdb',
table_name_filter=lambda tn: tn.startswith('inspectdb_uniquetogether'),
stdout=out)
output = out.getvalue()
self.assertIn(" unique_together = (('field1', 'field2'),)", output, msg='inspectdb should generate unique_together.')
@skipUnless(connection.vendor == 'sqlite',
"Only patched sqlite's DatabaseIntrospection.data_types_reverse for this test")
def test_custom_fields(self):
"""
Introspection of columns with a custom field (#21090)
"""
out = StringIO()
orig_data_types_reverse = connection.introspection.data_types_reverse
try:
connection.introspection.data_types_reverse = {
'text': 'myfields.TextField',
'bigint': 'BigIntegerField',
}
call_command('inspectdb',
table_name_filter=lambda tn: tn.startswith('inspectdb_columntypes'),
stdout=out)
output = out.getvalue()
self.assertIn("text_field = myfields.TextField()", output)
self.assertIn("big_int_field = models.BigIntegerField()", output)
finally:
connection.introspection.data_types_reverse = orig_data_types_reverse
|
uthcode/learntosolveit | refs/heads/version1 | languages/python/design_atexit_1.py | 7 | import atexit
def foo():
print "foo"
class SomeClass(object):
def __init__(self):
self.a = 10
self.b = 20
atexit.register(foo)
obj = SomeClass()
print obj.a
print obj.b
|
ArchipelProject/Archipel | refs/heads/master | ArchipelAgent/archipel-agent-hypervisor-network/archipelagenthypervisornetwork/network.py | 1 | # -*- coding: utf-8 -*-
#
# network.py
#
# Copyright (C) 2010 Antoine Mercadal <antoine.mercadal@inframonde.eu>
# Copyright, 2011 - Franck Villaume <franck.villaume@trivialdev.com>
# This file is part of ArchipelProject
# http://archipelproject.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import commands
import libvirt
import os
from archipelcore.archipelPlugin import TNArchipelPlugin
from archipelcore.utils import build_error_iq, build_error_message
from archipelcore import xmpp
from archipel.archipelLibvirtEntity import ARCHIPEL_NS_LIBVIRT_GENERIC_ERROR
ARCHIPEL_NS_HYPERVISOR_NETWORK = "archipel:hypervisor:network"
ARCHIPEL_ERROR_CODE_NETWORKS_DEFINE = -7001
ARCHIPEL_ERROR_CODE_NETWORKS_UNDEFINE = -7002
ARCHIPEL_ERROR_CODE_NETWORKS_CREATE = -7003
ARCHIPEL_ERROR_CODE_NETWORKS_DESTROY = -7004
ARCHIPEL_ERROR_CODE_NETWORKS_GET = -7005
ARCHIPEL_ERROR_CODE_NETWORKS_BRIDGES = -7006
ARCHIPEL_ERROR_CODE_NETWORKS_GETNAMES = -7007
ARCHIPEL_ERROR_CODE_NETWORKS_GETNICS = -7008
ARCHIPEL_ERROR_CODE_NETWORKS_GETNWFILTERS = -7009
class TNHypervisorNetworks (TNArchipelPlugin):
def __init__(self, configuration, entity, entry_point_group):
"""
Initialize the plugin.
@type configuration: Configuration object
@param configuration: the configuration
@type entity: L{TNArchipelEntity}
@param entity: the entity that owns the plugin
@type entry_point_group: string
@param entry_point_group: the group name of plugin entry_point
"""
TNArchipelPlugin.__init__(self, configuration=configuration, entity=entity, entry_point_group=entry_point_group)
self.folder_nwfilters = self.configuration.get("NETWORKS", "libvirt_nw_filters_path")
if not os.path.exists(self.folder_nwfilters):
self.folder_nwfilters = None
self.entity.log.warning("NETWORK: unable to find network filter folder at path %s. agent will not offer any network filter")
# permissions
if self.entity.__class__.__name__ == "TNArchipelVirtualMachine":
self.entity.permission_center.create_permission("network_getnames", "Authorizes user to get the existing network names", False)
self.entity.permission_center.create_permission("network_bridges", "Authorizes user to get existing bridges", False)
elif self.entity.__class__.__name__ == "TNArchipelHypervisor":
self.entity.permission_center.create_permission("network_define", "Authorizes user to define a network", False)
self.entity.permission_center.create_permission("network_undefine", "Authorizes user to undefine a network", False)
self.entity.permission_center.create_permission("network_create", "Authorizes user to create (start) a network", False)
self.entity.permission_center.create_permission("network_destroy", "Authorizes user to destroy (stop) a network", False)
self.entity.permission_center.create_permission("network_get", "Authorizes user to get all networks informations", False)
self.entity.permission_center.create_permission("network_getnames", "Authorizes user to get the existing network names", False)
self.entity.permission_center.create_permission("network_bridges", "Authorizes user to get existing bridges", False)
self.entity.permission_center.create_permission("network_getnics", "Authorizes user to get existing network interfaces", False)
self.entity.permission_center.create_permission("network_getnwfilters", "Authorizes user to get existing libvirt network filters", False)
registrar_items = [
{ "commands" : ["list networks"],
"parameters": [],
"method": self.message_get,
"permissions": ["network_get"],
"description": "List all networks" },
{ "commands" : ["create network", "start network"],
"parameters": [{"name": "identifier", "description": "The identifer of the network, UUID or name"}],
"method": self.message_create,
"permissions": ["network_create"],
"description": "Start the given network" },
{ "commands" : ["destroy network", "stop network"],
"parameters": [{"name": "identifier", "description": "The identifer of the network, UUID or name"}],
"method": self.message_destroy,
"permissions": ["network_destroy"],
"description": "Stop the given network" },
{ "commands" : ["nics", "network cards"],
"parameters": [],
"method": self.message_getnics,
"permissions": ["network_getnics"],
"description": "Get the list of all my network interfaces" }
]
self.entity.add_message_registrar_items(registrar_items)
### Plugin implementation
def register_handlers(self):
"""
This method will be called by the plugin user when it will be
necessary to register module for listening to stanza.
"""
if self.entity.__class__.__name__ == "TNArchipelVirtualMachine":
self.entity.xmppclient.RegisterHandler('iq', self.process_iq_for_virtualmachine, ns=ARCHIPEL_NS_HYPERVISOR_NETWORK)
elif self.entity.__class__.__name__ == "TNArchipelHypervisor":
self.entity.xmppclient.RegisterHandler('iq', self.process_iq_for_hypervisor, ns=ARCHIPEL_NS_HYPERVISOR_NETWORK)
def unregister_handlers(self):
"""
Unregister the handlers.
"""
if self.entity.__class__.__name__ == "TNArchipelVirtualMachine":
self.entity.xmppclient.UnregisterHandler('iq', self.process_iq_for_virtualmachine, ns=ARCHIPEL_NS_HYPERVISOR_NETWORK)
elif self.entity.__class__.__name__ == "TNArchipelHypervisor":
self.entity.xmppclient.UnregisterHandler('iq', self.process_iq_for_hypervisor, ns=ARCHIPEL_NS_HYPERVISOR_NETWORK)
@staticmethod
def plugin_info():
"""
Return informations about the plugin.
@rtype: dict
@return: dictionary contaning plugin informations
"""
plugin_friendly_name = "Hypervisor Networks"
plugin_identifier = "hypervisor_network"
plugin_configuration_section = "NETWORKS"
plugin_configuration_tokens = ["libvirt_nw_filters_path"]
return { "common-name" : plugin_friendly_name,
"identifier" : plugin_identifier,
"configuration-section" : plugin_configuration_section,
"configuration-tokens" : plugin_configuration_tokens }
### Utilities
def get_network_with_identifier(self, identifier):
"""
Return a libvirtNetwork according to the identifier, which can be the name or the UUID
@type identifier: String
@param identifier: the name or the UUID of the network
@rtype: libvirtNetwork
@return: the network object with given identifier
"""
try:
try:
libvirt_network = self.entity.libvirt_connection.networkLookupByName(identifier)
except:
libvirt_network = self.entity.libvirt_connection.networkLookupByUUIDString(identifier)
return libvirt_network
except Exception as ex:
self.entity.log.error("NETWORK: Unable to find a network with identifier %s: %s" % (identifier, str(ex)))
raise Exception("Unable to find a network with identifier %s" % identifier)
### libvirt controls
def get(self, active=True, inactive=True):
"""
Get the list of networks.
@type active: bool
@param active: if True, will return active network
@type inactive: bool
@param inactive: if True, will return not active network
@rtype: dict
@return: a list containing networtks
"""
ret = {}
if self.entity.__class__.__name__ == "TNArchipelVirtualMachine":
libvirt_connection = self.entity.hypervisor.libvirt_connection
else:
libvirt_connection = self.entity.libvirt_connection
if active:
ret["active"] = libvirt_connection.listNetworks()
if inactive:
ret["inactive"] = libvirt_connection.listDefinedNetworks()
return ret
def create(self, identifier):
"""
Create (start) the network with given identifier.
@type identifier: string
@param identifier: the identifer of the network to create. It can be its name or UUID
"""
libvirt_network = self.get_network_with_identifier(identifier)
libvirt_network.create()
self.entity.log.info("NETWORK: Virtual network %s created." % identifier)
self.entity.push_change("network", "created")
def destroy(self, identifier):
"""
destroy (stop) the network with given identifier
@type identifier: string
@param identifier: the identifer of the network to destroy. It can be its name or UUID
"""
libvirt_network = self.get_network_with_identifier(identifier)
libvirt_network.destroy()
self.entity.log.info("NETWORK: virtual network %s destroyed" % identifier)
self.entity.push_change("network", "destroyed")
def define(self, definition):
"""
define the network
@type definition: string
@param definition: the XML definition to use
"""
self.entity.libvirt_connection.networkDefineXML(str(definition))
self.entity.log.info("NETWORK: Virtual network XML is defined.")
self.entity.push_change("network", "defined")
def undefine(self, identifier):
"""
Undefine the network with given identifier.
@type identifier: string
@param identifier: the identifer of the network to destroy. It can be its name or UUID
"""
libvirt_network = self.get_network_with_identifier(identifier)
libvirt_network.undefine()
self.entity.log.info("NETWORK: Virtual network %s is undefined." % identifier)
self.entity.push_change("network", "undefined")
def getnics(self):
"""
Return the list of all network interfaces.
@rtype: list
@return: list containing network cards names
"""
f = open('/proc/net/dev', 'r')
content = f.read()
f.close()
splitted = content.split('\n')[2:-1]
nics = map(lambda x: x.split(":")[0].replace(" ", ""), splitted)
return sorted(nics)
def getnwfilters(self):
"""
Return the list of all libvirt network filters.
@rtype: list
@return: list containing network cards names
"""
ret = []
if not self.folder_nwfilters:
return ret
for nwfilter in os.listdir(self.folder_nwfilters):
ret.append(os.path.splitext(nwfilter)[0])
return ret
def setAutostart(self, identifier, shouldAutostart):
"""
Set the network to start with the host
@type network_identifier: string
@param identifier: the UUID or the name of the network
@type identifier: boolean
@param shouldAutostart: set if autostart should be set
@rtype: integer
@return: the result of the libvirt call
"""
libvirt_network = self.get_network_with_identifier(identifier)
return libvirt_network.setAutostart(shouldAutostart)
def getAutostart(self, identifier):
"""
Set the network to start with the host
@type identifier: string
@param identifier: the UUID or the name of the network
@rtype: Boolean
@return: True is network is in autostart mode
"""
libvirt_network = self.get_network_with_identifier(identifier)
return libvirt_network.autostart()
### XMPP Processing
def process_iq_for_hypervisor(self, conn, iq):
"""
This method is invoked when a ARCHIPEL_NS_HYPERVISOR_NETWORK IQ is received.
It understands IQ of type:
- define
- undefine
- create
- destroy
- get
- bridges
- getnames
- getnics
@type conn: xmpp.Dispatcher
@param conn: ths instance of the current connection that send the stanza
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
"""
reply = None
action = self.entity.check_acp(conn, iq)
self.entity.check_perm(conn, iq, action, -1, prefix="network_")
if action == "define":
reply = self.iq_define(iq)
elif action == "undefine":
reply = self.iq_undefine(iq)
elif action == "create":
reply = self.iq_create(iq)
elif action == "destroy":
reply = self.iq_destroy(iq)
elif action == "get":
reply = self.iq_get(iq)
elif action == "bridges":
reply = self.iq_bridges(iq)
elif action == "getnames":
reply = self.iq_get_names(iq)
elif action == "getnics":
reply = self.iq_get_nics(iq)
if reply:
conn.send(reply)
raise xmpp.protocol.NodeProcessed
def process_iq_for_virtualmachine(self, conn, iq):
"""
This method is invoked when a ARCHIPEL_NS_HYPERVISOR_NETWORK IQ is received.
It understands IQ of type:
- bridges
- getnames
- getnics
- getnwfilters
@type conn: xmpp.Dispatcher
@param conn: ths instance of the current connection that send the stanza
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
"""
reply = None
action = self.entity.check_acp(conn, iq)
self.entity.check_perm(conn, iq, action, -1, prefix="network_")
if action == "getnames":
reply = self.iq_get_names(iq)
elif action == "bridges":
reply = self.iq_bridges(iq)
elif action == "getnics":
reply = self.iq_get_nics(iq)
elif action == "getnwfilters":
reply = self.iq_get_nwfilters(iq)
if reply:
conn.send(reply)
raise xmpp.protocol.NodeProcessed
def iq_define(self, iq):
"""
Define a virtual network in the libvirt according to the XML data
network passed in argument.
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
@rtype: xmpp.Protocol.Iq
@return: a ready to send IQ containing the result of the action
"""
try:
reply = iq.buildReply("result")
network_node = iq.getTag("query").getTag("archipel").getTag("network")
self.define(network_node)
if iq.getTag("query").getTag("archipel").getAttr("autostart") == "1":
self.setAutostart(network_node.getTag("uuid").getData(), True)
except libvirt.libvirtError as ex:
reply = build_error_iq(self, ex, iq, ex.get_error_code(), ns=ARCHIPEL_NS_LIBVIRT_GENERIC_ERROR)
except Exception as ex:
reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_NETWORKS_DEFINE)
return reply
def iq_undefine(self, iq):
"""
Undefine a virtual network in the libvirt according to name passed in argument.
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
@rtype: xmpp.Protocol.Iq
@return: a ready to send IQ containing the result of the action
"""
try:
reply = iq.buildReply("result")
network_uuid = iq.getTag("query").getTag("archipel").getAttr("uuid")
self.undefine(network_uuid)
except libvirt.libvirtError as ex:
reply = build_error_iq(self, ex, iq, ex.get_error_code(), ns=ARCHIPEL_NS_LIBVIRT_GENERIC_ERROR)
except Exception as ex:
reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_NETWORKS_UNDEFINE)
return reply
def iq_create(self, iq):
"""
Create a network using libvirt connection.
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
@rtype: xmpp.Protocol.Iq
@return: a ready to send IQ containing the result of the action
"""
try:
network_uuid = iq.getTag("query").getTag("archipel").getAttr("uuid")
self.create(network_uuid)
reply = iq.buildReply("result")
self.entity.shout("network", "Network %s has been started by %s." % (network_uuid, iq.getFrom()))
except libvirt.libvirtError as ex:
reply = build_error_iq(self, ex, iq, ex.get_error_code(), ns=ARCHIPEL_NS_LIBVIRT_GENERIC_ERROR)
except Exception as ex:
reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_NETWORKS_CREATE)
return reply
def message_create(self, msg):
"""
Handle the creation request message.
@type msg: xmpp.Protocol.Message
@param msg: the received message
@rtype: xmpp.Protocol.Message
@return: a ready to send Message containing the result of the action
"""
try:
tokens = msg.getBody().split()
if not len(tokens) == 3:
return "I'm sorry, you use a wrong format. You can type 'help' to get help."
identifier = tokens[-1:][0]
self.create(identifier)
return "Starting network %s" % identifier
except Exception as ex:
return build_error_message(self, ex, msg)
def iq_destroy(self, iq):
"""
Destroy a network using libvirt connection.
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
@rtype: xmpp.Protocol.Iq
@return: a ready to send IQ containing the result of the action
"""
try:
network_uuid = iq.getTag("query").getTag("archipel").getAttr("uuid")
self.destroy(network_uuid)
reply = iq.buildReply("result")
self.entity.shout("network", "Network %s has been shutdowned by %s." % (network_uuid, iq.getFrom()))
except libvirt.libvirtError as ex:
reply = build_error_iq(self, ex, iq, ex.get_error_code(), ns=ARCHIPEL_NS_LIBVIRT_GENERIC_ERROR)
except Exception as ex:
reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_NETWORKS_DESTROY)
return reply
def message_destroy(self, msg):
"""
Handle the destroying request message.
@type msg: xmpp.Protocol.Message
@param msg: the message containing the request
@rtype: string
@return: the answer
"""
try:
tokens = msg.getBody().split()
if not len(tokens) == 3:
return "I'm sorry, you use a wrong format. You can type 'help' to get help."
identifier = tokens[-1:][0]
self.destroy(identifier)
return "Destroying network %s" % identifier
except Exception as ex:
return build_error_message(self, ex, msg)
def iq_get(self, iq):
"""
List all virtual networks.
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
@rtype: xmpp.Protocol.Iq
@return: a ready to send IQ containing the result of the action
"""
try:
reply = iq.buildReply("result")
active_networks_nodes = []
inactive_networks_nodes = []
networks = self.get()
for network_name in networks["active"]:
network = self.entity.libvirt_connection.networkLookupByName(network_name)
desc = network.XMLDesc(0)
n = xmpp.simplexml.NodeBuilder(data=desc).getDom()
n.setAttr("autostart", self.getAutostart(network_name))
active_networks_nodes.append(n)
for network_name in networks["inactive"]:
network = self.entity.libvirt_connection.networkLookupByName(network_name)
desc = network.XMLDesc(0)
n = xmpp.simplexml.NodeBuilder(data=desc).getDom()
n.setAttr("autostart", self.getAutostart(network_name))
inactive_networks_nodes.append(n)
active_networks_root_node = xmpp.Node(tag="activedNetworks", payload=active_networks_nodes)
inactive_networks_root_node = xmpp.Node(tag="unactivedNetworks", payload=inactive_networks_nodes)
reply.setQueryPayload([active_networks_root_node, inactive_networks_root_node])
except libvirt.libvirtError as ex:
reply = build_error_iq(self, ex, iq, ex.get_error_code(), ns=ARCHIPEL_NS_LIBVIRT_GENERIC_ERROR)
except Exception as ex:
reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_NETWORKS_GET)
return reply
def message_get(self, msg):
"""
Create the message response to list network.
@type msg: xmpp.Protocol.Message
@param msg: the message containing the request
@rtype: string
@return: the answer
"""
try:
networks = self.get()
ret = "Sure. Here are my current active networks:\n"
for net in networks["active"]:
ret += " - %s\n" % net
ret += "\n and my unactive network:\n"
for net in networks["inactive"]:
ret += " - %s\n" % net
return ret
except Exception as ex:
return build_error_message(self, ex, msg)
def iq_get_names(self, iq):
"""
List all virtual network names.
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
@rtype: xmpp.Protocol.Iq
@return: a ready to send IQ containing the result of the action
"""
try:
reply = iq.buildReply("result")
active_networks_nodes = []
actives_networks_names = self.get(inactive=False)["active"]
for network_name in actives_networks_names:
network = xmpp.Node(tag="network", attrs={"name": network_name})
active_networks_nodes.append(network)
reply.setQueryPayload(active_networks_nodes)
except libvirt.libvirtError as ex:
reply = build_error_iq(self, ex, iq, ex.get_error_code(), ns=ARCHIPEL_NS_LIBVIRT_GENERIC_ERROR)
except Exception as ex:
reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_NETWORKS_GETNAMES)
return reply
def iq_bridges(self, iq):
"""
List all bridge names.
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
@rtype: xmpp.Protocol.Iq
@return: a ready to send IQ containing the result of the action
"""
try:
reply = iq.buildReply("result")
bridges = []
for std_bridge in commands.getoutput("brctl show 2>/dev/null | grep -v -E '^[[:space:]]|bridge name' | cut -f 1").split("\n"):
if std_bridge:
bridges.append({"name": std_bridge, "type": "linux-bridge"})
for ovs_bridge in commands.getoutput("ovs-vsctl list-br 2>/dev/null").split("\n"):
if ovs_bridge:
bridges.append({"name": ovs_bridge, "type": "openvswitch"})
bridges = sorted(bridges, key=lambda k: k["name"])
bridges_list = []
for bridge in bridges:
if bridge.get("name", None) and bridge.get("type", None):
bridge_node = xmpp.Node(tag="bridge", attrs={"name": bridge["name"], "type": bridge["type"]})
bridges_list.append(bridge_node)
reply.setQueryPayload(bridges_list)
except Exception as ex:
reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_NETWORKS_BRIDGES)
return reply
def iq_get_nics(self, iq):
"""
List all existing networks cards on the hypervisor.
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
@rtype: xmpp.Protocol.Iq
@return: a ready to send IQ containing the result of the action
"""
try:
reply = iq.buildReply("result")
nodes = []
for n in self.getnics():
nodes.append(xmpp.Node("nic", attrs={"name": n}))
reply.setQueryPayload(nodes)
except Exception as ex:
reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_NETWORKS_GETNICS)
return reply
def message_getnics(self, msg):
"""
Get all the nics of the hypervisor.
@type msg: xmpp.Protocol.Message
@param msg: the message containing the request
@rtype: string
@return: the answer
"""
try:
nics = self.getnics()
ret = "Sure. Here are my current available network interfaces:\n"
for n in nics:
ret += " - %s\n" % n
return ret
except Exception as ex:
return build_error_message(self, ex, msg)
def iq_get_nwfilters(self, iq):
"""
List all existing libvirt network filters.
@type iq: xmpp.Protocol.Iq
@param iq: the received IQ
@rtype: xmpp.Protocol.Iq
@return: a ready to send IQ containing the result of the action
"""
try:
reply = iq.buildReply("result")
nodes = []
for nwfilter in self.getnwfilters():
nodes.append(xmpp.Node("filter", attrs={"name": nwfilter}))
reply.setQueryPayload(nodes)
except Exception as ex:
reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_NETWORKS_GETNWFILTERS)
return reply
|
mkoistinen/django-filer | refs/heads/develop | docs/conf.py | 27 | # -*- coding: utf-8 -*-
#
# django-filer documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 16 22:05:55 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
sys.path.append(os.path.abspath('../'))
import datetime
import filer
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'django-filer'
copyright = '%s, Stefan Foulis' % (datetime.date.today().year,)
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.'.join(filer.__version__.split('.')[0:2])
# The full version, including alpha/beta/rc tags.
release = filer.__version__
for c in ('a', 'b', 'dev', 'r'):
if c in release:
tags.add('develop')
break
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-filerdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-filer.tex', 'django-filer Documentation',
'Stefan Foulis', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-filer', u'django-filer Documentation',
[u'Stefan Foulis'], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
gdooper/scipy | refs/heads/master | tools/cythonize.py | 67 | #!/usr/bin/env python
""" cythonize
Cythonize pyx files into C files as needed.
Usage: cythonize [root_dir]
Default [root_dir] is 'scipy'.
Checks pyx files to see if they have been changed relative to their
corresponding C files. If they have, then runs cython on these files to
recreate the C files.
The script thinks that the pyx files have changed relative to the C files
by comparing hashes stored in a database file.
Simple script to invoke Cython (and Tempita) on all .pyx (.pyx.in)
files; while waiting for a proper build system. Uses file hashes to
figure out if rebuild is needed.
For now, this script should be run by developers when changing Cython files
only, and the resulting C files checked in, so that end-users (and Python-only
developers) do not get the Cython/Tempita dependencies.
Originally written by Dag Sverre Seljebotn, and copied here from:
https://raw.github.com/dagss/private-scipy-refactor/cythonize/cythonize.py
Note: this script does not check any of the dependent C libraries; it only
operates on the Cython .pyx files.
"""
from __future__ import division, print_function, absolute_import
import os
import re
import sys
import hashlib
import subprocess
HASH_FILE = 'cythonize.dat'
DEFAULT_ROOT = 'scipy'
# WindowsError is not defined on unix systems
try:
WindowsError
except NameError:
WindowsError = None
#
# Rules
#
def process_pyx(fromfile, tofile):
try:
from Cython.Compiler.Version import version as cython_version
from distutils.version import LooseVersion
if LooseVersion(cython_version) < LooseVersion('0.22'):
raise Exception('Building SciPy requires Cython >= 0.22')
except ImportError:
pass
flags = ['--fast-fail']
if tofile.endswith('.cxx'):
flags += ['--cplus']
try:
try:
r = subprocess.call(['cython'] + flags + ["-o", tofile, fromfile])
if r != 0:
raise Exception('Cython failed')
except OSError:
# There are ways of installing Cython that don't result in a cython
# executable on the path, see gh-2397.
r = subprocess.call([sys.executable, '-c',
'import sys; from Cython.Compiler.Main import '
'setuptools_main as main; sys.exit(main())'] + flags +
["-o", tofile, fromfile])
if r != 0:
raise Exception("Cython either isn't installed or it failed.")
except OSError:
raise OSError('Cython needs to be installed')
def process_tempita_pyx(fromfile, tofile):
try:
try:
from Cython import Tempita as tempita
except ImportError:
import tempita
except ImportError:
raise Exception('Building SciPy requires Tempita: '
'pip install --user Tempita')
from_filename = tempita.Template.from_filename
template = from_filename(fromfile, encoding=sys.getdefaultencoding())
pyxcontent = template.substitute()
assert fromfile.endswith('.pyx.in')
pyxfile = fromfile[:-len('.pyx.in')] + '.pyx'
with open(pyxfile, "w") as f:
f.write(pyxcontent)
process_pyx(pyxfile, tofile)
rules = {
# fromext : function
'.pyx': process_pyx,
'.pyx.in': process_tempita_pyx
}
#
# Hash db
#
def load_hashes(filename):
# Return { filename : (sha1 of input, sha1 of output) }
if os.path.isfile(filename):
hashes = {}
with open(filename, 'r') as f:
for line in f:
filename, inhash, outhash = line.split()
hashes[filename] = (inhash, outhash)
else:
hashes = {}
return hashes
def save_hashes(hash_db, filename):
with open(filename, 'w') as f:
for key, value in sorted(hash_db.items()):
f.write("%s %s %s\n" % (key, value[0], value[1]))
def sha1_of_file(filename):
h = hashlib.sha1()
with open(filename, "rb") as f:
h.update(f.read())
return h.hexdigest()
#
# Main program
#
def normpath(path):
path = path.replace(os.sep, '/')
if path.startswith('./'):
path = path[2:]
return path
def get_hash(frompath, topath):
from_hash = sha1_of_file(frompath)
to_hash = sha1_of_file(topath) if os.path.exists(topath) else None
return (from_hash, to_hash)
def process(path, fromfile, tofile, processor_function, hash_db):
fullfrompath = os.path.join(path, fromfile)
fulltopath = os.path.join(path, tofile)
current_hash = get_hash(fullfrompath, fulltopath)
if current_hash == hash_db.get(normpath(fullfrompath), None):
print('%s has not changed' % fullfrompath)
return
orig_cwd = os.getcwd()
try:
os.chdir(path)
print('Processing %s' % fullfrompath)
processor_function(fromfile, tofile)
finally:
os.chdir(orig_cwd)
# changed target file, recompute hash
current_hash = get_hash(fullfrompath, fulltopath)
# store hash in db
hash_db[normpath(fullfrompath)] = current_hash
def find_process_files(root_dir):
hash_db = load_hashes(HASH_FILE)
for cur_dir, dirs, files in os.walk(root_dir):
for filename in files:
in_file = os.path.join(cur_dir, filename + ".in")
if filename.endswith('.pyx') and os.path.isfile(in_file):
continue
for fromext, function in rules.items():
if filename.endswith(fromext):
toext = ".c"
with open(os.path.join(cur_dir, filename), 'rb') as f:
data = f.read()
m = re.search(br"^\s*#\s*distutils:\s*language\s*=\s*c\+\+\s*$", data, re.I|re.M)
if m:
toext = ".cxx"
fromfile = filename
tofile = filename[:-len(fromext)] + toext
process(cur_dir, fromfile, tofile, function, hash_db)
save_hashes(hash_db, HASH_FILE)
def main():
try:
root_dir = sys.argv[1]
except IndexError:
root_dir = DEFAULT_ROOT
find_process_files(root_dir)
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.