idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
58,800
def run ( self ) : import fnmatch import shutil import glob matches = [ ] matches . extend ( glob . glob ( './*.pyc' ) ) matches . extend ( glob . glob ( './*.pyd' ) ) matches . extend ( glob . glob ( './*.pyo' ) ) matches . extend ( glob . glob ( './*.so' ) ) dirs = [ ] dirs . extend ( glob . glob ( './__pycache__' ) ) dirs . extend ( glob . glob ( 'docs/_build' ) ) for cleandir in [ SOURCE , 'test' , 'examples' ] : for root , dirnames , filenames in os . walk ( cleandir ) : for filename in fnmatch . filter ( filenames , '*.pyc' ) : matches . append ( os . path . join ( root , filename ) ) for filename in fnmatch . filter ( filenames , '*.pyd' ) : matches . append ( os . path . join ( root , filename ) ) for filename in fnmatch . filter ( filenames , '*.pyo' ) : matches . append ( os . path . join ( root , filename ) ) for filename in fnmatch . filter ( filenames , '*.so' ) : matches . append ( os . path . join ( root , filename ) ) for filename in fnmatch . filter ( filenames , '*.dll' ) : matches . append ( os . path . join ( root , filename ) ) for filename in fnmatch . filter ( filenames , '*.c' ) : matches . append ( os . path . join ( root , filename ) ) for dirname in fnmatch . filter ( dirnames , '__pycache__' ) : dirs . append ( os . path . join ( root , dirname ) ) for match in matches : os . remove ( match ) for dir in dirs : shutil . rmtree ( dir )
Run CleanUp .
58,801
def run ( self ) : if os . system ( 'git add .' ) : sys . exit ( 1 ) if self . message is not None : os . system ( 'git commit -a -m "' + self . message + '"' ) else : os . system ( 'git commit -a' )
Run git add and commit with message if provided .
58,802
def uri ( self , value ) : jsonpointer . set_pointer ( self . record , self . pointer , value )
Set new uri value in record .
58,803
def open ( self , mode = 'r' , ** kwargs ) : _fs , filename = opener . parse ( self . uri ) return _fs . open ( filename , mode = mode , ** kwargs )
Open file uri under the pointer .
58,804
def move ( self , dst , ** kwargs ) : _fs , filename = opener . parse ( self . uri ) _fs_dst , filename_dst = opener . parse ( dst ) movefile ( _fs , filename , _fs_dst , filename_dst , ** kwargs ) self . uri = dst
Move file to a new destination and update uri .
58,805
def setcontents ( self , source , ** kwargs ) : if isinstance ( source , six . string_types ) : _file = opener . open ( source , 'rb' ) else : _file = source data = _file . read ( ) _fs , filename = opener . parse ( self . uri ) _fs . setcontents ( filename , data , ** kwargs ) _fs . close ( ) if isinstance ( source , six . string_types ) and hasattr ( _file , 'close' ) : _file . close ( )
Create a new file from a string or file - like object .
58,806
def remove ( self , force = False ) : if force : _fs , filename = opener . parse ( self . uri ) _fs . remove ( filename ) self . uri = None
Remove file reference from record .
58,807
def tobytes ( self , root = None , encoding = 'UTF-8' , doctype = None , canonicalized = True , xml_declaration = True , pretty_print = True , with_comments = True , ) : if root is None : root = self . root if canonicalized == True : return self . canonicalized_bytes ( root ) else : return etree . tostring ( root , encoding = encoding or self . info . encoding , doctype = doctype or self . info . doctype , xml_declaration = xml_declaration , pretty_print = pretty_print , with_comments = with_comments , )
return the content of the XML document as a byte string suitable for writing
58,808
def tostring ( self , root = None , doctype = None , pretty_print = True ) : if root is None : root = self . root return etree . tounicode ( root , doctype = doctype or self . info . doctype , pretty_print = pretty_print )
return the content of the XML document as a unicode string
58,809
def digest ( self , ** args ) : return String ( XML . canonicalized_string ( self . root ) ) . digest ( ** args )
calculate a digest based on the hash of the XML content
58,810
def element ( self , tag_path , test = None , ** attributes ) : xpath = tag_path tests = [ "@%s='%s'" % ( k , attributes [ k ] ) for k in attributes ] if test is not None : tests . insert ( 0 , test ) if len ( tests ) > 0 : xpath += "[%s]" % ' and ' . join ( tests ) e = self . find ( self . root , xpath ) if e is None : tag = tag_path . split ( '/' ) [ - 1 ] . split ( '[' ) [ 0 ] tagname = tag . split ( ':' ) [ - 1 ] if ':' in tag : nstag = tag . split ( ':' ) [ 0 ] tag = "{%s}%s" % ( self . NS [ nstag ] , tagname ) e = etree . Element ( tag , ** attributes ) return e
given a tag in xpath form and optional attributes find the element in self . root or return a new one .
58,811
def namespace ( self , elem = None ) : if elem is None : elem = self . root return XML . tag_namespace ( elem . tag )
return the URL if any for the doc root or elem if given .
58,812
def tag_namespace ( cls , tag ) : md = re . match ( "^(?:\{([^\}]*)\})" , tag ) if md is not None : return md . group ( 1 )
return the namespace for a given tag or if no namespace given
58,813
def tag_name ( cls , tag ) : while isinstance ( tag , etree . _Element ) : tag = tag . tag return tag . split ( '}' ) [ - 1 ]
return the name of the tag with the namespace removed
58,814
def element_map ( self , tags = None , xpath = "//*" , exclude_attribs = [ ] , include_attribs = [ ] , attrib_vals = False , hierarchy = False , minimize = False , ) : if tags is None : tags = Dict ( ) for elem in self . root . xpath ( xpath ) : if elem . tag not in tags . keys ( ) : tags [ elem . tag ] = Dict ( ** { 'parents' : [ ] , 'children' : [ ] , 'attributes' : Dict ( ) } ) for a in [ a for a in elem . attrib . keys ( ) if ( include_attribs == [ ] and a not in exclude_attribs ) or ( a in include_attribs ) ] : if a not in tags [ elem . tag ] . attributes . keys ( ) : tags [ elem . tag ] . attributes [ a ] = [ ] if attrib_vals == True and elem . get ( a ) not in tags [ elem . tag ] . attributes [ a ] : tags [ elem . tag ] . attributes [ a ] . append ( elem . get ( a ) ) if hierarchy == True : parent = elem . getparent ( ) if parent is not None and parent . tag not in tags [ elem . tag ] . parents : tags [ elem . tag ] . parents . append ( parent . tag ) for child in elem . xpath ( "*" ) : if child . tag not in tags [ elem . tag ] . children : tags [ elem . tag ] . children . append ( child . tag ) if minimize == True : for tag in tags . keys ( ) : if tags [ tag ] . get ( 'parents' ) == [ ] : tags [ tag ] . pop ( 'parents' ) if tags [ tag ] . get ( 'children' ) == [ ] : tags [ tag ] . pop ( 'children' ) if tags [ tag ] . get ( 'attributes' ) == { } : tags [ tag ] . pop ( 'attributes' ) if tags [ tag ] == { } : tags . pop ( tag ) return tags
return a dict of element tags their attribute names and optionally attribute values in the XML document
58,815
def dict_key_tag ( Class , key , namespaces = None ) : namespaces = namespaces or Class . NS ns = Class . tag_namespace ( key ) tag = Class . tag_name ( key ) if ns is None and ':' in key : prefix , tag = key . split ( ':' ) if prefix in namespaces . keys ( ) : ns = namespaces [ prefix ] if ns is not None : tag = "{%s}%s" % ( ns , tag ) return tag
convert a dict key into an element or attribute name
58,816
def replace_with_contents ( c , elem ) : "removes an element and leaves its contents in its place. Namespaces supported." parent = elem . getparent ( ) index = parent . index ( elem ) children = elem . getchildren ( ) previous = elem . getprevious ( ) if index == 0 : parent . text = ( parent . text or '' ) + ( elem . text or '' ) else : previous . tail = ( previous . tail or '' ) + ( elem . text or '' ) for child in children : parent . insert ( index + children . index ( child ) , child ) if len ( children ) > 0 : last_child = children [ - 1 ] last_child . tail = ( last_child . tail or '' ) + ( elem . tail or '' ) else : if index == 0 : parent . text = ( parent . text or '' ) + ( elem . tail or '' ) else : previous . tail = ( previous . tail or '' ) + ( elem . tail or '' ) parent . remove ( elem )
removes an element and leaves its contents in its place . Namespaces supported .
58,817
def remove_range ( cls , elem , end_elem , delete_end = True ) : while elem is not None and elem != end_elem and end_elem not in elem . xpath ( "descendant::*" ) : parent = elem . getparent ( ) nxt = elem . getnext ( ) parent . remove ( elem ) if DEBUG == True : print ( etree . tounicode ( elem ) ) elem = nxt if elem == end_elem : if delete_end == True : cls . remove ( end_elem , leave_tail = True ) elif elem is None : if parent . tail not in [ None , '' ] : parent . tail = '' cls . remove_range ( parent . getnext ( ) , end_elem ) XML . remove_if_empty ( parent ) elif end_elem in elem . xpath ( "descendant::*" ) : if DEBUG == True : print ( elem . text ) elem . text = '' cls . remove_range ( elem . getchildren ( ) [ 0 ] , end_elem ) XML . remove_if_empty ( elem ) else : print ( "LOGIC ERROR" , file = sys . stderr )
delete everything from elem to end_elem including elem . if delete_end == True also including end_elem ; otherwise leave it .
58,818
def wrap_content ( cls , container , wrapper ) : "wrap the content of container element with wrapper element" wrapper . text = ( container . text or '' ) + ( wrapper . text or '' ) container . text = '' for ch in container : wrapper . append ( ch ) container . insert ( 0 , wrapper ) return container
wrap the content of container element with wrapper element
58,819
def merge_contiguous ( C , node , xpath , namespaces = None ) : new_node = deepcopy ( node ) elems = XML . xpath ( new_node , xpath , namespaces = namespaces ) elems . reverse ( ) for elem in elems : nxt = elem . getnext ( ) if elem . attrib == { } : XML . replace_with_contents ( elem ) elif ( elem . tail in [ None , '' ] and nxt is not None and elem . tag == nxt . tag and elem . attrib == nxt . attrib ) : if len ( elem . getchildren ( ) ) > 0 : lastch = elem . getchildren ( ) [ - 1 ] lastch . tail = ( lastch . tail or '' ) + ( nxt . text or '' ) else : elem . text = ( elem . text or '' ) + ( nxt . text or '' ) for ch in nxt . getchildren ( ) : elem . append ( ch ) XML . remove ( nxt , leave_tail = True ) return new_node
Within a given node merge elements that are next to each other if they have the same tag and attributes .
58,820
def unnest ( c , elem , ignore_whitespace = False ) : parent = elem . getparent ( ) gparent = parent . getparent ( ) index = parent . index ( elem ) preparent = etree . Element ( parent . tag ) preparent . text , parent . text = ( parent . text or '' ) , '' for k in parent . attrib . keys ( ) : preparent . set ( k , parent . get ( k ) ) if index > 0 : for ch in parent . getchildren ( ) [ : index ] : preparent . append ( ch ) gparent . insert ( gparent . index ( parent ) , preparent ) XML . remove_if_empty ( preparent , leave_tail = True , ignore_whitespace = ignore_whitespace ) XML . remove ( elem , leave_tail = True ) gparent . insert ( gparent . index ( parent ) , elem ) elem . tail = '' XML . remove_if_empty ( parent , leave_tail = True , ignore_whitespace = ignore_whitespace )
unnest the element from its parent within doc . MUTABLE CHANGES
58,821
def interior_nesting ( cls , elem1 , xpath , namespaces = None ) : for elem2 in elem1 . xpath ( xpath , namespaces = namespaces ) : child_elem1 = etree . Element ( elem1 . tag ) for k in elem1 . attrib : child_elem1 . set ( k , elem1 . get ( k ) ) child_elem1 . text , elem2 . text = elem2 . text , '' for ch in elem2 . getchildren ( ) : child_elem1 . append ( ch ) elem2 . insert ( 0 , child_elem1 ) XML . replace_with_contents ( elem1 )
for elem1 containing elements at xpath embed elem1 inside each of those elements and then remove the original elem1
58,822
def fragment_nesting ( cls , elem1 , tag2 , namespaces = None ) : elems2 = elem1 . xpath ( "child::%s" % tag2 , namespaces = namespaces ) while len ( elems2 ) > 0 : elem2 = elems2 [ 0 ] parent2 = elem2 . getparent ( ) index2 = parent2 . index ( elem2 ) child_elem1 = etree . Element ( elem1 . tag ) for k in elem1 . attrib : child_elem1 . set ( k , elem1 . get ( k ) ) elem2 . text , child_elem1 . text = '' , elem2 . text for ch in elem2 . getchildren ( ) : child_elem1 . append ( ch ) elem2 . insert ( 0 , child_elem1 ) new_elem1 = etree . Element ( elem1 . tag ) for k in elem1 . attrib : new_elem1 . set ( k , elem1 . get ( k ) ) new_elem1 . text , elem2 . tail = elem2 . tail , '' for ch in parent2 . getchildren ( ) [ index2 + 1 : ] : new_elem1 . append ( ch ) parent = parent2 . getparent ( ) parent . insert ( parent . index ( parent2 ) + 1 , elem2 ) last_child = elem2 parent . insert ( parent . index ( elem2 ) + 1 , new_elem1 ) new_elem1 . tail , elem1 . tail = elem1 . tail , '' XML . remove_if_empty ( elem1 ) XML . remove_if_empty ( new_elem1 ) elem1 = new_elem1 elems2 = elem1 . xpath ( "child::%s" % tag2 , namespaces = namespaces )
for elem1 containing elements with tag2 fragment elem1 into elems that are adjacent to and nested within tag2
58,823
def communityvisibilitystate ( self ) : if self . _communityvisibilitystate == None : return None elif self . _communityvisibilitystate in self . VisibilityState : return self . VisibilityState [ self . _communityvisibilitystate ] else : return None
Return the Visibility State of the Users Profile
58,824
def personastate ( self ) : if self . _personastate == None : return None elif self . _personastate in self . PersonaState : return self . PersonaState [ self . _personastate ] else : return None
Return the Persona State of the Users Profile
58,825
def mcus ( ) : ls = [ ] for h in hwpack_names ( ) : for b in board_names ( h ) : ls += [ mcu ( b , h ) ] ls = sorted ( list ( set ( ls ) ) ) return ls
MCU list .
58,826
def logpath2dt ( filepath ) : return datetime . datetime . strptime ( re . match ( r'.*/(.*) .*$' , filepath ) . groups ( ) [ 0 ] , '%Y-%m-%d %H-%M' )
given a dataflashlog in the format produced by Mission Planner return a datetime which says when the file was downloaded from the APM
58,827
def url ( self ) : path = '/web/itemdetails.html?id={}' . format ( self . id ) return self . connector . get_url ( path , attach_api_key = False )
url of the item
58,828
async def update ( self , fields = '' ) : path = 'Users/{{UserId}}/Items/{}' . format ( self . id ) info = await self . connector . getJson ( path , remote = False , Fields = 'Path,Overview,' + fields ) self . object_dict . update ( info ) self . extras = { } return self
reload object info from emby
58,829
async def send ( self ) : path = 'Items/{}' . format ( self . id ) resp = await self . connector . post ( path , data = self . object_dict , remote = False ) if resp . status == 400 : await EmbyObject ( self . object_dict , self . connector ) . update ( ) resp = await self . connector . post ( path , data = self . object_dict , remote = False ) return resp
send data that was changed to emby
58,830
def remove_lib ( lib_name ) : targ_dlib = libraries_dir ( ) / lib_name log . debug ( 'remove %s' , targ_dlib ) targ_dlib . rmtree ( )
remove library .
58,831
def _read_holidays ( self , filename ) : cal = Calendar . from_ical ( open ( filename , 'rb' ) . read ( ) ) holidays = [ ] for component in cal . walk ( 'VEVENT' ) : start = component . decoded ( 'DTSTART' ) try : end = component . decoded ( 'DTEND' ) except KeyError : if isinstance ( start , datetime ) : end = start elif isinstance ( start , date ) : end = start + timedelta ( days = 1 ) else : raise KeyError , 'DTEND is missing and DTSTART is not of DATE or DATETIME type' if isinstance ( start , date ) and not isinstance ( start , datetime ) : assert ( isinstance ( end , date ) and not isinstance ( end , datetime ) ) , 'DTSTART is of DATE type but DTEND is not of DATE type (got %r instead)' % type ( end ) start = datetime . combine ( start , time . min ) end = datetime . combine ( end , time . min ) if start . tzinfo is None or end . tzinfo is None : start = self . tz . localize ( start , is_dst = False ) end = self . tz . localize ( end , is_dst = False ) yield ( start , end )
Read holidays from an iCalendar - format file .
58,832
def in_hours ( self , office = None , when = None ) : if when == None : when = datetime . now ( tz = utc ) if office == None : for office in self . offices . itervalues ( ) : if office . in_hours ( when ) : return True return False else : return self . offices [ office ] . in_hours ( when )
Finds if it is business hours in the given office .
58,833
def setup_logging ( namespace ) : loglevel = { 0 : logging . ERROR , 1 : logging . WARNING , 2 : logging . INFO , 3 : logging . DEBUG , } . get ( namespace . verbosity , logging . DEBUG ) if namespace . verbosity > 1 : logformat = '%(levelname)s csvpandas %(lineno)s %(message)s' else : logformat = 'csvpandas %(message)s' logging . basicConfig ( stream = namespace . log , format = logformat , level = loglevel )
setup global logging
58,834
def parse_subcommands ( parser , subcommands , argv ) : subparsers = parser . add_subparsers ( dest = 'subparser_name' ) parser_help = subparsers . add_parser ( 'help' , help = 'Detailed help for actions using `help <action>`' ) parser_help . add_argument ( 'action' , nargs = 1 ) modules = [ name for _ , name , _ in pkgutil . iter_modules ( subcommands . __path__ ) ] commands = [ m for m in modules if m in argv ] actions = { } for name in commands or modules : try : imp = '{}.{}' . format ( subcommands . __name__ , name ) mod = importlib . import_module ( imp ) except Exception as e : log . error ( e ) continue subparser = subparsers . add_parser ( name , help = mod . __doc__ . lstrip ( ) . split ( '\n' , 1 ) [ 0 ] , description = mod . __doc__ , formatter_class = argparse . RawDescriptionHelpFormatter ) mod . build_parser ( subparser ) subcommands . build_parser ( subparser ) actions [ name ] = mod . action return parser , actions
Setup all sub - commands
58,835
def opener ( mode = 'r' ) : def open_file ( f ) : if f is sys . stdout or f is sys . stdin : return f elif f == '-' : return sys . stdin if 'r' in mode else sys . stdout elif f . endswith ( '.bz2' ) : return bz2 . BZ2File ( f , mode ) elif f . endswith ( '.gz' ) : return gzip . open ( f , mode ) else : return open ( f , mode ) return open_file
Factory for creating file objects
58,836
def get ( self , id , no_summary = False ) : resp = self . client . accounts . get ( id ) if no_summary : return self . display ( resp ) results = [ ] client = LunrClient ( self . get_admin ( ) , debug = self . debug ) volumes = client . volumes . list ( account_id = resp [ 'id' ] ) for volume in volumes : if volume [ 'status' ] == 'DELETED' : continue results . append ( volume ) self . display ( resp , [ 'name' , 'status' , 'last_modified' , 'created_at' ] ) if results : return self . display ( response ( results , 200 ) , [ 'id' , 'status' , 'size' ] ) else : print ( "-- This account has no active volumes --" ) print ( "\nThis is a summary, use --no-summary " "to see the entire response" )
List details for a specific tenant id
58,837
def create ( self , id ) : resp = self . client . accounts . create ( id = id ) self . display ( resp )
Create a new tenant id
58,838
def delete ( self , id ) : resp = self . client . accounts . delete ( id ) self . display ( resp )
Delete an tenant id
58,839
def main ( ) : parser = argparse . ArgumentParser ( ) group_tcp = parser . add_argument_group ( 'TCP' ) group_tcp . add_argument ( '--tcp' , dest = 'mode' , action = 'store_const' , const = PROP_MODE_TCP , help = "Set tcp mode" ) group_tcp . add_argument ( '--host' , dest = 'hostname' , help = "Specify hostname" , default = '' ) group_tcp . add_argument ( '--port' , dest = 'port' , help = "Specify port" , default = 23 , type = int ) group_serial = parser . add_argument_group ( 'Serial' ) group_serial . add_argument ( '--serial' , dest = 'mode' , action = 'store_const' , const = PROP_MODE_SERIAL , help = "Set serial mode" ) group_serial . add_argument ( '--interface' , dest = 'interface' , help = "Specify interface" , default = '' ) group_file = parser . add_argument_group ( 'File' ) group_file . add_argument ( '--file' , dest = 'mode' , action = 'store_const' , const = PROP_MODE_FILE , help = "Set file mode" ) group_file . add_argument ( '--name' , dest = 'file' , help = "Specify file name" , default = '' ) args = parser . parse_args ( ) kwb = KWBEasyfire ( args . mode , args . hostname , args . port , args . interface , 0 , args . file ) kwb . run_thread ( ) time . sleep ( 5 ) kwb . stop_thread ( ) print ( kwb )
Main method for debug purposes .
58,840
def _open_connection ( self ) : if ( self . _mode == PROP_MODE_SERIAL ) : self . _serial = serial . Serial ( self . _serial_device , self . _serial_speed ) elif ( self . _mode == PROP_MODE_TCP ) : self . _socket = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) self . _socket . connect ( ( self . _ip , self . _port ) ) elif ( self . _mode == PROP_MODE_FILE ) : self . _file = open ( self . _file_path , "r" )
Open a connection to the easyfire unit .
58,841
def _close_connection ( self ) : if ( self . _mode == PROP_MODE_SERIAL ) : self . _serial . close ( ) elif ( self . _mode == PROP_MODE_TCP ) : self . _socket . close ( ) elif ( self . _mode == PROP_MODE_FILE ) : self . _file . close ( )
Close the connection to the easyfire unit .
58,842
def _add_to_checksum ( self , checksum , value ) : checksum = self . _byte_rot_left ( checksum , 1 ) checksum = checksum + value if ( checksum > 255 ) : checksum = checksum - 255 self . _debug ( PROP_LOGLEVEL_TRACE , "C: " + str ( checksum ) + " V: " + str ( value ) ) return checksum
Add a byte to the checksum .
58,843
def _read_byte ( self ) : to_return = "" if ( self . _mode == PROP_MODE_SERIAL ) : to_return = self . _serial . read ( 1 ) elif ( self . _mode == PROP_MODE_TCP ) : to_return = self . _socket . recv ( 1 ) elif ( self . _mode == PROP_MODE_FILE ) : to_return = struct . pack ( "B" , int ( self . _file . readline ( ) ) ) _LOGGER . debug ( "READ: " + str ( ord ( to_return ) ) ) self . _logdata . append ( ord ( to_return ) ) if ( len ( self . _logdata ) > self . _logdatalen ) : self . _logdata = self . _logdata [ len ( self . _logdata ) - self . _logdatalen : ] self . _debug ( PROP_LOGLEVEL_TRACE , "READ: " + str ( ord ( to_return ) ) ) return to_return
Read a byte from input .
58,844
def _decode_temp ( byte_1 , byte_2 ) : temp = ( byte_1 << 8 ) + byte_2 if ( temp > 32767 ) : temp = temp - 65536 temp = temp / 10 return temp
Decode a signed short temperature as two bytes to a single number .
58,845
def _read_packet ( self ) : status = STATUS_WAITING mode = 0 checksum = 0 checksum_calculated = 0 length = 0 version = 0 i = 0 cnt = 0 packet = bytearray ( 0 ) while ( status != STATUS_PACKET_DONE ) : read = self . _read_ord_byte ( ) if ( status != STATUS_CTRL_CHECKSUM and status != STATUS_SENSE_CHECKSUM ) : checksum_calculated = self . _add_to_checksum ( checksum_calculated , read ) self . _debug ( PROP_LOGLEVEL_TRACE , "R: " + str ( read ) ) self . _debug ( PROP_LOGLEVEL_TRACE , "S: " + str ( status ) ) if ( status == STATUS_WAITING ) : if ( read == 2 ) : status = STATUS_PRE_1 checksum_calculated = read else : status = STATUS_WAITING elif ( status == STATUS_PRE_1 ) : checksum = 0 if ( read == 2 ) : status = STATUS_SENSE_PRE_2 checksum_calculated = read elif ( read == 0 ) : status = STATUS_WAITING else : status = STATUS_CTRL_PRE_2 elif ( status == STATUS_SENSE_PRE_2 ) : length = read status = STATUS_SENSE_PRE_LENGTH elif ( status == STATUS_SENSE_PRE_LENGTH ) : version = read status = STATUS_SENSE_PRE_3 elif ( status == STATUS_SENSE_PRE_3 ) : cnt = read i = 0 status = STATUS_SENSE_DATA elif ( status == STATUS_SENSE_DATA ) : packet . append ( read ) i = i + 1 if ( i == length ) : status = STATUS_SENSE_CHECKSUM elif ( status == STATUS_SENSE_CHECKSUM ) : checksum = read mode = PROP_PACKET_SENSE status = STATUS_PACKET_DONE elif ( status == STATUS_CTRL_PRE_2 ) : version = read status = STATUS_CTRL_PRE_3 elif ( status == STATUS_CTRL_PRE_3 ) : cnt = read i = 0 length = 16 status = STATUS_CTRL_DATA elif ( status == STATUS_CTRL_DATA ) : packet . append ( read ) i = i + 1 if ( i == length ) : status = STATUS_CTRL_CHECKSUM elif ( status == STATUS_CTRL_CHECKSUM ) : checksum = read mode = PROP_PACKET_CTRL status = STATUS_PACKET_DONE else : status = STATUS_WAITING self . _debug ( PROP_LOGLEVEL_DEBUG , "MODE: " + str ( mode ) + " Version: " + str ( version ) + " Checksum: " + str ( checksum ) + " / " + str ( checksum_calculated ) + " Count: " + str ( cnt ) + " Length: " + str ( len ( packet ) ) ) self . _debug ( PROP_LOGLEVEL_TRACE , "Packet: " + str ( packet ) ) return ( mode , version , packet )
Read a packet from the input .
58,846
def _decode_sense_packet ( self , version , packet ) : data = self . _sense_packet_to_data ( packet ) offset = 4 i = 0 datalen = len ( data ) - offset - 6 temp_count = int ( datalen / 2 ) temp = [ ] for i in range ( temp_count ) : temp_index = i * 2 + offset temp . append ( self . _decode_temp ( data [ temp_index ] , data [ temp_index + 1 ] ) ) self . _debug ( PROP_LOGLEVEL_DEBUG , "T: " + str ( temp ) ) for sensor in self . _sense_sensor : if ( sensor . sensor_type == PROP_SENSOR_TEMPERATURE ) : sensor . value = temp [ sensor . index ] elif ( sensor . sensor_type == PROP_SENSOR_RAW ) : sensor . value = packet self . _debug ( PROP_LOGLEVEL_DEBUG , str ( self ) )
Decode a sense packet into the list of sensors .
58,847
def _decode_ctrl_packet ( self , version , packet ) : for i in range ( 5 ) : input_bit = packet [ i ] self . _debug ( PROP_LOGLEVEL_DEBUG , "Byte " + str ( i ) + ": " + str ( ( input_bit >> 7 ) & 1 ) + str ( ( input_bit >> 6 ) & 1 ) + str ( ( input_bit >> 5 ) & 1 ) + str ( ( input_bit >> 4 ) & 1 ) + str ( ( input_bit >> 3 ) & 1 ) + str ( ( input_bit >> 2 ) & 1 ) + str ( ( input_bit >> 1 ) & 1 ) + str ( input_bit & 1 ) ) for sensor in self . _ctrl_sensor : if ( sensor . sensor_type == PROP_SENSOR_FLAG ) : sensor . value = ( packet [ sensor . index // 8 ] >> ( sensor . index % 8 ) ) & 1 elif ( sensor . sensor_type == PROP_SENSOR_RAW ) : sensor . value = packet
Decode a control packet into the list of sensors .
58,848
def run ( self ) : while ( self . _run_thread ) : ( mode , version , packet ) = self . _read_packet ( ) if ( mode == PROP_PACKET_SENSE ) : self . _decode_sense_packet ( version , packet ) elif ( mode == PROP_PACKET_CTRL ) : self . _decode_ctrl_packet ( version , packet )
Main thread that reads from input and populates the sensors .
58,849
def run_thread ( self ) : self . _run_thread = True self . _thread . setDaemon ( True ) self . _thread . start ( )
Run the main thread .
58,850
def unused ( self , _dict ) : for key , value in _dict . items ( ) : if value is None : del _dict [ key ] return _dict
Remove empty parameters from the dict
58,851
def required ( self , method , _dict , require ) : for key in require : if key not in _dict : raise LunrError ( "'%s' is required argument for method '%s'" % ( key , method ) )
Ensure the required items are in the dictionary
58,852
def allowed ( self , method , _dict , allow ) : for key in _dict . keys ( ) : if key not in allow : raise LunrError ( "'%s' is not an argument for method '%s'" % ( key , method ) )
Only these items are allowed in the dictionary
58,853
def parse_event_name ( name ) : try : app , event = name . split ( '.' ) return '{}.{}' . format ( app , EVENTS_MODULE_NAME ) , event except ValueError : raise InvalidEventNameError ( ( u'The name "{}" is invalid. ' u'Make sure you are using the "app.KlassName" format' ) . format ( name ) )
Returns the python module and obj given an event name
58,854
def find_event ( name ) : try : module , klass = parse_event_name ( name ) return getattr ( import_module ( module ) , klass ) except ( ImportError , AttributeError ) : raise EventNotFoundError ( ( 'Event "{}" not found. ' 'Make sure you have a class called "{}" inside the "{}" ' 'module.' . format ( name , klass , module ) ) )
Actually import the event represented by name
58,855
def cleanup_handlers ( event = None ) : if event : if event in HANDLER_REGISTRY : del HANDLER_REGISTRY [ event ] if event in EXTERNAL_HANDLER_REGISTRY : del EXTERNAL_HANDLER_REGISTRY [ event ] else : HANDLER_REGISTRY . clear ( ) EXTERNAL_HANDLER_REGISTRY . clear ( )
Remove handlers of a given event . If no event is informed wipe out all events registered .
58,856
def find_handlers ( event_name , registry = HANDLER_REGISTRY ) : handlers = [ ] if isinstance ( event_name , basestring ) : matched_events = [ event for event in registry . keys ( ) if fnmatch . fnmatchcase ( event_name , event ) ] for matched_event in matched_events : handlers . extend ( registry . get ( matched_event ) ) else : handlers = registry . get ( find_event ( event_name ) , [ ] ) return handlers
Small helper to find all handlers associated to a given event
58,857
def get_default_values ( data ) : request = data . get ( 'request' ) result = { } result [ '__datetime__' ] = datetime . now ( ) result [ '__ip_address__' ] = request and get_ip ( request ) or '0.0.0.0' return result
Return all default values that an event should have
58,858
def filter_data_values ( data ) : banned = ( 'request' , ) return { key : val for key , val in data . items ( ) if not key in banned }
Remove special values that log function can take
58,859
def import_event_modules ( ) : for installed_app in getsetting ( 'INSTALLED_APPS' ) : module_name = u'{}.{}' . format ( installed_app , EVENTS_MODULE_NAME ) try : import_module ( module_name ) except ImportError : pass
Import all events declared for all currently installed apps
58,860
def handle_expired_accounts ( ) : ACTIVATED = RegistrationProfile . ACTIVATED expiration_date = datetime . timedelta ( days = settings . ACCOUNT_ACTIVATION_DAYS ) to_delete = [ ] print "Processing %s registration profiles..." % str ( RegistrationProfile . objects . all ( ) . count ( ) ) for profile in RegistrationProfile . objects . all ( ) : print "Processing %s" % profile . user if profile . activation_key == ACTIVATED : print "Found Active" to_delete . append ( profile . pk ) continue if profile . user . is_active and profile . user . date_joined + expiration_date <= datetime . datetime . now ( ) : print "Found Expired" user = profile . user user . is_active = False site = Site . objects . get_current ( ) ctx_dict = { 'site' : site , 'activation_key' : profile . activation_key } subject = render_to_string ( 'registration/email/emails/account_expired_subject.txt' , ctx_dict ) subject = '' . join ( subject . splitlines ( ) ) message = render_to_string ( 'registration/email/emails/account_expired.txt' , ctx_dict ) user . email_user ( subject , message , settings . DEFAULT_FROM_EMAIL ) user . save ( ) print "Deleting %s registration profiles." % str ( len ( to_delete ) ) RegistrationProfile . objects . filter ( pk__in = to_delete ) . delete ( )
Check of expired accounts .
58,861
def activate ( self , request , activation_key ) : if SHA1_RE . search ( activation_key ) : try : profile = RegistrationProfile . objects . get ( activation_key = activation_key ) except RegistrationProfile . DoesNotExist : return False user = profile . user user . is_active = True user . save ( ) profile . activation_key = RegistrationProfile . ACTIVATED profile . save ( ) return user return False
Override default activation process . This will activate the user even if its passed its expiration date .
58,862
def register ( self , request , ** kwargs ) : if Site . _meta . installed : site = Site . objects . get_current ( ) else : site = RequestSite ( request ) email = kwargs [ 'email' ] password = User . objects . make_random_password ( ) username = sha_constructor ( str ( email ) ) . hexdigest ( ) [ : 30 ] incr = 0 while User . objects . filter ( username = username ) . count ( ) > 0 : incr += 1 username = sha_constructor ( str ( email + str ( incr ) ) ) . hexdigest ( ) [ : 30 ] new_user = User . objects . create_user ( username , email , password ) new_user . save ( ) registration_profile = RegistrationProfile . objects . create_profile ( new_user ) auth_user = authenticate ( username = username , password = password ) login ( request , auth_user ) request . session . set_expiry ( 0 ) if hasattr ( settings , 'AUTH_PROFILE_MODULE' ) and getattr ( settings , 'AUTH_PROFILE_MODULE' ) : app_label , model_name = settings . AUTH_PROFILE_MODULE . split ( '.' ) model = models . get_model ( app_label , model_name ) try : profile = new_user . get_profile ( ) except model . DoesNotExist : profile = model ( user = new_user ) profile . save ( ) self . send_activation_email ( new_user , registration_profile , password , site ) signals . user_registered . send ( sender = self . __class__ , user = new_user , request = request ) return new_user
Create and immediately log in a new user . Only require a email to register username is generated automatically and a password is random generated and emailed to the user . Activation is still required for account uses after specified number of days .
58,863
def send_activation_email ( self , user , profile , password , site ) : ctx_dict = { 'password' : password , 'site' : site , 'activation_key' : profile . activation_key , 'expiration_days' : settings . ACCOUNT_ACTIVATION_DAYS } subject = render_to_string ( 'registration/email/emails/password_subject.txt' , ctx_dict ) subject = '' . join ( subject . splitlines ( ) ) message = render_to_string ( 'registration/email/emails/password.txt' , ctx_dict ) try : user . email_user ( subject , message , settings . DEFAULT_FROM_EMAIL ) except : pass
Custom send email method to supplied the activation link and new generated password .
58,864
def post_registration_redirect ( self , request , user ) : next_url = "/registration/register/complete/" if "next" in request . GET or "next" in request . POST : next_url = request . GET . get ( "next" , None ) or request . POST . get ( "next" , None ) or "/" return ( next_url , ( ) , { } )
After registration redirect to the home page or supplied next query string or hidden field value .
58,865
def next ( self ) : if self . start + self . size > self . total_size : result = None else : result = Batch ( self . start + self . size , self . size , self . total_size ) return result
Returns the next batch for the batched sequence or None if this batch is already the last batch .
58,866
def previous ( self ) : if self . start - self . size < 0 : result = None else : result = Batch ( self . start - self . size , self . size , self . total_size ) return result
Returns the previous batch for the batched sequence or None if this batch is already the first batch .
58,867
def last ( self ) : start = max ( self . number - 1 , 0 ) * self . size return Batch ( start , self . size , self . total_size )
Returns the last batch for the batched sequence .
58,868
def number ( self ) : return int ( math . ceil ( self . total_size / float ( self . size ) ) )
Returns the number of batches the batched sequence contains .
58,869
def watermark ( url , args = '' ) : args = args . split ( ',' ) params = dict ( name = args . pop ( 0 ) , opacity = 0.5 , tile = False , scale = 1.0 , greyscale = False , rotation = 0 , position = None , quality = QUALITY , obscure = OBSCURE_ORIGINAL , random_position_once = RANDOM_POSITION_ONCE , ) params [ 'url' ] = unquote ( url ) for arg in args : key , value = arg . split ( '=' ) key , value = key . strip ( ) , value . strip ( ) if key == 'position' : params [ 'position' ] = value elif key == 'opacity' : params [ 'opacity' ] = utils . _percent ( value ) elif key == 'tile' : params [ 'tile' ] = bool ( int ( value ) ) elif key == 'scale' : params [ 'scale' ] = value elif key == 'greyscale' : params [ 'greyscale' ] = bool ( int ( value ) ) elif key == 'rotation' : params [ 'rotation' ] = value elif key == 'quality' : params [ 'quality' ] = int ( value ) elif key == 'obscure' : params [ 'obscure' ] = bool ( int ( value ) ) elif key == 'random_position_once' : params [ 'random_position_once' ] = bool ( int ( value ) ) return Watermarker ( ) ( ** params )
Returns the URL to a watermarked copy of the image specified .
58,870
def _get_filesystem_path ( self , url_path , basedir = settings . MEDIA_ROOT ) : if url_path . startswith ( settings . MEDIA_URL ) : url_path = url_path [ len ( settings . MEDIA_URL ) : ] return os . path . normpath ( os . path . join ( basedir , url2pathname ( url_path ) ) )
Makes a filesystem path from the specified URL path
58,871
def generate_filename ( self , mark , ** kwargs ) : kwargs = kwargs . copy ( ) kwargs [ 'opacity' ] = int ( kwargs [ 'opacity' ] * 100 ) kwargs [ 'st_mtime' ] = kwargs [ 'fstat' ] . st_mtime kwargs [ 'st_size' ] = kwargs [ 'fstat' ] . st_size params = [ '%(original_basename)s' , 'wm' , 'w%(watermark)i' , 'o%(opacity)i' , 'gs%(greyscale)i' , 'r%(rotation)i' , 'fm%(st_mtime)i' , 'fz%(st_size)i' , 'p%(position)s' , ] scale = kwargs . get ( 'scale' , None ) if scale and scale != mark . size : params . append ( '_s%i' % ( float ( kwargs [ 'scale' ] [ 0 ] ) / mark . size [ 0 ] * 100 ) ) if kwargs . get ( 'tile' , None ) : params . append ( '_tiled' ) filename = '%s%s' % ( '_' . join ( params ) , kwargs [ 'ext' ] ) return filename % kwargs
Comes up with a good filename for the watermarked image
58,872
def get_url_path ( self , basedir , original_basename , ext , name , obscure = True ) : try : hash = hashlib . sha1 ( smart_str ( name ) ) . hexdigest ( ) except TypeError : hash = hashlib . sha1 ( smart_str ( name ) . encode ( 'utf-8' ) ) . hexdigest ( ) if obscure is True : logger . debug ( 'Obscuring original image name: %s => %s' % ( name , hash ) ) url_path = os . path . join ( basedir , hash + ext ) else : logger . debug ( 'Not obscuring original image name.' ) url_path = os . path . join ( basedir , hash , original_basename + ext ) try : fpath = self . _get_filesystem_path ( url_path ) os . makedirs ( os . path . dirname ( fpath ) ) except OSError as e : if e . errno == errno . EEXIST : pass else : logger . error ( 'Error creating path: %s' % traceback . format_exc ( ) ) raise else : logger . debug ( 'Created directory: %s' % os . path . dirname ( fpath ) ) return url_path
Determines an appropriate watermark path
58,873
def create_watermark ( self , target , mark , fpath , quality = QUALITY , ** kwargs ) : im = utils . watermark ( target , mark , ** kwargs ) im . save ( fpath , quality = quality ) return im
Create the watermarked image on the filesystem
58,874
def _val ( var , is_percent = False ) : try : if is_percent : var = float ( int ( var . strip ( '%' ) ) / 100.0 ) else : var = int ( var ) except ValueError : raise ValueError ( 'invalid watermark parameter: ' + var ) return var
Tries to determine the appropriate value of a particular variable that is passed in . If the value is supposed to be a percentage a whole integer will be sought after and then turned into a floating point number between 0 and 1 . If the value is supposed to be an integer the variable is cast into an integer .
58,875
def reduce_opacity ( img , opacity ) : assert opacity >= 0 and opacity <= 1 if img . mode != 'RGBA' : img = img . convert ( 'RGBA' ) else : img = img . copy ( ) alpha = img . split ( ) [ 3 ] alpha = ImageEnhance . Brightness ( alpha ) . enhance ( opacity ) img . putalpha ( alpha ) return img
Returns an image with reduced opacity .
58,876
def determine_scale ( scale , img , mark ) : if scale : try : scale = float ( scale ) except ( ValueError , TypeError ) : pass if isinstance ( scale , six . string_types ) and scale . upper ( ) == 'F' : scale = min ( float ( img . size [ 0 ] ) / mark . size [ 0 ] , float ( img . size [ 1 ] ) / mark . size [ 1 ] ) elif isinstance ( scale , six . string_types ) and scale . upper ( ) == 'R' : scale = min ( float ( img . size [ 0 ] ) / mark . size [ 0 ] , float ( img . size [ 1 ] ) / mark . size [ 1 ] ) / 100 * settings . WATERMARK_PERCENTAGE elif type ( scale ) not in ( float , int ) : raise ValueError ( 'Invalid scale value "%s"! Valid values are "F" ' 'for ratio-preserving scaling, "R%%" for percantage aspect ' 'ratio of source image and floating-point numbers and ' 'integers greater than 0.' % scale ) w = int ( mark . size [ 0 ] * float ( scale ) ) h = int ( mark . size [ 1 ] * float ( scale ) ) return ( w , h ) else : return mark . size
Scales an image using a specified ratio F or R . If scale is F the image is scaled to be as big as possible to fit in img without falling off the edges . If scale is R the watermark resizes to a percentage of minimum size of source image . Returns the scaled mark .
58,877
def determine_rotation ( rotation , mark ) : if isinstance ( rotation , six . string_types ) and rotation . lower ( ) == 'r' : rotation = random . randint ( 0 , 359 ) else : rotation = _int ( rotation ) return rotation
Determines the number of degrees to rotate the watermark image .
58,878
def watermark ( img , mark , position = ( 0 , 0 ) , opacity = 1 , scale = 1.0 , tile = False , greyscale = False , rotation = 0 , return_name = False , ** kwargs ) : if opacity < 1 : mark = reduce_opacity ( mark , opacity ) if not isinstance ( scale , tuple ) : scale = determine_scale ( scale , img , mark ) mark = mark . resize ( scale , resample = Image . ANTIALIAS ) if greyscale and mark . mode != 'LA' : mark = mark . convert ( 'LA' ) rotation = determine_rotation ( rotation , mark ) if rotation != 0 : new_w = int ( mark . size [ 0 ] * 1.5 ) new_h = int ( mark . size [ 1 ] * 1.5 ) new_mark = Image . new ( 'RGBA' , ( new_w , new_h ) , ( 0 , 0 , 0 , 0 ) ) new_l = int ( ( new_w - mark . size [ 0 ] ) / 2 ) new_t = int ( ( new_h - mark . size [ 1 ] ) / 2 ) new_mark . paste ( mark , ( new_l , new_t ) ) mark = new_mark . rotate ( rotation ) position = determine_position ( position , img , mark ) if img . mode != 'RGBA' : img = img . convert ( 'RGBA' ) assert isinstance ( position , tuple ) , 'Invalid position "%s"!' % position layer = Image . new ( 'RGBA' , img . size , ( 0 , 0 , 0 , 0 ) ) if tile : first_y = int ( position [ 1 ] % mark . size [ 1 ] - mark . size [ 1 ] ) first_x = int ( position [ 0 ] % mark . size [ 0 ] - mark . size [ 0 ] ) for y in range ( first_y , img . size [ 1 ] , mark . size [ 1 ] ) : for x in range ( first_x , img . size [ 0 ] , mark . size [ 0 ] ) : layer . paste ( mark , ( x , y ) ) else : layer . paste ( mark , position ) return Image . composite ( layer , img , layer )
Adds a watermark to an image
58,879
def parsed_file ( config_file ) : parser = ConfigParser ( allow_no_value = True ) parser . readfp ( config_file ) return parser
Parse an ini - style config file .
58,880
def commands ( config , names ) : commands = { cmd : Command ( ** dict ( ( minus_to_underscore ( k ) , v ) for k , v in config . items ( cmd ) ) ) for cmd in config . sections ( ) if cmd != 'packages' } try : return tuple ( commands [ x ] for x in names ) except KeyError as e : raise RuntimeError ( 'Section [commands] in the config file does not contain the ' 'key {.args[0]!r} you requested to execute.' . format ( e ) )
Return the list of commands to run .
58,881
def project_path ( * names ) : return os . path . join ( os . path . dirname ( __file__ ) , * names )
Path to a file in the project .
58,882
def get_osa_commit ( repo , ref , rpc_product = None ) : osa_differ . checkout ( repo , ref ) functions_path = os . path . join ( repo . working_tree_dir , 'scripts/functions.sh' ) release_path = os . path . join ( repo . working_tree_dir , 'playbooks/vars/rpc-release.yml' ) if os . path . exists ( release_path ) : with open ( release_path ) as f : rpc_release_data = yaml . safe_load ( f . read ( ) ) rpc_product_releases = rpc_release_data [ 'rpc_product_releases' ] release_data = rpc_product_releases [ rpc_product ] return release_data [ 'osa_release' ] elif repo . submodules [ 'openstack-ansible' ] : return repo . submodules [ 'openstack-ansible' ] . hexsha elif os . path . exists ( functions_path ) : quoted_re = re . compile ( 'OSA_RELEASE:-?"?([^"}]+)["}]' ) with open ( functions_path , "r" ) as funcs : for line in funcs . readlines ( ) : match = quoted_re . search ( line ) if match : return match . groups ( ) [ 0 ] else : raise SHANotFound ( ( "Cannot find OSA SHA in submodule or " "script: {}" . format ( functions_path ) ) ) else : raise SHANotFound ( 'No OSA SHA was able to be derived.' )
Get the OSA sha referenced by an RPCO Repo .
58,883
def publish_report ( report , args , old_commit , new_commit ) : output = "" if not args . quiet and not args . gist and not args . file : return report if args . gist : gist_url = post_gist ( report , old_commit , new_commit ) output += "\nReport posted to GitHub Gist: {0}" . format ( gist_url ) if args . file is not None : with open ( args . file , 'w' ) as f : f . write ( report . encode ( 'utf-8' ) ) output += "\nReport written to file: {0}" . format ( args . file ) return output
Publish the RST report based on the user request .
58,884
def run_rpc_differ ( ) : args = parse_arguments ( ) if args . debug : log . setLevel ( logging . DEBUG ) elif args . verbose : log . setLevel ( logging . INFO ) try : storage_directory = osa_differ . prepare_storage_dir ( args . directory ) except OSError : print ( "ERROR: Couldn't create the storage directory {0}. " "Please create it manually." . format ( args . directory ) ) sys . exit ( 1 ) rpc_repo_url = args . rpc_repo_url rpc_repo_dir = "{0}/rpc-openstack" . format ( storage_directory ) osa_differ . update_repo ( rpc_repo_dir , rpc_repo_url , args . update ) rpc_old_commit = validate_rpc_sha ( rpc_repo_dir , args . old_commit [ 0 ] ) rpc_new_commit = validate_rpc_sha ( rpc_repo_dir , args . new_commit [ 0 ] ) report_rst = make_rpc_report ( rpc_repo_dir , rpc_old_commit , rpc_new_commit , args ) try : role_yaml = osa_differ . get_roles ( rpc_repo_dir , rpc_old_commit , args . role_requirements_old_commit ) except IOError : role_yaml = osa_differ . get_roles ( rpc_repo_dir , rpc_old_commit , ROLE_REQ_FILE ) try : role_yaml_latest = osa_differ . get_roles ( rpc_repo_dir , rpc_new_commit , args . role_requirements ) except IOError : role_yaml_latest = osa_differ . get_roles ( rpc_repo_dir , rpc_new_commit , ROLE_REQ_FILE ) report_rst += ( "\nRPC-OpenStack Roles\n" "-------------------" ) report_rst += osa_differ . make_report ( storage_directory , role_yaml , role_yaml_latest , args . update , args . version_mappings ) report_rst += "\n" repo = Repo ( rpc_repo_dir ) osa_old_commit = get_osa_commit ( repo , rpc_old_commit , rpc_product = args . rpc_product_old_commit ) osa_new_commit = get_osa_commit ( repo , rpc_new_commit , rpc_product = args . rpc_product ) log . debug ( "OSA Commits old:{old} new:{new}" . format ( old = osa_old_commit , new = osa_new_commit ) ) osa_repo_dir = "{0}/openstack-ansible" . format ( storage_directory ) try : report_rst += osa_differ . make_osa_report ( osa_repo_dir , osa_old_commit , osa_new_commit , args ) except exceptions . InvalidCommitRangeException : pass try : role_yaml = osa_differ . get_roles ( osa_repo_dir , osa_old_commit , args . role_requirements_old_commit ) except IOError : role_yaml = osa_differ . get_roles ( osa_repo_dir , osa_old_commit , ROLE_REQ_FILE ) try : role_yaml_latest = osa_differ . get_roles ( osa_repo_dir , osa_new_commit , args . role_requirements ) except IOError : role_yaml_latest = osa_differ . get_roles ( osa_repo_dir , osa_new_commit , ROLE_REQ_FILE ) report_rst += ( "\nOpenStack-Ansible Roles\n" "-----------------------" ) report_rst += osa_differ . make_report ( storage_directory , role_yaml , role_yaml_latest , args . update , args . version_mappings ) project_yaml = osa_differ . get_projects ( osa_repo_dir , osa_old_commit ) project_yaml_latest = osa_differ . get_projects ( osa_repo_dir , osa_new_commit ) report_rst += ( "OpenStack-Ansible Projects\n" "--------------------------" ) report_rst += osa_differ . make_report ( storage_directory , project_yaml , project_yaml_latest , args . update , args . version_mappings ) output = publish_report ( report_rst , args , rpc_old_commit , rpc_new_commit ) print ( output )
The script starts here .
58,885
def main ( raw_args = None ) : parser = argparse . ArgumentParser ( description = "poor man's integration testing" ) parser . add_argument ( 'cmds' , metavar = 'cmd' , default = [ 'test' ] , nargs = '*' , help = 'Run command(s) defined in the configuration file. Each command ' 'is run on each package before proceeding with the next command. ' '(default: "test")' ) parser . add_argument ( '-c' , '--config' , dest = 'file' , type = argparse . FileType ( 'r' ) , default = 'toll.ini' , help = 'ini-style file to read the configuration from' ) parser . add_argument ( '--start-at' , dest = 'start_at' , type = str , default = '' , help = 'Skip over the packages in the config file listed before this' ' one. (It does a substring match to find the first package.)' ) args = parser . parse_args ( raw_args ) config_file = config . parsed_file ( args . file ) commands = config . commands ( config_file , args . cmds ) packages = config . packages ( config_file ) runner = Runner ( commands , packages , start_at = args . start_at ) return runner ( )
Console script entry point .
58,886
def remove ( self , list ) : xml = SP . DeleteList ( SP . listName ( list . id ) ) self . opener . post_soap ( LIST_WEBSERVICE , xml , soapaction = 'http://schemas.microsoft.com/sharepoint/soap/DeleteList' ) self . all_lists . remove ( list )
Removes a list from the site .
58,887
def create ( self , name , description = '' , template = 100 ) : try : template = int ( template ) except ValueError : template = LIST_TEMPLATES [ template ] if name in self : raise ValueError ( "List already exists: '{0}" . format ( name ) ) if uuid_re . match ( name ) : raise ValueError ( "Cannot create a list with a UUID as a name" ) xml = SP . AddList ( SP . listName ( name ) , SP . description ( description ) , SP . templateID ( text_type ( template ) ) ) result = self . opener . post_soap ( LIST_WEBSERVICE , xml , soapaction = 'http://schemas.microsoft.com/sharepoint/soap/AddList' ) list_element = result . xpath ( 'sp:AddListResult/sp:List' , namespaces = namespaces ) [ 0 ] self . _all_lists . append ( SharePointList ( self . opener , self , list_element ) )
Creates a new list in the site .
58,888
def Row ( self ) : if not hasattr ( self , '_row_class' ) : attrs = { 'fields' : self . fields , 'list' : self , 'opener' : self . opener } for field in self . fields . values ( ) : attrs [ field . name ] = field . descriptor self . _row_class = type ( 'SharePointListRow' , ( SharePointListRow , ) , attrs ) return self . _row_class
The class for a row in this list .
58,889
def append ( self , row ) : if isinstance ( row , dict ) : row = self . Row ( row ) elif isinstance ( row , self . Row ) : pass elif isinstance ( row , SharePointListRow ) : raise TypeError ( "row must be a dict or an instance of SharePointList.Row, not SharePointListRow" ) else : raise TypeError ( "row must be a dict or an instance of SharePointList.Row" ) self . rows self . _rows . append ( row ) return row
Appends a row to the list . Takes a dictionary returns a row .
58,890
def remove ( self , row ) : self . _rows . remove ( row ) self . _deleted_rows . add ( row )
Removes the row from the list .
58,891
def save ( self ) : batches = E . Batch ( ListVersion = '1' , OnError = 'Return' ) xml = SP . UpdateListItems ( SP . listName ( self . id ) , SP . updates ( batches ) ) rows_by_batch_id , batch_id = { } , 1 for row in self . _rows : batch = row . get_batch_method ( ) if batch is None : continue batch . attrib [ 'ID' ] = text_type ( batch_id ) rows_by_batch_id [ batch_id ] = row batches . append ( batch ) batch_id += 1 for row in self . _deleted_rows : batch = E . Method ( E . Field ( text_type ( row . id ) , Name = 'ID' ) , ID = text_type ( batch_id ) , Cmd = 'Delete' ) rows_by_batch_id [ batch_id ] = row batches . append ( batch ) batch_id += 1 if len ( batches ) == 0 : return response = self . opener . post_soap ( LIST_WEBSERVICE , xml , soapaction = 'http://schemas.microsoft.com/sharepoint/soap/UpdateListItems' ) for result in response . xpath ( './/sp:Result' , namespaces = namespaces ) : batch_id , batch_result = result . attrib [ 'ID' ] . split ( ',' ) row = rows_by_batch_id [ int ( batch_id ) ] error_code = result . find ( 'sp:ErrorCode' , namespaces = namespaces ) error_text = result . find ( 'sp:ErrorText' , namespaces = namespaces ) if error_code is not None and error_code . text != '0x00000000' : raise UpdateFailedError ( row , batch_result , error_code . text , error_text . text ) if batch_result in ( 'Update' , 'New' ) : row . _update ( result . xpath ( 'z:row' , namespaces = namespaces ) [ 0 ] , clear = True ) else : self . _deleted_rows . remove ( row ) assert not self . _deleted_rows assert not any ( row . _changed for row in self . rows )
Updates the list with changes .
58,892
def get_batch_method ( self ) : if not self . _changed : return None batch_method = E . Method ( Cmd = 'Update' if self . id else 'New' ) batch_method . append ( E . Field ( text_type ( self . id ) if self . id else 'New' , Name = 'ID' ) ) for field in self . fields . values ( ) : if field . name in self . _changed : value = field . unparse ( self . _data [ field . name ] or '' ) batch_method . append ( E . Field ( value , Name = field . name ) ) return batch_method
Returns a change batch for SharePoint s UpdateListItems operation .
58,893
def convert_to_python ( self , xmlrpc = None ) : if xmlrpc : return xmlrpc . get ( self . name , self . default ) elif self . default : return self . default else : return None
Extracts a value for the field from an XML - RPC response .
58,894
def get_outputs ( self , input_value ) : output_value = self . convert_to_xmlrpc ( input_value ) output = { } for name in self . output_names : output [ name ] = output_value return output
Generate a set of output values for a given input .
58,895
def struct ( self ) : data = { } for var , fmap in self . _def . items ( ) : if hasattr ( self , var ) : data . update ( fmap . get_outputs ( getattr ( self , var ) ) ) return data
XML - RPC - friendly representation of the current object state
58,896
def get_args ( self , client ) : default_args = self . default_args ( client ) if self . method_args or self . optional_args : optional_args = getattr ( self , 'optional_args' , tuple ( ) ) args = [ ] for arg in ( self . method_args + optional_args ) : if hasattr ( self , arg ) : obj = getattr ( self , arg ) if hasattr ( obj , 'struct' ) : args . append ( obj . struct ) else : args . append ( obj ) args = list ( default_args ) + args else : args = default_args return args
Builds final set of XML - RPC method arguments based on the method s arguments any default arguments and their defined respective ordering .
58,897
def process_result ( self , raw_result ) : if self . results_class and raw_result : if isinstance ( raw_result , dict_type ) : return self . results_class ( raw_result ) elif isinstance ( raw_result , collections . Iterable ) : return [ self . results_class ( result ) for result in raw_result ] return raw_result
Performs actions on the raw result from the XML - RPC response . If a results_class is defined the response will be converted into one or more object instances of that class .
58,898
def parse ( self , text ) : results = [ ] if isinstance ( text , str ) : if six . PY2 : text = unicode ( text , 'utf-8' ) self . clean_text = self . _normalize_string ( text ) addresses = set ( self . _get_addresses ( self . clean_text ) ) if addresses : results = list ( map ( self . _parse_address , addresses ) ) return results
Returns a list of addresses found in text together with parsed address parts
58,899
def _parse_address ( self , address_string ) : match = utils . match ( self . rules , address_string , flags = re . VERBOSE | re . U ) if match : match_as_dict = match . groupdict ( ) match_as_dict . update ( { 'country_id' : self . country } ) cleaned_dict = self . _combine_results ( match_as_dict ) return address . Address ( ** cleaned_dict ) return False
Parses address into parts