idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
55,800
def stamp_excerpt ( kb_app : kb , sphinx_app : Sphinx , doctree : doctree ) : resources = sphinx_app . env . resources confdir = sphinx_app . confdir source = PurePath ( doctree . attributes [ 'source' ] ) docname = str ( source . relative_to ( confdir ) ) . split ( '.rst' ) [ 0 ] resource = resources . get ( docname ) if resource : excerpt = getattr ( resource . props , 'excerpt' , False ) auto_excerpt = getattr ( resource . props , 'auto_excerpt' , False ) if excerpt : resource . excerpt = excerpt elif not auto_excerpt : resource . excerpt = None else : resource . excerpt = get_rst_excerpt ( doctree , auto_excerpt )
Walk the tree and extract excert into resource . excerpt
55,801
def bitfieldify ( buff , count ) : databits = bitarray ( ) databits . frombytes ( buff ) return databits [ len ( databits ) - count : ]
Extract a bitarray out of a bytes array .
55,802
def build_byte_align_buff ( bits ) : bitmod = len ( bits ) % 8 if bitmod == 0 : rdiff = bitarray ( ) else : rdiff = bitarray ( 8 - bitmod ) rdiff . setall ( False ) return rdiff + bits
Pad the left side of a bitarray with 0s to align its length with byte boundaries .
55,803
def create ( self , name , cidr , ** kwargs ) : return self . driver . create ( name , cidr , ** kwargs )
This function will create a user network . Within OpenStack it will create a network and a subnet Within AWS it will create a VPC and a subnet
55,804
def find_whole_word ( w ) : return re . compile ( r'\b({0})\b' . format ( w ) , flags = re . IGNORECASE ) . search
Scan through string looking for a location where this word produces a match and return a corresponding MatchObject instance . Return None if no position in the string matches the pattern ; note that this is different from finding a zero - length match at some point in the string .
55,805
def GetCompressedFilesInDir ( fileDir , fileList , ignoreDirList , supportedFormatList = [ '.rar' , ] ) : goodlogging . Log . Info ( "EXTRACT" , "Parsing file directory: {0}" . format ( fileDir ) ) if os . path . isdir ( fileDir ) is True : for globPath in glob . glob ( os . path . join ( fileDir , '*' ) ) : if os . path . splitext ( globPath ) [ 1 ] in supportedFormatList : fileList . append ( globPath )
Get all supported files from given directory folder . Appends to given file list .
55,806
def MultipartArchiving ( firstPartExtractList , otherPartSkippedList , archiveDir , otherPartFilePath = None ) : if otherPartFilePath is None : for filePath in list ( otherPartSkippedList ) : MultipartArchiving ( firstPartExtractList , otherPartSkippedList , archiveDir , filePath ) else : baseFileName = re . findall ( "(.+?)[.]part.+?rar" , otherPartFilePath ) [ 0 ] if baseFileName in firstPartExtractList : util . ArchiveProcessedFile ( otherPartFilePath , archiveDir ) if otherPartFilePath in otherPartSkippedList : otherPartSkippedList . remove ( otherPartFilePath ) elif otherPartFilePath not in otherPartSkippedList : otherPartSkippedList . append ( otherPartFilePath )
Archive all parts of multi - part compressed file .
55,807
def DoRarExtraction ( rarArchive , targetFile , dstDir ) : try : rarArchive . extract ( targetFile , dstDir ) except BaseException as ex : goodlogging . Log . Info ( "EXTRACT" , "Extract failed - Exception: {0}" . format ( ex ) ) return False else : return True
RAR extraction with exception catching
55,808
def GetRarPassword ( skipUserInput ) : goodlogging . Log . Info ( "EXTRACT" , "RAR file needs password to extract" ) if skipUserInput is False : prompt = "Enter password, 'x' to skip this file or 'exit' to quit this program: " response = goodlogging . Log . Input ( "EXTRACT" , prompt ) response = util . CheckEmptyResponse ( response ) else : response = 'x' if response . lower ( ) == 'x' : goodlogging . Log . Info ( "EXTRACT" , "File extraction skipped without password" ) return False elif response . lower ( ) == 'exit' : goodlogging . Log . Fatal ( "EXTRACT" , "Program terminated by user 'exit'" ) else : return response
Get password for rar archive from user input .
55,809
def CheckPasswordReuse ( skipUserInput ) : goodlogging . Log . Info ( "EXTRACT" , "RAR files needs password to extract" ) if skipUserInput is False : prompt = "Enter 't' to reuse the last password for just this file, " "'a' to reuse for all subsequent files, " "'n' to enter a new password for this file " "or 's' to enter a new password for all files: " response = goodlogging . Log . Input ( "EXTRACT" , prompt ) response = util . ValidUserResponse ( response , ( 't' , 'a' , 'n' , 's' ) ) else : response = 'a' if response . lower ( ) == 's' : return - 1 if response . lower ( ) == 'n' : return 0 elif response . lower ( ) == 't' : return 1 elif response . lower ( ) == 'a' : return 2
Check with user for password reuse .
55,810
def register ( self , func , singleton = False , threadlocal = False , name = None ) : func . _giveme_singleton = singleton func . _giveme_threadlocal = threadlocal if name is None : name = func . __name__ self . _registered [ name ] = func return func
Register a dependency function
55,811
def get_value ( self , name ) : factory = self . _registered . get ( name ) if not factory : raise KeyError ( 'Name not registered' ) if factory . _giveme_singleton : if name in self . _singletons : return self . _singletons [ name ] self . _singletons [ name ] = factory ( ) return self . _singletons [ name ] elif factory . _giveme_threadlocal : if hasattr ( self . _threadlocals , name ) : return getattr ( self . _threadlocals , name ) setattr ( self . _threadlocals , name , factory ( ) ) return getattr ( self . _threadlocals , name ) return factory ( )
Get return value of a dependency factory or a live singleton instance .
55,812
def trace ( fun , * a , ** k ) : @ wraps ( fun ) def tracer ( * a , ** k ) : ret = fun ( * a , ** k ) print ( 'trace:fun: %s\n ret=%s\n a=%s\nk%s\n' % ( str ( fun ) , str ( ret ) , str ( a ) , str ( k ) ) ) return ret return tracer
define a tracer for a rule function for log and statistic purposes
55,813
def timer ( fun , * a , ** k ) : @ wraps ( fun ) def timer ( * a , ** k ) : start = arrow . now ( ) ret = fun ( * a , ** k ) end = arrow . now ( ) print ( 'timer:fun: %s\n start:%s,end:%s, took [%s]' % ( str ( fun ) , str ( start ) , str ( end ) , str ( end - start ) ) ) return ret return timer
define a timer for a rule function for log and statistic purposes
55,814
def get_function ( self , fun = None ) : sfun = str ( fun ) self . say ( 'get_function:' + sfun , verbosity = 100 ) if not fun : return NoRuleFunction ( ) if sfun in self . _rule_functions : return self . _rule_functions [ sfun ] else : self . add_function ( name = sfun , fun = self . rule_function_not_found ( fun ) ) self . cry ( 'fun(%s) not found, returning dummy' % ( sfun ) , verbosity = 10 ) if sfun in self . _rule_functions : return self . _rule_functions [ sfun ] else : self . rule_function_not_found ( fun )
get function as RuleFunction or return a NoRuleFunction function
55,815
def add_function ( self , fun = None , name = None , fun_type = FUN_TYPE ) : if not name : if six . PY2 : name = fun . func_name else : name = fun . __name__ self . say ( 'adding fun(%s)' % name , verbosity = 50 ) self . say ( 'adding fun_type:%s' % fun_type , verbosity = 50 ) if self . function_exists ( name ) : self . cry ( 'overwriting :fun(%s)' % name , verbosity = 10 ) self . say ( 'added :' + name , verbosity = 10 ) self . _rule_functions [ name ] = RuleFunction ( name , fun , fun_type ) return True
actually replace function
55,816
def function_exists ( self , fun ) : res = fun in self . _rule_functions self . say ( 'function exists:' + str ( fun ) + ':' + str ( res ) , verbosity = 10 ) return res
get function s existense
55,817
def rule_function_not_found ( self , fun = None ) : sfun = str ( fun ) self . cry ( 'rule_function_not_found:' + sfun ) def not_found ( * a , ** k ) : return ( sfun + ':rule_function_not_found' , k . keys ( ) ) return not_found
any function that does not exist will be added as a dummy function that will gather inputs for easing into the possible future implementation
55,818
def parse_value ( val ) : val = val . replace ( "%" , " " ) . replace ( " " , "" ) . replace ( "," , "." ) . replace ( "st" , "" ) . strip ( ) missing = [ "Ejdeltagit" , "N/A" ] if val in missing : return val elif val == "" : return None return float ( val )
Parse values from html
55,819
def _get_html ( self , url ) : self . log . info ( u"/GET {}" . format ( url ) ) r = requests . get ( url ) if hasattr ( r , 'from_cache' ) : if r . from_cache : self . log . info ( "(from cache)" ) if r . status_code != 200 : throw_request_err ( r ) return r . content
Get html from url
55,820
def _get_json ( self , url ) : self . log . info ( u"/GET " + url ) r = requests . get ( url ) if hasattr ( r , 'from_cache' ) : if r . from_cache : self . log . info ( "(from cache)" ) if r . status_code != 200 : throw_request_err ( r ) return r . json ( )
Get json from url
55,821
def regions ( self ) : regions = [ ] elem = self . dimensions [ "region" ] . elem for option_elem in elem . find_all ( "option" ) : region = option_elem . text . strip ( ) regions . append ( region ) return regions
Get a list of all regions
55,822
def _get_region_slug ( self , id_or_label ) : region = id_or_label slug = region . replace ( u" " , "-" ) . replace ( u"ö", " o") \ . replace ( u"Ö", " O") \ . replace ( u"ä", " a") \ . replace ( u"å", " a") s" EXCEPTIONS = { "Jamtland-Harjedalens" : "Jamtlands" , "Rikets" : "Sveriges" , } if slug in EXCEPTIONS : slug = EXCEPTIONS [ slug ] return slug
Get the regional slug to be used in url Norrbotten = > Norrbottens
55,823
def default_value ( self ) : if not hasattr ( self , "_default_value" ) : if self . elem_type == "select" : try : def_value = get_option_value ( self . elem . select_one ( "[selected]" ) ) except AttributeError : def_value = get_option_value ( self . elem . select_one ( "option" ) ) elif self . elem_type == "checkbox" : def_value = self . elem . get ( "value" ) elif self . elem_type == "radio" : def_value = [ x for x in self . elem if x . has_attr ( "checked" ) ] [ 0 ] . get ( "value" ) self . _default_value = def_value assert def_value is not None return self . _default_value
The default category when making a query
55,824
def _parse_horizontal_scroll_table ( self , table_html ) : row_labels = [ parse_text ( x . text ) for x in table_html . select ( ".DTFC_LeftBodyWrapper tbody tr" ) ] row_label_ids = [ None ] * len ( row_labels ) cols = [ parse_text ( x . text ) for x in table_html . select ( ".dataTables_scrollHead th" ) ] value_rows = table_html . select ( ".dataTables_scrollBody tbody tr" ) values = [ ] for row_i , value_row in enumerate ( value_rows ) : row_values = [ parse_value ( x . text ) for x in value_row . select ( "td" ) ] values . append ( row_values ) sheet = Sheet ( zip ( row_label_ids , row_labels ) , cols , values ) return sheet . long_format
Get list of dicts from horizontally scrollable table
55,825
def is_json_file ( filename , show_warnings = False ) : try : config_dict = load_config ( filename , file_type = "json" ) is_json = True except : is_json = False return ( is_json )
Check configuration file type is JSON Return a boolean indicating wheather the file is JSON format or not
55,826
def is_yaml_file ( filename , show_warnings = False ) : if is_json_file ( filename ) : return ( False ) try : config_dict = load_config ( filename , file_type = "yaml" ) if ( type ( config_dict ) == str ) : is_yaml = False else : is_yaml = True except : is_yaml = False return ( is_yaml )
Check configuration file type is yaml Return a boolean indicating wheather the file is yaml format or not
55,827
def is_ini_file ( filename , show_warnings = False ) : try : config_dict = load_config ( filename , file_type = "ini" ) if config_dict == { } : is_ini = False else : is_ini = True except : is_ini = False return ( is_ini )
Check configuration file type is INI Return a boolean indicating wheather the file is INI format or not
55,828
def is_toml_file ( filename , show_warnings = False ) : if is_yaml_file ( filename ) : return ( False ) try : config_dict = load_config ( filename , file_type = "toml" ) is_toml = True except : is_toml = False return ( is_toml )
Check configuration file type is TOML Return a boolean indicating wheather the file is TOML format or not
55,829
def _collect_settings ( self , apps ) : contents = { } if apps : for app in apps : if app not in settings . INSTALLED_APPS : raise CommandError ( "Application '{0}' not in settings.INSTALLED_APPS" . format ( app ) ) else : apps = settings . INSTALLED_APPS for app in apps : module = import_module ( app ) for module_dir in module . __path__ : json_file = os . path . abspath ( os . path . join ( module_dir , self . json_file ) ) if os . path . isfile ( json_file ) : with open ( json_file , 'r' ) as fp : contents [ app ] = json . load ( fp ) return contents
Iterate over given apps or INSTALLED_APPS and collect the content of each s settings file which is expected to be in JSON format .
55,830
def required_unique ( objects , key ) : keys = { } duplicate = set ( ) for k in map ( key , objects ) : keys [ k ] = keys . get ( k , 0 ) + 1 if keys [ k ] > 1 : duplicate . add ( k ) if duplicate : return ( False , u"Duplicate object keys: {}" . format ( duplicate ) ) return ( True , u"" )
A pyrsistent invariant which requires all objects in the given iterable to have a unique key .
55,831
def item_by_name ( self , name ) : for obj in self . items : if obj . metadata . name == name : return obj raise KeyError ( name )
Find an item in this collection by its name metadata .
55,832
def _init_name_core ( self , name : str ) : self . __regex = re . compile ( rf'^{self._pattern}$' ) self . name = name
Runs whenever a new instance is initialized or sep is set .
55,833
def get_name ( self , ** values ) -> str : if not values and self . name : return self . name if values : for ck , cvs in _sorted_items ( self . compounds ) : if ck in cvs and ck in values : continue comp_values = [ values . pop ( cv , getattr ( self , cv ) ) for cv in cvs ] if None not in comp_values : values [ ck ] = '' . join ( rf'{v}' for v in comp_values ) return self . _get_nice_name ( ** values )
Get a new name string from this object s name values .
55,834
def cast_config ( cls , config : typing . Mapping [ str , str ] ) -> typing . Dict [ str , str ] : return { k : cls . cast ( v , k ) for k , v in config . items ( ) }
Cast config to grouped regular expressions .
55,835
def _execute_primitives ( self , commands ) : for p in commands : if self . _scanchain and self . _scanchain . _debug : print ( " Executing" , p ) p . execute ( self )
Run a list of executable primitives on this controller and distribute the returned data to the associated TDOPromises .
55,836
def pretty_version_text ( ) : version_lines = [ "dtool, version {}" . format ( dtool_version ) ] version_lines . append ( "\nBase:" ) version_lines . append ( "dtoolcore, version {}" . format ( dtoolcore . __version__ ) ) version_lines . append ( "dtool-cli, version {}" . format ( __version__ ) ) version_lines . append ( "\nStorage brokers:" ) for ep in iter_entry_points ( "dtool.storage_brokers" ) : package = ep . module_name . split ( "." ) [ 0 ] dyn_load_p = __import__ ( package ) version = dyn_load_p . __version__ storage_broker = ep . load ( ) version_lines . append ( "{}, {}, version {}" . format ( storage_broker . key , package . replace ( "_" , "-" ) , version ) ) modules = [ ep . module_name for ep in iter_entry_points ( "dtool.cli" ) ] packages = set ( [ m . split ( "." ) [ 0 ] for m in modules ] ) version_lines . append ( "\nPlugins:" ) for p in packages : dyn_load_p = __import__ ( p ) version_lines . append ( "{}, version {}" . format ( p . replace ( "_" , "-" ) , dyn_load_p . __version__ ) ) return "\n" . join ( version_lines )
Return pretty version text listing all plugins .
55,837
def dtool ( debug ) : level = logging . WARNING if debug : level = logging . DEBUG logging . basicConfig ( format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' , level = level )
Tool to work with datasets .
55,838
def add_nic ( self , instance_id , net_id ) : self . client . servers . interface_attach ( instance_id , None , net_id , None ) return True
Add a Network Interface Controller
55,839
def delete_nic ( self , instance_id , port_id ) : self . client . servers . interface_detach ( instance_id , port_id ) return True
Delete a Network Interface Controller
55,840
def disassociate_public_ip ( self , public_ip_id ) : floating_ip = self . client . floating_ips . get ( public_ip_id ) floating_ip = floating_ip . to_dict ( ) instance_id = floating_ip . get ( 'instance_id' ) address = floating_ip . get ( 'ip' ) self . client . servers . remove_floating_ip ( instance_id , address ) return True
Disassociate a external IP
55,841
def split ( self , bitindex ) : if bitindex < 0 : raise ValueError ( "bitindex must be larger or equal to 0." ) if bitindex > len ( self ) : raise ValueError ( "bitindex larger than the array's size. " "Len: %s; bitindex: %s" % ( len ( self ) , bitindex ) ) if bitindex == 0 : return None , self if bitindex == len ( self ) : return self , None left = TDOPromise ( self . _chain , self . _bitstart , bitindex , _parent = self ) right = TDOPromise ( self . _chain , 0 , len ( self ) - bitindex , _parent = self ) self . _components = [ ] self . _addsub ( left , 0 ) self . _addsub ( right , bitindex ) return left , right
Split a promise into two promises at the provided index .
55,842
def _fulfill ( self , bits , ignore_nonpromised_bits = False ) : if self . _allsubsfulfilled ( ) : if not self . _components : if ignore_nonpromised_bits : self . _value = bits [ self . _bitstartselective : self . _bitstartselective + self . _bitlength ] else : self . _value = bits [ self . _bitstart : self . _bitend ] else : self . _value = self . _components [ 0 ] [ 0 ] . _value for sub , offset in self . _components [ 1 : ] : self . _value += sub . _value if self . _parent is not None : self . _parent . _fulfill ( None )
Supply the promise with the bits from its associated primitive s execution .
55,843
def makesubatoffset ( self , bitoffset , * , _offsetideal = None ) : if _offsetideal is None : _offsetideal = bitoffset if bitoffset is 0 : return self newpromise = TDOPromise ( self . _chain , self . _bitstart + bitoffset , self . _bitlength , _parent = self , bitstartselective = self . _bitstartselective + _offsetideal ) self . _addsub ( newpromise , 0 ) return newpromise
Create a copy of this promise with an offset and use it as this promise s child .
55,844
def add ( self , promise , bitoffset , * , _offsetideal = None ) : if _offsetideal is None : _offsetideal = bitoffset if isinstance ( promise , TDOPromise ) : newpromise = promise . makesubatoffset ( bitoffset , _offsetideal = _offsetideal ) self . _promises . append ( newpromise ) elif isinstance ( promise , TDOPromiseCollection ) : for p in promise . _promises : self . add ( p , bitoffset , _offsetideal = _offsetideal )
Add a promise to the promise collection at an optional offset .
55,845
def split ( self , bitindex ) : if bitindex < 0 : raise ValueError ( "bitindex must be larger or equal to 0." ) if bitindex == 0 : return None , self lastend = 0 split_promise = False for splitindex , p in enumerate ( self . _promises ) : if bitindex in range ( lastend , p . _bitstart ) : split_promise = False break if bitindex in range ( p . _bitstart , p . _bitend ) : if bitindex - p . _bitstart == 0 : split_promise = False else : split_promise = True break lastend = p . _bitend else : raise Exception ( "Should be impossible" ) processed_left = TDOPromiseCollection ( self . _chain ) processed_right = TDOPromiseCollection ( self . _chain ) if split_promise : left , right = p . split ( bitindex - p . _bitstart ) for i in range ( splitindex ) : processed_left . add ( self . _promises [ i ] , 0 ) processed_left . add ( left , 0 ) processed_right . add ( right , 0 ) for tmpprim in self . _promises [ splitindex + 1 : ] : processed_right . add ( tmpprim , - bitindex ) return processed_left , processed_right else : for i in range ( splitindex ) : processed_left . add ( self . _promises [ i ] , 0 ) for i in range ( splitindex , len ( self . _promises ) ) : processed_right . add ( self . _promises [ i ] , - bitindex ) return processed_left , processed_right
Split a promise into two promises . A tail bit and the rest .
55,846
def makesubatoffset ( self , bitoffset , * , _offsetideal = None ) : if _offsetideal is None : _offsetideal = bitoffset if bitoffset is 0 : return self newpromise = TDOPromiseCollection ( self . _chain ) for promise in self . _promises : newpromise . add ( promise , bitoffset , _offsetideal = _offsetideal ) return newpromise
Create a copy of this PromiseCollection with an offset applied to each contained promise and register each with their parent .
55,847
def cli ( ctx , stage ) : if not ctx . bubble : ctx . say_yellow ( 'There is no bubble present, ' + 'will not show any transformer rules' ) raise click . Abort ( ) path = ctx . home + '/' RULES = None ctx . say ( 'Stage:' + stage , verbosity = 10 ) if stage in STAGES : if stage in ctx . cfg . CFG : STAGE = ctx . cfg . CFG [ stage ] ctx . say ( 'Stage found:' , stuff = STAGE , verbosity = 100 ) if 'TRANSFORM' in STAGE : TRANSFORM = STAGE . TRANSFORM ctx . say ( 'Transform found:' , stuff = TRANSFORM , verbosity = 100 ) if 'RULES' in TRANSFORM : RULES = TRANSFORM . RULES ctx . say ( 'Rules found:' , stuff = RULES , verbosity = 100 ) if not RULES : ctx . say_red ( 'There is no TRANSFORM.RULES in stage:' + stage ) ctx . say_yellow ( 'please check configuration in ' + ctx . home + '/config/config.yaml' ) raise click . Abort ( ) if type ( RULES ) == str and RULES . endswith ( '.bubble' ) : ctx . say ( 'loading rules' , verbosity = 10 ) rules = get_bubble ( ctx , path + RULES ) rule_type = 'bubble' transformer = Transformer ( rules = rules , rule_type = rule_type , bubble_path = path , verbose = ctx . get_verbose ( ) ) rules = transformer . _rules . get_rules ( ) ctx . say ( 'current number of rules:' + str ( len ( rules ) ) , verbosity = 1 ) for r in rules : ctx . say ( 'rule: ' + str ( r ) , verbosity = 1 ) ctx . gbc . say ( 'rules: ' , stuff = rules , verbosity = 100 ) else : ctx . say ( 'no rules!' ) return True
Show transformer rules
55,848
def connectExec ( connection , protocol , commandLine ) : deferred = connectSession ( connection , protocol ) @ deferred . addCallback def requestSubsystem ( session ) : return session . requestExec ( commandLine ) return deferred
Connect a Protocol to a ssh exec session
55,849
def connectShell ( connection , protocol ) : deferred = connectSession ( connection , protocol ) @ deferred . addCallback def requestSubsystem ( session ) : return session . requestShell ( ) return deferred
Connect a Protocol to a ssh shell session
55,850
def connectSubsystem ( connection , protocol , subsystem ) : deferred = connectSession ( connection , protocol ) @ deferred . addCallback def requestSubsystem ( session ) : return session . requestSubsystem ( subsystem ) return deferred
Connect a Protocol to a ssh subsystem channel
55,851
def connectSession ( connection , protocol , sessionFactory = None , * args , ** kwargs ) : factory = sessionFactory or defaultSessionFactory session = factory ( * args , ** kwargs ) session . dataReceived = protocol . dataReceived session . closed = lambda : protocol . connectionLost ( connectionDone ) deferred = defer . Deferred ( ) @ deferred . addCallback def connectProtocolAndReturnSession ( specificData ) : protocol . makeConnection ( session ) return session session . sessionOpen = deferred . callback session . openFailed = deferred . errback connection . openChannel ( session ) return deferred
Open a SSHSession channel and connect a Protocol to it
55,852
def requestSubsystem ( self , subsystem ) : data = common . NS ( subsystem ) return self . sendRequest ( 'subsystem' , data , wantReply = True )
Request a subsystem and return a deferred reply .
55,853
def requestPty ( self , term = None , rows = 0 , cols = 0 , xpixel = 0 , ypixel = 0 , modes = '' ) : term = term or os . environ . get ( 'TERM' , '' ) data = packRequest_pty_req ( term , ( rows , cols , xpixel , ypixel ) , modes ) return self . sendRequest ( 'pty-req' , data )
Request allocation of a pseudo - terminal for a channel
55,854
def requestEnv ( self , env = { } ) : for variable , value in env . items ( ) : data = common . NS ( variable ) + common . NS ( value ) self . sendRequest ( 'env' , data )
Send requests to set the environment variables for the channel
55,855
def commandstr ( command ) : if command == CMD_MESSAGE_ERROR : msg = "CMD_MESSAGE_ERROR" elif command == CMD_MESSAGE_LIST : msg = "CMD_MESSAGE_LIST" elif command == CMD_MESSAGE_PASSWORD : msg = "CMD_MESSAGE_PASSWORD" elif command == CMD_MESSAGE_MP3 : msg = "CMD_MESSAGE_MP3" elif command == CMD_MESSAGE_DELETE : msg = "CMD_MESSAGE_DELETE" elif command == CMD_MESSAGE_VERSION : msg = "CMD_MESSAGE_VERSION" elif command == CMD_MESSAGE_CDR_AVAILABLE : msg = "CMD_MESSAGE_CDR_AVAILABLE" elif command == CMD_MESSAGE_CDR : msg = "CMD_MESSAGE_CDR" else : msg = "CMD_MESSAGE_UNKNOWN" return msg
Convert command into string .
55,856
def run ( ) : parser = OptionParser ( version = __version__ , description = __doc__ , ) parser . add_option ( '-u' , '--url' , dest = 'url' , help = 'Database URL (connection string)' , ) parser . add_option ( '-r' , '--render' , dest = 'render' , default = 'dot' , choices = [ 'plantuml' , 'dot' ] , help = 'Output format - plantuml or dot' , ) parser . add_option ( '-l' , '--list' , dest = 'list' , action = 'store_true' , help = 'Output database list of tables and exit' , ) parser . add_option ( '-i' , '--include' , dest = 'include' , help = 'List of tables to include through ","' , ) parser . add_option ( '-e' , '--exclude' , dest = 'exclude' , help = 'List of tables to exlude through ","' , ) ( options , args ) = parser . parse_args ( ) if not options . url : print ( '-u/--url option required' ) exit ( 1 ) engine = create_engine ( options . url ) meta = MetaData ( ) meta . reflect ( bind = engine ) if options . list : print ( 'Database tables:' ) tables = sorted ( meta . tables . keys ( ) ) def _g ( l , i ) : try : return tables [ i ] except IndexError : return '' for i in range ( 0 , len ( tables ) , 2 ) : print ( ' {0}{1}{2}' . format ( _g ( tables , i ) , ' ' * ( 38 - len ( _g ( tables , i ) ) ) , _g ( tables , i + 1 ) , ) ) exit ( 0 ) tables = set ( meta . tables . keys ( ) ) if options . include : tables &= set ( map ( string . strip , options . include . split ( ',' ) ) ) if options . exclude : tables -= set ( map ( string . strip , options . exclude . split ( ',' ) ) ) desc = describe ( map ( lambda x : operator . getitem ( meta . tables , x ) , tables ) ) print ( getattr ( render , options . render ) ( desc ) )
Command for reflection database objects
55,857
def refresh ( self ) : strawpoll_response = requests . get ( '{api_url}/{poll_id}' . format ( api_url = api_url , poll_id = self . id ) ) raise_status ( strawpoll_response ) self . status_code = strawpoll_response . status_code self . response_json = strawpoll_response . json ( ) self . id = self . response_json [ 'id' ] self . title = self . response_json [ 'title' ] self . options = self . response_json [ 'options' ] self . votes = self . response_json [ 'votes' ] self . captcha = self . response_json [ 'captcha' ] self . dupcheck = self . response_json [ 'dupcheck' ] self . url = 'https://www.strawpoll.me/{id}' . format ( id = self . id ) self . results_url = 'https://www.strawpoll.me/{id}/r' . format ( id = self . id )
Refresh all class attributes .
55,858
def write_json_file ( self , path ) : with open ( path , "w" ) as f : f . write ( self . to_json ( ) )
Serialize this VariantCollection to a JSON representation and write it out to a text file .
55,859
def read_json_file ( cls , path ) : with open ( path , 'r' ) as f : json_string = f . read ( ) return cls . from_json ( json_string )
Construct a VariantCollection from a JSON file .
55,860
def dumps ( data , escape = False , ** kwargs ) : if 'sort_keys' not in kwargs : kwargs [ 'sort_keys' ] = True converted = json . dumps ( data , default = _converter , ** kwargs ) if escape : return cgi . escape ( converted ) return converted
A wrapper around json . dumps that can handle objects that json module is not aware .
55,861
def deserialize ( klass , data ) : handler = DESERIALIZE_REGISTRY . get ( klass ) if handler : return handler ( data ) raise TypeError ( "There is no deserializer registered to handle " "instances of '{}'" . format ( klass . __name__ ) )
Helper function to access a method that creates objects of a given klass with the received data .
55,862
def _convert_from ( data ) : try : module , klass_name = data [ '__class__' ] . rsplit ( '.' , 1 ) klass = getattr ( import_module ( module ) , klass_name ) except ( ImportError , AttributeError , KeyError ) : return data return deserialize ( klass , data [ '__value__' ] )
Internal function that will be hooked to the native json . loads
55,863
def _converter ( data ) : handler = REGISTRY . get ( data . __class__ ) if handler : full_name = '{}.{}' . format ( data . __class__ . __module__ , data . __class__ . __name__ ) return { '__class__' : full_name , '__value__' : handler ( data ) , } raise TypeError ( repr ( data ) + " is not JSON serializable" )
Internal function that will be passed to the native json . dumps .
55,864
def handle_error ( self , error ) : logging . exception ( "try to sleep if there are repeating errors." ) error_desc = str ( error ) now = datetime . datetime . now ( ) if error_desc not in self . error_time_log : self . error_time_log [ error_desc ] = now return time_of_last_encounter = self . error_time_log [ str ( error ) ] time_since_last_encounter = now - time_of_last_encounter if time_since_last_encounter . total_seconds ( ) > self . config . get ( 'min_seconds_between_errors' ) : self . error_time_log [ error_desc ] = now return if error_desc not in self . error_sleep_log : time . sleep ( self . config . get ( 'sleep_seconds_on_consecutive_errors' ) ) self . error_sleep_log [ error_desc ] = 1 else : sys . exit ( )
Try to detect repetitive errors and sleep for a while to avoid being marked as spam
55,865
def parse_isodate ( datestr ) : m = isodate_rx . search ( datestr ) assert m , 'unrecognized date format: ' + datestr year , month , day = m . group ( 'year' , 'month' , 'day' ) hour , minute , second , fraction = m . group ( 'hour' , 'minute' , 'second' , 'fraction' ) tz , tzhh , tzmm = m . group ( 'tz' , 'tzhh' , 'tzmm' ) dt = datetime . datetime ( int ( year ) , int ( month ) , int ( day ) , int ( hour ) ) if fraction is None : fraction = 0 else : fraction = float ( '0.' + fraction ) if minute is None : dt = dt . replace ( minute = int ( 60 * fraction ) ) else : dt = dt . replace ( minute = int ( minute ) ) if second is None : dt = dt . replace ( second = int ( 60 * fraction ) ) else : dt = dt . replace ( second = int ( second ) , microsecond = int ( 1000000 * fraction ) ) if tz is not None : if tz [ 0 ] == 'Z' : offset = 0 else : offset = datetime . timedelta ( minutes = int ( tzmm or 0 ) , hours = int ( tzhh ) ) if tz [ 0 ] == '-' : offset = - offset dt = dt . replace ( tzinfo = UTCOffset ( offset ) ) return dt
Parse a string that loosely fits ISO 8601 formatted date - time string
55,866
def ls ( self , rev , path , recursive = False , recursive_dirs = False , directory = False , report = ( ) ) : raise NotImplementedError
List directory or file
55,867
def log ( self , revrange = None , limit = None , firstparent = False , merges = None , path = None , follow = False ) : raise NotImplementedError
Get commit logs
55,868
def user_create ( self , cloudflare_email , cloudflare_pass , unique_id = None ) : params = { 'act' : 'user_create' , 'cloudflare_email' : cloudflare_email , 'cloudflare_pass' : cloudflare_pass } if unique_id : params [ 'unique_id' ] = unique_id return self . _request ( params )
Create new cloudflare user with selected email and id . Optionally also select unique_id which can be then used to get user information .
55,869
def zone_set ( self , user_key , zone_name , resolve_to , subdomains ) : params = { 'act' : 'zone_set' , 'user_key' : user_key , 'zone_name' : zone_name , 'resolve_to' : resolve_to , 'subdomains' : subdomains , } return self . _request ( params )
Create new zone for user associated with this user_key .
55,870
def full_zone_set ( self , user_key , zone_name ) : params = { 'act' : 'full_zone_set' , 'user_key' : user_key , 'zone_name' : zone_name , } return self . _request ( params )
Create new zone and all subdomains for user associated with this user_key .
55,871
def user_lookup ( self , cloudflare_email = None , unique_id = None ) : if not cloudflare_email and not unique_id : raise KeyError ( 'Either cloudflare_email or unique_id must be present' ) params = { 'act' : 'user_lookup' } if cloudflare_email : params [ 'cloudflare_email' ] = cloudflare_email else : params [ 'unique_id' ] = unique_id return self . _request ( params )
Lookup user data based on either his cloudflare_email or his unique_id .
55,872
def user_auth ( self , cloudflare_email = None , cloudflare_pass = None , unique_id = None ) : if not ( cloudflare_email and cloudflare_pass ) and not unique_id : raise KeyError ( 'Either cloudflare_email and cloudflare_pass or unique_id must be present' ) params = { 'act' : 'user_auth' } if cloudflare_email and cloudflare_pass : params [ 'cloudflare_email' ] = cloudflare_email params [ 'cloudflare_pass' ] = cloudflare_pass if unique_id : params [ 'unique_id' ] = unique_id return self . _request ( params )
Get user_key based on either his email and password or unique_id .
55,873
def zone_list ( self , user_key , limit = 100 , offset = 0 , zone_name = None , sub_id = None , zone_status = 'ALL' , sub_status = 'ALL' , ) : if zone_status not in [ 'V' , 'D' , 'ALL' ] : raise ValueError ( 'zone_status has to be V, D or ALL' ) if sub_status not in [ 'V' , 'CNL' , 'ALL' ] : raise ValueError ( 'sub_status has to be V, CNL or ALL' ) params = { 'act' : 'zone_list' , 'user_key' : user_key , 'limit' : limit , 'offset' : offset , 'zone_status' : zone_status , 'sub_status' : sub_status } if zone_name : params [ 'zone_name' ] = zone_name if sub_id : params [ 'sub_id' ] = sub_id return self . _request ( params )
List zones for a user .
55,874
def attr_exists ( self , attr ) : gen = self . attr_gen ( attr ) n_instances = len ( list ( gen ) ) if n_instances > 0 : return True else : return False
Returns True if at least on instance of the attribute is found
55,875
def datasets ( self ) : HiisiHDF . _clear_cache ( ) self . visititems ( HiisiHDF . _is_dataset ) return HiisiHDF . CACHE [ 'dataset_paths' ]
Method returns a list of dataset paths .
55,876
def create_from_filedict ( self , filedict ) : if self . mode in [ 'r+' , 'w' , 'w-' , 'x' , 'a' ] : for h5path , path_content in filedict . iteritems ( ) : if path_content . has_key ( 'DATASET' ) : if h5path in self : for key , value in path_content . iteritems ( ) : if key != 'DATASET' : self [ h5path ] . attrs [ key ] = value else : try : group = self . create_group ( os . path . dirname ( h5path ) ) except ValueError : group = self [ os . path . dirname ( h5path ) ] pass new_dataset = group . create_dataset ( os . path . basename ( h5path ) , data = path_content [ 'DATASET' ] ) for key , value in path_content . iteritems ( ) : if key != 'DATASET' : new_dataset . attrs [ key ] = value else : try : group = self . create_group ( h5path ) except ValueError : group = self [ h5path ] for key , value in path_content . iteritems ( ) : group . attrs [ key ] = value
Creates h5 file from dictionary containing the file structure . Filedict is a regular dictinary whose keys are hdf5 paths and whose values are dictinaries containing the metadata and datasets . Metadata is given as normal key - value - pairs and dataset arrays are given using DATASET key . Datasets must be numpy arrays . Method can also be used to append existing hdf5 file . If the file is opened in read only mode method does nothing .
55,877
def search ( self , attr , value , tolerance = 0 ) : found_paths = [ ] gen = self . attr_gen ( attr ) for path_attr_pair in gen : if isinstance ( path_attr_pair . value , str ) : type_name = 'str' else : type_name = path_attr_pair . value . dtype . name if 'int' in type_name or 'float' in type_name : if abs ( path_attr_pair . value - value ) <= tolerance : found_paths . append ( path_attr_pair . path ) else : if path_attr_pair . value == value : found_paths . append ( path_attr_pair . path ) return found_paths
Find paths with a key value match
55,878
def _extractReporterIons ( ionArrays , reporterMz , mzTolerance ) : reporterIons = { 'mz' : [ ] , 'i' : [ ] } for reporterMzValue in reporterMz : limHi = reporterMzValue * ( 1 + mzTolerance ) limLo = reporterMzValue * ( 1 - mzTolerance ) loPos = bisect . bisect_left ( ionArrays [ 'mz' ] , limLo ) upPos = bisect . bisect_right ( ionArrays [ 'mz' ] , limHi ) matchingValues = ionArrays [ 'mz' ] [ loPos : upPos ] if matchingValues . size == 0 : reporterIons [ 'i' ] . append ( 0 ) reporterIons [ 'mz' ] . append ( numpy . nan ) elif matchingValues . size == 1 : reporterIons [ 'i' ] . append ( ionArrays [ 'i' ] [ loPos ] ) reporterIons [ 'mz' ] . append ( ionArrays [ 'mz' ] [ loPos ] ) else : mzDeviations = numpy . abs ( matchingValues - reporterMzValue ) minDeviationPos = numpy . argmin ( mzDeviations ) bestMatchArrayPos = range ( loPos , upPos ) [ minDeviationPos ] reporterIons [ 'i' ] . append ( ionArrays [ 'i' ] [ bestMatchArrayPos ] ) reporterIons [ 'mz' ] . append ( ionArrays [ 'mz' ] [ bestMatchArrayPos ] ) reporterIons [ 'mz' ] = numpy . array ( reporterIons [ 'mz' ] , dtype = ionArrays [ 'mz' ] . dtype ) reporterIons [ 'i' ] = numpy . array ( reporterIons [ 'i' ] , dtype = ionArrays [ 'i' ] . dtype ) return reporterIons
Find and a list of reporter ions and return mz and intensity values .
55,879
def _correctIsotopeImpurities ( matrix , intensities ) : correctedIntensities , _ = scipy . optimize . nnls ( matrix , intensities ) return correctedIntensities
Corrects observed reporter ion intensities for isotope impurities .
55,880
def _normalizeImpurityMatrix ( matrix ) : newMatrix = list ( ) for line in matrix : total = sum ( line ) if total != 0 : newMatrix . append ( [ i / total for i in line ] ) else : newMatrix . append ( line ) return newMatrix
Normalize each row of the matrix that the sum of the row equals 1 .
55,881
def _padImpurityMatrix ( matrix , preChannels , postChannels ) : extendedMatrix = list ( ) lastMatrixI = len ( matrix ) - 1 for i , line in enumerate ( matrix ) : prePadding = itertools . repeat ( 0. , i ) postPadding = itertools . repeat ( 0. , lastMatrixI - i ) newLine = list ( itertools . chain ( prePadding , line , postPadding ) ) extendedMatrix . append ( newLine [ preChannels : - postChannels ] ) return extendedMatrix
Align the values of an isotope impurity matrix and fill up with 0 .
55,882
def _processImpurityMatrix ( self ) : processedMatrix = _normalizeImpurityMatrix ( self . impurityMatrix ) processedMatrix = _padImpurityMatrix ( processedMatrix , self . matrixPreChannels , self . matrixPostChannels ) processedMatrix = _transposeMatrix ( processedMatrix ) return processedMatrix
Process the impurity matrix so that it can be used to correct observed reporter intensities .
55,883
def exception ( message ) : def decorator ( method ) : @ wraps ( method ) def wrapper ( self , * args , ** kwargs ) : if self . messages : kwargs [ 'message' ] = args [ 0 ] if args else kwargs . get ( 'message' , message ) else : kwargs [ 'message' ] = None kwargs [ 'prefix' ] = self . prefix kwargs [ 'statsd' ] = self . statsd return method ( self , ** kwargs ) return wrapper return decorator
Exception method convenience wrapper .
55,884
def to_dict ( self ) : val = dict ( self . payload or ( ) ) if self . message : val [ 'message' ] = self . message return val
Convert Exception class to a Python dictionary .
55,885
def init_app ( self , app , config = None , statsd = None ) : if config is not None : self . config = config elif self . config is None : self . config = app . config self . messages = self . config . get ( 'EXCEPTION_MESSAGE' , True ) self . prefix = self . config . get ( 'EXCEPTION_PREFIX' , DEFAULT_PREFIX ) self . statsd = statsd
Init Flask Extension .
55,886
def wrap_node ( self , node , options ) : if 'celery_task' in options : return options [ 'celery_task' ] ( node ) return self . celery_task ( node )
\ celery registers tasks by decorating them and so do we so the user can pass a celery task and we ll wrap our code with theirs in a nice package celery can execute .
55,887
def checkpoint ( key = 0 , unpickler = pickle . load , pickler = pickle . dump , work_dir = gettempdir ( ) , refresh = False ) : def decorator ( func ) : def wrapped ( * args , ** kwargs ) : if isinstance ( key , str ) : save_file = os . path . join ( work_dir , key ) elif isinstance ( key , Template ) : save_file = os . path . join ( work_dir , key . substitute ( kwargs ) ) save_file = save_file . format ( * args ) elif isinstance ( key , types . FunctionType ) : save_file = os . path . join ( work_dir , key ( args , kwargs ) ) else : logging . warn ( 'Using 0-th argument as default.' ) save_file = os . path . join ( work_dir , '{0}' ) save_file = save_file . format ( args [ key ] ) logging . info ( 'checkpoint@ %s' % save_file ) if isinstance ( refresh , types . FunctionType ) : do_refresh = refresh ( ) else : do_refresh = refresh if do_refresh or not os . path . exists ( path = save_file ) : try : out = func ( * args , ** kwargs ) except : raise else : with open ( save_file , 'wb' ) as f : pickler ( out , f ) return out else : logging . info ( "Checkpoint exists. Loading from: %s" % save_file ) with open ( save_file , 'rb' ) as f : return unpickler ( f ) return wrapped return decorator
A utility decorator to save intermediate results of a function . It is the caller s responsibility to specify a key naming scheme such that the output of each function call with different arguments is stored in a separate file .
55,888
def run ( ) : parser = argparse . ArgumentParser ( prog = 'python -m braillegraph' , description = 'Print a braille bar graph of the given integers.' ) parser . add_argument ( '-n' , '--no-newline' , action = 'store_const' , dest = 'end' , const = '' , default = None , help = 'do not print the trailing newline character' ) subparsers = parser . add_subparsers ( title = 'directions' ) horizontal_parser = subparsers . add_parser ( 'horizontal' , help = 'a horizontal graph' ) horizontal_parser . set_defaults ( func = lambda args : horizontal_graph ( args . integers ) ) horizontal_parser . add_argument ( 'integers' , metavar = 'N' , type = int , nargs = '+' , help = 'an integer' ) vertical_parser = subparsers . add_parser ( 'vertical' , help = 'a vertical graph' ) vertical_parser . set_defaults ( func = lambda args : vertical_graph ( args . integers , sep = args . sep ) ) vertical_parser . add_argument ( 'integers' , metavar = 'N' , type = int , nargs = '+' , help = 'an integer' ) vertical_parser . add_argument ( '-s' , '--sep' , action = 'store' , default = None , help = 'separator for groups of bars' ) args = parser . parse_args ( ) print ( args . func ( args ) , end = args . end )
Display the arguments as a braille graph on standard output .
55,889
def _rnd_date ( start , end ) : return date . fromordinal ( random . randint ( start . toordinal ( ) , end . toordinal ( ) ) )
Internal random date generator .
55,890
def rnd_date_list_high_performance ( size , start = date ( 1970 , 1 , 1 ) , end = None , ** kwargs ) : if end is None : end = date . today ( ) start_days = to_ordinal ( parser . parse_datetime ( start ) ) end_days = to_ordinal ( parser . parse_datetime ( end ) ) _assert_correct_start_end ( start_days , end_days ) if has_np : return [ from_ordinal ( days ) for days in np . random . randint ( start_days , end_days , size ) ] else : return [ from_ordinal ( random . randint ( start_days , end_days ) ) for _ in range ( size ) ]
Generate mass random date .
55,891
def day_interval ( year , month , day , milliseconds = False , return_string = False ) : if milliseconds : delta = timedelta ( milliseconds = 1 ) else : delta = timedelta ( seconds = 1 ) start = datetime ( year , month , day ) end = datetime ( year , month , day ) + timedelta ( days = 1 ) - delta if not return_string : return start , end else : return str ( start ) , str ( end )
Return a start datetime and end datetime of a day .
55,892
def month_interval ( year , month , milliseconds = False , return_string = False ) : if milliseconds : delta = timedelta ( milliseconds = 1 ) else : delta = timedelta ( seconds = 1 ) if month == 12 : start = datetime ( year , month , 1 ) end = datetime ( year + 1 , 1 , 1 ) - delta else : start = datetime ( year , month , 1 ) end = datetime ( year , month + 1 , 1 ) - delta if not return_string : return start , end else : return str ( start ) , str ( end )
Return a start datetime and end datetime of a month .
55,893
def year_interval ( year , milliseconds = False , return_string = False ) : if milliseconds : delta = timedelta ( milliseconds = 1 ) else : delta = timedelta ( seconds = 1 ) start = datetime ( year , 1 , 1 ) end = datetime ( year + 1 , 1 , 1 ) - delta if not return_string : return start , end else : return str ( start ) , str ( end )
Return a start datetime and end datetime of a year .
55,894
def get_milestone ( self , title ) : if not title : return GithubObject . NotSet if not hasattr ( self , '_milestones' ) : self . _milestones = { m . title : m for m in self . repo . get_milestones ( ) } milestone = self . _milestones . get ( title ) if not milestone : milestone = self . repo . create_milestone ( title = title ) return milestone
given the title as str looks for an existing milestone or create a new one and return the object
55,895
def get_assignee ( self , login ) : if not login : return GithubObject . NotSet if not hasattr ( self , '_assignees' ) : self . _assignees = { c . login : c for c in self . repo . get_assignees ( ) } if login not in self . _assignees : print ( "{} doesn't belong to this repo. This issue won't be assigned." . format ( login ) ) return self . _assignees . get ( login )
given the user login looks for a user in assignee list of the repo and return it if was found .
55,896
def sender ( self , issues ) : for issue in issues : state = self . get_state ( issue . state ) if issue . number : try : gh_issue = self . repo . get_issue ( issue . number ) original_state = gh_issue . state if original_state == state : action = 'Updated' elif original_state == 'closed' : action = 'Reopened' else : action = 'Closed' gh_issue . edit ( title = issue . title , body = issue . body , labels = issue . labels , milestone = self . get_milestone ( issue . milestone ) , assignee = self . get_assignee ( issue . assignee ) , state = self . get_state ( issue . state ) ) print ( '{} #{}: {}' . format ( action , gh_issue . number , gh_issue . title ) ) except GithubException : print ( 'Not found #{}: {} (ignored)' . format ( issue . number , issue . title ) ) continue else : gh_issue = self . repo . create_issue ( title = issue . title , body = issue . body , labels = issue . labels , milestone = self . get_milestone ( issue . milestone ) , assignee = self . get_assignee ( issue . assignee ) ) print ( 'Created #{}: {}' . format ( gh_issue . number , gh_issue . title ) )
push a list of issues to github
55,897
def wrap_node ( self , node , options ) : job_kwargs = { 'queue' : options . get ( 'queue' , 'default' ) , 'connection' : options . get ( 'connection' , self . redis_connection ) , 'timeout' : options . get ( 'timeout' , None ) , 'result_ttl' : options . get ( 'result_ttl' , 500 ) , } return job ( ** job_kwargs ) ( node )
we have the option to construct nodes here so we can use different queues for nodes without having to have different queue objects .
55,898
def create_albaran_automatic ( pk , list_lines ) : line_bd = SalesLineAlbaran . objects . filter ( line_order__pk__in = list_lines ) . values_list ( 'line_order__pk' ) if line_bd . count ( ) == 0 or len ( list_lines ) != len ( line_bd [ 0 ] ) : if line_bd . count ( ) != 0 : for x in line_bd [ 0 ] : list_lines . pop ( list_lines . index ( x ) ) GenLineProduct . create_albaran_from_order ( pk , list_lines )
creamos de forma automatica el albaran
55,899
def create_invoice_from_albaran ( pk , list_lines ) : context = { } if list_lines : new_list_lines = [ x [ 0 ] for x in SalesLineAlbaran . objects . values_list ( 'line_order__pk' ) . filter ( pk__in = [ int ( x ) for x in list_lines ] ) . exclude ( invoiced = True ) ] if new_list_lines : lo = SalesLineOrder . objects . values_list ( 'order__pk' ) . filter ( pk__in = new_list_lines ) [ : 1 ] if lo and lo [ 0 ] and lo [ 0 ] [ 0 ] : new_pk = lo [ 0 ] [ 0 ] context = GenLineProduct . create_invoice_from_order ( new_pk , new_list_lines ) if 'error' not in context or not context [ 'error' ] : SalesLineAlbaran . objects . filter ( pk__in = [ int ( x ) for x in list_lines ] ) . exclude ( invoiced = True ) . update ( invoiced = True ) return context else : error = _ ( 'Pedido no encontrado' ) else : error = _ ( 'Lineas no relacionadas con pedido' ) else : error = _ ( 'Lineas no seleccionadas' ) context [ 'error' ] = error return context
la pk y list_lines son de albaranes necesitamos la info de las lineas de pedidos