idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
8,200
def store ( self , secrets , job ) : for j in job : if j in secrets : job [ j ] = self . add ( job [ j ] )
Sanitize the job object of any of the given secrets .
8,201
def has_secret ( self , value ) : if isinstance ( value , string_types ) : for k in self . secrets : if k in value : return True elif isinstance ( value , MutableMapping ) : for this_value in value . values ( ) : if self . has_secret ( this_value ) : return True elif isinstance ( value , MutableSequence ) : for this_value in value : if self . has_secret ( this_value ) : return True return False
Test if the provided document has any of our secrets .
8,202
def retrieve ( self , value ) : if isinstance ( value , string_types ) : for key , this_value in self . secrets . items ( ) : value = value . replace ( key , this_value ) elif isinstance ( value , MutableMapping ) : return { k : self . retrieve ( v ) for k , v in value . items ( ) } elif isinstance ( value , MutableSequence ) : return [ self . retrieve ( v ) for k , v in enumerate ( value ) ] return value
Replace placeholders with their corresponding secrets .
8,203
def _check_mod_11_2 ( numeric_string ) : nums = numeric_string . replace ( "-" , "" ) total = 0 for num in nums [ : - 1 ] : digit = int ( num ) total = ( total + digit ) * 2 remainder = total % 11 result = ( 12 - remainder ) % 11 if result == 10 : checkdigit = "X" else : checkdigit = str ( result ) return nums [ - 1 ] . upper ( ) == checkdigit
Validate numeric_string for its MOD - 11 - 2 checksum .
8,204
def _valid_orcid ( orcid ) : if orcid is None or not orcid : raise ValueError ( u'ORCID cannot be unspecified' ) orcid = orcid . lower ( ) match = re . match ( r"(http://orcid\.org/|https://orcid\.org/|orcid\.org/)?" r"(?P<orcid>(\d{4}-\d{4}-\d{4}-\d{3}[0-9x]))$" , orcid ) help_url = u"https://support.orcid.org/knowledgebase/articles/" "116780-structure-of-the-orcid-identifier" if not match : raise ValueError ( u"Invalid ORCID: %s\n%s" % ( orcid , help_url ) ) orcid_num = match . group ( "orcid" ) . upper ( ) if not _check_mod_11_2 ( orcid_num ) : raise ValueError ( u"Invalid ORCID checksum: %s\n%s" % ( orcid_num , help_url ) ) return u"https://orcid.org/%s" % orcid_num
Ensure orcid is a valid ORCID identifier .
8,205
def checksum_copy ( src_file , dst_file = None , hasher = Hasher , buffersize = 1024 * 1024 ) : checksum = hasher ( ) contents = src_file . read ( buffersize ) if dst_file and hasattr ( dst_file , "name" ) and hasattr ( src_file , "name" ) : temp_location = os . path . join ( os . path . dirname ( dst_file . name ) , str ( uuid . uuid4 ( ) ) ) try : os . rename ( dst_file . name , temp_location ) os . link ( src_file . name , dst_file . name ) dst_file = None os . unlink ( temp_location ) except OSError : pass if os . path . exists ( temp_location ) : os . rename ( temp_location , dst_file . name ) while contents != b"" : if dst_file is not None : dst_file . write ( contents ) checksum . update ( contents ) contents = src_file . read ( buffersize ) if dst_file is not None : dst_file . flush ( ) return checksum . hexdigest ( ) . lower ( )
Compute checksums while copying a file .
8,206
def copy_job_order ( job , job_order_object ) : if not hasattr ( job , "tool" ) : return job_order_object customised_job = { } for each , i in enumerate ( job . tool [ "inputs" ] ) : with SourceLine ( job . tool [ "inputs" ] , each , WorkflowException , _logger . isEnabledFor ( logging . DEBUG ) ) : iid = shortname ( i [ "id" ] ) if iid in job_order_object : customised_job [ iid ] = copy . deepcopy ( job_order_object [ iid ] ) elif "default" in i : customised_job [ iid ] = copy . deepcopy ( i [ "default" ] ) else : pass return customised_job
Create copy of job object for provenance .
8,207
def evaluate ( self , process , job , job_order_object , research_obj ) : if not hasattr ( process , "steps" ) : self . prospective_prov ( job ) customised_job = copy_job_order ( job , job_order_object ) self . used_artefacts ( customised_job , self . workflow_run_uri ) research_obj . create_job ( customised_job , job ) elif hasattr ( job , "workflow" ) : self . prospective_prov ( job ) customised_job = copy_job_order ( job , job_order_object ) self . used_artefacts ( customised_job , self . workflow_run_uri )
Evaluate the nature of job
8,208
def start_process ( self , process_name , when , process_run_id = None ) : if process_run_id is None : process_run_id = uuid . uuid4 ( ) . urn prov_label = "Run of workflow/packed.cwl#main/" + process_name self . document . activity ( process_run_id , None , None , { provM . PROV_TYPE : WFPROV [ "ProcessRun" ] , provM . PROV_LABEL : prov_label } ) self . document . wasAssociatedWith ( process_run_id , self . engine_uuid , str ( "wf:main/" + process_name ) ) self . document . wasStartedBy ( process_run_id , None , self . workflow_run_uri , when , None , None ) return process_run_id
Record the start of each Process .
8,209
def declare_string ( self , value ) : byte_s = BytesIO ( str ( value ) . encode ( ENCODING ) ) data_file = self . research_object . add_data_file ( byte_s , content_type = TEXT_PLAIN ) checksum = posixpath . basename ( data_file ) data_id = "data:%s" % posixpath . split ( data_file ) [ 1 ] entity = self . document . entity ( data_id , { provM . PROV_TYPE : WFPROV [ "Artifact" ] , provM . PROV_VALUE : str ( value ) } ) return entity , checksum
Save as string in UTF - 8 .
8,210
def finalize_prov_profile ( self , name ) : if name is None : filename = "primary.cwlprov" else : wf_name = urllib . parse . quote ( str ( name ) , safe = "" ) . replace ( "%" , "_" ) filename = "%s.%s.cwlprov" % ( wf_name , self . workflow_run_uuid ) basename = posixpath . join ( _posix_path ( PROVENANCE ) , filename ) prov_ids = [ ] with self . research_object . write_bag_file ( basename + ".xml" ) as provenance_file : self . document . serialize ( provenance_file , format = "xml" , indent = 4 ) prov_ids . append ( self . provenance_ns [ filename + ".xml" ] ) with self . research_object . write_bag_file ( basename + ".provn" ) as provenance_file : self . document . serialize ( provenance_file , format = "provn" , indent = 2 ) prov_ids . append ( self . provenance_ns [ filename + ".provn" ] ) with self . research_object . write_bag_file ( basename + ".json" ) as provenance_file : self . document . serialize ( provenance_file , format = "json" , indent = 2 ) prov_ids . append ( self . provenance_ns [ filename + ".json" ] ) with self . research_object . write_bag_file ( basename + ".ttl" ) as provenance_file : self . document . serialize ( provenance_file , format = "rdf" , rdf_format = "turtle" ) prov_ids . append ( self . provenance_ns [ filename + ".ttl" ] ) with self . research_object . write_bag_file ( basename + ".nt" ) as provenance_file : self . document . serialize ( provenance_file , format = "rdf" , rdf_format = "ntriples" ) prov_ids . append ( self . provenance_ns [ filename + ".nt" ] ) with self . research_object . write_bag_file ( basename + ".jsonld" ) as provenance_file : self . document . serialize ( provenance_file , format = "rdf" , rdf_format = "json-ld" ) prov_ids . append ( self . provenance_ns [ filename + ".jsonld" ] ) _logger . debug ( u"[provenance] added provenance: %s" , prov_ids ) return prov_ids
Transfer the provenance related files to the RO .
8,211
def _initialize_bagit ( self ) : self . self_check ( ) bagit = os . path . join ( self . folder , "bagit.txt" ) with open ( bagit , "w" , encoding = ENCODING , newline = '\n' ) as bag_it_file : bag_it_file . write ( u"BagIt-Version: 0.97\n" ) bag_it_file . write ( u"Tag-File-Character-Encoding: %s\n" % ENCODING )
Write fixed bagit header .
8,212
def user_provenance ( self , document ) : self . self_check ( ) ( username , fullname ) = _whoami ( ) if not self . full_name : self . full_name = fullname document . add_namespace ( UUID ) document . add_namespace ( ORCID ) document . add_namespace ( FOAF ) account = document . agent ( ACCOUNT_UUID , { provM . PROV_TYPE : FOAF [ "OnlineAccount" ] , "prov:label" : username , FOAF [ "accountName" ] : username } ) user = document . agent ( self . orcid or USER_UUID , { provM . PROV_TYPE : PROV [ "Person" ] , "prov:label" : self . full_name , FOAF [ "name" ] : self . full_name , FOAF [ "account" ] : account } ) document . actedOnBehalfOf ( account , user )
Add the user provenance .
8,213
def write_bag_file ( self , path , encoding = ENCODING ) : self . self_check ( ) bag_file = cast ( IO , WritableBagFile ( self , path ) ) if encoding is not None : return cast ( IO , TextIOWrapper ( bag_file , encoding = encoding , newline = "\n" ) ) return bag_file
Write the bag file into our research object .
8,214
def add_tagfile ( self , path , timestamp = None ) : self . self_check ( ) checksums = { } if os . path . isdir ( path ) : return with open ( path , "rb" ) as tag_file : checksums [ SHA1 ] = checksum_copy ( tag_file , hasher = hashlib . sha1 ) tag_file . seek ( 0 ) checksums [ SHA256 ] = checksum_copy ( tag_file , hasher = hashlib . sha256 ) tag_file . seek ( 0 ) checksums [ SHA512 ] = checksum_copy ( tag_file , hasher = hashlib . sha512 ) rel_path = _posix_path ( os . path . relpath ( path , self . folder ) ) self . tagfiles . add ( rel_path ) self . add_to_manifest ( rel_path , checksums ) if timestamp is not None : self . _file_provenance [ rel_path ] = { "createdOn" : timestamp . isoformat ( ) }
Add tag files to our research object .
8,215
def _ro_aggregates ( self ) : def guess_mediatype ( rel_path ) : media_types = { "txt" : TEXT_PLAIN , "ttl" : 'text/turtle; charset="UTF-8"' , "rdf" : 'application/rdf+xml' , "json" : 'application/json' , "jsonld" : 'application/ld+json' , "xml" : 'application/xml' , "cwl" : 'text/x+yaml; charset="UTF-8"' , "provn" : 'text/provenance-notation; charset="UTF-8"' , "nt" : 'application/n-triples' , } conforms_to = { "provn" : 'http://www.w3.org/TR/2013/REC-prov-n-20130430/' , "cwl" : 'https://w3id.org/cwl/' , } prov_conforms_to = { "provn" : 'http://www.w3.org/TR/2013/REC-prov-n-20130430/' , "rdf" : 'http://www.w3.org/TR/2013/REC-prov-o-20130430/' , "ttl" : 'http://www.w3.org/TR/2013/REC-prov-o-20130430/' , "nt" : 'http://www.w3.org/TR/2013/REC-prov-o-20130430/' , "jsonld" : 'http://www.w3.org/TR/2013/REC-prov-o-20130430/' , "xml" : 'http://www.w3.org/TR/2013/NOTE-prov-xml-20130430/' , "json" : 'http://www.w3.org/Submission/2013/SUBM-prov-json-20130424/' , } extension = rel_path . rsplit ( "." , 1 ) [ - 1 ] . lower ( ) if extension == rel_path : extension = None local_aggregate = { } if extension in media_types : local_aggregate [ "mediatype" ] = media_types [ extension ] if extension in conforms_to : local_aggregate [ "conformsTo" ] = conforms_to [ extension ] if ( rel_path . startswith ( _posix_path ( PROVENANCE ) ) and extension in prov_conforms_to ) : if ".cwlprov" in rel_path : local_aggregate [ "conformsTo" ] = [ prov_conforms_to [ extension ] , CWLPROV_VERSION ] else : local_aggregate [ "conformsTo" ] = prov_conforms_to [ extension ] return local_aggregate aggregates = [ ] for path in self . bagged_size . keys ( ) : aggregate_dict = { } ( folder , filename ) = posixpath . split ( path ) aggregate_dict [ "uri" ] = 'urn:hash::sha1:' + filename aggregate_dict [ "bundledAs" ] = { "uri" : self . base_uri + path , "folder" : "/%s/" % folder , "filename" : filename , } if path in self . _file_provenance : aggregate_dict [ "bundledAs" ] . update ( self . _file_provenance [ path ] ) else : pass if path in self . _content_types : aggregate_dict [ "mediatype" ] = self . _content_types [ path ] aggregates . append ( aggregate_dict ) for path in self . tagfiles : if ( not ( path . startswith ( METADATA ) or path . startswith ( WORKFLOW ) or path . startswith ( SNAPSHOT ) ) ) : continue if path == posixpath . join ( METADATA , "manifest.json" ) : continue rel_aggregates = { } uri = posixpath . relpath ( path , METADATA ) rel_aggregates [ "uri" ] = uri rel_aggregates . update ( guess_mediatype ( path ) ) if path in self . _file_provenance : rel_aggregates . update ( self . _file_provenance [ path ] ) elif not path . startswith ( SNAPSHOT ) : rel_aggregates . update ( self . _self_made ( ) ) aggregates . append ( rel_aggregates ) aggregates . extend ( self . _external_aggregates ) return aggregates
Gather dictionary of files to be added to the manifest .
8,216
def packed_workflow ( self , packed ) : self . self_check ( ) rel_path = posixpath . join ( _posix_path ( WORKFLOW ) , "packed.cwl" ) with self . write_bag_file ( rel_path , encoding = None ) as write_pack : write_pack . write ( packed . encode ( ENCODING ) ) _logger . debug ( u"[provenance] Added packed workflow: %s" , rel_path )
Pack CWL description to generate re - runnable CWL object in RO .
8,217
def has_data_file ( self , sha1hash ) : folder = os . path . join ( self . folder , DATA , sha1hash [ 0 : 2 ] ) hash_path = os . path . join ( folder , sha1hash ) return os . path . isfile ( hash_path )
Confirms the presence of the given file in the RO .
8,218
def add_to_manifest ( self , rel_path , checksums ) : self . self_check ( ) if posixpath . isabs ( rel_path ) : raise ValueError ( "rel_path must be relative: %s" % rel_path ) if posixpath . commonprefix ( [ "data/" , rel_path ] ) == "data/" : manifest = "manifest" else : manifest = "tagmanifest" for ( method , hash_value ) in checksums . items ( ) : manifestpath = os . path . join ( self . folder , "%s-%s.txt" % ( manifest , method . lower ( ) ) ) with open ( manifestpath , "a" , encoding = ENCODING , newline = '\n' ) as checksum_file : line = u"%s %s\n" % ( hash_value , rel_path ) _logger . debug ( u"[provenance] Added to %s: %s" , manifestpath , line ) checksum_file . write ( line )
Add files to the research object manifest .
8,219
def create_job ( self , builder_job , wf_job = None , is_output = False ) : copied = copy . deepcopy ( builder_job ) relativised_input_objecttemp = { } self . _relativise_files ( copied ) def jdefault ( o ) : return dict ( o ) if is_output : rel_path = posixpath . join ( _posix_path ( WORKFLOW ) , "primary-output.json" ) else : rel_path = posixpath . join ( _posix_path ( WORKFLOW ) , "primary-job.json" ) j = json_dumps ( copied , indent = 4 , ensure_ascii = False , default = jdefault ) with self . write_bag_file ( rel_path ) as file_path : file_path . write ( j + u"\n" ) _logger . debug ( u"[provenance] Generated customised job file: %s" , rel_path ) relativised_input_objecttemp = { } for key , value in copied . items ( ) : if isinstance ( value , MutableMapping ) : if value . get ( "class" ) in ( "File" , "Directory" ) : relativised_input_objecttemp [ key ] = value else : relativised_input_objecttemp [ key ] = value self . relativised_input_object . update ( { k : v for k , v in relativised_input_objecttemp . items ( ) if v } ) return self . relativised_input_object
Generate the new job object with RO specific relative paths .
8,220
def _relativise_files ( self , structure ) : _logger . debug ( u"[provenance] Relativising: %s" , structure ) if isinstance ( structure , MutableMapping ) : if structure . get ( "class" ) == "File" : relative_path = None if "checksum" in structure : alg , checksum = structure [ "checksum" ] . split ( "$" ) if alg != SHA1 : raise TypeError ( "Only SHA1 CWL checksums are currently supported: " "{}" . format ( structure ) ) if self . has_data_file ( checksum ) : prefix = checksum [ 0 : 2 ] relative_path = posixpath . join ( "data" , prefix , checksum ) if not relative_path is not None and "location" in structure : _logger . info ( "[provenance] Adding to RO %s" , structure [ "location" ] ) fsaccess = StdFsAccess ( "" ) with fsaccess . open ( structure [ "location" ] , "rb" ) as fp : relative_path = self . add_data_file ( fp ) checksum = posixpath . basename ( relative_path ) structure [ "checksum" ] = "%s$%s" % ( SHA1 , checksum ) if relative_path is not None : structure [ "location" ] = posixpath . join ( ".." , relative_path ) else : _logger . warning ( "Could not determine RO path for file %s" , structure ) if "path" in structure : del structure [ "path" ] if structure . get ( "class" ) == "Directory" : del structure [ "location" ] for val in structure . values ( ) : self . _relativise_files ( val ) return if isinstance ( structure , ( str , Text ) ) : return try : for obj in iter ( structure ) : self . _relativise_files ( obj ) except TypeError : pass
Save any file objects into the RO and update the local paths .
8,221
def close ( self , save_to = None ) : if save_to is None : if not self . closed : _logger . debug ( u"[provenance] Deleting temporary %s" , self . folder ) shutil . rmtree ( self . folder , ignore_errors = True ) else : save_to = os . path . abspath ( save_to ) _logger . info ( u"[provenance] Finalizing Research Object" ) self . _finalize ( ) if os . path . isdir ( save_to ) : _logger . info ( u"[provenance] Deleting existing %s" , save_to ) shutil . rmtree ( save_to ) shutil . move ( self . folder , save_to ) _logger . info ( u"[provenance] Research Object saved to %s" , save_to ) self . folder = save_to self . closed = True
Close the Research Object optionally saving to specified folder .
8,222
def printrdf ( wflow , ctx , style ) : rdf = gather ( wflow , ctx ) . serialize ( format = style , encoding = 'utf-8' ) if not rdf : return u"" return rdf . decode ( 'utf-8' )
Serialize the CWL document into a string ready for printing .
8,223
def create_runtime ( self , env , runtime_context ) : any_path_okay = self . builder . get_requirement ( "DockerRequirement" ) [ 1 ] or False runtime = [ u"singularity" , u"--quiet" , u"exec" , u"--contain" , u"--pid" , u"--ipc" ] if _singularity_supports_userns ( ) : runtime . append ( u"--userns" ) runtime . append ( u"--bind" ) runtime . append ( u"{}:{}:rw" . format ( docker_windows_path_adjust ( os . path . realpath ( self . outdir ) ) , self . builder . outdir ) ) runtime . append ( u"--bind" ) tmpdir = "/tmp" runtime . append ( u"{}:{}:rw" . format ( docker_windows_path_adjust ( os . path . realpath ( self . tmpdir ) ) , tmpdir ) ) self . add_volumes ( self . pathmapper , runtime , any_path_okay = True , secret_store = runtime_context . secret_store , tmpdir_prefix = runtime_context . tmpdir_prefix ) if self . generatemapper is not None : self . add_volumes ( self . generatemapper , runtime , any_path_okay = any_path_okay , secret_store = runtime_context . secret_store , tmpdir_prefix = runtime_context . tmpdir_prefix ) runtime . append ( u"--pwd" ) runtime . append ( u"%s" % ( docker_windows_path_adjust ( self . builder . outdir ) ) ) if runtime_context . custom_net : raise UnsupportedRequirement ( "Singularity implementation does not support custom networking" ) elif runtime_context . disable_net : runtime . append ( u"--net" ) env [ "SINGULARITYENV_TMPDIR" ] = tmpdir env [ "SINGULARITYENV_HOME" ] = self . builder . outdir for name , value in self . environment . items ( ) : env [ "SINGULARITYENV_{}" . format ( name ) ] = str ( value ) return ( runtime , None )
Returns the Singularity runtime list of commands and options .
8,224
def fetch_document ( argsworkflow , loadingContext = None ) : if loadingContext is None : loadingContext = LoadingContext ( ) loadingContext . loader = default_loader ( ) else : loadingContext = loadingContext . copy ( ) if loadingContext . loader is None : loadingContext . loader = default_loader ( loadingContext . fetcher_constructor ) if isinstance ( argsworkflow , string_types ) : uri , fileuri = resolve_tool_uri ( argsworkflow , resolver = loadingContext . resolver , document_loader = loadingContext . loader ) workflowobj = loadingContext . loader . fetch ( fileuri ) return loadingContext , workflowobj , uri if isinstance ( argsworkflow , dict ) : uri = argsworkflow [ "id" ] if argsworkflow . get ( "id" ) else "_:" + Text ( uuid . uuid4 ( ) ) workflowobj = cast ( CommentedMap , cmap ( argsworkflow , fn = uri ) ) loadingContext . loader . idx [ uri ] = workflowobj return loadingContext , workflowobj , uri raise ValidationException ( "Must be URI or object: '%s'" % argsworkflow )
Retrieve a CWL document .
8,225
def make_tool ( uri , loadingContext ) : if loadingContext . loader is None : raise ValueError ( "loadingContext must have a loader" ) resolveduri , metadata = loadingContext . loader . resolve_ref ( uri ) processobj = None if isinstance ( resolveduri , MutableSequence ) : for obj in resolveduri : if obj [ 'id' ] . endswith ( '#main' ) : processobj = obj break if not processobj : raise WorkflowException ( u"Tool file contains graph of multiple objects, must specify " "one of #%s" % ", #" . join ( urllib . parse . urldefrag ( i [ "id" ] ) [ 1 ] for i in resolveduri if "id" in i ) ) elif isinstance ( resolveduri , MutableMapping ) : processobj = resolveduri else : raise Exception ( "Must resolve to list or dict" ) tool = loadingContext . construct_tool_object ( processobj , loadingContext ) if loadingContext . jobdefaults : jobobj = loadingContext . jobdefaults for inp in tool . tool [ "inputs" ] : if shortname ( inp [ "id" ] ) in jobobj : inp [ "default" ] = jobobj [ shortname ( inp [ "id" ] ) ] return tool
Make a Python CWL object .
8,226
def revmap_file ( builder , outdir , f ) : split = urllib . parse . urlsplit ( outdir ) if not split . scheme : outdir = file_uri ( str ( outdir ) ) if "location" in f and "path" not in f : if f [ "location" ] . startswith ( "file://" ) : f [ "path" ] = convert_pathsep_to_unix ( uri_file_path ( f [ "location" ] ) ) else : return f if "path" in f : path = f [ "path" ] uripath = file_uri ( path ) del f [ "path" ] if "basename" not in f : f [ "basename" ] = os . path . basename ( path ) if not builder . pathmapper : raise ValueError ( "Do not call revmap_file using a builder that doesn't have a pathmapper." ) revmap_f = builder . pathmapper . reversemap ( path ) if revmap_f and not builder . pathmapper . mapper ( revmap_f [ 0 ] ) . type . startswith ( "Writable" ) : f [ "location" ] = revmap_f [ 1 ] elif uripath == outdir or uripath . startswith ( outdir + os . sep ) : f [ "location" ] = file_uri ( path ) elif path == builder . outdir or path . startswith ( builder . outdir + os . sep ) : f [ "location" ] = builder . fs_access . join ( outdir , path [ len ( builder . outdir ) + 1 : ] ) elif not os . path . isabs ( path ) : f [ "location" ] = builder . fs_access . join ( outdir , path ) else : raise WorkflowException ( u"Output file path %s must be within designated output directory (%s) or an input " u"file pass through." % ( path , builder . outdir ) ) return f raise WorkflowException ( u"Output File object is missing both 'location' " "and 'path' fields: %s" % f )
Remap a file from internal path to external path .
8,227
def check_adjust ( builder , file_o ) : if not builder . pathmapper : raise ValueError ( "Do not call check_adjust using a builder that doesn't have a pathmapper." ) file_o [ "path" ] = docker_windows_path_adjust ( builder . pathmapper . mapper ( file_o [ "location" ] ) [ 1 ] ) dn , bn = os . path . split ( file_o [ "path" ] ) if file_o . get ( "dirname" ) != dn : file_o [ "dirname" ] = Text ( dn ) if file_o . get ( "basename" ) != bn : file_o [ "basename" ] = Text ( bn ) if file_o [ "class" ] == "File" : nr , ne = os . path . splitext ( file_o [ "basename" ] ) if file_o . get ( "nameroot" ) != nr : file_o [ "nameroot" ] = Text ( nr ) if file_o . get ( "nameext" ) != ne : file_o [ "nameext" ] = Text ( ne ) if not ACCEPTLIST_RE . match ( file_o [ "basename" ] ) : raise WorkflowException ( "Invalid filename: '{}' contains illegal characters" . format ( file_o [ "basename" ] ) ) return file_o
Map files to assigned path inside a container .
8,228
def _flat_crossproduct_scatter ( process , joborder , scatter_keys , callback , startindex , runtimeContext ) : scatter_key = scatter_keys [ 0 ] jobl = len ( joborder [ scatter_key ] ) steps = [ ] put = startindex for index in range ( 0 , jobl ) : sjob = copy . copy ( joborder ) sjob [ scatter_key ] = joborder [ scatter_key ] [ index ] if len ( scatter_keys ) == 1 : if runtimeContext . postScatterEval is not None : sjob = runtimeContext . postScatterEval ( sjob ) steps . append ( process . job ( sjob , functools . partial ( callback . receive_scatter_output , put ) , runtimeContext ) ) put += 1 else : ( add , _ ) = _flat_crossproduct_scatter ( process , sjob , scatter_keys [ 1 : ] , callback , put , runtimeContext ) put += len ( add ) steps . extend ( add ) return ( steps , put )
Inner loop .
8,229
def formatSubclassOf ( fmt , cls , ontology , visited ) : if URIRef ( fmt ) == URIRef ( cls ) : return True if ontology is None : return False if fmt in visited : return False visited . add ( fmt ) uriRefFmt = URIRef ( fmt ) for s , p , o in ontology . triples ( ( uriRefFmt , RDFS . subClassOf , None ) ) : if formatSubclassOf ( o , cls , ontology , visited ) : return True for s , p , o in ontology . triples ( ( uriRefFmt , OWL . equivalentClass , None ) ) : if formatSubclassOf ( o , cls , ontology , visited ) : return True for s , p , o in ontology . triples ( ( None , OWL . equivalentClass , uriRefFmt ) ) : if formatSubclassOf ( s , cls , ontology , visited ) : return True return False
Determine if fmt is a subclass of cls .
8,230
def check_format ( actual_file , input_formats , ontology ) : for afile in aslist ( actual_file ) : if not afile : continue if "format" not in afile : raise validate . ValidationException ( u"File has no 'format' defined: {}" . format ( json_dumps ( afile , indent = 4 ) ) ) for inpf in aslist ( input_formats ) : if afile [ "format" ] == inpf or formatSubclassOf ( afile [ "format" ] , inpf , ontology , set ( ) ) : return raise validate . ValidationException ( u"File has an incompatible format: {}" . format ( json_dumps ( afile , indent = 4 ) ) )
Confirms that the format present is valid for the allowed formats .
8,231
def v1_0to1_1_0dev1 ( doc , loader , baseuri ) : doc = copy . deepcopy ( doc ) rewrite = { "http://commonwl.org/cwltool#WorkReuse" : "WorkReuse" , "http://commonwl.org/cwltool#TimeLimit" : "ToolTimeLimit" , "http://commonwl.org/cwltool#NetworkAccess" : "NetworkAccess" , "http://commonwl.org/cwltool#InplaceUpdateRequirement" : "InplaceUpdateRequirement" , "http://commonwl.org/cwltool#LoadListingRequirement" : "LoadListingRequirement" , "http://commonwl.org/cwltool#WorkReuse" : "WorkReuse" , } def rewrite_requirements ( t ) : if "requirements" in t : for r in t [ "requirements" ] : if r [ "class" ] in rewrite : r [ "class" ] = rewrite [ r [ "class" ] ] if "hints" in t : for r in t [ "hints" ] : if r [ "class" ] in rewrite : r [ "class" ] = rewrite [ r [ "class" ] ] if "steps" in t : for s in t [ "steps" ] : rewrite_requirements ( s ) def update_secondaryFiles ( t ) : if isinstance ( t , MutableSequence ) : return [ { "pattern" : p } for p in t ] else : return t def fix_inputBinding ( t ) : for i in t [ "inputs" ] : if "inputBinding" in i : ib = i [ "inputBinding" ] for k in list ( ib . keys ( ) ) : if k != "loadContents" : _logger . warning ( SourceLine ( ib , k ) . makeError ( "Will ignore field '%s' which is not valid in %s inputBinding" % ( k , t [ "class" ] ) ) ) del ib [ k ] visit_class ( doc , ( "CommandLineTool" , "Workflow" ) , rewrite_requirements ) visit_class ( doc , ( "ExpressionTool" , "Workflow" ) , fix_inputBinding ) visit_field ( doc , "secondaryFiles" , update_secondaryFiles ) upd = doc if isinstance ( upd , MutableMapping ) and "$graph" in upd : upd = upd [ "$graph" ] for proc in aslist ( upd ) : proc . setdefault ( "hints" , [ ] ) proc [ "hints" ] . insert ( 0 , { "class" : "NetworkAccess" , "networkAccess" : True } ) proc [ "hints" ] . insert ( 0 , { "class" : "LoadListingRequirement" , "loadListing" : "deep_listing" } ) return ( doc , "v1.1.0-dev1" )
Public updater for v1 . 0 to v1 . 1 . 0 - dev1 .
8,232
def checkversion ( doc , metadata , enable_dev ) : cdoc = None if isinstance ( doc , CommentedSeq ) : if not isinstance ( metadata , CommentedMap ) : raise Exception ( "Expected metadata to be CommentedMap" ) lc = metadata . lc metadata = copy . deepcopy ( metadata ) metadata . lc . data = copy . copy ( lc . data ) metadata . lc . filename = lc . filename metadata [ u"$graph" ] = doc cdoc = metadata elif isinstance ( doc , CommentedMap ) : cdoc = doc else : raise Exception ( "Expected CommentedMap or CommentedSeq" ) version = metadata [ u"cwlVersion" ] cdoc [ "cwlVersion" ] = version if version not in UPDATES : if version in DEVUPDATES : if enable_dev : pass else : raise validate . ValidationException ( u"Version '%s' is a development or deprecated version.\n " "Update your document to a stable version (%s) or use " "--enable-dev to enable support for development and " "deprecated versions." % ( version , ", " . join ( list ( UPDATES . keys ( ) ) ) ) ) else : raise validate . ValidationException ( u"Unrecognized version %s" % version ) return ( cdoc , version )
Checks the validity of the version of the give CWL document .
8,233
def stage_files ( pathmapper , stage_func = None , ignore_writable = False , symlink = True , secret_store = None ) : for key , entry in pathmapper . items ( ) : if not entry . staged : continue if not os . path . exists ( os . path . dirname ( entry . target ) ) : os . makedirs ( os . path . dirname ( entry . target ) ) if entry . type in ( "File" , "Directory" ) and os . path . exists ( entry . resolved ) : if symlink : if onWindows ( ) : if entry . type == "File" : shutil . copy ( entry . resolved , entry . target ) elif entry . type == "Directory" : if os . path . exists ( entry . target ) and os . path . isdir ( entry . target ) : shutil . rmtree ( entry . target ) copytree_with_merge ( entry . resolved , entry . target ) else : os . symlink ( entry . resolved , entry . target ) elif stage_func is not None : stage_func ( entry . resolved , entry . target ) elif entry . type == "Directory" and not os . path . exists ( entry . target ) and entry . resolved . startswith ( "_:" ) : os . makedirs ( entry . target ) elif entry . type == "WritableFile" and not ignore_writable : shutil . copy ( entry . resolved , entry . target ) ensure_writable ( entry . target ) elif entry . type == "WritableDirectory" and not ignore_writable : if entry . resolved . startswith ( "_:" ) : os . makedirs ( entry . target ) else : shutil . copytree ( entry . resolved , entry . target ) ensure_writable ( entry . target ) elif entry . type == "CreateFile" or entry . type == "CreateWritableFile" : with open ( entry . target , "wb" ) as new : if secret_store is not None : new . write ( secret_store . retrieve ( entry . resolved ) . encode ( "utf-8" ) ) else : new . write ( entry . resolved . encode ( "utf-8" ) ) if entry . type == "CreateFile" : os . chmod ( entry . target , stat . S_IRUSR ) else : ensure_writable ( entry . target ) pathmapper . update ( key , entry . target , entry . target , entry . type , entry . staged )
Link or copy files to their targets . Create them as needed .
8,234
def avroize_type ( field_type , name_prefix = "" ) : if isinstance ( field_type , MutableSequence ) : for field in field_type : avroize_type ( field , name_prefix ) elif isinstance ( field_type , MutableMapping ) : if field_type [ "type" ] in ( "enum" , "record" ) : if "name" not in field_type : field_type [ "name" ] = name_prefix + Text ( uuid . uuid4 ( ) ) if field_type [ "type" ] == "record" : avroize_type ( field_type [ "fields" ] , name_prefix ) if field_type [ "type" ] == "array" : avroize_type ( field_type [ "items" ] , name_prefix ) if isinstance ( field_type [ "type" ] , MutableSequence ) : for ctype in field_type [ "type" ] : avroize_type ( ctype , name_prefix ) return field_type
adds missing information to a type so that CWL types are valid in schema_salad .
8,235
def versionstring ( ) : pkg = pkg_resources . require ( "cwltool" ) if pkg : return u"%s %s" % ( sys . argv [ 0 ] , pkg [ 0 ] . version ) return u"%s %s" % ( sys . argv [ 0 ] , "unknown version" )
version of CWLtool used to execute the workflow .
8,236
def docker_windows_path_adjust ( path ) : r if onWindows ( ) : split = path . split ( ':' ) if len ( split ) == 2 : if platform . win32_ver ( ) [ 0 ] in ( '7' , '8' ) : split [ 0 ] = split [ 0 ] . lower ( ) else : split [ 0 ] = split [ 0 ] . capitalize ( ) path = ':' . join ( split ) path = path . replace ( ':' , '' ) . replace ( '\\' , '/' ) return path if path [ 0 ] == '/' else '/' + path return path
r Changes only windows paths so that the can be appropriately passed to the docker run command as as docker treats them as unix paths .
8,237
def bytes2str_in_dicts ( inp ) : if isinstance ( inp , MutableMapping ) : for k in inp : inp [ k ] = bytes2str_in_dicts ( inp [ k ] ) return inp if isinstance ( inp , MutableSequence ) : for idx , value in enumerate ( inp ) : inp [ idx ] = bytes2str_in_dicts ( value ) return inp elif isinstance ( inp , bytes ) : return inp . decode ( 'utf-8' ) return inp
Convert any present byte string to unicode string inplace . input is a dict of nested dicts and lists
8,238
def visit_class ( rec , cls , op ) : if isinstance ( rec , MutableMapping ) : if "class" in rec and rec . get ( "class" ) in cls : op ( rec ) for d in rec : visit_class ( rec [ d ] , cls , op ) if isinstance ( rec , MutableSequence ) : for d in rec : visit_class ( d , cls , op )
Apply a function to with class in cls .
8,239
def visit_field ( rec , field , op ) : if isinstance ( rec , MutableMapping ) : if field in rec : rec [ field ] = op ( rec [ field ] ) for d in rec : visit_field ( rec [ d ] , field , op ) if isinstance ( rec , MutableSequence ) : for d in rec : visit_field ( d , field , op )
Apply a function to mapping with field .
8,240
def print_num ( num ) : out ( 'hex: 0x{0:08x}' . format ( num ) ) out ( 'dec: {0:d}' . format ( num ) ) out ( 'oct: 0o{0:011o}' . format ( num ) ) out ( 'bin: 0b{0:032b}' . format ( num ) )
Write a numeric result in various forms
8,241
def main ( argv = None ) : parser = get_parser ( ) global args args = parser . parse_args ( argv ) args . func ( )
Runs the program and handles command line options
8,242
def __decode_ext_desc ( self , value_type , value ) : if value_type == 0 : return self . __decode_string ( value ) elif value_type == 1 : return value elif 1 < value_type < 6 : return _bytes_to_int_le ( value )
decode ASF_EXTENDED_CONTENT_DESCRIPTION_OBJECT values
8,243
def fetch ( self , card_id , data = { } , ** kwargs ) : return super ( Card , self ) . fetch ( card_id , data , ** kwargs )
Fetch Card for given Id
8,244
def all ( self , data = { } , ** kwargs ) : return super ( VirtualAccount , self ) . all ( data , ** kwargs )
Fetch all Virtual Account entities
8,245
def fetch ( self , virtual_account_id , data = { } , ** kwargs ) : return super ( VirtualAccount , self ) . fetch ( virtual_account_id , data , ** kwargs )
Fetch Virtual Account for given Id
8,246
def create ( self , data = { } , ** kwargs ) : url = self . base_url return self . post_url ( url , data , ** kwargs )
Create Virtual Account from given dict
8,247
def close ( self , virtual_account_id , data = { } , ** kwargs ) : url = "{}/{}" . format ( self . base_url , virtual_account_id ) data [ 'status' ] = 'closed' return self . patch_url ( url , data , ** kwargs )
Close Virtual Account from given Id
8,248
def payments ( self , virtual_account_id , data = { } , ** kwargs ) : url = "{}/{}/payments" . format ( self . base_url , virtual_account_id ) return self . get_url ( url , data , ** kwargs )
Fetch Payment for Virtual Account Id
8,249
def all ( self , data = { } , ** kwargs ) : return super ( Subscription , self ) . all ( data , ** kwargs )
Fetch all Subscription entities
8,250
def fetch ( self , subscription_id , data = { } , ** kwargs ) : return super ( Subscription , self ) . fetch ( subscription_id , data , ** kwargs )
Fetch Subscription for given Id
8,251
def cancel ( self , subscription_id , data = { } , ** kwargs ) : url = "{}/{}/cancel" . format ( self . base_url , subscription_id ) return self . post_url ( url , data , ** kwargs )
Cancel subscription given by subscription_id
8,252
def all ( self , data = { } , ** kwargs ) : return super ( Order , self ) . all ( data , ** kwargs )
Fetch all Order entities
8,253
def fetch ( self , order_id , data = { } , ** kwargs ) : return super ( Order , self ) . fetch ( order_id , data , ** kwargs )
Fetch Order for given Id
8,254
def fetch ( self , customer_id , data = { } , ** kwargs ) : return super ( Customer , self ) . fetch ( customer_id , data , ** kwargs )
Fetch Customer for given Id
8,255
def edit ( self , customer_id , data = { } , ** kwargs ) : url = '{}/{}' . format ( self . base_url , customer_id ) return self . put_url ( url , data , ** kwargs )
Edit Customer information from given dict
8,256
def fetch ( self , addon_id , data = { } , ** kwargs ) : return super ( Addon , self ) . fetch ( addon_id , data , ** kwargs )
Fetch addon for given Id
8,257
def delete ( self , addon_id , data = { } , ** kwargs ) : return super ( Addon , self ) . delete ( addon_id , data , ** kwargs )
Delete addon for given id
8,258
def all ( self , data = { } , ** kwargs ) : return super ( Refund , self ) . all ( data , ** kwargs )
Fetch All Refund
8,259
def fetch ( self , refund_id , data = { } , ** kwargs ) : return super ( Refund , self ) . fetch ( refund_id , data , ** kwargs )
Refund object for given paymnet Id
8,260
def all ( self , data = { } , ** kwargs ) : return super ( Payment , self ) . all ( data , ** kwargs )
Fetch all Payment entities
8,261
def fetch ( self , payment_id , data = { } , ** kwargs ) : return super ( Payment , self ) . fetch ( payment_id , data , ** kwargs )
Fetch Payment for given Id
8,262
def capture ( self , payment_id , amount , data = { } , ** kwargs ) : url = "{}/{}/capture" . format ( self . base_url , payment_id ) data [ 'amount' ] = amount return self . post_url ( url , data , ** kwargs )
Capture Payment for given Id
8,263
def transfer ( self , payment_id , data = { } , ** kwargs ) : url = "{}/{}/transfers" . format ( self . base_url , payment_id ) return self . post_url ( url , data , ** kwargs )
Create Transfer for given Payment Id
8,264
def transfers ( self , payment_id , data = { } , ** kwargs ) : url = "{}/{}/transfers" . format ( self . base_url , payment_id ) return self . get_url ( url , data , ** kwargs )
Fetches all transfer for given Payment Id
8,265
def fetch ( self , plan_id , data = { } , ** kwargs ) : return super ( Plan , self ) . fetch ( plan_id , data , ** kwargs )
Fetch Plan for given Id
8,266
def all ( self , data = { } , ** kwargs ) : return super ( Plan , self ) . all ( data , ** kwargs )
Fetch all plan entities
8,267
def fetch ( self , customer_id , token_id , data = { } , ** kwargs ) : url = "{}/{}/tokens/{}" . format ( self . base_url , customer_id , token_id ) return self . get_url ( url , data , ** kwargs )
Fetch Token for given Id and given customer Id
8,268
def all ( self , customer_id , data = { } , ** kwargs ) : url = "{}/{}/tokens" . format ( self . base_url , customer_id ) return self . get_url ( url , data , ** kwargs )
Get all tokens for given customer Id
8,269
def delete ( self , customer_id , token_id , data = { } , ** kwargs ) : url = "{}/{}/tokens/{}" . format ( self . base_url , customer_id , token_id ) return self . delete_url ( url , data , ** kwargs )
Delete Given Token For a Customer
8,270
def all ( self , data = { } , ** kwargs ) : return super ( Settlement , self ) . all ( data , ** kwargs )
Fetch all Settlement entities
8,271
def fetch ( self , settlement_id , data = { } , ** kwargs ) : return super ( Settlement , self ) . fetch ( settlement_id , data , ** kwargs )
Fetch Settlement data for given Id
8,272
def compare_string ( self , expected_str , actual_str ) : if len ( expected_str ) != len ( actual_str ) : return False result = 0 for x , y in zip ( expected_str , actual_str ) : result |= ord ( x ) ^ ord ( y ) return result == 0
Returns True if the two strings are equal False otherwise The time taken is independent of the number of characters that match For the sake of simplicity this function executes in constant time only when the two strings have the same length . It short - circuits when they have different lengths
8,273
def all ( self , data = { } , ** kwargs ) : if 'payment_id' in data : url = "/payments/{}/transfers" . format ( data [ 'payment_id' ] ) del data [ 'payment_id' ] return self . get_url ( url , data , ** kwargs ) return super ( Transfer , self ) . all ( data , ** kwargs )
Fetch all Transfer entities
8,274
def fetch ( self , transfer_id , data = { } , ** kwargs ) : return super ( Transfer , self ) . fetch ( transfer_id , data , ** kwargs )
Fetch Transfer for given Id
8,275
def reverse ( self , transfer_id , data = { } , ** kwargs ) : url = "{}/{}/reversals" . format ( self . base_url , transfer_id ) return self . post_url ( url , data , ** kwargs )
Reverse Transfer from given id
8,276
def reversals ( self , transfer_id , data = { } , ** kwargs ) : url = "{}/{}/reversals" . format ( self . base_url , transfer_id ) return self . get_url ( url , data , ** kwargs )
Get all Reversal Transfer from given id
8,277
def request ( self , method , path , ** options ) : options = self . _update_user_agent_header ( options ) url = "{}{}" . format ( self . base_url , path ) response = getattr ( self . session , method ) ( url , auth = self . auth , verify = self . cert_path , ** options ) if ( ( response . status_code >= HTTP_STATUS_CODE . OK ) and ( response . status_code < HTTP_STATUS_CODE . REDIRECT ) ) : return response . json ( ) else : msg = "" code = "" json_response = response . json ( ) if 'error' in json_response : if 'description' in json_response [ 'error' ] : msg = json_response [ 'error' ] [ 'description' ] if 'code' in json_response [ 'error' ] : code = str ( json_response [ 'error' ] [ 'code' ] ) if str . upper ( code ) == ERROR_CODE . BAD_REQUEST_ERROR : raise BadRequestError ( msg ) elif str . upper ( code ) == ERROR_CODE . GATEWAY_ERROR : raise GatewayError ( msg ) elif str . upper ( code ) == ERROR_CODE . SERVER_ERROR : raise ServerError ( msg ) else : raise ServerError ( msg )
Dispatches a request to the Razorpay HTTP API
8,278
def get ( self , path , params , ** options ) : return self . request ( 'get' , path , params = params , ** options )
Parses GET request options and dispatches a request
8,279
def post ( self , path , data , ** options ) : data , options = self . _update_request ( data , options ) return self . request ( 'post' , path , data = data , ** options )
Parses POST request options and dispatches a request
8,280
def patch ( self , path , data , ** options ) : data , options = self . _update_request ( data , options ) return self . request ( 'patch' , path , data = data , ** options )
Parses PATCH request options and dispatches a request
8,281
def delete ( self , path , data , ** options ) : data , options = self . _update_request ( data , options ) return self . request ( 'delete' , path , data = data , ** options )
Parses DELETE request options and dispatches a request
8,282
def put ( self , path , data , ** options ) : data , options = self . _update_request ( data , options ) return self . request ( 'put' , path , data = data , ** options )
Parses PUT request options and dispatches a request
8,283
def _update_request ( self , data , options ) : data = json . dumps ( data ) if 'headers' not in options : options [ 'headers' ] = { } options [ 'headers' ] . update ( { 'Content-type' : 'application/json' } ) return data , options
Updates The resource data and header options
8,284
def all ( self , data = { } , ** kwargs ) : return super ( Invoice , self ) . all ( data , ** kwargs )
Fetch all Invoice entities
8,285
def fetch ( self , invoice_id , data = { } , ** kwargs ) : return super ( Invoice , self ) . fetch ( invoice_id , data , ** kwargs )
Fetch Invoice for given Id
8,286
def cancel ( self , invoice_id , ** kwargs ) : url = "{}/{}/cancel" . format ( self . base_url , invoice_id ) return self . post_url ( url , { } , ** kwargs )
Cancel an unpaid Invoice with given ID via API It can only be called on an invoice that is not in the paid state .
8,287
def delete ( self , invoice_id , ** kwargs ) : url = "{}/{}" . format ( self . base_url , invoice_id ) return self . delete_url ( url , { } , ** kwargs )
Delete an invoice You can delete an invoice which is in the draft state .
8,288
def issue ( self , invoice_id , ** kwargs ) : url = "{}/{}/issue" . format ( self . base_url , invoice_id ) return self . post_url ( url , { } , ** kwargs )
Issues an invoice in draft state
8,289
def edit ( self , invoice_id , data = { } , ** kwargs ) : url = "{}/{}" . format ( self . base_url , invoice_id ) return self . patch_url ( url , data , ** kwargs )
Update an invoice In draft state all the attributes are allowed .
8,290
def _pad_added ( self , element , pad ) : name = pad . query_caps ( None ) . to_string ( ) if name . startswith ( 'audio/x-raw' ) : nextpad = self . conv . get_static_pad ( 'sink' ) if not nextpad . is_linked ( ) : self . _got_a_pad = True pad . link ( nextpad )
The callback for GstElement s pad - added signal .
8,291
def _no_more_pads ( self , element ) : if not self . _got_a_pad : self . read_exc = NoStreamError ( ) self . ready_sem . release ( )
The callback for GstElement s no - more - pads signal .
8,292
def _new_sample ( self , sink ) : if self . running : buf = sink . emit ( 'pull-sample' ) . get_buffer ( ) mem = buf . get_all_memory ( ) success , info = mem . map ( Gst . MapFlags . READ ) if success : data = info . data mem . unmap ( info ) self . queue . put ( data ) else : raise GStreamerError ( "Unable to map buffer memory while reading the file." ) return Gst . FlowReturn . OK
The callback for appsink s new - sample signal .
8,293
def _unkown_type ( self , uridecodebin , decodebin , caps ) : streaminfo = caps . to_string ( ) if not streaminfo . startswith ( 'audio/' ) : return self . read_exc = UnknownTypeError ( streaminfo ) self . ready_sem . release ( )
The callback for decodebin s unknown - type signal .
8,294
def close ( self , force = False ) : if self . running or force : self . running = False self . finished = True self . pipeline . get_bus ( ) . remove_signal_watch ( ) self . dec . set_property ( "uri" , None ) self . sink . get_static_pad ( "sink" ) . disconnect ( self . caps_handler ) try : self . queue . get_nowait ( ) except queue . Empty : pass self . pipeline . set_state ( Gst . State . NULL ) del self . pipeline
Close the file and clean up associated resources .
8,295
def read_data ( self , block_samples = 1024 ) : old_width = self . _file . getsampwidth ( ) while True : data = self . _file . readframes ( block_samples ) if not data : break data = audioop . lin2lin ( data , old_width , TARGET_WIDTH ) if self . _needs_byteswap and self . _file . getcomptype ( ) != 'sowt' : data = byteswap ( data ) yield data
Generates blocks of PCM data found in the file .
8,296
def _gst_available ( ) : try : import gi except ImportError : return False try : gi . require_version ( 'Gst' , '1.0' ) except ( ValueError , AttributeError ) : return False try : from gi . repository import Gst except ImportError : return False return True
Determine whether Gstreamer and the Python GObject bindings are installed .
8,297
def available_backends ( ) : from . import rawread result = [ rawread . RawAudioFile ] if _ca_available ( ) : from . import macca result . append ( macca . ExtAudioFile ) if _gst_available ( ) : from . import gstdec result . append ( gstdec . GstAudioFile ) if _mad_available ( ) : from . import maddec result . append ( maddec . MadAudioFile ) if ffdec . available ( ) : result . append ( ffdec . FFmpegAudioFile ) return result
Returns a list of backends that are available on this system .
8,298
def audio_open ( path , backends = None ) : if backends is None : backends = available_backends ( ) for BackendClass in backends : try : return BackendClass ( path ) except DecodeError : pass raise NoBackendError ( )
Open an audio file using a library that is available on this system .
8,299
def popen_multiple ( commands , command_args , * args , ** kwargs ) : for i , command in enumerate ( commands ) : cmd = [ command ] + command_args try : return subprocess . Popen ( cmd , * args , ** kwargs ) except OSError : if i == len ( commands ) - 1 : raise
Like subprocess . Popen but can try multiple commands in case some are not available .