idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
4,900
def gene_list ( list_id = None ) : all_case_ids = [ case . case_id for case in app . db . cases ( ) ] if list_id : genelist_obj = app . db . gene_list ( list_id ) case_ids = [ case . case_id for case in app . db . cases ( ) if case not in genelist_obj . cases ] if genelist_obj is None : return abort ( 404 , "gene list not found: {}" . format ( list_id ) ) if 'download' in request . args : response = make_response ( '\n' . join ( genelist_obj . gene_ids ) ) filename = secure_filename ( "{}.txt" . format ( genelist_obj . list_id ) ) header = "attachment; filename={}" . format ( filename ) response . headers [ 'Content-Disposition' ] = header return response if request . method == 'POST' : if list_id : case_ids = request . form . getlist ( 'case_id' ) for case_id in case_ids : case_obj = app . db . case ( case_id ) if case_obj not in genelist_obj . cases : genelist_obj . cases . append ( case_obj ) app . db . save ( ) else : req_file = request . files [ 'file' ] new_listid = ( request . form [ 'list_id' ] or secure_filename ( req_file . filename ) ) if app . db . gene_list ( new_listid ) : return abort ( 500 , 'Please provide a unique list name' ) if not req_file : return abort ( 500 , 'Please provide a file for upload' ) gene_ids = [ line for line in req_file . stream if not line . startswith ( '#' ) ] genelist_obj = app . db . add_genelist ( new_listid , gene_ids ) case_ids = all_case_ids return render_template ( 'gene_list.html' , gene_list = genelist_obj , case_ids = case_ids )
Display or add a gene list .
4,901
def delete_genelist ( list_id , case_id = None ) : if case_id : case_obj = app . db . case ( case_id ) app . db . remove_genelist ( list_id , case_obj = case_obj ) return redirect ( request . referrer ) else : app . db . remove_genelist ( list_id ) return redirect ( url_for ( '.index' ) )
Delete a whole gene list with links to cases or a link .
4,902
def resources ( ) : ind_id = request . form [ 'ind_id' ] upload_dir = os . path . abspath ( app . config [ 'UPLOAD_DIR' ] ) req_file = request . files [ 'file' ] filename = secure_filename ( req_file . filename ) file_path = os . path . join ( upload_dir , filename ) name = request . form [ 'name' ] or filename req_file . save ( file_path ) ind_obj = app . db . individual ( ind_id ) app . db . add_resource ( name , file_path , ind_obj ) return redirect ( request . referrer )
Upload a new resource for an individual .
4,903
def resource ( resource_id ) : resource_obj = app . db . resource ( resource_id ) if 'raw' in request . args : return send_from_directory ( os . path . dirname ( resource_obj . path ) , os . path . basename ( resource_obj . path ) ) return render_template ( 'resource.html' , resource = resource_obj )
Show a resource .
4,904
def comments ( case_id ) : text = request . form [ 'text' ] variant_id = request . form . get ( 'variant_id' ) username = request . form . get ( 'username' ) case_obj = app . db . case ( case_id ) app . db . add_comment ( case_obj , text , variant_id = variant_id , username = username ) return redirect ( request . referrer )
Upload a new comment .
4,905
def individual ( ind_id ) : individual_obj = app . db . individual ( ind_id ) return render_template ( 'individual.html' , individual = individual_obj )
Show details for a specific individual .
4,906
def synopsis ( case_id ) : text = request . form [ 'text' ] case_obj = app . db . case ( case_id ) app . db . update_synopsis ( case_obj , text ) return redirect ( request . referrer )
Update the case synopsis .
4,907
def add_case ( ) : ind_ids = request . form . getlist ( 'ind_id' ) case_id = request . form [ 'case_id' ] source = request . form [ 'source' ] variant_type = request . form [ 'type' ] if len ( ind_ids ) == 0 : return abort ( 400 , "must add at least one member of case" ) new_case = Case ( case_id = case_id , name = case_id , variant_source = source , variant_type = variant_type , variant_mode = 'gemini' ) for ind_id in ind_ids : ind_obj = app . db . individual ( ind_id ) new_case . individuals . append ( ind_obj ) app . db . session . add ( new_case ) app . db . save ( ) return redirect ( url_for ( '.case' , case_id = new_case . name ) )
Make a new case out of a list of individuals .
4,908
def print_sub ( tables ) : logger = logging . getLogger ( "meepo.sub.print_sub" ) logger . info ( "print_sub tables: %s" % ", " . join ( tables ) ) if not isinstance ( tables , ( list , set ) ) : raise ValueError ( "tables should be list or set" ) events = ( "%s_%s" % ( tb , action ) for tb , action in itertools . product ( * [ tables , [ "write" , "update" , "delete" ] ] ) ) for event in events : signal ( event ) . connect ( lambda pk : logger . info ( "%s -> %s" % event , pk ) , weak = False )
Dummy print sub .
4,909
def binary_to_term ( data ) : if not isinstance ( data , bytes ) : raise ParseException ( 'not bytes input' ) size = len ( data ) if size <= 1 : raise ParseException ( 'null input' ) if b_ord ( data [ 0 ] ) != _TAG_VERSION : raise ParseException ( 'invalid version' ) try : i , term = _binary_to_term ( 1 , data ) if i != size : raise ParseException ( 'unparsed data' ) return term except struct . error : raise ParseException ( 'missing data' ) except IndexError : raise ParseException ( 'missing data' )
Decode Erlang terms within binary data into Python types
4,910
def term_to_binary ( term , compressed = False ) : data_uncompressed = _term_to_binary ( term ) if compressed is False : return b_chr ( _TAG_VERSION ) + data_uncompressed else : if compressed is True : compressed = 6 if compressed < 0 or compressed > 9 : raise InputException ( 'compressed in [0..9]' ) data_compressed = zlib . compress ( data_uncompressed , compressed ) size_uncompressed = len ( data_uncompressed ) if size_uncompressed > 4294967295 : raise OutputException ( 'uint32 overflow' ) return ( b_chr ( _TAG_VERSION ) + b_chr ( _TAG_COMPRESSED_ZLIB ) + struct . pack ( b'>I' , size_uncompressed ) + data_compressed )
Encode Python types into Erlang terms in binary data
4,911
def _parseLine ( cls , line ) : r = cls . _PROG . match ( line ) if not r : raise ValueError ( "Error: parsing '%s'. Correct: \"<number> <number> [<text>]\"" % line ) d = r . groupdict ( ) if len ( d [ 'begin' ] ) == 0 or len ( d [ 'end' ] ) == 0 : raise ValueError ( "Error: parsing '%s'. Correct: \"<number> <number> [<text>]\"" % line ) return AudioClipSpec ( d [ 'begin' ] , d [ 'end' ] , d [ 'text' ] . strip ( ) )
Parsers a single line of text and returns an AudioClipSpec
4,912
def mode_name ( self ) : for name , id in self . MODES . iteritems ( ) : if id == self . mode : return name
Returns the tunnel mode s name for printing purpose .
4,913
def open ( self ) : if self . fd is not None : raise self . AlreadyOpened ( ) logger . debug ( "Opening %s..." % ( TUN_KO_PATH , ) ) self . fd = os . open ( TUN_KO_PATH , os . O_RDWR ) logger . debug ( "Opening %s tunnel '%s'..." % ( self . mode_name . upper ( ) , self . pattern , ) ) try : ret = fcntl . ioctl ( self . fd , self . TUNSETIFF , struct . pack ( "16sH" , self . pattern , self . mode | self . no_pi ) ) except IOError , e : if e . errno == 1 : logger . error ( "Cannot open a %s tunnel because the operation is not permitted." % ( self . mode_name . upper ( ) , ) ) raise self . NotPermitted ( ) raise self . name = ret [ : 16 ] . strip ( "\x00" ) logger . info ( "Tunnel '%s' opened." % ( self . name , ) )
Create the tunnel . If the tunnel is already opened the function will raised an AlreadyOpened exception .
4,914
def close ( self ) : if self . fd is None : return logger . debug ( "Closing tunnel '%s'..." % ( self . name or "" , ) ) os . close ( self . fd ) self . fd = None logger . info ( "Tunnel '%s' closed." % ( self . name or "" , ) )
Close the tunnel . If the tunnel is already closed or never opened do nothing .
4,915
def recv ( self , size = None ) : size = size if size is not None else 1500 return os . read ( self . fd , size )
Receive a buffer . The default size is 1500 the classical MTU .
4,916
def download ( self ) : p = Pool ( ) p . map ( self . _download , self . days )
MLBAM dataset download
4,917
def count_by_type ( self ) : saltbridges = defaultdict ( int ) for contact in self . timeseries : pkey = ( contact . ligandatomid , contact . ligandatomname , contact . resid , contact . resname , contact . segid ) saltbridges [ pkey ] += 1 dtype = [ ( "ligand_atom_id" , int ) , ( "ligand_atom_name" , "|U4" ) , ( "resid" , int ) , ( "resname" , "|U4" ) , ( "segid" , "|U8" ) , ( "frequency" , float ) ] out = np . empty ( ( len ( saltbridges ) , ) , dtype = dtype ) tsteps = float ( len ( self . timesteps ) ) for cursor , ( key , count ) in enumerate ( saltbridges . iteritems ( ) ) : out [ cursor ] = key + ( count / tsteps , ) return out . view ( np . recarray )
Count how many times each individual salt bridge occured throughout the simulation . Returns numpy array .
4,918
def count_by_time ( self ) : out = np . empty ( ( len ( self . timesteps ) , ) , dtype = [ ( 'time' , float ) , ( 'count' , int ) ] ) for cursor , timestep in enumerate ( self . timesteps ) : out [ cursor ] = ( timestep , len ( [ x for x in self . timeseries if x . time == timestep ] ) ) return out . view ( np . recarray )
Count how many salt bridges occured in each frame . Returns numpy array .
4,919
def keep_longest ( head , update , down_path ) : if update is None : return 'f' if head is None : return 's' return 'f' if len ( head ) >= len ( update ) else 's'
Keep longest field among head and update .
4,920
def comments ( self , case_id = None , variant_id = None , username = None ) : logger . debug ( "Looking for comments" ) comment_objs = self . query ( Comment ) if case_id : comment_objs = comment_objs . filter_by ( case_id = case_id ) if variant_id : comment_objs = comment_objs . filter_by ( variant_id = variant_id ) elif case_id : comment_objs = comment_objs . filter_by ( variant_id = None ) return comment_objs
Return comments for a case or variant .
4,921
def add_comment ( self , case_obj , text , variant_id = None , username = None ) : comment = Comment ( text = text , username = username or 'Anonymous' , case = case_obj , variant_id = variant_id ) self . session . add ( comment ) self . save ( ) return comment
Add a comment to a variant or a case
4,922
def _add_consequences ( self , variant_obj , raw_variant_line ) : consequences = [ ] for consequence in SO_TERMS : if consequence in raw_variant_line : consequences . append ( consequence ) variant_obj . consequences = consequences
Add the consequences found for a variant
4,923
def collect_appendvars ( ap_ , cls ) : for key , value in cls . __dict__ . items ( ) : if key . startswith ( 'appendvars_' ) : varname = key [ 11 : ] if varname not in ap_ . appendvars : ap_ . appendvars [ varname ] = [ ] if value not in ap_ . appendvars [ varname ] : if not isinstance ( value , list ) : value = [ value ] ap_ . appendvars [ varname ] += value
colleziona elementi per le liste .
4,924
def has_shared ( arg , shared ) : try : if isinstance ( shared , list ) : shared_arguments = shared else : shared_arguments = shared . __shared_arguments__ for idx , ( args , kwargs ) in enumerate ( shared_arguments ) : arg_name = kwargs . get ( 'dest' , args [ - 1 ] . lstrip ( '-' ) . replace ( '-' , '_' ) ) if arg_name == arg : return idx idx = False except ( ValueError , AttributeError ) : idx = False return idx
Verifica se ci sono shared .
4,925
def has_argument ( arg , arguments ) : try : if not isinstance ( arguments , list ) : arguments = arguments . __arguments__ for idx , ( args , kwargs ) in enumerate ( arguments ) : arg_name = kwargs . get ( 'dest' , args [ - 1 ] . lstrip ( '-' ) . replace ( '-' , '_' ) ) if arg_name == arg : return idx idx = False except ( ValueError , AttributeError ) : idx = False return idx
Verifica se ci sono argument con la classe .
4,926
def get_functarguments ( func ) : argspec = inspect . getargspec ( func ) if argspec . defaults is not None : args = argspec . args [ : - len ( argspec . defaults ) ] kwargs = dict ( zip ( argspec . args [ - len ( argspec . defaults ) : ] , argspec . defaults ) ) else : args = argspec . args kwargs = { } if args and args [ 0 ] == 'self' : args . pop ( 0 ) func . __named__ = [ ] arguments = [ ] shared = get_shared ( func ) for arg in args : if has_shared ( arg , shared ) is not False : continue if has_argument ( arg , func . __cls__ ) is not False : continue arguments . append ( ( [ arg ] , { } , ) ) func . __named__ . append ( arg ) for key , val in kwargs . items ( ) : if has_shared ( key , shared ) is not False : continue if has_argument ( key , func . __cls__ ) is not False : continue if isinstance ( val , dict ) : flags = [ val . pop ( 'lflag' , '--%s' % key ) ] short = val . pop ( 'flag' , None ) dest = val . get ( 'dest' , key ) . replace ( '-' , '_' ) if short : flags . insert ( 0 , short ) else : flags = [ '--%s' % key ] val = dict ( default = val ) dest = key . replace ( '-' , '_' ) func . __named__ . append ( dest ) arguments . append ( ( flags , val , ) ) return arguments
Recupera gli argomenti dalla funzione stessa .
4,927
def get_parser ( func , parent ) : parser = parent . add_parser ( func . __cmd_name__ , help = func . __doc__ ) for args , kwargs in func . __arguments__ : parser . add_argument ( * args , ** kwargs ) return parser
Imposta il parser .
4,928
def get_shared ( func ) : shared = [ ] if not hasattr ( func , '__cls__' ) : return shared if not hasattr ( func . __cls__ , '__shared_arguments__' ) : return shared if hasattr ( func , '__no_share__' ) : if func . __no_share__ is True : return shared else : shared += [ s for s in func . __cls__ . __shared_arguments__ if ( s [ 0 ] [ - 1 ] . replace ( '--' , '' ) . replace ( '-' , '_' ) ) not in func . __no_share__ ] else : shared = func . __cls__ . __shared_arguments__ return shared
return shared .
4,929
def set_subcommands ( func , parser ) : if hasattr ( func , '__subcommands__' ) and func . __subcommands__ : sub_parser = parser . add_subparsers ( title = SUBCOMMANDS_LIST_TITLE , dest = 'subcommand' , description = SUBCOMMANDS_LIST_DESCRIPTION . format ( func . __cmd_name__ ) , help = func . __doc__ ) for sub_func in func . __subcommands__ . values ( ) : parser = get_parser ( sub_func , sub_parser ) for args , kwargs in get_shared ( sub_func ) : parser . add_argument ( * args , ** kwargs ) else : for args , kwargs in get_shared ( func ) : parser . add_argument ( * args , ** kwargs )
Set subcommands .
4,930
def check_help ( ) : know = set ( ( '-h' , '--help' , '-v' , '--version' ) ) args = set ( sys . argv [ 1 : ] ) return len ( know . intersection ( args ) ) > 0
check know args in argv .
4,931
def analysis_of_prot_lig_interactions ( self ) : self . hbonds = HBonds ( self . topol_data , self . trajectory , self . start , self . end , self . skip , self . analysis_cutoff , distance = 3 ) self . pistacking = PiStacking ( self . topol_data , self . trajectory , self . start , self . end , self . skip , self . analysis_cutoff ) self . sasa = SASA ( self . topol_data , self . trajectory ) self . lig_descr = LigDescr ( self . topol_data ) if self . trajectory != [ ] : self . rmsf = RMSF_measurements ( self . topol_data , self . topology , self . trajectory , self . ligand , self . start , self . end , self . skip ) self . salt_bridges = SaltBridges ( self . topol_data , self . trajectory , self . lig_descr , self . start , self . end , self . skip , self . analysis_cutoff )
The classes and function that deal with protein - ligand interaction analysis .
4,932
def save_files ( self ) : while True : try : os . mkdir ( self . output_name ) except Exception as e : self . output_name = raw_input ( "This directory already exists - please enter a new name:" ) else : break self . workdir = os . getcwd ( ) os . chdir ( self . workdir + "/" + self . output_name )
Saves all output from LINTools run in a single directory named after the output name .
4,933
def remove_files ( self ) : file_list = [ "molecule.svg" , "lig.pdb" , "HIS.pdb" , "PHE.pdb" , "TRP.pdb" , "TYR.pdb" , "lig.mol" , "test.xtc" ] for residue in self . topol_data . dict_of_plotted_res . keys ( ) : file_list . append ( residue [ 1 ] + residue [ 2 ] + ".svg" ) for f in file_list : if os . path . isfile ( f ) == True : os . remove ( f )
Removes intermediate files .
4,934
def setup ( self ) : self . radiation_count = 0 self . noise_count = 0 self . count = 0 self . count_history = [ 0 ] * HISTORY_LENGTH self . history_index = 0 self . previous_time = millis ( ) self . previous_history_time = millis ( ) self . duration = 0 GPIO . setup ( self . radiation_pin , GPIO . IN , pull_up_down = GPIO . PUD_UP ) GPIO . setup ( self . noise_pin , GPIO . IN , pull_up_down = GPIO . PUD_UP ) GPIO . add_event_detect ( self . radiation_pin , GPIO . FALLING , callback = self . _on_radiation ) GPIO . add_event_detect ( self . noise_pin , GPIO . FALLING , callback = self . _on_noise ) self . _enable_timer ( ) return self
Initialize the driver by setting up GPIO interrupts and periodic statistics processing .
4,935
def load ( ctx , variant_source , family_file , family_type , root ) : root = root or ctx . obj . get ( 'root' ) or os . path . expanduser ( "~/.puzzle" ) if os . path . isfile ( root ) : logger . error ( "'root' can't be a file" ) ctx . abort ( ) logger . info ( "Root directory is: {}" . format ( root ) ) db_path = os . path . join ( root , 'puzzle_db.sqlite3' ) logger . info ( "db path is: {}" . format ( db_path ) ) if not os . path . exists ( db_path ) : logger . warn ( "database not initialized, run 'puzzle init'" ) ctx . abort ( ) if not os . path . isfile ( variant_source ) : logger . error ( "Variant source has to be a file" ) ctx . abort ( ) mode = get_file_type ( variant_source ) if mode == 'unknown' : logger . error ( "Unknown file type" ) ctx . abort ( ) elif mode == 'gemini' : logger . debug ( "Initialzing GEMINI plugin" ) if not GEMINI : logger . error ( "Need to have gemini installed to use gemini plugin" ) ctx . abort ( ) logger . debug ( 'Set puzzle backend to {0}' . format ( mode ) ) variant_type = get_variant_type ( variant_source ) logger . debug ( 'Set variant type to {0}' . format ( variant_type ) ) cases = get_cases ( variant_source = variant_source , case_lines = family_file , case_type = family_type , variant_type = variant_type , variant_mode = mode ) if len ( cases ) == 0 : logger . warning ( "No cases found" ) ctx . abort ( ) logger . info ( "Initializing sqlite plugin" ) store = SqlStore ( db_path ) for case_obj in cases : if store . case ( case_obj . case_id ) is not None : logger . warn ( "{} already exists in the database" . format ( case_obj . case_id ) ) continue logger . debug ( "adding case: {} to puzzle db" . format ( case_obj . case_id ) ) store . add_case ( case_obj , vtype = variant_type , mode = mode )
Load a variant source into the database .
4,936
def list ( self , deal_id , ** params ) : _ , _ , associated_contacts = self . http_client . get ( "/deals/{deal_id}/associated_contacts" . format ( deal_id = deal_id ) , params = params ) return associated_contacts
Retrieve deal s associated contacts
4,937
def create ( self , deal_id , * args , ** kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for AssociatedContact are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) _ , _ , associated_contact = self . http_client . post ( "/deals/{deal_id}/associated_contacts" . format ( deal_id = deal_id ) , body = attributes ) return associated_contact
Create an associated contact
4,938
def destroy ( self , deal_id , contact_id ) : status_code , _ , _ = self . http_client . delete ( "/deals/{deal_id}/associated_contacts/{contact_id}" . format ( deal_id = deal_id , contact_id = contact_id ) ) return status_code == 204
Remove an associated contact
4,939
def list ( self , ** params ) : _ , _ , contacts = self . http_client . get ( "/contacts" , params = params ) return contacts
Retrieve all contacts
4,940
def retrieve ( self , id ) : _ , _ , contact = self . http_client . get ( "/contacts/{id}" . format ( id = id ) ) return contact
Retrieve a single contact
4,941
def list ( self , ** params ) : _ , _ , deals = self . http_client . get ( "/deals" , params = params ) for deal in deals : deal [ 'value' ] = Coercion . to_decimal ( deal [ 'value' ] ) return deals
Retrieve all deals
4,942
def create ( self , * args , ** kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for Deal are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) if "value" in attributes : attributes [ "value" ] = Coercion . to_string ( attributes [ "value" ] ) _ , _ , deal = self . http_client . post ( "/deals" , body = attributes ) deal [ "value" ] = Coercion . to_decimal ( deal [ "value" ] ) return deal
Create a deal
4,943
def retrieve ( self , id ) : _ , _ , deal = self . http_client . get ( "/deals/{id}" . format ( id = id ) ) deal [ "value" ] = Coercion . to_decimal ( deal [ "value" ] ) return deal
Retrieve a single deal
4,944
def update ( self , id , * args , ** kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for Deal are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) if "value" in attributes : attributes [ "value" ] = Coercion . to_string ( attributes [ "value" ] ) _ , _ , deal = self . http_client . put ( "/deals/{id}" . format ( id = id ) , body = attributes ) deal [ "value" ] = Coercion . to_decimal ( deal [ "value" ] ) return deal
Update a deal
4,945
def update ( self , id , * args , ** kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for DealSource are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) _ , _ , deal_source = self . http_client . put ( "/deal_sources/{id}" . format ( id = id ) , body = attributes ) return deal_source
Update a source
4,946
def list ( self , ** params ) : _ , _ , deal_unqualified_reasons = self . http_client . get ( "/deal_unqualified_reasons" , params = params ) return deal_unqualified_reasons
Retrieve all deal unqualified reasons
4,947
def retrieve ( self , id ) : _ , _ , deal_unqualified_reason = self . http_client . get ( "/deal_unqualified_reasons/{id}" . format ( id = id ) ) return deal_unqualified_reason
Retrieve a single deal unqualified reason
4,948
def list ( self , ** params ) : _ , _ , leads = self . http_client . get ( "/leads" , params = params ) return leads
Retrieve all leads
4,949
def retrieve ( self , id ) : _ , _ , lead = self . http_client . get ( "/leads/{id}" . format ( id = id ) ) return lead
Retrieve a single lead
4,950
def list ( self , ** params ) : _ , _ , lead_unqualified_reasons = self . http_client . get ( "/lead_unqualified_reasons" , params = params ) return lead_unqualified_reasons
Retrieve all lead unqualified reasons
4,951
def list ( self , order_id , ** params ) : _ , _ , line_items = self . http_client . get ( "/orders/{order_id}/line_items" . format ( order_id = order_id ) , params = params ) return line_items
Retrieve order s line items
4,952
def retrieve ( self , order_id , id ) : _ , _ , line_item = self . http_client . get ( "/orders/{order_id}/line_items/{id}" . format ( order_id = order_id , id = id ) ) return line_item
Retrieve a single line item
4,953
def list ( self , ** params ) : _ , _ , loss_reasons = self . http_client . get ( "/loss_reasons" , params = params ) return loss_reasons
Retrieve all reasons
4,954
def retrieve ( self , id ) : _ , _ , loss_reason = self . http_client . get ( "/loss_reasons/{id}" . format ( id = id ) ) return loss_reason
Retrieve a single reason
4,955
def list ( self , ** params ) : _ , _ , notes = self . http_client . get ( "/notes" , params = params ) return notes
Retrieve all notes
4,956
def retrieve ( self , id ) : _ , _ , note = self . http_client . get ( "/notes/{id}" . format ( id = id ) ) return note
Retrieve a single note
4,957
def list ( self , ** params ) : _ , _ , orders = self . http_client . get ( "/orders" , params = params ) return orders
Retrieve all orders
4,958
def retrieve ( self , id ) : _ , _ , order = self . http_client . get ( "/orders/{id}" . format ( id = id ) ) return order
Retrieve a single order
4,959
def list ( self , ** params ) : _ , _ , pipelines = self . http_client . get ( "/pipelines" , params = params ) return pipelines
Retrieve all pipelines
4,960
def list ( self , ** params ) : _ , _ , products = self . http_client . get ( "/products" , params = params ) return products
Retrieve all products
4,961
def retrieve ( self , id ) : _ , _ , product = self . http_client . get ( "/products/{id}" . format ( id = id ) ) return product
Retrieve a single product
4,962
def list ( self , ** params ) : _ , _ , stages = self . http_client . get ( "/stages" , params = params ) return stages
Retrieve all stages
4,963
def list ( self , ** params ) : _ , _ , tags = self . http_client . get ( "/tags" , params = params ) return tags
Retrieve all tags
4,964
def retrieve ( self , id ) : _ , _ , tag = self . http_client . get ( "/tags/{id}" . format ( id = id ) ) return tag
Retrieve a single tag
4,965
def list ( self , ** params ) : _ , _ , tasks = self . http_client . get ( "/tasks" , params = params ) return tasks
Retrieve all tasks
4,966
def retrieve ( self , id ) : _ , _ , task = self . http_client . get ( "/tasks/{id}" . format ( id = id ) ) return task
Retrieve a single task
4,967
def list ( self , ** params ) : _ , _ , text_messages = self . http_client . get ( "/text_messages" , params = params ) return text_messages
Retrieve text messages
4,968
def retrieve ( self , id ) : _ , _ , text_message = self . http_client . get ( "/text_messages/{id}" . format ( id = id ) ) return text_message
Retrieve a single text message
4,969
def list ( self , ** params ) : _ , _ , users = self . http_client . get ( "/users" , params = params ) return users
Retrieve all users
4,970
def retrieve ( self , id ) : _ , _ , user = self . http_client . get ( "/users/{id}" . format ( id = id ) ) return user
Retrieve a single user
4,971
def list ( self , ** params ) : _ , _ , visit_outcomes = self . http_client . get ( "/visit_outcomes" , params = params ) return visit_outcomes
Retrieve visit outcomes
4,972
def request ( url , * args , ** kwargs ) : method = kwargs . get ( 'method' , 'GET' ) timeout = kwargs . pop ( 'timeout' , 10 ) req = requests . request ( method , url , * args , timeout = timeout , ** kwargs ) data = req . json ( ) _LOGGER . debug ( json . dumps ( data ) ) return data
Do the HTTP Request and return data
4,973
def message_worker ( device ) : _LOGGER . debug ( "Starting Worker Thread." ) msg_q = device . messages while True : if not msg_q . empty ( ) : message = msg_q . get ( ) data = { } try : data = json . loads ( message . decode ( "utf-8" ) ) except ValueError : _LOGGER . error ( "Received invalid message: %s" , message ) if 'device_id' in data : device_id = data . get ( 'device_id' ) if device_id == device . device_id : device . handle_event ( data ) else : _LOGGER . warning ( "Received message for unknown device." ) msg_q . task_done ( ) time . sleep ( 0.2 )
Loop through messages and pass them on to right device
4,974
def socket_worker ( sock , msg_q ) : _LOGGER . debug ( "Starting Socket Thread." ) while True : try : data , addr = sock . recvfrom ( 1024 ) except OSError as err : _LOGGER . error ( err ) else : _LOGGER . debug ( "received message: %s from %s" , data , addr ) msg_q . put ( data ) time . sleep ( 0.2 )
Socket Loop that fills message queue
4,975
def toposort ( graph , pick_first = 'head' ) : in_deg = { } for node , next_nodes in six . iteritems ( graph ) : for next_node in [ next_nodes . head_node , next_nodes . update_node ] : if next_node is None : continue in_deg [ next_node ] = in_deg . get ( next_node , 0 ) + 1 stk = [ FIRST ] ordered = [ ] visited = set ( ) while stk : node = stk . pop ( ) visited . add ( node ) if node != FIRST : ordered . append ( node ) traversal = _get_traversal ( graph . get ( node , BeforeNodes ( ) ) , pick_first ) for next_node in traversal : if next_node is None : continue if next_node in visited : raise ValueError ( 'Graph has a cycle' ) in_deg [ next_node ] -= 1 if in_deg [ next_node ] == 0 : stk . append ( next_node ) if len ( ordered ) != len ( graph ) - 1 : raise ValueError ( 'Graph has a cycle' ) return ordered
Toplogically sorts a list match graph .
4,976
def sort_cyclic_graph_best_effort ( graph , pick_first = 'head' ) : ordered = [ ] visited = set ( ) if pick_first == 'head' : fst_attr , snd_attr = ( 'head_node' , 'update_node' ) else : fst_attr , snd_attr = ( 'update_node' , 'head_node' ) current = FIRST while current is not None : visited . add ( current ) current = getattr ( graph [ current ] , fst_attr ) if current not in visited and current is not None : ordered . append ( current ) current = FIRST while current is not None : visited . add ( current ) current = getattr ( graph [ current ] , snd_attr ) if current not in visited and current is not None : ordered . append ( current ) return ordered
Fallback for cases in which the graph has cycles .
4,977
def get ( url ) : writeln ( "Getting data from url" , url ) response = requests . get ( url ) if response . status_code == 200 : writeln ( response . text ) else : writeln ( str ( response . status_code ) , response . reason )
Retrieve an url .
4,978
def post ( url , var ) : data = { b [ 0 ] : b [ 1 ] for b in [ a . split ( "=" ) for a in var ] } writeln ( "Sending data to url" , url ) response = requests . post ( url , data = data ) if response . status_code == 200 : writeln ( response . text ) else : writeln ( str ( response . status_code ) , response . reason )
Post data to an url .
4,979
def cast_bytes ( s , encoding = 'utf8' , errors = 'strict' ) : if isinstance ( s , bytes ) : return s elif isinstance ( s , str ) : return s . encode ( encoding , errors ) else : raise TypeError ( "Expected unicode or bytes, got %r" % s )
cast str or bytes to bytes
4,980
def cast_str ( s , encoding = 'utf8' , errors = 'strict' ) : if isinstance ( s , bytes ) : return s . decode ( encoding , errors ) elif isinstance ( s , str ) : return s else : raise TypeError ( "Expected unicode or bytes, got %r" % s )
cast bytes or str to str
4,981
def cast_datetime ( ts , fmt = None ) : dt = datetime . datetime . fromtimestamp ( ts ) if fmt : return dt . strftime ( fmt ) return dt
cast timestamp to datetime or date str
4,982
def singleton_init_by ( init_fn = None ) : if not init_fn : def wrap_init ( origin_init ) : return origin_init else : def wrap_init ( origin_init ) : def __init__ ( self ) : origin_init ( self ) init_fn ( self ) return __init__ def inner ( cls_def : type ) : if not hasattr ( cls_def , '__instancecheck__' ) or isinstance ( cls_def . __instancecheck__ , ( types . BuiltinMethodType , _slot_wrapper ) ) : def __instancecheck__ ( self , instance ) : return instance is self cls_def . __instancecheck__ = __instancecheck__ _origin_init = cls_def . __init__ cls_def . __init__ = wrap_init ( _origin_init ) return cls_def ( ) return inner
>>> from Redy . Magic . Classic import singleton >>>
4,983
def const_return ( func ) : result = _undef def ret_call ( * args , ** kwargs ) : nonlocal result if result is _undef : result = func ( * args , ** kwargs ) return result return ret_call
>>> from Redy . Magic . Classic import const_return >>>
4,984
def execute ( func : types . FunctionType ) : spec = getfullargspec ( func ) default = spec . defaults arg_cursor = 0 def get_item ( name ) : nonlocal arg_cursor ctx = func . __globals__ value = ctx . get ( name , _undef ) if value is _undef : try : value = default [ arg_cursor ] arg_cursor += 1 except ( TypeError , IndexError ) : raise ValueError ( f"Current context has no variable `{name}`" ) return value return func ( * ( get_item ( arg_name ) for arg_name in spec . args ) )
>>> from Redy . Magic . Classic import execute >>> x = 1 >>>
4,985
def cast ( cast_fn ) : def inner ( func ) : def call ( * args , ** kwargs ) : return cast_fn ( func ( * args , ** kwargs ) ) functools . update_wrapper ( call , func ) return call return inner
>>> from Redy . Magic . Classic import cast >>>
4,986
def insert ( self , action : Action , where : 'Union[int, Delegate.Where]' ) : if isinstance ( where , int ) : self . actions . insert ( where , action ) return here = where ( self . actions ) self . actions . insert ( here , action )
add a new action with specific priority
4,987
def patch_to_conflict_set ( patch ) : patch_type , patched_key , value = patch if isinstance ( patched_key , list ) : key_path = tuple ( patched_key ) else : key_path = tuple ( k for k in patched_key . split ( '.' ) if k ) conflicts = set ( ) if patch_type == REMOVE : conflict_type = ConflictType . REMOVE_FIELD for key , obj in value : conflicts . add ( Conflict ( conflict_type , key_path + ( key , ) , None ) ) elif patch_type == CHANGE : conflict_type = ConflictType . SET_FIELD first_val , second_val = value conflicts . add ( Conflict ( conflict_type , key_path , second_val ) ) elif patch_type == ADD : conflict_type = ConflictType . SET_FIELD for key , obj in value : conflicts . add ( Conflict ( conflict_type , key_path + ( key , ) , obj ) ) return conflicts
Translates a dictdiffer conflict into a json_merger one .
4,988
def merge ( self ) : if isinstance ( self . head , dict ) and isinstance ( self . update , dict ) : if not isinstance ( self . root , dict ) : self . root = { } self . _merge_dicts ( ) else : self . _merge_base_values ( ) if self . conflict_set : raise MergeError ( 'Dictdiffer Errors' , self . conflicts )
Perform merge of head and update starting from root .
4,989
def chebyshev ( point1 , point2 ) : return max ( abs ( point1 [ 0 ] - point2 [ 0 ] ) , abs ( point1 [ 1 ] - point2 [ 1 ] ) )
Computes distance between 2D points using chebyshev metric
4,990
def circlescan ( x0 , y0 , r1 , r2 ) : if r1 < 0 : raise ValueError ( "Initial radius must be non-negative" ) if r2 < 0 : raise ValueError ( "Final radius must be non-negative" ) previous = [ ] rstep = 1 if r2 >= r1 else - 1 for distance in range ( r1 , r2 + rstep , rstep ) : if distance == 0 : yield x0 , y0 else : a = 0.707107 rotations = { 0 : [ [ 1 , 0 ] , [ 0 , 1 ] ] , 1 : [ [ a , a ] , [ - a , a ] ] , 2 : [ [ 0 , 1 ] , [ - 1 , 0 ] ] , 3 : [ [ - a , a ] , [ - a , - a ] ] , 4 : [ [ - 1 , 0 ] , [ 0 , - 1 ] ] , 5 : [ [ - a , - a ] , [ a , - a ] ] , 6 : [ [ 0 , - 1 ] , [ 1 , 0 ] ] , 7 : [ [ a , - a ] , [ a , a ] ] } nangles = len ( rotations ) current = [ ] for angle in range ( nangles ) : x = 0 y = distance d = 1 - distance while x < y : xr = rotations [ angle ] [ 0 ] [ 0 ] * x + rotations [ angle ] [ 0 ] [ 1 ] * y yr = rotations [ angle ] [ 1 ] [ 0 ] * x + rotations [ angle ] [ 1 ] [ 1 ] * y xr = x0 + xr yr = y0 + yr point = ( int ( round ( xr ) ) , int ( round ( yr ) ) ) if point not in previous : yield xr , yr current . append ( point ) if ( d < 0 ) : d += 3 + 2 * x else : d += 5 - 2 * ( y - x ) y -= 1 x += 1 previous = current
Scan pixels in a circle pattern around a center point
4,991
def gridscan ( xi , yi , xf , yf , stepx = 1 , stepy = 1 ) : if stepx <= 0 : raise ValueError ( "X-step must be positive" ) if stepy <= 0 : raise ValueError ( "Y-step must be positive" ) dx = stepx if xf >= xi else - stepx dy = stepy if yf >= yi else - stepy for y in range ( yi , yf + dy , dy ) : for x in range ( xi , xf + dx , dx ) : yield x , y
Scan pixels in a grid pattern along the x - coordinate then y - coordinate
4,992
def ringscan ( x0 , y0 , r1 , r2 , metric = chebyshev ) : if r1 < 0 : raise ValueError ( "Initial radius must be non-negative" ) if r2 < 0 : raise ValueError ( "Final radius must be non-negative" ) if not hasattr ( metric , "__call__" ) : raise TypeError ( "Metric not callable" ) direction = 0 steps = { 0 : [ 1 , 0 ] , 1 : [ 1 , - 1 ] , 2 : [ 0 , - 1 ] , 3 : [ - 1 , - 1 ] , 4 : [ - 1 , 0 ] , 5 : [ - 1 , 1 ] , 6 : [ 0 , 1 ] , 7 : [ 1 , 1 ] } nsteps = len ( steps ) center = [ x0 , y0 ] rstep = 1 if r2 >= r1 else - 1 for distance in range ( r1 , r2 + rstep , rstep ) : initial = [ x0 , y0 + distance ] current = initial ntrys = 0 while True : if distance == 0 : yield current [ 0 ] , current [ 1 ] break nextpoint = [ current [ i ] + steps [ direction ] [ i ] for i in range ( 2 ) ] if metric ( center , nextpoint ) != distance : ntrys += 1 if ntrys == nsteps : break direction = ( direction + 1 ) % nsteps continue ntrys = 0 yield current [ 0 ] , current [ 1 ] current = nextpoint if current == initial : break if ntrys == nsteps : break
Scan pixels in a ring pattern around a center point clockwise
4,993
def snakescan ( xi , yi , xf , yf ) : dx = 1 if xf >= xi else - 1 dy = 1 if yf >= yi else - 1 x , xa , xb = xi , xi , xf for y in range ( yi , yf + dy , dy ) : for x in range ( xa , xb + dx , dx ) : yield x , y if x == xa or x == xb : dx *= - 1 xa , xb = xb , xa
Scan pixels in a snake pattern along the x - coordinate then y - coordinate
4,994
def walkscan ( x0 , y0 , xn = 0.25 , xp = 0.25 , yn = 0.25 , yp = 0.25 ) : if xn < 0 : raise ValueError ( "Negative x probabilty must be non-negative" ) if xp < 0 : raise ValueError ( "Positive x probabilty must be non-negative" ) if yn < 0 : raise ValueError ( "Negative y probabilty must be non-negative" ) if yp < 0 : raise ValueError ( "Positive y probabilty must be non-negative" ) total = xp + xn + yp + yn xn /= total xp /= total yn /= total yp /= total cxn = xn cxp = cxn + xp cyn = cxp + yn x , y = x0 , y0 while True : yield x , y probability = random . random ( ) if probability <= cxn : x -= 1 elif probability <= cxp : x += 1 elif probability <= cyn : y -= 1 else : y += 1
Scan pixels in a random walk pattern with given step probabilities . The random walk will continue indefinitely unless a skip transformation is used with the stop parameter set or a clip transformation is used with the abort parameter set to True . The probabilities are normalized to sum to 1 .
4,995
def validate ( self ) : if self . access_token is None : raise ConfigurationError ( 'No access token provided. ' 'Set your access token during client initialization using: ' '"basecrm.Client(access_token= <YOUR_PERSONAL_ACCESS_TOKEN>)"' ) if re . search ( r'\s' , self . access_token ) : raise ConfigurationError ( 'Provided access token is invalid ' 'as it contains disallowed characters. ' 'Please double-check you access token.' ) if len ( self . access_token ) != 64 : raise ConfigurationError ( 'Provided access token is invalid ' 'as it has invalid length. ' 'Please double-check your access token.' ) if not self . base_url or not re . match ( self . URL_REGEXP , self . base_url ) : raise ConfigurationError ( 'Provided base url is invalid ' 'as it not a valid URI. ' 'Please make sure it incldues the schema part, ' 'both http and https are accepted, ' 'and the hierarchical part' ) return True
Validates whether a configuration is valid .
4,996
def start ( self , device_uuid ) : status_code , _ , session = self . http_client . post ( '/sync/start' , body = None , headers = self . build_headers ( device_uuid ) ) return None if status_code == 204 else session
Start synchronization flow
4,997
def fetch ( self , device_uuid , session_id ) : status_code , _ , root = self . http_client . get ( "/sync/{session_id}/queues/main" . format ( session_id = session_id ) , params = None , headers = self . build_headers ( device_uuid ) , raw = True ) return [ ] if status_code == 204 else root [ 'items' ]
Get data from queue
4,998
def ack ( self , device_uuid , ack_keys ) : attributes = { 'ack_keys' : ack_keys } status_code , _ , _ = self . http_client . post ( '/sync/ack' , body = attributes , headers = self . build_headers ( device_uuid ) ) return status_code == 202
Acknowledge received data
4,999
def fetch ( self , callback ) : session = self . client . sync . start ( self . device_uuid ) if session is None or 'id' not in session : return while True : queue_items = self . client . sync . fetch ( self . device_uuid , session [ 'id' ] ) if not queue_items : break ack_keys = [ ] for item in queue_items : if callback ( item [ 'meta' ] , item [ 'data' ] ) : ack_keys . append ( item [ 'meta' ] [ 'sync' ] [ 'ack_key' ] ) if ack_keys : self . client . sync . ack ( self . device_uuid , ack_keys )
Perform a full synchronization flow .