idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
11,200
def clear_matplotlib_ticks ( ax = None , axis = "both" ) : if not ax : return if axis . lower ( ) in [ "both" , "x" , "horizontal" ] : ax . set_xticks ( [ ] , [ ] ) if axis . lower ( ) in [ "both" , "y" , "vertical" ] : ax . set_yticks ( [ ] , [ ] )
Clears the default matplotlib axes or the one specified by the axis argument .
11,201
def scatter ( points , ax = None , permutation = None , colorbar = False , colormap = None , vmin = 0 , vmax = 1 , scientific = False , cbarlabel = None , cb_kwargs = None , ** kwargs ) : if not ax : fig , ax = pyplot . subplots ( ) xs , ys = project_sequence ( points , permutation = permutation ) ax . scatter ( xs , ys , vmin = vmin , vmax = vmax , ** kwargs ) if colorbar and ( colormap != None ) : if cb_kwargs != None : colorbar_hack ( ax , vmin , vmax , colormap , scientific = scientific , cbarlabel = cbarlabel , ** cb_kwargs ) else : colorbar_hack ( ax , vmin , vmax , colormap , scientific = scientific , cbarlabel = cbarlabel ) return ax
Plots trajectory points where each point satisfies x + y + z = scale . First argument is a list or numpy array of tuples of length 3 .
11,202
def _en_to_enth ( energy , concs , A , B , C ) : enth = abs ( energy - concs [ 0 ] * A - concs [ 1 ] * B - concs [ 2 ] * C ) return enth
Converts an energy to an enthalpy .
11,203
def _energy_to_enthalpy ( energy ) : pureA = [ energy [ 0 ] [ 0 ] , energy [ 0 ] [ 1 ] ] pureB = [ energy [ 1 ] [ 0 ] , energy [ 1 ] [ 1 ] ] pureC = [ energy [ 2 ] [ 0 ] , energy [ 2 ] [ 1 ] ] enthalpy = [ ] for en in energy : c = en [ 2 ] conc = [ float ( i ) / sum ( c ) for i in c ] CE = _en_to_enth ( en [ 0 ] , conc , pureA [ 0 ] , pureB [ 0 ] , pureC [ 0 ] ) VASP = _en_to_enth ( en [ 1 ] , conc , pureA [ 1 ] , pureB [ 1 ] , pureC [ 1 ] ) enthalpy . append ( [ CE , VASP , c ] ) return enthalpy
Converts energy to enthalpy . This function take the energies stored in the energy array and converts them to formation enthalpy .
11,204
def _find_error ( vals ) : err_vals = [ ] for en in vals : c = en [ 2 ] conc = [ float ( i ) / sum ( c ) for i in c ] err = abs ( en [ 0 ] - en [ 1 ] ) err_vals . append ( [ conc , err ] ) return err_vals
Find the errors in the energy values .
11,205
def _read_data ( fname ) : energy = [ ] with open ( fname , 'r' ) as f : for line in f : CE = abs ( float ( line . strip ( ) . split ( ) [ 0 ] ) ) VASP = abs ( float ( line . strip ( ) . split ( ) [ 1 ] ) ) conc = [ i for i in line . strip ( ) . split ( ) [ 2 : ] ] conc_f = [ ] for c in conc : if '[' in c and ']' in c : conc_f . append ( int ( c [ 1 : - 1 ] ) ) elif '[' in c : conc_f . append ( int ( c [ 1 : - 1 ] ) ) elif ']' in c or ',' in c : conc_f . append ( int ( c [ : - 1 ] ) ) else : conc_f . append ( int ( c ) ) energy . append ( [ CE , VASP , conc_f ] ) return energy
Reads data from file .
11,206
def conc_err_plot ( fname ) : energies = _read_data ( fname ) enthalpy = _energy_to_enthalpy ( energies ) this_errors = _find_error ( enthalpy ) points = [ ] colors = [ ] for er in this_errors : concs = er [ 0 ] points . append ( ( concs [ 0 ] * 100 , concs [ 1 ] * 100 , concs [ 2 ] * 100 ) ) colors . append ( er [ 1 ] ) scale = 100 figure , tax = ternary . figure ( scale = scale ) tax . boundary ( linewidth = 1.0 ) tax . set_title ( "Errors in Convex Hull Predictions." , fontsize = 20 ) tax . gridlines ( multiple = 10 , color = "blue" ) tax . scatter ( points , vmax = max ( colors ) , colormap = plt . cm . viridis , colorbar = True , c = colors , cmap = plt . cm . viridis ) tax . show ( )
Plots the error in the CE data . This plots the error in the CE predictions within a ternary concentration diagram .
11,207
def _connect_callbacks ( self ) : figure = self . get_figure ( ) callback = partial ( mpl_redraw_callback , tax = self ) event_names = ( 'resize_event' , 'draw_event' ) for event_name in event_names : figure . canvas . mpl_connect ( event_name , callback )
Connect resize matplotlib callbacks .
11,208
def set_title ( self , title , ** kwargs ) : ax = self . get_axes ( ) ax . set_title ( title , ** kwargs )
Sets the title on the underlying matplotlib AxesSubplot .
11,209
def left_axis_label ( self , label , position = None , rotation = 60 , offset = 0.08 , ** kwargs ) : if not position : position = ( - offset , 3. / 5 , 2. / 5 ) self . _labels [ "left" ] = ( label , position , rotation , kwargs )
Sets the label on the left axis .
11,210
def right_axis_label ( self , label , position = None , rotation = - 60 , offset = 0.08 , ** kwargs ) : if not position : position = ( 2. / 5 + offset , 3. / 5 , 0 ) self . _labels [ "right" ] = ( label , position , rotation , kwargs )
Sets the label on the right axis .
11,211
def clear_matplotlib_ticks ( self , axis = "both" ) : ax = self . get_axes ( ) plotting . clear_matplotlib_ticks ( ax = ax , axis = axis )
Clears the default matplotlib ticks .
11,212
def get_ticks_from_axis_limits ( self , multiple = 1 ) : for k in [ 'b' , 'l' , 'r' ] : self . _ticks [ k ] = numpy . linspace ( self . _axis_limits [ k ] [ 0 ] , self . _axis_limits [ k ] [ 1 ] , self . _boundary_scale / float ( multiple ) + 1 ) . tolist ( )
Taking self . _axis_limits and self . _boundary_scale get the scaled ticks for all three axes and store them in self . _ticks under the keys b for bottom l for left and r for right axes .
11,213
def set_custom_ticks ( self , locations = None , clockwise = False , multiple = 1 , axes_colors = None , tick_formats = None , ** kwargs ) : for k in [ 'b' , 'l' , 'r' ] : self . ticks ( ticks = self . _ticks [ k ] , locations = locations , axis = k , clockwise = clockwise , multiple = multiple , axes_colors = axes_colors , tick_formats = tick_formats , ** kwargs )
Having called get_ticks_from_axis_limits set the custom ticks on the plot .
11,214
def _redraw_labels ( self ) : ax = self . get_axes ( ) for mpl_object in self . _to_remove : mpl_object . remove ( ) self . _to_remove = [ ] label_data = list ( self . _labels . values ( ) ) label_data . extend ( self . _corner_labels . values ( ) ) for ( label , position , rotation , kwargs ) in label_data : transform = ax . transAxes x , y = project_point ( position ) position = numpy . array ( [ x , y ] ) new_rotation = ax . transData . transform_angles ( numpy . array ( ( rotation , ) ) , position . reshape ( ( 1 , 2 ) ) ) [ 0 ] text = ax . text ( x , y , label , rotation = new_rotation , transform = transform , horizontalalignment = "center" , ** kwargs ) text . set_rotation_mode ( "anchor" ) self . _to_remove . append ( text )
Redraw axis labels typically after draw or resize events .
11,215
def convert_coordinates ( self , points , axisorder = 'blr' ) : return convert_coordinates_sequence ( points , self . _boundary_scale , self . _axis_limits , axisorder )
Convert data coordinates to simplex coordinates for plotting in the case that axis limits have been applied .
11,216
def get_cmap ( cmap = None ) : if isinstance ( cmap , matplotlib . colors . Colormap ) : return cmap if isinstance ( cmap , str ) : cmap_name = cmap else : cmap_name = DEFAULT_COLOR_MAP_NAME return plt . get_cmap ( cmap_name )
Loads a matplotlib colormap if specified or supplies the default .
11,217
def blend_value ( data , i , j , k , keys = None ) : key_size = len ( list ( data . keys ( ) ) [ 0 ] ) if not keys : keys = triangle_coordinates ( i , j , k ) keys = [ tuple ( key [ : key_size ] ) for key in keys ] try : s = sum ( data [ key ] for key in keys ) value = s / 3. except KeyError : value = None return value
Computes the average value of the three vertices of a triangle in the simplex triangulation where two of the vertices are on the lower horizontal .
11,218
def alt_blend_value ( data , i , j , k ) : keys = alt_triangle_coordinates ( i , j , k ) return blend_value ( data , i , j , k , keys = keys )
Computes the average value of the three vertices of a triangle in the simplex triangulation where two of the vertices are on the upper horizontal .
11,219
def triangle_coordinates ( i , j , k ) : return [ ( i , j , k ) , ( i + 1 , j , k - 1 ) , ( i , j + 1 , k - 1 ) ]
Computes coordinates of the constituent triangles of a triangulation for the simplex . These triangules are parallel to the lower axis on the lower side .
11,220
def alt_triangle_coordinates ( i , j , k ) : return [ ( i , j + 1 , k - 1 ) , ( i + 1 , j , k - 1 ) , ( i + 1 , j + 1 , k - 2 ) ]
Computes coordinates of the constituent triangles of a triangulation for the simplex . These triangules are parallel to the lower axis on the upper side .
11,221
def generate_hexagon_deltas ( ) : zero = numpy . array ( [ 0 , 0 , 0 ] ) alpha = numpy . array ( [ - 1. / 3 , 2. / 3 , 0 ] ) deltaup = numpy . array ( [ 1. / 3 , 1. / 3 , 0 ] ) deltadown = numpy . array ( [ 2. / 3 , - 1. / 3 , 0 ] ) i_vec = numpy . array ( [ 0 , 1. / 2 , - 1. / 2 ] ) i_vec_down = numpy . array ( [ 1. / 2 , - 1. / 2 , 0 ] ) deltaX_vec = numpy . array ( [ 1. / 2 , 0 , - 1. / 2 ] ) d = dict ( ) d [ "100" ] = [ zero , - deltaX_vec , - deltadown , - i_vec_down ] d [ "010" ] = [ zero , i_vec_down , - alpha , - i_vec ] d [ "001" ] = [ zero , i_vec , deltaup , deltaX_vec ] d [ "011" ] = [ i_vec , deltaup , deltadown , - alpha , - i_vec ] d [ "101" ] = [ - deltaX_vec , - deltadown , alpha , deltaup , deltaX_vec ] d [ "110" ] = [ i_vec_down , - alpha , - deltaup , - deltadown , - i_vec_down ] d [ "111" ] = [ alpha , deltaup , deltadown , - alpha , - deltaup , - deltadown ] return d
Generates a dictionary of the necessary additive vectors to generate the hexagon points for the hexagonal heatmap .
11,222
def hexagon_coordinates ( i , j , k ) : signature = "" for x in [ i , j , k ] : if x == 0 : signature += "0" else : signature += "1" deltas = hexagon_deltas [ signature ] center = numpy . array ( [ i , j , k ] ) return numpy . array ( [ center + x for x in deltas ] )
Computes coordinates of the constituent hexagons of a hexagonal heatmap .
11,223
def polygon_generator ( data , scale , style , permutation = None ) : project = functools . partial ( project_point , permutation = permutation ) if isinstance ( data , dict ) : data_gen = data . items ( ) else : data_gen = data for key , value in data_gen : if value is None : continue i = key [ 0 ] j = key [ 1 ] k = scale - i - j if style == 'h' : vertices = hexagon_coordinates ( i , j , k ) yield ( map ( project , vertices ) , value ) elif style == 'd' : if ( i <= scale ) and ( j <= scale ) and ( k >= 0 ) : vertices = triangle_coordinates ( i , j , k ) yield ( map ( project , vertices ) , value ) if ( i < scale ) and ( j < scale ) and ( k >= 1 ) : vertices = alt_triangle_coordinates ( i , j , k ) value = blend_value ( data , i , j , k ) yield ( map ( project , vertices ) , value ) elif style == 't' : if ( i < scale ) and ( j < scale ) and ( k > 0 ) : vertices = triangle_coordinates ( i , j , k ) value = blend_value ( data , i , j , k ) yield ( map ( project , vertices ) , value ) if ( i < scale ) and ( j < scale ) and ( k > 1 ) : vertices = alt_triangle_coordinates ( i , j , k ) value = alt_blend_value ( data , i , j , k ) yield ( map ( project , vertices ) , value )
Generator for the vertices of the polygon to be colored and its color depending on style . Called by heatmap .
11,224
def heatmap ( data , scale , vmin = None , vmax = None , cmap = None , ax = None , scientific = False , style = 'triangular' , colorbar = True , permutation = None , use_rgba = False , cbarlabel = None , cb_kwargs = None ) : if not ax : fig , ax = pyplot . subplots ( ) if use_rgba : for k , v in data . items ( ) : data [ k ] = numpy . array ( v ) else : cmap = get_cmap ( cmap ) if vmin is None : vmin = min ( data . values ( ) ) if vmax is None : vmax = max ( data . values ( ) ) style = style . lower ( ) [ 0 ] if style not in [ "t" , "h" , 'd' ] : raise ValueError ( "Heatmap style must be 'triangular', 'dual-triangular', or 'hexagonal'" ) vertices_values = polygon_generator ( data , scale , style , permutation = permutation ) for vertices , value in vertices_values : if value is None : continue if not use_rgba : color = colormapper ( value , vmin , vmax , cmap = cmap ) else : color = value xs , ys = unzip ( vertices ) ax . fill ( xs , ys , facecolor = color , edgecolor = color ) if not cb_kwargs : cb_kwargs = dict ( ) if colorbar : colorbar_hack ( ax , vmin , vmax , cmap , scientific = scientific , cbarlabel = cbarlabel , ** cb_kwargs ) return ax
Plots heatmap of given color values .
11,225
def svg_polygon ( coordinates , color ) : coord_str = [ ] for c in coordinates : coord_str . append ( "," . join ( map ( str , c ) ) ) coord_str = " " . join ( coord_str ) polygon = '<polygon points="%s" style="fill:%s;stroke:%s;stroke-width:0"/>\n' % ( coord_str , color , color ) return polygon
Create an svg triangle for the stationary heatmap .
11,226
def line ( ax , p1 , p2 , permutation = None , ** kwargs ) : pp1 = project_point ( p1 , permutation = permutation ) pp2 = project_point ( p2 , permutation = permutation ) ax . add_line ( Line2D ( ( pp1 [ 0 ] , pp2 [ 0 ] ) , ( pp1 [ 1 ] , pp2 [ 1 ] ) , ** kwargs ) )
Draws a line on ax from p1 to p2 .
11,227
def horizontal_line ( ax , scale , i , ** kwargs ) : p1 = ( 0 , i , scale - i ) p2 = ( scale - i , i , 0 ) line ( ax , p1 , p2 , ** kwargs )
Draws the i - th horizontal line parallel to the lower axis .
11,228
def left_parallel_line ( ax , scale , i , ** kwargs ) : p1 = ( i , scale - i , 0 ) p2 = ( i , 0 , scale - i ) line ( ax , p1 , p2 , ** kwargs )
Draws the i - th line parallel to the left axis .
11,229
def right_parallel_line ( ax , scale , i , ** kwargs ) : p1 = ( 0 , scale - i , i ) p2 = ( scale - i , 0 , i ) line ( ax , p1 , p2 , ** kwargs )
Draws the i - th line parallel to the right axis .
11,230
def boundary ( ax , scale , axes_colors = None , ** kwargs ) : if axes_colors is None : axes_colors = dict ( ) for _axis in [ 'l' , 'r' , 'b' ] : if _axis not in axes_colors . keys ( ) : axes_colors [ _axis ] = 'black' horizontal_line ( ax , scale , 0 , color = axes_colors [ 'b' ] , ** kwargs ) left_parallel_line ( ax , scale , 0 , color = axes_colors [ 'l' ] , ** kwargs ) right_parallel_line ( ax , scale , 0 , color = axes_colors [ 'r' ] , ** kwargs ) return ax
Plots the boundary of the simplex . Creates and returns matplotlib axis if none given .
11,231
def gridlines ( ax , scale , multiple = None , horizontal_kwargs = None , left_kwargs = None , right_kwargs = None , ** kwargs ) : if 'linewidth' not in kwargs : kwargs [ "linewidth" ] = 0.5 if 'linestyle' not in kwargs : kwargs [ "linestyle" ] = ':' horizontal_kwargs = merge_dicts ( kwargs , horizontal_kwargs ) left_kwargs = merge_dicts ( kwargs , left_kwargs ) right_kwargs = merge_dicts ( kwargs , right_kwargs ) if not multiple : multiple = 1. for i in arange ( 0 , scale , multiple ) : horizontal_line ( ax , scale , i , ** horizontal_kwargs ) for i in arange ( 0 , scale + multiple , multiple ) : left_parallel_line ( ax , scale , i , ** left_kwargs ) right_parallel_line ( ax , scale , i , ** right_kwargs ) return ax
Plots grid lines excluding boundary .
11,232
def create_next_candidates ( prev_candidates , length ) : item_set = set ( ) for candidate in prev_candidates : for item in candidate : item_set . add ( item ) items = sorted ( item_set ) tmp_next_candidates = ( frozenset ( x ) for x in combinations ( items , length ) ) if length < 3 : return list ( tmp_next_candidates ) next_candidates = [ candidate for candidate in tmp_next_candidates if all ( True if frozenset ( x ) in prev_candidates else False for x in combinations ( candidate , length - 1 ) ) ] return next_candidates
Returns the apriori candidates as a list .
11,233
def gen_support_records ( transaction_manager , min_support , ** kwargs ) : max_length = kwargs . get ( 'max_length' ) _create_next_candidates = kwargs . get ( '_create_next_candidates' , create_next_candidates ) candidates = transaction_manager . initial_candidates ( ) length = 1 while candidates : relations = set ( ) for relation_candidate in candidates : support = transaction_manager . calc_support ( relation_candidate ) if support < min_support : continue candidate_set = frozenset ( relation_candidate ) relations . add ( candidate_set ) yield SupportRecord ( candidate_set , support ) length += 1 if max_length and length > max_length : break candidates = _create_next_candidates ( relations , length )
Returns a generator of support records with given transactions .
11,234
def gen_ordered_statistics ( transaction_manager , record ) : items = record . items for combination_set in combinations ( sorted ( items ) , len ( items ) - 1 ) : items_base = frozenset ( combination_set ) items_add = frozenset ( items . difference ( items_base ) ) confidence = ( record . support / transaction_manager . calc_support ( items_base ) ) lift = confidence / transaction_manager . calc_support ( items_add ) yield OrderedStatistic ( frozenset ( items_base ) , frozenset ( items_add ) , confidence , lift )
Returns a generator of ordered statistics as OrderedStatistic instances .
11,235
def filter_ordered_statistics ( ordered_statistics , ** kwargs ) : min_confidence = kwargs . get ( 'min_confidence' , 0.0 ) min_lift = kwargs . get ( 'min_lift' , 0.0 ) for ordered_statistic in ordered_statistics : if ordered_statistic . confidence < min_confidence : continue if ordered_statistic . lift < min_lift : continue yield ordered_statistic
Filter OrderedStatistic objects .
11,236
def apriori ( transactions , ** kwargs ) : min_support = kwargs . get ( 'min_support' , 0.1 ) min_confidence = kwargs . get ( 'min_confidence' , 0.0 ) min_lift = kwargs . get ( 'min_lift' , 0.0 ) max_length = kwargs . get ( 'max_length' , None ) if min_support <= 0 : raise ValueError ( 'minimum support must be > 0' ) _gen_support_records = kwargs . get ( '_gen_support_records' , gen_support_records ) _gen_ordered_statistics = kwargs . get ( '_gen_ordered_statistics' , gen_ordered_statistics ) _filter_ordered_statistics = kwargs . get ( '_filter_ordered_statistics' , filter_ordered_statistics ) transaction_manager = TransactionManager . create ( transactions ) support_records = _gen_support_records ( transaction_manager , min_support , max_length = max_length ) for support_record in support_records : ordered_statistics = list ( _filter_ordered_statistics ( _gen_ordered_statistics ( transaction_manager , support_record ) , min_confidence = min_confidence , min_lift = min_lift , ) ) if not ordered_statistics : continue yield RelationRecord ( support_record . items , support_record . support , ordered_statistics )
Executes Apriori algorithm and returns a RelationRecord generator .
11,237
def load_transactions ( input_file , ** kwargs ) : delimiter = kwargs . get ( 'delimiter' , '\t' ) for transaction in csv . reader ( input_file , delimiter = delimiter ) : yield transaction if transaction else [ '' ]
Load transactions and returns a generator for transactions .
11,238
def dump_as_json ( record , output_file ) : def default_func ( value ) : if isinstance ( value , frozenset ) : return sorted ( value ) raise TypeError ( repr ( value ) + " is not JSON serializable" ) converted_record = record . _replace ( ordered_statistics = [ x . _asdict ( ) for x in record . ordered_statistics ] ) json . dump ( converted_record . _asdict ( ) , output_file , default = default_func , ensure_ascii = False ) output_file . write ( os . linesep )
Dump an relation record as a json value .
11,239
def dump_as_two_item_tsv ( record , output_file ) : for ordered_stats in record . ordered_statistics : if len ( ordered_stats . items_base ) != 1 : continue if len ( ordered_stats . items_add ) != 1 : continue output_file . write ( '{0}\t{1}\t{2:.8f}\t{3:.8f}\t{4:.8f}{5}' . format ( list ( ordered_stats . items_base ) [ 0 ] , list ( ordered_stats . items_add ) [ 0 ] , record . support , ordered_stats . confidence , ordered_stats . lift , os . linesep ) )
Dump a relation record as TSV only for 2 item relations .
11,240
def main ( ** kwargs ) : _parse_args = kwargs . get ( '_parse_args' , parse_args ) _load_transactions = kwargs . get ( '_load_transactions' , load_transactions ) _apriori = kwargs . get ( '_apriori' , apriori ) args = _parse_args ( sys . argv [ 1 : ] ) transactions = _load_transactions ( chain ( * args . input ) , delimiter = args . delimiter ) result = _apriori ( transactions , max_length = args . max_length , min_support = args . min_support , min_confidence = args . min_confidence ) for record in result : args . output_func ( record , args . output )
Executes Apriori algorithm and print its result .
11,241
def add_transaction ( self , transaction ) : for item in transaction : if item not in self . __transaction_index_map : self . __items . append ( item ) self . __transaction_index_map [ item ] = set ( ) self . __transaction_index_map [ item ] . add ( self . __num_transaction ) self . __num_transaction += 1
Add a transaction .
11,242
def calc_support ( self , items ) : if not items : return 1.0 if not self . num_transaction : return 0.0 sum_indexes = None for item in items : indexes = self . __transaction_index_map . get ( item ) if indexes is None : return 0.0 if sum_indexes is None : sum_indexes = indexes else : sum_indexes = sum_indexes . intersection ( indexes ) return float ( len ( sum_indexes ) ) / self . __num_transaction
Returns a support for items .
11,243
def render_payment_form ( self ) : self . context [ self . form_context_name ] = self . payment_form_cls ( ) return TemplateResponse ( self . request , self . payment_template , self . context )
Display the DirectPayment for entering payment information .
11,244
def validate_payment_form ( self ) : warn_untested ( ) form = self . payment_form_cls ( self . request . POST ) if form . is_valid ( ) : success = form . process ( self . request , self . item ) if success : return HttpResponseRedirect ( self . success_url ) else : self . context [ 'errors' ] = self . errors [ 'processing' ] self . context [ self . form_context_name ] = form self . context . setdefault ( "errors" , self . errors [ 'form' ] ) return TemplateResponse ( self . request , self . payment_template , self . context )
Try to validate and then process the DirectPayment form .
11,245
def redirect_to_express ( self ) : wpp = PayPalWPP ( self . request ) try : nvp_obj = wpp . setExpressCheckout ( self . item ) except PayPalFailure : warn_untested ( ) self . context [ 'errors' ] = self . errors [ 'paypal' ] return self . render_payment_form ( ) else : return HttpResponseRedirect ( express_endpoint_for_token ( nvp_obj . token ) )
First step of ExpressCheckout . Redirect the request to PayPal using the data returned from setExpressCheckout .
11,246
def validate_confirm_form ( self ) : wpp = PayPalWPP ( self . request ) pp_data = dict ( token = self . request . POST [ 'token' ] , payerid = self . request . POST [ 'PayerID' ] ) self . item . update ( pp_data ) try : if self . is_recurring ( ) : warn_untested ( ) nvp = wpp . createRecurringPaymentsProfile ( self . item ) else : nvp = wpp . doExpressCheckoutPayment ( self . item ) self . handle_nvp ( nvp ) except PayPalFailure : self . context [ 'errors' ] = self . errors [ 'processing' ] return self . render_payment_form ( ) else : return HttpResponseRedirect ( self . success_url )
Third and final step of ExpressCheckout . Request has pressed the confirmation but and we can send the final confirmation to PayPal using the data from the POST ed form .
11,247
def init ( self , request , paypal_request , paypal_response ) : if request is not None : from paypal . pro . helpers import strip_ip_port self . ipaddress = strip_ip_port ( request . META . get ( 'REMOTE_ADDR' , '' ) ) if ( hasattr ( request , "user" ) and request . user . is_authenticated ) : self . user = request . user else : self . ipaddress = '' query_data = dict ( ( k , v ) for k , v in paypal_request . items ( ) if k not in self . RESTRICTED_FIELDS ) self . query = urlencode ( query_data ) self . response = urlencode ( paypal_response ) ack = paypal_response . get ( 'ack' , False ) if ack != "Success" : if ack == "SuccessWithWarning" : warn_untested ( ) self . flag_info = paypal_response . get ( 'l_longmessage0' , '' ) else : self . set_flag ( paypal_response . get ( 'l_longmessage0' , '' ) , paypal_response . get ( 'l_errorcode' , '' ) )
Initialize a PayPalNVP instance from a HttpRequest .
11,248
def set_flag ( self , info , code = None ) : self . flag = True self . flag_info += info if code is not None : self . flag_code = code
Flag this instance for investigation .
11,249
def process ( self , request , item ) : warn_untested ( ) from paypal . pro . helpers import PayPalWPP wpp = PayPalWPP ( request ) params = model_to_dict ( self , exclude = self . ADMIN_FIELDS ) params [ 'acct' ] = self . acct params [ 'creditcardtype' ] = self . creditcardtype params [ 'expdate' ] = self . expdate params [ 'cvv2' ] = self . cvv2 params . update ( item ) if 'billingperiod' in params : return wpp . createRecurringPaymentsProfile ( params , direct = True ) else : return wpp . doDirectPayment ( params )
Do a direct payment .
11,250
def posted_data_dict ( self ) : if not self . query : return None from django . http import QueryDict roughdecode = dict ( item . split ( '=' , 1 ) for item in self . query . split ( '&' ) ) encoding = roughdecode . get ( 'charset' , None ) if encoding is None : encoding = DEFAULT_ENCODING query = self . query . encode ( 'ascii' ) data = QueryDict ( query , encoding = encoding ) return data . dict ( )
All the data that PayPal posted to us as a correctly parsed dictionary of values .
11,251
def verify ( self ) : self . response = self . _postback ( ) . decode ( 'ascii' ) self . clear_flag ( ) self . _verify_postback ( ) if not self . flag : if self . is_transaction ( ) : if self . payment_status not in self . PAYMENT_STATUS_CHOICES : self . set_flag ( "Invalid payment_status. (%s)" % self . payment_status ) if duplicate_txn_id ( self ) : self . set_flag ( "Duplicate txn_id. (%s)" % self . txn_id ) self . save ( )
Verifies an IPN and a PDT . Checks for obvious signs of weirdness in the payment and flags appropriately .
11,252
def verify_secret ( self , form_instance , secret ) : warn_untested ( ) if not check_secret ( form_instance , secret ) : self . set_flag ( "Invalid secret. (%s)" ) % secret self . save ( )
Verifies an IPN payment over SSL using EWP .
11,253
def initialize ( self , request ) : if request . method == 'GET' : self . query = request . META . get ( 'QUERY_STRING' , '' ) elif request . method == 'POST' : self . query = request . body . decode ( 'ascii' ) self . ipaddress = request . META . get ( 'REMOTE_ADDR' , '' )
Store the data we ll need to make the postback from the request object .
11,254
def _encrypt ( self ) : from M2Crypto import BIO , SMIME , X509 plaintext = 'cert_id=%s\n' % self . cert_id for name , field in self . fields . items ( ) : value = None if name in self . initial : value = self . initial [ name ] elif field . initial is not None : value = field . initial if value is not None : plaintext += u'%s=%s\n' % ( name , value ) plaintext = plaintext . encode ( 'utf-8' ) s = SMIME . SMIME ( ) s . load_key_bio ( BIO . openfile ( self . private_cert ) , BIO . openfile ( self . public_cert ) ) p7 = s . sign ( BIO . MemoryBuffer ( plaintext ) , flags = SMIME . PKCS7_BINARY ) x509 = X509 . load_cert_bio ( BIO . openfile ( self . paypal_cert ) ) sk = X509 . X509_Stack ( ) sk . push ( x509 ) s . set_x509_stack ( sk ) s . set_cipher ( SMIME . Cipher ( 'des_ede3_cbc' ) ) tmp = BIO . MemoryBuffer ( ) p7 . write_der ( tmp ) p7 = s . encrypt ( tmp , flags = SMIME . PKCS7_BINARY ) out = BIO . MemoryBuffer ( ) p7 . write ( out ) return out . read ( ) . decode ( )
Use your key thing to encrypt things .
11,255
def paypal_time ( time_obj = None ) : warn_untested ( ) if time_obj is None : time_obj = time . gmtime ( ) return time . strftime ( PayPalNVP . TIMESTAMP_FORMAT , time_obj )
Returns a time suitable for PayPal time fields .
11,256
def paypaltime2datetime ( s ) : naive = datetime . datetime . strptime ( s , PayPalNVP . TIMESTAMP_FORMAT ) if not settings . USE_TZ : return naive else : return timezone . make_aware ( naive , timezone . utc )
Convert a PayPal time string to a DateTime .
11,257
def express_endpoint_for_token ( token , commit = False ) : pp_params = dict ( token = token ) if commit : pp_params [ 'useraction' ] = 'commit' return express_endpoint ( ) % urlencode ( pp_params )
Returns the PayPal Express Checkout endpoint for a token . Pass commit = True if you will not prompt for confirmation when the user returns to your site .
11,258
def strip_ip_port ( ip_address ) : if '.' in ip_address : cleaned_ip = ip_address . split ( ':' ) [ 0 ] elif ']:' in ip_address : cleaned_ip = ip_address . rpartition ( ':' ) [ 0 ] [ 1 : - 1 ] else : cleaned_ip = ip_address return cleaned_ip
Strips the port from an IPv4 or IPv6 address returns a unicode object .
11,259
def doDirectPayment ( self , params ) : defaults = { "method" : "DoDirectPayment" , "paymentaction" : "Sale" } required = [ "creditcardtype" , "acct" , "expdate" , "cvv2" , "ipaddress" , "firstname" , "lastname" , "street" , "city" , "state" , "countrycode" , "zip" , "amt" , ] nvp_obj = self . _fetch ( params , required , defaults ) if nvp_obj . flag : raise PayPalFailure ( nvp_obj . flag_info , nvp = nvp_obj ) return nvp_obj
Call PayPal DoDirectPayment method .
11,260
def setExpressCheckout ( self , params ) : if self . _is_recurring ( params ) : params = self . _recurring_setExpressCheckout_adapter ( params ) defaults = { "method" : "SetExpressCheckout" , "noshipping" : 1 } required = [ "returnurl" , "cancelurl" , "paymentrequest_0_amt" ] nvp_obj = self . _fetch ( params , required , defaults ) if nvp_obj . flag : raise PayPalFailure ( nvp_obj . flag_info , nvp = nvp_obj ) return nvp_obj
Initiates an Express Checkout transaction . Optionally the SetExpressCheckout API operation can set up billing agreements for reference transactions and recurring payments . Returns a NVP instance - check for token and payerid to continue!
11,261
def createRecurringPaymentsProfile ( self , params , direct = False ) : defaults = { "method" : "CreateRecurringPaymentsProfile" } required = [ "profilestartdate" , "billingperiod" , "billingfrequency" , "amt" ] if direct : required + [ "creditcardtype" , "acct" , "expdate" , "firstname" , "lastname" ] else : required + [ "token" , "payerid" ] nvp_obj = self . _fetch ( params , required , defaults ) if nvp_obj . flag : raise PayPalFailure ( nvp_obj . flag_info , nvp = nvp_obj ) return nvp_obj
Set direct to True to indicate that this is being called as a directPayment . Returns True PayPal successfully creates the profile otherwise False .
11,262
def manangeRecurringPaymentsProfileStatus ( self , params , fail_silently = False ) : defaults = { "method" : "ManageRecurringPaymentsProfileStatus" } required = [ "profileid" , "action" ] nvp_obj = self . _fetch ( params , required , defaults ) flag_info_test_string = 'Invalid profile status for cancel action; profile should be active or suspended' if nvp_obj . flag and not ( fail_silently and nvp_obj . flag_info == flag_info_test_string ) : raise PayPalFailure ( nvp_obj . flag_info , nvp = nvp_obj ) return nvp_obj
Requires profileid and action params . Action must be either Cancel Suspend or Reactivate .
11,263
def _recurring_setExpressCheckout_adapter ( self , params ) : params [ 'l_billingtype0' ] = "RecurringPayments" params [ 'l_billingagreementdescription0' ] = params [ 'desc' ] REMOVE = [ "billingfrequency" , "billingperiod" , "profilestartdate" , "desc" ] for k in params . keys ( ) : if k in REMOVE : del params [ k ] return params
The recurring payment interface to SEC is different than the recurring payment interface to ECP . This adapts a normal call to look like a SEC call .
11,264
def _fetch ( self , params , required , defaults ) : defaults . update ( params ) pp_params = self . _check_and_update_params ( required , defaults ) pp_string = self . signature + urlencode ( pp_params ) response = self . _request ( pp_string ) response_params = self . _parse_response ( response ) log . debug ( 'PayPal Request:\n%s\n' , pprint . pformat ( defaults ) ) log . debug ( 'PayPal Response:\n%s\n' , pprint . pformat ( response_params ) ) nvp_params = { } tmpd = defaults . copy ( ) tmpd . update ( response_params ) for k , v in tmpd . items ( ) : if k in self . NVP_FIELDS : nvp_params [ str ( k ) ] = v if 'timestamp' in nvp_params : nvp_params [ 'timestamp' ] = paypaltime2datetime ( nvp_params [ 'timestamp' ] ) nvp_obj = PayPalNVP ( ** nvp_params ) nvp_obj . init ( self . request , params , response_params ) nvp_obj . save ( ) return nvp_obj
Make the NVP request and store the response .
11,265
def _request ( self , data ) : return requests . post ( self . endpoint , data = data . encode ( "ascii" ) ) . content
Moved out to make testing easier .
11,266
def _check_and_update_params ( self , required , params ) : for r in required : if r not in params : raise PayPalError ( "Missing required param: %s" % r ) return ( dict ( ( k . upper ( ) , v ) for k , v in params . items ( ) ) )
Ensure all required parameters were passed to the API call and format them correctly .
11,267
def _parse_response ( self , response ) : q = QueryDict ( response , encoding = 'UTF-8' ) . dict ( ) return { k . lower ( ) : v for k , v in q . items ( ) }
Turn the PayPal response into a dict
11,268
def _postback ( self ) : return requests . post ( self . get_endpoint ( ) , data = dict ( cmd = "_notify-synch" , at = IDENTITY_TOKEN , tx = self . tx ) ) . content
Perform PayPal PDT Postback validation . Sends the transaction ID and business token to PayPal which responses with SUCCESS or FAILED .
11,269
def duplicate_txn_id ( ipn_obj ) : similars = ( ipn_obj . __class__ . _default_manager . filter ( txn_id = ipn_obj . txn_id ) . exclude ( id = ipn_obj . id ) . exclude ( flag = True ) . order_by ( '-created_at' ) [ : 1 ] ) if len ( similars ) > 0 : return similars [ 0 ] . payment_status == ipn_obj . payment_status return False
Returns True if a record with this transaction id exists and its payment_status has not changed . This function has been completely changed from its previous implementation where it used to specifically only check for a Pending - > Completed transition .
11,270
def make_secret ( form_instance , secret_fields = None ) : warn_untested ( ) if secret_fields is None : secret_fields = [ 'business' , 'item_name' ] data = "" for name in secret_fields : if hasattr ( form_instance , 'cleaned_data' ) : if name in form_instance . cleaned_data : data += unicode ( form_instance . cleaned_data [ name ] ) else : if name in form_instance . initial : data += unicode ( form_instance . initial [ name ] ) elif name in form_instance . fields and form_instance . fields [ name ] . initial is not None : data += unicode ( form_instance . fields [ name ] . initial ) secret = get_sha1_hexdigest ( settings . SECRET_KEY , data ) return secret
Returns a secret for use in a EWP form or an IPN verification based on a selection of variables in params . Should only be used with SSL .
11,271
def is_number ( self ) : self . number = re . sub ( r'[^\d]' , '' , self . number ) return self . number . isdigit ( )
True if there is at least one digit in number .
11,272
def is_mod10 ( self ) : double = 0 total = 0 for i in range ( len ( self . number ) - 1 , - 1 , - 1 ) : for c in str ( ( double + 1 ) * int ( self . number [ i ] ) ) : total = total + int ( c ) double = ( double + 1 ) % 2 return ( total % 10 ) == 0
Returns True if number is valid according to mod10 .
11,273
def get_type ( self ) : for card , pattern in CARDS . items ( ) : if pattern . match ( self . number ) : return card return None
Return the type if it matches one of the cards .
11,274
def verify ( self ) : if self . is_number ( ) and not self . is_test ( ) and self . is_mod10 ( ) : return self . get_type ( ) return None
Returns the card type if valid else None .
11,275
def clean ( self , value ) : if value : value = value . replace ( '-' , '' ) . replace ( ' ' , '' ) self . card_type = verify_credit_card ( value ) if self . card_type is None : raise forms . ValidationError ( "Invalid credit card number." ) return value
Raises a ValidationError if the card is not valid and stashes card type .
11,276
def process ( self , request , item ) : warn_untested ( ) from paypal . pro . helpers import PayPalWPP wpp = PayPalWPP ( request ) params = self . cleaned_data params [ 'creditcardtype' ] = self . fields [ 'acct' ] . card_type params [ 'expdate' ] = self . cleaned_data [ 'expdate' ] . strftime ( "%m%Y" ) params [ 'ipaddress' ] = request . META . get ( "REMOTE_ADDR" , "" ) params . update ( item ) try : if 'billingperiod' not in params : wpp . doDirectPayment ( params ) else : wpp . createRecurringPaymentsProfile ( params , direct = True ) except PayPalFailure : return False return True
Process a PayPal direct payment .
11,277
def _postback ( self ) : return requests . post ( self . get_endpoint ( ) , data = b"cmd=_notify-validate&" + self . query . encode ( "ascii" ) ) . content
Perform PayPal Postback validation .
11,278
def send_signals ( self ) : if self . flag : invalid_ipn_received . send ( sender = self ) return else : valid_ipn_received . send ( sender = self )
Shout for the world to hear whether a txn was successful .
11,279
def visit_delete_stmt ( element , compiler , ** kwargs ) : whereclause = '' usingclause = '' delete_stmt_table = compiler . process ( element . table , asfrom = True , ** kwargs ) whereclause_tuple = element . get_children ( ) if whereclause_tuple : usingclause_tables = [ ] whereclause = ' WHERE {clause}' . format ( clause = compiler . process ( * whereclause_tuple , ** kwargs ) ) whereclause_columns = gen_columns_from_children ( element ) for col in whereclause_columns : table = compiler . process ( col . table , asfrom = True , ** kwargs ) if table != delete_stmt_table and table not in usingclause_tables : usingclause_tables . append ( table ) if usingclause_tables : usingclause = ' USING {clause}' . format ( clause = ', ' . join ( usingclause_tables ) ) return 'DELETE FROM {table}{using}{where}' . format ( table = delete_stmt_table , using = usingclause , where = whereclause )
Adds redshift - dialect specific compilation rule for the delete statement .
11,280
def get_columns ( self , connection , table_name , schema = None , ** kw ) : cols = self . _get_redshift_columns ( connection , table_name , schema , ** kw ) if not self . _domains : self . _domains = self . _load_domains ( connection ) domains = self . _domains columns = [ ] for col in cols : column_info = self . _get_column_info ( name = col . name , format_type = col . format_type , default = col . default , notnull = col . notnull , domains = domains , enums = [ ] , schema = col . schema , encode = col . encode ) columns . append ( column_info ) return columns
Return information about columns in table_name .
11,281
def get_pk_constraint ( self , connection , table_name , schema = None , ** kw ) : constraints = self . _get_redshift_constraints ( connection , table_name , schema , ** kw ) pk_constraints = [ c for c in constraints if c . contype == 'p' ] if not pk_constraints : return { 'constrained_columns' : [ ] , 'name' : '' } pk_constraint = pk_constraints [ 0 ] m = PRIMARY_KEY_RE . match ( pk_constraint . condef ) colstring = m . group ( 'columns' ) constrained_columns = SQL_IDENTIFIER_RE . findall ( colstring ) return { 'constrained_columns' : constrained_columns , 'name' : pk_constraint . conname , }
Return information about the primary key constraint on table_name .
11,282
def get_foreign_keys ( self , connection , table_name , schema = None , ** kw ) : constraints = self . _get_redshift_constraints ( connection , table_name , schema , ** kw ) fk_constraints = [ c for c in constraints if c . contype == 'f' ] uniques = defaultdict ( lambda : defaultdict ( dict ) ) for con in fk_constraints : uniques [ con . conname ] [ "key" ] = con . conkey uniques [ con . conname ] [ "condef" ] = con . condef fkeys = [ ] for conname , attrs in uniques . items ( ) : m = FOREIGN_KEY_RE . match ( attrs [ 'condef' ] ) colstring = m . group ( 'referred_columns' ) referred_columns = SQL_IDENTIFIER_RE . findall ( colstring ) referred_table = m . group ( 'referred_table' ) referred_schema = m . group ( 'referred_schema' ) colstring = m . group ( 'columns' ) constrained_columns = SQL_IDENTIFIER_RE . findall ( colstring ) fkey_d = { 'name' : conname , 'constrained_columns' : constrained_columns , 'referred_schema' : referred_schema , 'referred_table' : referred_table , 'referred_columns' : referred_columns , } fkeys . append ( fkey_d ) return fkeys
Return information about foreign keys in table_name .
11,283
def get_table_names ( self , connection , schema = None , ** kw ) : return self . _get_table_or_view_names ( 'r' , connection , schema , ** kw )
Return a list of table names for schema .
11,284
def get_view_names ( self , connection , schema = None , ** kw ) : return self . _get_table_or_view_names ( 'v' , connection , schema , ** kw )
Return a list of view names for schema .
11,285
def get_unique_constraints ( self , connection , table_name , schema = None , ** kw ) : constraints = self . _get_redshift_constraints ( connection , table_name , schema , ** kw ) constraints = [ c for c in constraints if c . contype == 'u' ] uniques = defaultdict ( lambda : defaultdict ( dict ) ) for con in constraints : uniques [ con . conname ] [ "key" ] = con . conkey uniques [ con . conname ] [ "cols" ] [ con . attnum ] = con . attname return [ { 'name' : None , 'column_names' : [ uc [ "cols" ] [ i ] for i in uc [ "key" ] ] } for name , uc in uniques . items ( ) ]
Return information about unique constraints in table_name .
11,286
def get_table_options ( self , connection , table_name , schema , ** kw ) : def keyfunc ( column ) : num = int ( column . sortkey ) return abs ( num ) table = self . _get_redshift_relation ( connection , table_name , schema , ** kw ) columns = self . _get_redshift_columns ( connection , table_name , schema , ** kw ) sortkey_cols = sorted ( [ col for col in columns if col . sortkey ] , key = keyfunc ) interleaved = any ( [ int ( col . sortkey ) < 0 for col in sortkey_cols ] ) sortkey = [ col . name for col in sortkey_cols ] interleaved_sortkey = None if interleaved : interleaved_sortkey = sortkey sortkey = None distkeys = [ col . name for col in columns if col . distkey ] distkey = distkeys [ 0 ] if distkeys else None return { 'redshift_diststyle' : table . diststyle , 'redshift_distkey' : distkey , 'redshift_sortkey' : sortkey , 'redshift_interleaved_sortkey' : interleaved_sortkey , }
Return a dictionary of options specified when the table of the given name was created .
11,287
def create_connect_args ( self , * args , ** kwargs ) : default_args = { 'sslmode' : 'verify-full' , 'sslrootcert' : pkg_resources . resource_filename ( __name__ , 'redshift-ca-bundle.crt' ) , } cargs , cparams = super ( RedshiftDialect , self ) . create_connect_args ( * args , ** kwargs ) default_args . update ( cparams ) return cargs , default_args
Build DB - API compatible connection arguments .
11,288
def visit_unload_from_select ( element , compiler , ** kw ) : template = el = element qs = template . format ( manifest = 'MANIFEST' if el . manifest else '' , header = 'HEADER' if el . header else '' , delimiter = ( 'DELIMITER AS :delimiter' if el . delimiter is not None else '' ) , encrypted = 'ENCRYPTED' if el . encrypted else '' , fixed_width = 'FIXEDWIDTH AS :fixed_width' if el . fixed_width else '' , gzip = 'GZIP' if el . gzip else '' , add_quotes = 'ADDQUOTES' if el . add_quotes else '' , escape = 'ESCAPE' if el . escape else '' , null = 'NULL AS :null_as' if el . null is not None else '' , allow_overwrite = 'ALLOWOVERWRITE' if el . allow_overwrite else '' , parallel = 'PARALLEL OFF' if not el . parallel else '' , region = 'REGION :region' if el . region is not None else '' , max_file_size = ( 'MAXFILESIZE :max_file_size MB' if el . max_file_size is not None else '' ) , ) query = sa . text ( qs ) if el . delimiter is not None : query = query . bindparams ( sa . bindparam ( 'delimiter' , value = element . delimiter , type_ = sa . String , ) ) if el . fixed_width : query = query . bindparams ( sa . bindparam ( 'fixed_width' , value = _process_fixed_width ( el . fixed_width ) , type_ = sa . String , ) ) if el . null is not None : query = query . bindparams ( sa . bindparam ( 'null_as' , value = el . null , type_ = sa . String ) ) if el . region is not None : query = query . bindparams ( sa . bindparam ( 'region' , value = el . region , type_ = sa . String ) ) if el . max_file_size is not None : max_file_size_mib = float ( el . max_file_size ) / 1024 / 1024 query = query . bindparams ( sa . bindparam ( 'max_file_size' , value = max_file_size_mib , type_ = sa . Float ) ) return compiler . process ( query . bindparams ( sa . bindparam ( 'credentials' , value = el . credentials , type_ = sa . String ) , sa . bindparam ( 'unload_location' , value = el . unload_location , type_ = sa . String , ) , sa . bindparam ( 'select' , value = compiler . process ( el . select , literal_binds = True , ) , type_ = sa . String , ) , ) , ** kw )
Returns the actual sql query for the UnloadFromSelect class .
11,289
def visit_create_library_command ( element , compiler , ** kw ) : query = bindparams = [ sa . bindparam ( 'location' , value = element . location , type_ = sa . String , ) , sa . bindparam ( 'credentials' , value = element . credentials , type_ = sa . String , ) , ] if element . region is not None : bindparams . append ( sa . bindparam ( 'region' , value = element . region , type_ = sa . String , ) ) quoted_lib_name = compiler . preparer . quote_identifier ( element . library_name ) query = query . format ( name = quoted_lib_name , or_replace = 'OR REPLACE' if element . replace else '' , region = 'REGION :region' if element . region else '' ) return compiler . process ( sa . text ( query ) . bindparams ( * bindparams ) , ** kw )
Returns the actual sql query for the CreateLibraryCommand class .
11,290
def find_proxy ( url , host = None ) : if host is None : m = _URL_REGEX . match ( url ) if not m : raise URLError ( url ) if len ( m . groups ( ) ) is 1 : host = m . groups ( ) [ 0 ] else : raise URLError ( url ) return _pacparser . find_proxy ( url , host )
Finds proxy string for the given url and host . If host is not defined it s extracted from the url .
11,291
def just_find_proxy ( pacfile , url , host = None ) : if not os . path . isfile ( pacfile ) : raise IOError ( 'Pac file does not exist: {}' . format ( pacfile ) ) init ( ) parse_pac ( pacfile ) proxy = find_proxy ( url , host ) cleanup ( ) return proxy
This function is a wrapper around init parse_pac find_proxy and cleanup . This is the function to call if you want to find proxy just for one url .
11,292
def split_data ( iterable , pred ) : yes , no = [ ] , [ ] for d in iterable : if pred ( d ) : yes . append ( d ) else : no . append ( d ) return yes , no
Split data from iterable into two lists . Each element is passed to function pred ; elements for which pred returns True are put into yes list other elements are put into no list .
11,293
def match_url ( self , url , options = None ) : options = options or { } for optname in self . options : if optname == 'match-case' : continue if optname not in options : raise ValueError ( "Rule requires option %s" % optname ) if optname == 'domain' : if not self . _domain_matches ( options [ 'domain' ] ) : return False continue if options [ optname ] != self . options [ optname ] : return False return self . _url_matches ( url )
Return if this rule matches the URL .
11,294
def matching_supported ( self , options = None ) : if self . is_comment : return False if self . is_html_rule : return False options = options or { } keys = set ( options . keys ( ) ) if not keys . issuperset ( self . _options_keys ) : return False return True
Return whether this rule can return meaningful result given the options dict . If some options are missing then rule shouldn t be matched against and this function returns False .
11,295
def rule_to_regex ( cls , rule ) : if not rule : return rule if rule . startswith ( '/' ) and rule . endswith ( '/' ) : if len ( rule ) > 1 : rule = rule [ 1 : - 1 ] else : raise AdblockParsingError ( 'Invalid rule' ) return rule rule = re . sub ( r"([.$+?{}()\[\]\\])" , r"\\\1" , rule ) rule = rule . replace ( "^" , "(?:[^\w\d_\-.%]|$)" ) rule = rule . replace ( "*" , ".*" ) if rule [ - 1 ] == '|' : rule = rule [ : - 1 ] + '$' if rule [ : 2 ] == '||' : if len ( rule ) > 2 : rule = r"^(?:[^:/?#]+:)?(?://(?:[^/?#]*\.)?)?" + rule [ 2 : ] elif rule [ 0 ] == '|' : rule = '^' + rule [ 1 : ] rule = re . sub ( "(\|)[^$]" , r"\|" , rule ) return rule
Convert AdBlock rule to a regular expression .
11,296
def tokenize ( self , data_source , callback = None ) : self . _reinitialize ( ) if callback is not None : self . _deliver = callback while True : frame = data_source . read ( ) if frame is None : break self . _current_frame += 1 self . _process ( frame ) self . _post_process ( ) if callback is None : _ret = self . _tokens self . _tokens = None return _ret
Read data from data_source one frame a time and process the read frames in order to detect sequences of frames that make up valid tokens .
11,297
def read ( self ) : if self . _current >= len ( self . _data ) : return None self . _current += 1 return self . _data [ self . _current - 1 ]
Read one character from buffer .
11,298
def set_data ( self , data ) : if not isinstance ( data , basestring ) : raise ValueError ( "data must an instance of basestring" ) self . _data = data self . _current = 0
Set a new data buffer .
11,299
def set_data ( self , data_buffer ) : if len ( data_buffer ) % ( self . sample_width * self . channels ) != 0 : raise ValueError ( "length of data_buffer must be a multiple of (sample_width * channels)" ) self . _buffer = data_buffer self . _index = 0 self . _left = 0 if self . _buffer is None else len ( self . _buffer )
Set new data for this audio stream .