idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
3,500
|
def check_url ( url ) : result = { "url" : url } try : response = requests . get ( url ) result [ "status" ] = response . status_code result [ "reason" ] = response . reason response . raise_for_status ( ) result [ "alive" ] = True except AttributeError as err : if err . message == "'NoneType' object has no attribute 'encode'" : result [ "alive" ] = False result [ "reason" ] = "Invalid URL" result [ "status" ] = None else : raise except requests . exceptions . RequestException as err : result [ "alive" ] = False if "reason" not in result : result [ "reason" ] = str ( err ) if "status" not in result : result [ "status" ] = None assert "url" in result assert result . get ( "alive" ) in ( True , False ) assert "status" in result assert "reason" in result return result
|
Check whether the given URL is dead or alive .
|
3,501
|
def upsert_result ( client_site_url , apikey , resource_id , result ) : url = client_site_url + u"deadoralive/upsert" params = result . copy ( ) params [ "resource_id" ] = resource_id requests . post ( url , headers = dict ( Authorization = apikey ) , params = params )
|
Post the given link check result to the client site .
|
3,502
|
def get_check_and_report ( client_site_url , apikey , get_resource_ids_to_check , get_url_for_id , check_url , upsert_result ) : logger = _get_logger ( ) resource_ids = get_resource_ids_to_check ( client_site_url , apikey ) for resource_id in resource_ids : try : url = get_url_for_id ( client_site_url , apikey , resource_id ) except CouldNotGetURLError : logger . info ( u"This link checker was not authorized to access " "resource {0}, skipping." . format ( resource_id ) ) continue result = check_url ( url ) status = result [ "status" ] reason = result [ "reason" ] if result [ "alive" ] : logger . info ( u"Checking URL {0} of resource {1} succeeded with " "status {2}:" . format ( url , resource_id , status ) ) else : logger . info ( u"Checking URL {0} of resource {1} failed with error " "{2}:" . format ( url , resource_id , reason ) ) upsert_result ( client_site_url , apikey , resource_id = resource_id , result = result )
|
Get links from the client site check them and post the results back .
|
3,503
|
def remove ( self , member ) : if isinstance ( member , ZipInfo ) : zinfo = member else : zinfo = self . getinfo ( member ) zlen = len ( zinfo . FileHeader ( ) ) + zinfo . compress_size fileidx = self . filelist . index ( zinfo ) fileofs = sum ( [ len ( self . filelist [ f ] . FileHeader ( ) ) + self . filelist [ f ] . compress_size for f in xrange ( 0 , fileidx ) ] ) self . fp . seek ( fileofs + zlen ) after = self . fp . read ( ) self . fp . seek ( fileofs ) self . fp . write ( after ) self . fp . seek ( - zlen , 2 ) self . fp . truncate ( ) self . _didModify = True self . filelist . remove ( zinfo ) del self . NameToInfo [ member ]
|
Remove a member from the archive .
|
3,504
|
def import_class ( class_path ) : module_name , class_name = class_path . rsplit ( "." , 1 ) module = import_module ( module_name ) claz = getattr ( module , class_name ) return claz
|
Imports the class for the given class name .
|
3,505
|
def _executor ( self ) : if self . EXECUTE_PARALLEL is False : executor_path = "batch_requests.concurrent.executor.SequentialExecutor" executor_class = import_class ( executor_path ) return executor_class ( ) else : executor_path = self . CONCURRENT_EXECUTOR executor_class = import_class ( executor_path ) return executor_class ( self . NUM_WORKERS )
|
Creating an ExecutorPool is a costly operation . Executor needs to be instantiated only once .
|
3,506
|
def make_label ( self , path ) : from datetime import datetime from StringIO import StringIO path = path . lstrip ( "/" ) bucket , label = path . split ( "/" , 1 ) bucket = self . ofs . _require_bucket ( bucket ) key = self . ofs . _get_key ( bucket , label ) if key is None : key = bucket . new_key ( label ) self . ofs . _update_key_metadata ( key , { '_creation_time' : str ( datetime . utcnow ( ) ) } ) key . set_contents_from_file ( StringIO ( '' ) ) key . close ( )
|
this borrows too much from the internals of ofs maybe expose different parts of the api?
|
3,507
|
def get_proxy_config ( self , headers , path ) : self . ofs . conn . add_aws_auth_header ( headers , 'PUT' , path ) from pprint import pprint pprint ( headers ) host = self . ofs . conn . server_name ( ) return host , headers
|
stub . this really needs to be a call to the remote restful interface to get the appropriate host and headers to use for this upload
|
3,508
|
def fetch_all_mood_stations ( self , terr = KKBOXTerritory . TAIWAN ) : url = 'https://api.kkbox.com/v1.1/mood-stations' url += '?' + url_parse . urlencode ( { 'territory' : terr } ) return self . http . _post_data ( url , None , self . http . _headers_with_access_token ( ) )
|
Fetches all mood stations .
|
3,509
|
def fetch_mood_station ( self , station_id , terr = KKBOXTerritory . TAIWAN ) : url = 'https://api.kkbox.com/v1.1/mood-stations/%s' % station_id url += '?' + url_parse . urlencode ( { 'territory' : terr } ) return self . http . _post_data ( url , None , self . http . _headers_with_access_token ( ) )
|
Fetches a mood station by given ID .
|
3,510
|
def fetch_data ( self , url ) : return self . http . _post_data ( url , None , self . http . _headers_with_access_token ( ) )
|
Fetches data from specific url .
|
3,511
|
def fetch_shared_playlist ( self , playlist_id , terr = KKBOXTerritory . TAIWAN ) : url = 'https://api.kkbox.com/v1.1/shared-playlists/%s' % playlist_id url += '?' + url_parse . urlencode ( { 'territory' : terr } ) return self . http . _post_data ( url , None , self . http . _headers_with_access_token ( ) )
|
Fetches a shared playlist by given ID .
|
3,512
|
def get_firewall_rule ( self , server_uuid , firewall_rule_position , server_instance = None ) : url = '/server/{0}/firewall_rule/{1}' . format ( server_uuid , firewall_rule_position ) res = self . get_request ( url ) return FirewallRule ( ** res [ 'firewall_rule' ] )
|
Return a FirewallRule object based on server uuid and rule position .
|
3,513
|
def get_firewall_rules ( self , server ) : server_uuid , server_instance = uuid_and_instance ( server ) url = '/server/{0}/firewall_rule' . format ( server_uuid ) res = self . get_request ( url ) return [ FirewallRule ( server = server_instance , ** firewall_rule ) for firewall_rule in res [ 'firewall_rules' ] [ 'firewall_rule' ] ]
|
Return all FirewallRule objects based on a server instance or uuid .
|
3,514
|
def create_firewall_rule ( self , server , firewall_rule_body ) : server_uuid , server_instance = uuid_and_instance ( server ) url = '/server/{0}/firewall_rule' . format ( server_uuid ) body = { 'firewall_rule' : firewall_rule_body } res = self . post_request ( url , body ) return FirewallRule ( server = server_instance , ** res [ 'firewall_rule' ] )
|
Create a new firewall rule for a given server uuid .
|
3,515
|
def delete_firewall_rule ( self , server_uuid , firewall_rule_position ) : url = '/server/{0}/firewall_rule/{1}' . format ( server_uuid , firewall_rule_position ) return self . request ( 'DELETE' , url )
|
Delete a firewall rule based on a server uuid and rule position .
|
3,516
|
def configure_firewall ( self , server , firewall_rule_bodies ) : server_uuid , server_instance = uuid_and_instance ( server ) return [ self . create_firewall_rule ( server_uuid , rule ) for rule in firewall_rule_bodies ]
|
Helper for calling create_firewall_rule in series for a list of firewall_rule_bodies .
|
3,517
|
def post ( self , data ) : uri = '{}/sinkhole' . format ( self . client . remote ) self . logger . debug ( uri ) if PYVERSION == 2 : try : data = data . decode ( 'utf-8' ) except Exception : data = data . decode ( 'latin-1' ) data = { 'message' : data } body = self . client . post ( uri , data ) return body
|
POSTs a raw SMTP message to the Sinkhole API
|
3,518
|
def pre_process_method_headers ( method , headers ) : method = method . lower ( ) _wsgi_headers = [ "content_length" , "content_type" , "query_string" , "remote_addr" , "remote_host" , "remote_user" , "request_method" , "server_name" , "server_port" ] _transformed_headers = { } for header , value in headers . items ( ) : header = header . replace ( "-" , "_" ) header = "http_{header}" . format ( header = header ) if header . lower ( ) not in _wsgi_headers else header _transformed_headers . update ( { header . upper ( ) : value } ) return method , _transformed_headers
|
Returns the lowered method . Capitalize headers prepend HTTP_ and change - to _ .
|
3,519
|
def headers_to_include_from_request ( curr_request ) : return { h : v for h , v in curr_request . META . items ( ) if h in _settings . HEADERS_TO_INCLUDE }
|
Define headers that needs to be included from the current request .
|
3,520
|
def get_wsgi_request_object ( curr_request , method , url , headers , body ) : x_headers = headers_to_include_from_request ( curr_request ) method , t_headers = pre_process_method_headers ( method , headers ) if "CONTENT_TYPE" not in t_headers : t_headers . update ( { "CONTENT_TYPE" : _settings . DEFAULT_CONTENT_TYPE } ) x_headers . update ( t_headers ) content_type = x_headers . get ( "CONTENT_TYPE" , _settings . DEFAULT_CONTENT_TYPE ) _request_factory = BatchRequestFactory ( ) _request_provider = getattr ( _request_factory , method ) secure = _settings . USE_HTTPS request = _request_provider ( url , data = body , secure = secure , content_type = content_type , ** x_headers ) return request
|
Based on the given request parameters constructs and returns the WSGI request object .
|
3,521
|
def _base_environ ( self , ** request ) : environ = { 'HTTP_COOKIE' : self . cookies . output ( header = '' , sep = '; ' ) , 'PATH_INFO' : str ( '/' ) , 'REMOTE_ADDR' : str ( '127.0.0.1' ) , 'REQUEST_METHOD' : str ( 'GET' ) , 'SCRIPT_NAME' : str ( '' ) , 'SERVER_NAME' : str ( 'localhost' ) , 'SERVER_PORT' : str ( '8000' ) , 'SERVER_PROTOCOL' : str ( 'HTTP/1.1' ) , 'wsgi.version' : ( 1 , 0 ) , 'wsgi.url_scheme' : str ( 'http' ) , 'wsgi.input' : FakePayload ( b'' ) , 'wsgi.errors' : self . errors , 'wsgi.multiprocess' : True , 'wsgi.multithread' : True , 'wsgi.run_once' : False , } environ . update ( self . defaults ) environ . update ( request ) return environ
|
Override the default values for the wsgi environment variables .
|
3,522
|
def request ( self , method , endpoint , body = None , timeout = - 1 ) : if method not in set ( [ 'GET' , 'POST' , 'PUT' , 'DELETE' ] ) : raise Exception ( 'Invalid/Forbidden HTTP method' ) url = '/' + self . api_v + endpoint headers = { 'Authorization' : self . token , 'Content-Type' : 'application/json' } if body : json_body_or_None = json . dumps ( body ) else : json_body_or_None = None call_timeout = timeout if timeout != - 1 else self . timeout APIcall = getattr ( requests , method . lower ( ) ) res = APIcall ( 'https://api.upcloud.com' + url , data = json_body_or_None , headers = headers , timeout = call_timeout ) if res . text : res_json = res . json ( ) else : res_json = { } return self . __error_middleware ( res , res_json )
|
Perform a request with a given body to a given endpoint in UpCloud s API .
|
3,523
|
def post_request ( self , endpoint , body = None , timeout = - 1 ) : return self . request ( 'POST' , endpoint , body , timeout )
|
Perform a POST request to a given endpoint in UpCloud s API .
|
3,524
|
def __error_middleware ( self , res , res_json ) : if ( res . status_code in [ 400 , 401 , 402 , 403 , 404 , 405 , 406 , 409 ] ) : err_dict = res_json . get ( 'error' , { } ) raise UpCloudAPIError ( error_code = err_dict . get ( 'error_code' ) , error_message = err_dict . get ( 'error_message' ) ) return res_json
|
Middleware that raises an exception when HTTP statuscode is an error code .
|
3,525
|
def put_stream ( self , bucket , label , stream_object , params = { } ) : self . claim_bucket ( bucket ) self . connection . put_object ( bucket , label , stream_object , headers = self . _convert_to_meta ( params ) )
|
Create a new file to swift object storage .
|
3,526
|
def get ( self , q , limit = None ) : uri = '{}/predict?q={}' . format ( self . client . remote , q ) self . logger . debug ( uri ) body = self . client . get ( uri ) return body [ 'score' ]
|
Performs a search against the predict endpoint
|
3,527
|
def list_labels ( self , bucket ) : for name in self . z . namelist ( ) : container , label = self . _nf ( name . encode ( "utf-8" ) ) if container == bucket and label != MD_FILE : yield label
|
List labels for the given bucket . Due to zipfiles inherent arbitrary ordering this is an expensive operation as it walks the entire archive searching for individual buckets
|
3,528
|
def list_buckets ( self ) : buckets = set ( ) for name in self . z . namelist ( ) : bucket , _ = self . _nf ( name ) if bucket not in buckets : buckets . add ( bucket ) yield bucket
|
List all buckets managed by this OFS instance . Like list_labels this also walks the entire archive yielding the bucketnames . A local set is retained so that duplicates aren t returned so this will temporarily pull the entire list into memory even though this is a generator and will slow as more buckets are added to the set .
|
3,529
|
def del_stream ( self , bucket , label ) : if self . exists ( bucket , label ) : name = self . _zf ( bucket , label ) self . _del_stream ( name )
|
Delete a bitstream . This needs more testing - file deletion in a zipfile is problematic . Alternate method is to create second zipfile without the files in question which is not a nice method for large zip archives .
|
3,530
|
def update_metadata ( self , bucket , label , params ) : if self . mode != "r" : try : payload = self . _get_bucket_md ( bucket ) except OFSFileNotFound : payload = { } for l in self . list_labels ( bucket ) : payload [ l ] = { } payload [ l ] [ '_label' ] = l if not self . quiet : print ( "Had to create md file for %s" % bucket ) except OFSException as e : raise OFSException ( e ) if not label in payload : payload [ label ] = { } payload [ label ] . update ( params ) self . put_stream ( bucket , MD_FILE , json . dumps ( payload ) . encode ( 'utf-8' ) , params = { } , replace = True , add_md = False ) return payload [ label ] else : raise OFSException ( "Cannot update MD in archive in 'r' mode" )
|
Update the metadata with the provided dictionary of params .
|
3,531
|
def del_metadata_keys ( self , bucket , label , keys ) : if self . mode != "r" : try : payload = self . _get_bucket_md ( bucket ) except OFSFileNotFound : raise OFSFileNotFound ( "Couldn't find a md file for %s bucket" % bucket ) except OFSException as e : raise OFSException ( e ) if payload . has_key ( label ) : for key in [ x for x in keys if payload [ label ] . has_key ( x ) ] : del payload [ label ] [ key ] self . put_stream ( bucket , MD_FILE , json . dumps ( payload ) , params = { } , replace = True , add_md = False ) else : raise OFSException ( "Cannot update MD in archive in 'r' mode" )
|
Delete the metadata corresponding to the specified keys .
|
3,532
|
def get_response ( wsgi_request ) : service_start_time = datetime . now ( ) view , args , kwargs = resolve ( wsgi_request . path_info ) kwargs . update ( { "request" : wsgi_request } ) try : resp = view ( * args , ** kwargs ) except Exception as exc : resp = HttpResponseServerError ( content = exc . message ) headers = dict ( resp . _headers . values ( ) ) d_resp = { "status_code" : resp . status_code , "reason_phrase" : resp . reason_phrase , "headers" : headers } try : d_resp . update ( { "body" : resp . content } ) except ContentNotRenderedError : resp . render ( ) d_resp . update ( { "body" : resp . content } ) if _settings . ADD_DURATION_HEADER : d_resp [ 'headers' ] . update ( { _settings . DURATION_HEADER_NAME : ( datetime . now ( ) - service_start_time ) . seconds } ) return d_resp
|
Given a WSGI request makes a call to a corresponding view function and returns the response .
|
3,533
|
def get_wsgi_requests ( request ) : valid_http_methods = [ "get" , "post" , "put" , "patch" , "delete" , "head" , "options" , "connect" , "trace" ] requests = json . loads ( request . body ) if type ( requests ) not in ( list , tuple ) : raise BadBatchRequest ( "The body of batch request should always be list!" ) no_requests = len ( requests ) if no_requests > _settings . MAX_LIMIT : raise BadBatchRequest ( "You can batch maximum of %d requests." % ( _settings . MAX_LIMIT ) ) def construct_wsgi_from_data ( data ) : url = data . get ( "url" , None ) method = data . get ( "method" , None ) if url is None or method is None : raise BadBatchRequest ( "Request definition should have url, method defined." ) if method . lower ( ) not in valid_http_methods : raise BadBatchRequest ( "Invalid request method." ) body = data . get ( "body" , "" ) headers = data . get ( "headers" , { } ) return get_wsgi_request_object ( request , method , url , headers , body ) return [ construct_wsgi_from_data ( data ) for data in requests ]
|
For the given batch request extract the individual requests and create WSGIRequest object for each .
|
3,534
|
def handle_batch_requests ( request , * args , ** kwargs ) : batch_start_time = datetime . now ( ) try : wsgi_requests = get_wsgi_requests ( request ) except BadBatchRequest as brx : return HttpResponseBadRequest ( content = brx . message ) response = execute_requests ( wsgi_requests ) resp = HttpResponse ( content = json . dumps ( response ) , content_type = "application/json" ) if _settings . ADD_DURATION_HEADER : resp . __setitem__ ( _settings . DURATION_HEADER_NAME , str ( ( datetime . now ( ) - batch_start_time ) . seconds ) ) return resp
|
A view function to handle the overall processing of batch requests .
|
3,535
|
def search ( self , keyword , types = [ ] , terr = KKBOXTerritory . TAIWAN ) : url = 'https://api.kkbox.com/v1.1/search' url += '?' + url_parse . urlencode ( { 'q' : keyword , 'territory' : terr } ) if len ( types ) > 0 : url += '&type=' + ',' . join ( types ) return self . http . _post_data ( url , None , self . http . _headers_with_access_token ( ) )
|
Searches within KKBOX s database .
|
3,536
|
def _create_ip_address_objs ( ip_addresses , cloud_manager ) : if 'ip_addresses' in ip_addresses : ip_addresses = ip_addresses [ 'ip_addresses' ] if 'ip_address' in ip_addresses : ip_addresses = ip_addresses [ 'ip_address' ] return [ IPAddress ( cloud_manager = cloud_manager , ** ip_addr ) for ip_addr in ip_addresses ]
|
Create IPAddress objects from API response data . Also associates CloudManager with the objects .
|
3,537
|
def _reset ( self , ** kwargs ) : super ( Tag , self ) . _reset ( ** kwargs ) self . _api_name = self . name if 'server' in self . servers : self . servers = kwargs [ 'servers' ] [ 'server' ] if self . servers and isinstance ( self . servers [ 0 ] , six . string_types ) : self . servers = [ Server ( uuid = server , populated = False ) for server in self . servers ]
|
Reset the objects attributes .
|
3,538
|
def _get ( self , uri , params = { } ) : if not uri . startswith ( self . remote ) : uri = '{}{}' . format ( self . remote , uri ) return self . _make_request ( uri , params )
|
HTTP GET function
|
3,539
|
def _post ( self , uri , data ) : if not uri . startswith ( self . remote ) : uri = '{}/{}' . format ( self . remote , uri ) self . logger . debug ( uri ) return self . _make_request ( uri , data = data )
|
HTTP POST function
|
3,540
|
def modify_server ( self , UUID , ** kwargs ) : body = dict ( ) body [ 'server' ] = { } for arg in kwargs : if arg not in Server . updateable_fields : Exception ( '{0} is not an updateable field' . format ( arg ) ) body [ 'server' ] [ arg ] = kwargs [ arg ] res = self . request ( 'PUT' , '/server/{0}' . format ( UUID ) , body ) server = res [ 'server' ] IPAddresses = IPAddress . _create_ip_address_objs ( server . pop ( 'ip_addresses' ) , cloud_manager = self ) storages = Storage . _create_storage_objs ( server . pop ( 'storage_devices' ) , cloud_manager = self ) return Server ( server , ip_addresses = IPAddresses , storage_devices = storages , populated = True , cloud_manager = self )
|
modify_server allows updating the server s updateable_fields .
|
3,541
|
def _internal_convert ( inp ) : try : mn = struct . unpack ( '>BBBB' , inp . read ( 4 ) ) except struct . error : raise FormatError ( struct . error ) if mn [ 0 ] != 0 or mn [ 1 ] != 0 : msg = ( "Incorrect first two bytes of the magic number: " + "0x{0:02X} 0x{1:02X}" . format ( mn [ 0 ] , mn [ 1 ] ) ) raise FormatError ( msg ) dtype_code = mn [ 2 ] if dtype_code not in _DATA_TYPES_IDX : msg = "Incorrect data type code: 0x{0:02X}" . format ( dtype_code ) raise FormatError ( msg ) dims = int ( mn [ 3 ] ) dtype , dtype_s , el_size = _DATA_TYPES_IDX [ dtype_code ] try : dims_sizes = struct . unpack ( '>' + 'I' * dims , inp . read ( 4 * dims ) ) except struct . error as e : raise FormatError ( 'Dims sizes: {0}' . format ( e ) ) full_length = reduce ( operator . mul , dims_sizes , 1 ) try : result_array = numpy . frombuffer ( inp . read ( full_length * el_size ) , dtype = numpy . dtype ( dtype ) ) . reshape ( dims_sizes ) except ValueError as e : raise FormatError ( 'Error creating numpy array: {0}' . format ( e ) ) if len ( inp . read ( 1 ) ) > 0 : raise FormatError ( 'Superfluous data detected.' ) return result_array
|
Converts file in IDX format provided by file - like input into numpy . ndarray and returns it .
|
3,542
|
def convert_to_string ( ndarr ) : with contextlib . closing ( BytesIO ( ) ) as bytesio : _internal_write ( bytesio , ndarr ) return bytesio . getvalue ( )
|
Writes the contents of the numpy . ndarray ndarr to bytes in IDX format and returns it .
|
3,543
|
def get_OS_UUID ( cls , os ) : if os in cls . templates : return cls . templates [ os ] uuid_regexp = '^[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}$' if re . search ( uuid_regexp , os ) : return os raise Exception ( ( "Invalid OS -- valid options are: 'CentOS 6.5', 'CentOS 7.0', " "'Debian 7.8', 'Debian 8.0' ,'Ubuntu 12.04', 'Ubuntu 14.04', 'Ubuntu 16.04', " "'Windows 2008', 'Windows 2012'" ) )
|
Validate Storage OS and its UUID .
|
3,544
|
def execute ( self , requests , resp_generator , * args , ** kwargs ) : result_futures = [ self . executor_pool . submit ( resp_generator , req , * args , ** kwargs ) for req in requests ] resp = [ res_future . result ( ) for res_future in result_futures ] return resp
|
Calls the resp_generator for all the requests in parallel in an asynchronous way .
|
3,545
|
def execute ( self , requests , resp_generator , * args , ** kwargs ) : return [ resp_generator ( request ) for request in requests ]
|
Calls the resp_generator for all the requests in sequential order .
|
3,546
|
def setup_logging ( args ) : loglevel = logging . WARNING if args . verbose : loglevel = logging . INFO if args . debug : loglevel = logging . DEBUG console = logging . StreamHandler ( ) logging . getLogger ( '' ) . setLevel ( loglevel ) console . setFormatter ( logging . Formatter ( LOG_FORMAT ) ) logging . getLogger ( '' ) . addHandler ( console )
|
Sets up basic logging
|
3,547
|
def get_ips ( self ) : res = self . get_request ( '/ip_address' ) IPs = IPAddress . _create_ip_address_objs ( res [ 'ip_addresses' ] , cloud_manager = self ) return IPs
|
Get all IPAddress objects from the API .
|
3,548
|
def new ( self , user , name , description = None ) : uri = self . client . remote + '/users/{0}/feeds' . format ( user ) data = { 'feed' : { 'name' : name , 'description' : description } } resp = self . client . post ( uri , data ) return resp
|
Creates a new Feed object
|
3,549
|
def delete ( self , user , name ) : uri = self . client . remote + '/users/{}/feeds/{}' . format ( user , name ) resp = self . client . session . delete ( uri ) return resp . status_code
|
Removes a feed
|
3,550
|
def index ( self , user ) : uri = self . client . remote + '/users/{0}/feeds' . format ( user ) return self . client . get ( uri )
|
Returns a list of Feeds from the API
|
3,551
|
def show ( self , user , name , limit = None , lasttime = None ) : uri = self . client . remote + '/users/{0}/feeds/{1}' . format ( user , name ) return self . client . get ( uri , params = { 'limit' : limit , 'lasttime' : lasttime } )
|
Returns a specific Feed from the API
|
3,552
|
def build_object ( self , obj ) : try : build_path = self . get_build_path ( obj ) self . request = self . create_request ( build_path ) self . request . user = AnonymousUser ( ) self . set_kwargs ( obj ) self . build_file ( build_path , self . get_content ( ) ) except Http404 : self . unbuild_object ( obj )
|
Override django - bakery to skip profiles that raise 404
|
3,553
|
def make_schedule_row ( schedule_day , slot , seen_items ) : row = ScheduleRow ( schedule_day , slot ) skip = { } expanding = { } all_items = list ( slot . scheduleitem_set . select_related ( 'talk' , 'page' , 'venue' ) . all ( ) ) for item in all_items : if item in seen_items : seen_items [ item ] [ 'rowspan' ] += 1 skip [ item . venue ] = seen_items [ item ] continue scheditem = { 'item' : item , 'rowspan' : 1 , 'colspan' : 1 } row . items [ item . venue ] = scheditem seen_items [ item ] = scheditem if item . expand : expanding [ item . venue ] = [ ] empty = [ ] expanding_right = None skipping = 0 skip_item = None for venue in schedule_day . venues : if venue in skip : skipping = 1 skip_item = skip [ venue ] continue if venue in expanding : item = row . items [ venue ] for empty_venue in empty : row . items . pop ( empty_venue ) item [ 'colspan' ] += 1 empty = [ ] expanding_right = item elif venue in row . items : empty = [ ] expanding_right = None elif expanding_right : expanding_right [ 'colspan' ] += 1 elif skipping > 0 and skipping < skip_item [ 'colspan' ] : skipping += 1 else : skipping = 0 empty . append ( venue ) row . items [ venue ] = { 'item' : None , 'rowspan' : 1 , 'colspan' : 1 } return row
|
Create a row for the schedule table .
|
3,554
|
def generate_schedule ( today = None ) : schedule_days = { } seen_items = { } for slot in Slot . objects . all ( ) . order_by ( 'end_time' , 'start_time' , 'day' ) : day = slot . get_day ( ) if today and day != today : continue schedule_day = schedule_days . get ( day ) if schedule_day is None : schedule_day = schedule_days [ day ] = ScheduleDay ( day ) row = make_schedule_row ( schedule_day , slot , seen_items ) schedule_day . rows . append ( row ) return sorted ( schedule_days . values ( ) , key = lambda x : x . day . date )
|
Helper function which creates an ordered list of schedule days
|
3,555
|
def get_context_data ( self , ** kwargs ) : context = super ( ScheduleXmlView , self ) . get_context_data ( ** kwargs ) if self . request . GET . get ( 'render_description' , None ) == '1' : context [ 'render_description' ] = True else : context [ 'render_description' ] = False return context
|
Allow adding a render_description parameter
|
3,556
|
def get ( self , request ) : calendar = Calendar ( ) site = get_current_site ( request ) calendar . add ( 'prodid' , '-//%s Schedule//%s//' % ( site . name , site . domain ) ) calendar . add ( 'version' , '2.0' ) for item in ScheduleItem . objects . all ( ) : sched_event = Event ( ) sched_event . add ( 'dtstamp' , item . last_updated ) sched_event . add ( 'summary' , item . get_title ( ) ) sched_event . add ( 'location' , item . venue . name ) sched_event . add ( 'dtstart' , item . get_start_datetime ( ) ) sched_event . add ( 'duration' , datetime . timedelta ( minutes = item . get_duration_minutes ( ) ) ) sched_event . add ( 'class' , 'PUBLIC' ) sched_event . add ( 'uid' , '%s@%s' % ( item . pk , site . domain ) ) calendar . add_component ( sched_event ) response = HttpResponse ( calendar . to_ical ( ) , content_type = "text/calendar" ) response [ 'Content-Disposition' ] = 'attachment; filename=schedule.ics' return response
|
Create a iCal file from the schedule
|
3,557
|
def build_object ( self , obj ) : if not obj . exclude_from_static : super ( ShowPage , self ) . build_object ( obj )
|
Override django - bakery to skip pages marked exclude_from_static
|
3,558
|
def build_object ( self , obj ) : try : super ( TalkView , self ) . build_object ( obj ) except PermissionDenied : self . unbuild_object ( obj )
|
Override django - bakery to skip talks that raise 403
|
3,559
|
def get_object ( self , * args , ** kwargs ) : object_ = super ( TalkView , self ) . get_object ( * args , ** kwargs ) if not object_ . can_view ( self . request . user ) : raise PermissionDenied return object_
|
Only talk owners can see talks unless they ve been accepted
|
3,560
|
def render_to_response ( self , * args , ** kwargs ) : if self . request . path != self . object . get_absolute_url ( ) : return HttpResponseRedirect ( self . object . get_absolute_url ( ) ) return super ( TalkView , self ) . render_to_response ( * args , ** kwargs )
|
Canonicalize the URL if the slug changed
|
3,561
|
def delete ( self , request , * args , ** kwargs ) : talk = self . get_object ( ) talk . status = WITHDRAWN talk . save ( ) revisions . set_user ( self . request . user ) revisions . set_comment ( "Talk Withdrawn" ) return HttpResponseRedirect ( self . success_url )
|
Override delete to only withdraw
|
3,562
|
def order_results_by ( * fields ) : def decorator ( f ) : @ functools . wraps ( f ) def wrapper ( * args , ** kw ) : result = f ( * args , ** kw ) return result . order_by ( * fields ) return wrapper return decorator
|
A decorator that applies an ordering to the QuerySet returned by a function .
|
3,563
|
def cache_result ( cache_key , timeout ) : def decorator ( f ) : cache_name = settings . WAFER_CACHE @ functools . wraps ( f ) def wrapper ( * args , ** kw ) : cache = caches [ cache_name ] result = cache . get ( cache_key ) if result is None : result = f ( * args , ** kw ) cache . set ( cache_key , result , timeout ) return result def invalidate ( ) : cache = caches [ cache_name ] cache . delete ( cache_key ) wrapper . invalidate = invalidate return wrapper return decorator
|
A decorator for caching the result of a function .
|
3,564
|
def build_queryset ( self ) : paths = [ ( os . path . join ( self . build_prefix , 'index.html' ) , { } ) ] self . request = None queryset = self . get_queryset ( ) paginator = self . get_paginator ( queryset , self . get_paginate_by ( queryset ) ) for page in paginator . page_range : paths . append ( ( os . path . join ( self . build_prefix , 'page' , '%d' % page , 'index.html' ) , { 'page' : page } ) ) for build_path , kwargs in paths : self . request = self . create_request ( build_path ) self . request . user = AnonymousUser ( ) self . kwargs = kwargs self . prep_directory ( build_path ) target_path = os . path . join ( settings . BUILD_DIR , build_path ) self . build_file ( target_path , self . get_content ( ) )
|
Override django - bakery s build logic to fake pagination .
|
3,565
|
def site_info ( request ) : site = get_current_site ( request ) context = { 'WAFER_CONFERENCE_NAME' : site . name , 'WAFER_CONFERENCE_DOMAIN' : site . domain , } return context
|
Expose the site s info to templates
|
3,566
|
def navigation_info ( request ) : if request . GET . get ( 'wafer_hide_navigation' ) == "1" : nav_class = "wafer-invisible" else : nav_class = "wafer-visible" context = { 'WAFER_NAVIGATION_VISIBILITY' : nav_class , } return context
|
Expose whether to display the navigation header and footer
|
3,567
|
def registration_settings ( request ) : context = { } for setting in ( 'WAFER_SSO' , 'WAFER_HIDE_LOGIN' , 'WAFER_REGISTRATION_OPEN' , 'WAFER_REGISTRATION_MODE' , 'WAFER_TALKS_OPEN' , 'WAFER_VIDEO_LICENSE' , ) : context [ setting ] = getattr ( settings , setting , None ) return context
|
Expose selected settings to templates
|
3,568
|
def profiles ( self ) : limit = [ ] if self . is_admin ( ) : limit . append ( _ ( "Administrator" ) ) limit . sort ( ) return limit
|
return the rolls this people is related with
|
3,569
|
def set_logging_level ( args ) : "Computes and sets the logging level from the parsed arguments." root_logger = logging . getLogger ( ) level = logging . INFO logging . getLogger ( 'requests.packages.urllib3' ) . setLevel ( logging . WARNING ) if "verbose" in args and args . verbose is not None : logging . getLogger ( 'requests.packages.urllib3' ) . setLevel ( 0 ) if args . verbose > 1 : level = 5 elif args . verbose > 0 : level = logging . DEBUG else : logging . critical ( "verbose is an unexpected value. (%s) exiting." , args . verbose ) sys . exit ( 2 ) elif "quiet" in args and args . quiet is not None : if args . quiet > 1 : level = logging . ERROR elif args . quiet > 0 : level = logging . WARNING else : logging . critical ( "quiet is an unexpected value. (%s) exiting." , args . quiet ) if level is not None : root_logger . setLevel ( level ) if args . silence_urllib3 : requests . packages . urllib3 . disable_warnings ( )
|
Computes and sets the logging level from the parsed arguments .
|
3,570
|
def debug ( self , msg ) : if self . __debug is not False : if self . __debug is None : debug_filename = getattr ( settings , "AD_DEBUG_FILE" , None ) if debug_filename : self . __debug = open ( settings . AD_DEBUG_FILE , 'a' ) else : self . __debug = False if self . __debug : self . __debug . write ( "{}\n" . format ( msg ) ) self . __debug . flush ( )
|
Handle the debugging to a file
|
3,571
|
def authenticate ( self , * args , ** kwargs ) : username = kwargs . get ( "username" , None ) password = kwargs . get ( "password" , None ) authorization = self . ldap_link ( username , password , mode = 'LOGIN' ) if authorization : user = self . get_or_create_user ( username , password ) if user : user . is_active = True user . save ( ) else : user = User . objects . filter ( username = username ) . first ( ) if user and not user . is_staff : if authorization is False or getattr ( settings , "AD_LOCK_UNAUTHORIZED" , False ) : user . is_active = False user . save ( ) user = None return user
|
Authenticate the user agains LDAP
|
3,572
|
def get_or_create_user ( self , username , password ) : info = self . get_ad_info ( username , password ) self . debug ( "INFO found: {}" . format ( info ) ) try : user = User . objects . get ( username = username ) except User . DoesNotExist : user = User ( username = username ) user . first_name = info . get ( 'first_name' , '' ) user . last_name = info . get ( 'last_name' , '' ) user . email = info . get ( 'email' , '' ) is_admin = False for domain in info [ 'groups' ] : if 'Domain Admins' in info [ 'groups' ] [ domain ] : is_admin = True break user . is_staff = is_admin user . is_superuser = is_admin user . set_password ( password ) user = self . validate ( user , info ) if user : self . debug ( "User got validated!" ) user . save ( ) self . synchronize ( user , info ) else : self . debug ( "User didn't pass validation!" ) return user
|
Get or create the given user
|
3,573
|
def synchronize ( self , user , info ) : self . debug ( "Synchronize!" ) user . groups . clear ( ) for domain in info [ 'groups' ] : for groupname in info [ 'groups' ] [ domain ] : group = Group . objects . filter ( name = groupname ) . first ( ) if group : user . groups . add ( group )
|
It tries to do a group synchronization if possible This methods should be redeclared by the developer
|
3,574
|
def update_schedule_items ( * args , ** kw ) : slot = kw . pop ( 'instance' , None ) if not slot : return for item in slot . scheduleitem_set . all ( ) : item . save ( update_fields = [ 'last_updated' ] ) next_slot = slot . slot_set . all ( ) if next_slot . count ( ) : for item in next_slot [ 0 ] . scheduleitem_set . all ( ) : item . save ( update_fields = [ 'last_updated' ] )
|
We save all the schedule items associated with this slot so the last_update time is updated to reflect any changes to the timing of the slots
|
3,575
|
def make_diff ( current , revision ) : the_diff = [ ] dmp = diff_match_patch ( ) for field in ( set ( current . field_dict . keys ( ) ) | set ( revision . field_dict . keys ( ) ) ) : if field == 'id' or field . endswith ( '_rendered' ) : continue missing_field = False try : cur_val = current . field_dict [ field ] or "" except KeyError : cur_val = "No such field in latest version\n" missing_field = True try : old_val = revision . field_dict [ field ] or "" except KeyError : old_val = "No such field in old version\n" missing_field = True if missing_field : diffs = [ ( dmp . DIFF_DELETE , old_val ) , ( dmp . DIFF_INSERT , cur_val ) ] patch = dmp . diff_prettyHtml ( diffs ) elif isinstance ( cur_val , Markup ) : if cur_val . raw == old_val . raw : continue diffs = dmp . diff_main ( old_val . raw , cur_val . raw ) patch = dmp . diff_prettyHtml ( diffs ) elif cur_val == old_val : continue else : diffs = dmp . diff_main ( force_text ( old_val ) , force_text ( cur_val ) ) patch = dmp . diff_prettyHtml ( diffs ) the_diff . append ( ( field , patch ) ) the_diff . sort ( ) return the_diff
|
Create the difference between the current revision and a previous version
|
3,576
|
def compare_view ( self , request , object_id , version_id , extra_context = None ) : opts = self . model . _meta object_id = unquote ( object_id ) current = Version . objects . get_for_object_reference ( self . model , object_id ) [ 0 ] revision = Version . objects . get_for_object_reference ( self . model , object_id ) . filter ( id = version_id ) [ 0 ] the_diff = make_diff ( current , revision ) context = { "title" : _ ( "Comparing current %(model)s with revision created %(date)s" ) % { 'model' : current , 'date' : get_date ( revision ) , } , "opts" : opts , "compare_list_url" : reverse ( "%s:%s_%s_comparelist" % ( self . admin_site . name , opts . app_label , opts . model_name ) , args = ( quote ( object_id ) , ) ) , "diff_list" : the_diff , } extra_context = extra_context or { } context . update ( extra_context ) return render ( request , self . compare_template or self . _get_template_list ( "compare.html" ) , context )
|
Actually compare two versions .
|
3,577
|
def comparelist_view ( self , request , object_id , extra_context = None ) : opts = self . model . _meta object_id = unquote ( object_id ) current = get_object_or_404 ( self . model , pk = object_id ) action_list = [ { "revision" : version . revision , "url" : reverse ( "%s:%s_%s_compare" % ( self . admin_site . name , opts . app_label , opts . model_name ) , args = ( quote ( version . object_id ) , version . id ) ) , } for version in self . _reversion_order_version_queryset ( Version . objects . get_for_object_reference ( self . model , object_id ) . select_related ( "revision__user" ) ) ] context = { "action_list" : action_list , "opts" : opts , "object_id" : quote ( object_id ) , "original" : current , } extra_context = extra_context or { } context . update ( extra_context ) return render ( request , self . compare_list_template or self . _get_template_list ( "compare_list.html" ) , context )
|
Allow selecting versions to compare .
|
3,578
|
def grv ( struct , position ) : if position == 'year' : size = 4 else : size = 2 if ( struct [ position ] [ 2 ] ) : rightnow = str ( struct [ position ] [ 0 ] ) . zfill ( size ) else : if position == 'year' : rightnow = ' _' else : rightnow = '__' return rightnow
|
This function helps to convert date information for showing proper filtering
|
3,579
|
def get_template_names ( self ) : template_model = getattr ( self , 'template_model' , "{0}/{1}_{2}" . format ( self . _appname . lower ( ) , self . _modelname . lower ( ) , self . get_template_names_key ) ) template_model_ext = getattr ( self , 'template_model_ext' , 'html' ) templates = get_template ( template_model , self . user , self . language , template_model_ext , raise_error = False ) if type ( templates ) == list : templates . append ( "codenerix/{0}.html" . format ( self . get_template_names_key ) ) return templates
|
Build the list of templates related to this user
|
3,580
|
def get_context_data ( self , ** kwargs ) : context = super ( GenBase , self ) . get_context_data ( ** kwargs ) if hasattr ( self , 'html_head' ) : context [ 'html_head' ] = self . html_head ( self . object ) if hasattr ( self , 'gentrans' ) : context [ 'gentranslate' ] = self . gentrans . copy ( ) context [ 'gentranslate' ] . update ( self . gentranslate ) else : context [ 'gentranslate' ] = self . gentranslate return context
|
Set a base context
|
3,581
|
def get_context_data ( self , ** kwargs ) : context = super ( GenList , self ) . get_context_data ( ** kwargs ) context . update ( self . __context ) context [ 'now' ] = epochdate ( time . time ( ) ) context [ 'profile' ] = self . profile context [ 'vtable' ] = getattr ( self , 'vtable' , False ) context [ 'export_excel' ] = getattr ( self , 'export_excel' , True ) context [ 'export_name' ] = getattr ( self , 'export_name' , 'list' ) context [ 'ngincludes' ] = getattr ( self , 'ngincludes' , { } ) if 'table' not in context [ 'ngincludes' ] . keys ( ) : context [ 'ngincludes' ] [ 'table' ] = "{}codenerix/partials/table.html" . format ( settings . STATIC_URL ) context [ 'linkadd' ] = getattr ( self , 'linkadd' , self . auth_permission ( 'add' ) or getattr ( self , 'public' , False ) ) context [ 'linkedit' ] = getattr ( self , 'linkedit' , self . auth_permission ( 'change' ) or getattr ( self , 'public' , False ) ) context [ 'show_details' ] = getattr ( self , 'show_details' , False ) context [ 'show_modal' ] = getattr ( self , 'show_modal' , False ) context [ 'search_filter_button' ] = getattr ( self , 'search_filter_button' , False ) if not self . json_worker : template_base = getattr ( self , 'template_base' , 'base/base' ) template_base_ext = getattr ( self , 'template_base_ext' , 'html' ) context [ 'template_base' ] = get_template ( template_base , self . user , self . language , extension = template_base_ext ) object_id = kwargs . get ( 'object_id' , None ) try : object_id = int ( object_id ) except Exception : pass try : unicode ( 'codenerix' ) unicodetest = unicode except NameError : unicodetest = str if isinstance ( object_id , str ) or isinstance ( object_id , unicodetest ) : context [ 'object_name' ] = object_id object_obj = None else : if object_id : obj = context [ 'obj' ] object_obj = get_object_or_404 ( obj , pk = object_id ) else : object_obj = None context [ 'object_obj' ] = object_obj context . update ( self . extra_context ) return context
|
Generic list view with validation included and object transfering support
|
3,582
|
def get_context_json ( self , context ) : answer = { } answer [ 'meta' ] = self . __jcontext_metadata ( context ) answer [ 'filter' ] = self . __jcontext_filter ( context ) answer [ 'table' ] = { } answer [ 'table' ] [ 'head' ] = self . __jcontext_tablehead ( context ) answer [ 'table' ] [ 'body' ] = None answer [ 'table' ] [ 'header' ] = None answer [ 'table' ] [ 'summary' ] = None return answer
|
Return a base answer for a json answer
|
3,583
|
def get_form ( self , form_class = None ) : formobj = super ( GenModify , self ) . get_form ( form_class ) selfgroups = getattr ( self , "form_groups" , None ) if selfgroups : if type ( selfgroups ) == list : formobj . __groups__ = lambda : selfgroups else : formobj . __groups__ = selfgroups else : selfgroups = getattr ( self , "__groups__" , None ) if selfgroups : formobj . __groups__ = selfgroups return formobj
|
Set form groups to the groups specified in the view if defined
|
3,584
|
def get_parent ( self , directory ) : assert settings . PAGE_DIR . startswith ( '/' ) assert settings . PAGE_DIR . endswith ( '/' ) parents = directory [ len ( settings . PAGE_DIR ) : ] page = None if parents : for slug in parents . split ( '/' ) : page = Page . objects . get ( parent = page , slug = slug ) return page
|
Given a directory name return the Page representing it in the menu heirarchy .
|
3,585
|
def wafer_sso_url ( context , sso_method ) : request = context . request url = reverse ( getattr ( views , '%s_login' % sso_method ) ) if 'next' in request . GET : url += '?' + urlencode ( { 'next' : request . GET [ 'next' ] } ) return url
|
Return the correct URL to SSO with the given method .
|
3,586
|
def authorize ( args ) : oauth2_instance = oauth2 . build_oauth2 ( args . app , args ) oauth2_instance . build_authorizer ( ) logging . info ( 'Application "%s" authorized!' , args . app )
|
Authorizes Coursera s OAuth2 client for using coursera . org API servers for a specific application
|
3,587
|
def check_auth ( args ) : oauth2_instance = oauth2 . build_oauth2 ( args . app , args ) auth = oauth2_instance . build_authorizer ( ) my_profile_url = ( 'https://api.coursera.org/api/externalBasicProfiles.v1?' 'q=me&fields=name' ) r = requests . get ( my_profile_url , auth = auth ) if r . status_code != 200 : logging . error ( 'Received response code %s from the basic profile API.' , r . status_code ) logging . debug ( 'Response body:\n%s' , r . text ) sys . exit ( 1 ) try : external_id = r . json ( ) [ 'elements' ] [ 0 ] [ 'id' ] except : logging . error ( 'Could not parse the external id out of the response body %s' , r . text ) external_id = None try : name = r . json ( ) [ 'elements' ] [ 0 ] [ 'name' ] except : logging . error ( 'Could not parse the name out of the response body %s' , r . text ) name = None if not args . quiet > 0 : print 'Name: %s' % name print 'External ID: %s' % external_id if name is None or external_id is None : sys . exit ( 1 )
|
Checks courseraoauth2client s connectivity to the coursera . org API servers for a specific application
|
3,588
|
def quintic_bucket_warp ( x , n , l1 , l2 , l3 , x0 , w1 , w2 , w3 ) : x1 = x0 - w2 / 2.0 - w1 / 2.0 x2 = x0 + w2 / 2.0 + w3 / 2.0 x_shift_1 = 2.0 * ( x - x1 ) / w1 x_shift_3 = 2.0 * ( x - x2 ) / w3 if n == 0 : return ( l1 * ( x <= ( x1 - w1 / 2.0 ) ) + ( 0.5 * ( l2 - l1 ) * ( 3.0 / 8.0 * x_shift_1 ** 5 - 5.0 / 4.0 * x_shift_1 ** 3 + 15.0 / 8.0 * x_shift_1 ) + ( l1 + l2 ) / 2.0 ) * ( ( x > ( x1 - w1 / 2.0 ) ) & ( x < ( x1 + w1 / 2.0 ) ) ) + l2 * ( ( x >= ( x1 + w1 / 2.0 ) ) & ( x <= x2 - w3 / 2.0 ) ) + ( 0.5 * ( l3 - l2 ) * ( 3.0 / 8.0 * x_shift_3 ** 5 - 5.0 / 4.0 * x_shift_3 ** 3 + 15.0 / 8.0 * x_shift_3 ) + ( l2 + l3 ) / 2.0 ) * ( ( x > ( x2 - w3 / 2.0 ) ) & ( x < ( x2 + w3 / 2.0 ) ) ) + l3 * ( x >= ( x2 + w3 / 2.0 ) ) ) elif n == 1 : return ( ( 0.5 * ( l2 - l1 ) * ( 5.0 * 3.0 / 8.0 * x_shift_1 ** 4 - 3.0 * 5.0 / 4.0 * x_shift_1 ** 2 + 15.0 / 8.0 ) / w1 ) * ( ( x > ( x1 - w1 / 2.0 ) ) & ( x < ( x1 + w1 / 2.0 ) ) ) + ( 0.5 * ( l3 - l2 ) * ( 5.0 * 3.0 / 8.0 * x_shift_3 ** 4 - 3.0 * 5.0 / 4.0 * x_shift_3 ** 2 + 15.0 / 8.0 ) / w3 ) * ( ( x > ( x2 - w3 / 2.0 ) ) & ( x < ( x2 + w3 / 2.0 ) ) ) ) else : raise NotImplementedError ( "Only up to first derivatives are supported!" )
|
Warps the length scale with a piecewise quintic bucket shape .
|
3,589
|
def sso ( user , desired_username , name , email , profile_fields = None ) : if not user : if not settings . REGISTRATION_OPEN : raise SSOError ( 'Account registration is closed' ) user = _create_desired_user ( desired_username ) _configure_user ( user , name , email , profile_fields ) if not user . is_active : raise SSOError ( 'Account disabled' ) user . backend = settings . AUTHENTICATION_BACKENDS [ 0 ] return user
|
Create a user if the provided user is None from the parameters . Then log the user in and return it .
|
3,590
|
def debit ( self , amount , credit_account , description , debit_memo = "" , credit_memo = "" , datetime = None ) : assert amount >= 0 return self . post ( amount , credit_account , description , self_memo = debit_memo , other_memo = credit_memo , datetime = datetime )
|
Post a debit of amount and a credit of - amount against this account and credit_account respectively .
|
3,591
|
def credit ( self , amount , debit_account , description , debit_memo = "" , credit_memo = "" , datetime = None ) : assert amount >= 0 return self . post ( - amount , debit_account , description , self_memo = credit_memo , other_memo = debit_memo , datetime = datetime )
|
Post a credit of amount and a debit of - amount against this account and credit_account respectively .
|
3,592
|
def post ( self , amount , other_account , description , self_memo = "" , other_memo = "" , datetime = None ) : tx = self . _new_transaction ( ) if datetime : tx . t_stamp = datetime tx . description = description tx . save ( ) a1 = self . _make_ae ( self . _DEBIT_IN_DB ( ) * amount , self_memo , tx ) a1 . save ( ) a2 = other_account . _make_ae ( - self . _DEBIT_IN_DB ( ) * amount , other_memo , tx ) a2 . save ( ) return ( a1 , a2 )
|
Post a transaction of amount against this account and the negative amount against other_account .
|
3,593
|
def totals ( self , start = None , end = None ) : qs = self . _entries_range ( start = start , end = end ) qs_positive = qs . filter ( amount__gt = Decimal ( "0.00" ) ) . all ( ) . aggregate ( Sum ( 'amount' ) ) qs_negative = qs . filter ( amount__lt = Decimal ( "0.00" ) ) . all ( ) . aggregate ( Sum ( 'amount' ) ) positives = qs_positive [ 'amount__sum' ] if qs_positive [ 'amount__sum' ] is not None else 0 negatives = - qs_negative [ 'amount__sum' ] if qs_negative [ 'amount__sum' ] is not None else 0 if self . _DEBIT_IN_DB ( ) > 0 : debits = positives credits = negatives else : debits = negatives credits = positives net = debits - credits if self . _positive_credit ( ) : net = - net return self . Totals ( credits , debits , net )
|
Returns a Totals object containing the sum of all debits credits and net change over the period of time from start to end .
|
3,594
|
def ledger ( self , start = None , end = None ) : DEBIT_IN_DB = self . _DEBIT_IN_DB ( ) flip = 1 if self . _positive_credit ( ) : flip *= - 1 qs = self . _entries_range ( start = start , end = end ) qs = qs . order_by ( "transaction__t_stamp" , "transaction__tid" ) balance = Decimal ( "0.00" ) if start : balance = self . balance ( start ) if not qs : return [ ] def helper ( balance_in ) : balance = balance_in for e in qs . all ( ) : amount = e . amount * DEBIT_IN_DB o_balance = balance balance += flip * amount yield LedgerEntry ( amount , e , o_balance , balance ) return helper ( balance )
|
Returns a list of entries for this account .
|
3,595
|
def find_overlapping_slots ( all_slots ) : overlaps = set ( [ ] ) for slot in all_slots : start = slot . get_start_time ( ) end = slot . end_time for other_slot in all_slots : if other_slot . pk == slot . pk : continue if other_slot . get_day ( ) != slot . get_day ( ) : continue other_start = other_slot . get_start_time ( ) other_end = other_slot . end_time if start <= other_start and other_start < end : overlaps . add ( slot ) overlaps . add ( other_slot ) elif start < other_end and other_end <= end : overlaps . add ( slot ) overlaps . add ( other_slot ) return overlaps
|
Find any slots that overlap
|
3,596
|
def find_non_contiguous ( all_items ) : non_contiguous = [ ] for item in all_items : if item . slots . count ( ) < 2 : continue last_slot = None for slot in item . slots . all ( ) . order_by ( 'end_time' ) : if last_slot : if last_slot . end_time != slot . get_start_time ( ) : non_contiguous . append ( item ) break last_slot = slot return non_contiguous
|
Find any items that have slots that aren t contiguous
|
3,597
|
def find_invalid_venues ( all_items ) : venues = { } for item in all_items : valid = False item_days = list ( item . venue . days . all ( ) ) for slot in item . slots . all ( ) : for day in item_days : if day == slot . get_day ( ) : valid = True break if not valid : venues . setdefault ( item . venue , [ ] ) venues [ item . venue ] . append ( item ) return venues . items ( )
|
Find venues assigned slots that aren t on the allowed list of days .
|
3,598
|
def check_schedule ( ) : all_items = prefetch_schedule_items ( ) for validator , _type , _msg in SCHEDULE_ITEM_VALIDATORS : if validator ( all_items ) : return False all_slots = prefetch_slots ( ) for validator , _type , _msg in SLOT_VALIDATORS : if validator ( all_slots ) : return False return True
|
Helper routine to easily test if the schedule is valid
|
3,599
|
def validate_schedule ( ) : all_items = prefetch_schedule_items ( ) errors = [ ] for validator , _type , msg in SCHEDULE_ITEM_VALIDATORS : if validator ( all_items ) : errors . append ( msg ) all_slots = prefetch_slots ( ) for validator , _type , msg in SLOT_VALIDATORS : if validator ( all_slots ) : errors . append ( msg ) return errors
|
Helper routine to report issues with the schedule
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.