idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
57,400
def sg_arg_def ( ** kwargs ) : r for k , v in kwargs . items ( ) : if type ( v ) is tuple or type ( v ) is list : v , c = v [ 0 ] , v [ 1 ] else : c = k if type ( v ) is str : tf . app . flags . DEFINE_string ( k , v , c ) elif type ( v ) is int : tf . app . flags . DEFINE_integer ( k , v , c ) elif type ( v ) is float : tf . app . flags . DEFINE_float ( k , v , c ) elif type ( v ) is bool : tf . app . flags . DEFINE_bool ( k , v , c )
r Defines command line options
57,401
def sg_summary_loss ( tensor , prefix = 'losses' , name = None ) : r prefix = '' if prefix is None else prefix + '/' name = prefix + _pretty_name ( tensor ) if name is None else prefix + name _scalar ( name , tf . reduce_mean ( tensor ) ) _histogram ( name + '-h' , tensor )
r Register tensor to summary report as loss
57,402
def sg_summary_gradient ( tensor , gradient , prefix = None , name = None ) : r prefix = '' if prefix is None else prefix + '/' name = prefix + _pretty_name ( tensor ) if name is None else prefix + name _scalar ( name + '/grad' , tf . reduce_mean ( tf . abs ( gradient ) ) ) _histogram ( name + '/grad-h' , tf . abs ( gradient ) )
r Register tensor to summary report as gradient
57,403
def sg_summary_activation ( tensor , prefix = None , name = None ) : r prefix = '' if prefix is None else prefix + '/' name = prefix + _pretty_name ( tensor ) if name is None else prefix + name _scalar ( name + '/ratio' , tf . reduce_mean ( tf . cast ( tf . greater ( tensor , 0 ) , tf . sg_floatx ) ) ) _histogram ( name + '/ratio-h' , tensor )
r Register tensor to summary report as activation
57,404
def sg_summary_param ( tensor , prefix = None , name = None ) : r prefix = '' if prefix is None else prefix + '/' name = prefix + _pretty_name ( tensor ) if name is None else prefix + name _scalar ( name + '/abs' , tf . reduce_mean ( tf . abs ( tensor ) ) ) _histogram ( name + '/abs-h' , tf . abs ( tensor ) )
r Register tensor to summary report as parameters
57,405
def sg_summary_image ( tensor , prefix = None , name = None ) : r prefix = '' if prefix is None else prefix + '/' name = prefix + _pretty_name ( tensor ) if name is None else prefix + name if not tf . get_variable_scope ( ) . reuse : tf . summary . image ( name + '-im' , tensor )
r Register tensor to summary report as image
57,406
def sg_summary_audio ( tensor , sample_rate = 16000 , prefix = None , name = None ) : r prefix = '' if prefix is None else prefix + '/' name = prefix + _pretty_name ( tensor ) if name is None else prefix + name if not tf . get_variable_scope ( ) . reuse : tf . summary . audio ( name + '-au' , tensor , sample_rate )
r Register tensor to summary report as audio
57,407
def sg_train ( ** kwargs ) : r opt = tf . sg_opt ( kwargs ) assert opt . loss is not None , 'loss is mandatory.' opt += tf . sg_opt ( optim = 'MaxProp' , lr = 0.001 , beta1 = 0.9 , beta2 = 0.99 , category = '' , ep_size = 100000 ) train_op = sg_optim ( opt . loss , optim = opt . optim , lr = 0.001 , beta1 = opt . beta1 , beta2 = opt . beta2 , category = opt . category ) loss_ = opt . loss if isinstance ( opt . loss , ( tuple , list ) ) : loss_ = opt . loss [ 0 ] @ sg_train_func def train_func ( sess , arg ) : return sess . run ( [ loss_ , train_op ] ) [ 0 ] train_func ( ** opt )
r Trains the model .
57,408
def sg_restore ( sess , save_path , category = '' ) : r if not isinstance ( category , ( tuple , list ) ) : category = [ category ] var_list = { } for cat in category : for t in tf . global_variables ( ) : if t . name . startswith ( cat ) : var_list [ t . name [ : - 2 ] ] = t saver = tf . train . Saver ( var_list ) saver . restore ( sess , save_path )
r Restores previously saved variables .
57,409
def sg_regularizer_loss ( scale = 1.0 ) : r return scale * tf . reduce_mean ( tf . get_collection ( tf . GraphKeys . REGULARIZATION_LOSSES ) )
r Get regularizer losss
57,410
def sg_densenet_layer ( x , opt ) : r assert opt . dim is not None , 'dim is mandatory.' assert opt . num is not None , 'num is mandatory.' opt += tf . sg_opt ( stride = 1 , act = 'relu' , trans = True ) def cname ( index ) : return opt . name if opt . name is None else opt . name + '_%d' % index with tf . sg_context ( bias = False , reuse = opt . reuse ) : out = x for i in range ( opt . num ) : out_new = ( out . sg_bypass ( act = opt . act , bn = True , name = cname ( 3 * i + 1 ) ) . sg_conv ( dim = opt . dim // 4 , size = 1 , act = opt . act , bn = True , name = cname ( 3 * i + 2 ) ) . sg_conv ( dim = opt . dim , size = 3 , name = cname ( 3 * i + 3 ) ) ) out = tf . concat ( [ out_new , out ] , 3 ) if opt . trans : out = ( out . sg_bypass ( act = opt . act , bn = True , name = cname ( 3 * i + 4 ) ) . sg_conv ( size = 1 , name = cname ( 3 * i + 5 ) ) . sg_pool ( avg = True ) ) return out
r Applies basic architecture of densenet layer .
57,411
def deep_merge_dict ( a , b ) : if not isinstance ( a , dict ) : raise TypeError ( "a must be a dict, but found %s" % a . __class__ . __name__ ) if not isinstance ( b , dict ) : raise TypeError ( "b must be a dict, but found %s" % b . __class__ . __name__ ) _a = copy ( a ) _b = copy ( b ) for key_b , val_b in iteritems ( _b ) : if isinstance ( val_b , dict ) : if key_b not in _a or not isinstance ( _a [ key_b ] , dict ) : _a [ key_b ] = { } _a [ key_b ] = deep_merge_dict ( _a [ key_b ] , val_b ) else : _a [ key_b ] = val_b return _a
Deep merges dictionary b into dictionary a .
57,412
def copy_file_if_modified ( src_path , dest_path ) : if os . path . isdir ( dest_path ) : shutil . rmtree ( dest_path ) must_copy = False if not os . path . exists ( dest_path ) : must_copy = True else : src_stat = os . stat ( src_path ) dest_stat = os . stat ( dest_path ) if ( ( src_stat [ stat . ST_SIZE ] != dest_stat [ stat . ST_SIZE ] ) or ( src_stat [ stat . ST_MTIME ] != dest_stat [ stat . ST_MTIME ] ) ) : must_copy = True if must_copy : shutil . copy2 ( src_path , dest_path )
Only copies the file from the source path to the destination path if it doesn t exist yet or it has been modified . Intended to provide something of an optimisation when a project has large trees of assets .
57,413
def get_url_file_ext ( url ) : filename = url . split ( '/' ) [ - 1 ] name , ext = os . path . splitext ( filename ) if not ext and name and name [ 0 ] == '.' : ext = name return ext
Attempts to extract the file extension from the given URL .
57,414
def generate_quickstart ( project_path ) : ensure_path_exists ( project_path ) ensure_file_exists ( os . path . join ( project_path , "config.yml" ) , DEFAULT_CONFIG_CONTENT ) ensure_path_exists ( os . path . join ( project_path , 'models' ) ) ensure_path_exists ( os . path . join ( project_path , 'data' ) ) ensure_path_exists ( os . path . join ( project_path , 'themes' ) ) ensure_path_exists ( os . path . join ( project_path , 'templates' ) ) ensure_path_exists ( os . path . join ( project_path , 'templatetags' ) ) ensure_path_exists ( os . path . join ( project_path , 'views' ) ) ensure_path_exists ( os . path . join ( project_path , 'assets' ) )
Generates all of the basic paths for a Statik project within the given project path . If the project path doesn t exist it will be created .
57,415
def get_project_config_file ( path , default_config_file_name ) : _path , _config_file_path = None , None path = os . path . abspath ( path ) if os . path . isdir ( path ) : _path = path _config_file_path = os . path . join ( _path , default_config_file_name ) logger . debug ( "Using default project configuration file path: %s" , _config_file_path ) elif path . endswith ( ".yml" ) : _path = os . path . dirname ( path ) _config_file_path = path logger . debug ( "Using custom project configuration file path: %s" , _config_file_path ) return _path , _config_file_path
Attempts to extract the project config file s absolute path from the given path . If the path is a directory it automatically assumes a config . yml file will be in that directory . If the path is to a . yml file it assumes that that is the root configuration file for the project .
57,416
def strip_el_text ( el , max_depth = 0 , cur_depth = 0 ) : el_text = strip_str ( el . text if el . text is not None else "" ) if cur_depth < max_depth : for child in el : el_text += " " + strip_el_text ( child , max_depth = max_depth , cur_depth = cur_depth + 1 ) else : children = list ( el ) if children is not None and len ( children ) > 0 : if children [ - 1 ] . tail is not None : el_text += " " + strip_str ( children [ - 1 ] . tail ) if cur_depth > 0 : if el . tail is not None : el_text += " " + strip_str ( el . tail ) return strip_str ( el_text )
Recursively strips the plain text out of the given XML etree element up to the desired depth .
57,417
def find_first_file_with_ext ( base_paths , prefix , exts ) : for base_path in base_paths : for ext in exts : filename = os . path . join ( base_path , "%s%s" % ( prefix , ext ) ) if os . path . exists ( filename ) and os . path . isfile ( filename ) : logger . debug ( "Found first file with relevant extension: %s" , filename ) return base_path , ext logger . debug ( "No files found for prefix %s, extensions %s" , prefix , ", " . join ( exts ) ) return None , None
Runs through the given list of file extensions and returns the first file with the given base path and extension combination that actually exists .
57,418
def find_duplicates_in_array ( array ) : duplicates = [ ] non_duplicates = [ ] if len ( array ) != len ( set ( array ) ) : for item in array : if item not in non_duplicates : non_duplicates . append ( item ) elif item in non_duplicates and item not in duplicates : duplicates . append ( item ) return duplicates
Runs through the array and returns the elements that contain more than one duplicate
57,419
def read_requirements ( filename ) : data = [ ] for line in read_file ( filename ) : line = line . strip ( ) if not line or line . startswith ( '#' ) : continue if '+' in line [ : 4 ] : repo_link , egg_name = line . split ( '#egg=' ) if not egg_name : raise ValueError ( 'Unknown requirement: {0}' . format ( line ) ) DEPENDENCY_LINKS . append ( line ) line = egg_name data . append ( line ) return data
Parse a requirements file .
57,420
def find_additional_rels ( self , all_models ) : for model_name , model in iteritems ( all_models ) : if model_name != self . name : for field_name in model . field_names : field = model . fields [ field_name ] if field . field_type == self . name and field . back_populates is not None and ( isinstance ( field , StatikForeignKeyField ) or isinstance ( field , StatikManyToManyField ) ) : self . additional_rels [ field . back_populates ] = { 'to_model' : model_name , 'back_populates' : field_name , 'secondary' : ( model_name , field . field_type ) if isinstance ( field , StatikManyToManyField ) else None } logger . debug ( 'Additional relationship %s.%s -> %s (%s)' , self . name , field . back_populates , model_name , self . additional_rels [ field . back_populates ] )
Attempts to scan for additional relationship fields for this model based on all of the other models structures and relationships .
57,421
def create_db ( self , models ) : self . tables = dict ( [ ( model_name , self . create_model_table ( model ) ) for model_name , model in iteritems ( models ) ] ) logger . debug ( "Creating %d database table(s)..." , len ( self . tables ) ) try : self . Base . metadata . create_all ( self . engine ) except Exception as exc : raise StatikError ( message = "Failed to create in-memory data model." , orig_exc = exc ) self . load_all_model_data ( models )
Creates the in - memory SQLite database from the model configuration .
57,422
def sort_models ( self ) : model_names = [ table . name for table in self . Base . metadata . sorted_tables if table . name in self . models ] logger . debug ( "Unsorted models: %s" , model_names ) model_count = len ( model_names ) swapped = True sort_round = 0 while swapped : sort_round += 1 logger . debug ( 'Sorting round: %d (%s)' , sort_round , model_names ) sorted_models = [ ] for i in range ( model_count ) : model = self . models [ model_names [ i ] ] for foreign_model_name in model . foreign_models : if foreign_model_name not in sorted_models : sorted_models . append ( foreign_model_name ) if model . name not in sorted_models : sorted_models . append ( model . name ) if model_names == sorted_models : swapped = False model_names = sorted_models logger . debug ( "Sorted models: %s (%d rounds)" , model_names , sort_round ) return model_names
Sorts the database models appropriately based on their relationships so that we load our data in the appropriate order .
57,423
def create_model_table ( self , model ) : try : return db_model_factory ( self . Base , model , self . models ) except Exception as exc : raise ModelError ( model . name , message = "failed to create in-memory table." , orig_exc = exc , context = self . error_context )
Creates the table for the given model .
57,424
def load_model_data ( self , path , model ) : if os . path . isdir ( path ) : if os . path . isfile ( os . path . join ( path , '_all.yml' ) ) : self . load_model_data_collection ( path , model ) self . load_model_data_from_files ( path , model ) self . session . commit ( )
Loads the data for the specified model from the given path .
57,425
def query ( self , query , additional_locals = None , safe_mode = False ) : logger . debug ( "Attempting to execute database query: %s" , query ) if safe_mode and not isinstance ( query , dict ) : raise SafetyViolationError ( context = self . error_context ) if isinstance ( query , dict ) : logger . debug ( "Executing query in safe mode (MLAlchemy)" ) return mlalchemy . parse_query ( query ) . to_sqlalchemy ( self . session , self . tables ) . all ( ) else : logger . debug ( "Executing unsafe query (Python exec())" ) if additional_locals is not None : for k , v in iteritems ( additional_locals ) : locals ( ) [ k ] = v exec ( compile ( 'result = %s' % query . strip ( ) , '<string>' , 'exec' ) , globals ( ) , locals ( ) ) return locals ( ) [ 'result' ]
Executes the given SQLAlchemy query string .
57,426
def generate ( input_path , output_path = None , in_memory = False , safe_mode = False , error_context = None ) : project = StatikProject ( input_path , safe_mode = safe_mode , error_context = error_context ) return project . generate ( output_path = output_path , in_memory = in_memory )
Executes the Statik site generator using the given parameters .
57,427
def generate ( self , output_path = None , in_memory = False ) : result = dict ( ) if in_memory else 0 logger . info ( "Generating Statik build..." ) try : if output_path is None and not in_memory : raise InternalError ( "If project is not to be generated in-memory, an output path must be specified" ) self . error_context . update ( filename = self . config_file_path ) self . config = self . config or StatikConfig ( self . config_file_path ) if self . config . encoding is not None : logger . debug ( "Using encoding: %s" , self . config . encoding ) else : logger . debug ( "Using encoding: %s" , self . config . encoding ) self . error_context . clear ( ) self . models = self . load_models ( ) self . template_engine = StatikTemplateEngine ( self ) if self . config . external_database is not None : self . config . external_database . write_files ( output_path , self . models ) self . views = self . load_views ( ) if not self . views : raise NoViewsError ( ) self . db = self . load_db_data ( self . models ) self . project_context = self . load_project_context ( ) in_memory_result = self . process_views ( ) if in_memory : result = in_memory_result else : file_count = self . dump_in_memory_result ( in_memory_result , output_path ) logger . info ( 'Wrote %d output file(s) to folder: %s' , file_count , output_path ) self . copy_assets ( output_path ) result = file_count logger . info ( "Success!" ) except StatikError as exc : logger . debug ( traceback . format_exc ( ) ) logger . error ( exc . render ( ) ) raise exc except Exception as exc : logger . debug ( traceback . format_exc ( ) ) _exc = StatikError ( message = "Failed to build project. Run Statik in verbose mode (-v) to see " + "additional traceback information about this error." , orig_exc = exc , context = self . error_context ) logger . error ( _exc . render ( ) ) raise _exc finally : try : if self . db is not None : self . db . shutdown ( ) except Exception as e : logger . exception ( "Unable to clean up properly: %s" , e ) return result
Executes the Statik project generator .
57,428
def load_views ( self ) : view_path = os . path . join ( self . path , StatikProject . VIEWS_DIR ) logger . debug ( "Loading views from: %s" , view_path ) if not os . path . isdir ( view_path ) : raise MissingProjectFolderError ( StatikProject . VIEWS_DIR ) view_files = list_files ( view_path , [ 'yml' , 'yaml' ] ) logger . debug ( "Found %d view(s) in project" , len ( view_files ) ) views = { } for view_file in view_files : view_name = extract_filename ( view_file ) views [ view_name ] = StatikView ( filename = os . path . join ( view_path , view_file ) , encoding = self . config . encoding , name = view_name , models = self . models , template_engine = self . template_engine , error_context = self . error_context ) return views
Loads the views for this project from the project directory structure .
57,429
def process_views ( self ) : output = { } logger . debug ( "Processing %d view(s)..." , len ( self . views ) ) for view_name , view in iteritems ( self . views ) : try : output = deep_merge_dict ( output , view . process ( self . db , safe_mode = self . safe_mode , extra_context = self . project_context ) ) except StatikError as exc : raise exc except Exception as exc : raise ViewError ( message = "Failed to render view \"%s\"." % view_name , orig_exc = exc ) return output
Processes the loaded views to generate the required output data .
57,430
def dump_in_memory_result ( self , result , output_path ) : file_count = 0 logger . debug ( "Dumping in-memory processing results to output folder: %s" , output_path ) for k , v in iteritems ( result ) : cur_output_path = os . path . join ( output_path , k ) if isinstance ( v , dict ) : file_count += self . dump_in_memory_result ( v , cur_output_path ) else : if not os . path . isdir ( output_path ) : os . makedirs ( output_path ) filename = os . path . join ( output_path , k ) logger . debug ( "Writing output file: %s" , filename ) with open ( filename , 'wt' , encoding = self . config . encoding ) as f : f . write ( v ) file_count += 1 return file_count
Recursively dumps the result of our processing into files within the given output path .
57,431
def copy_assets ( self , output_path ) : src_paths = [ ] if self . config . theme is not None : src_paths . append ( os . path . join ( self . path , StatikProject . THEMES_DIR , self . config . theme , StatikProject . ASSETS_DIR ) ) if os . path . isabs ( self . config . assets_src_path ) : src_paths . append ( self . config . assets_src_path ) else : src_paths . append ( os . path . join ( self . path , self . config . assets_src_path ) ) for src_path in src_paths : if os . path . exists ( src_path ) and os . path . isdir ( src_path ) : dest_path = self . config . assets_dest_path if not os . path . isabs ( dest_path ) : dest_path = os . path . join ( output_path , dest_path ) asset_count = copy_tree ( src_path , dest_path ) logger . info ( "Copied %s asset(s) from %s to %s" , asset_count , src_path , dest_path ) else : logger . info ( "Missing assets source path - skipping copying of assets: %s" , src_path )
Copies all asset files from the source path to the destination path . If no such source path exists no asset copying will be performed .
57,432
def autogen ( project_path ) : generate_quickstart ( project_path ) project = StatikProject ( project_path ) project . config = StatikConfig ( project . config_file_path ) models = list ( project . load_models ( ) . values ( ) ) logger . info ( 'Creating view and template for home page (index.html).' ) generate_yaml_file ( os . path . join ( project_path , StatikProject . VIEWS_DIR , 'index.yaml' ) , { 'path' : '/' , 'template' : 'index' } ) generate_index_file ( os . path . join ( project_path , StatikProject . TEMPLATES_DIR , 'index.jinja2' ) ) for model in models : logger . info ( 'Creating view and template for model: %s' % model . name ) generate_yaml_file ( os . path . join ( project_path , StatikProject . VIEWS_DIR , '%s.yaml' % model . name ) , { 'path' : { 'template' : '/%s/{{ %s.pk }}' % ( model . name , model . name ) , 'for-each' : { '%s' % model . name : 'session.query(%s).all()' % model . name } } , 'template' : ( '%s' % model . name ) , } ) generate_model_file ( os . path . join ( project_path , StatikProject . TEMPLATES_DIR , '%s.jinja2' % model . name ) , project , model , model . fields . values ( ) )
Autogenerates views and templates for all the models in the project .
57,433
def generate_yaml_file ( filename , contents ) : with open ( filename , 'w' ) as file : file . write ( yaml . dump ( contents , default_flow_style = False ) )
Creates a yaml file with the given content .
57,434
def generate_index_file ( filename ) : with open ( filename , 'w' ) as file : content = open ( os . path . join ( os . path . dirname ( __file__ ) , 'templates/index_page.html' ) , 'r' ) . read ( ) file . write ( content )
Constructs a default home page for the project .
57,435
def generate_model_file ( filename , project , model , fields ) : for field in fields : field . type = field . __class__ . __name__ content = open ( os . path . join ( os . path . dirname ( __file__ ) , 'templates/model_page.html' ) , 'r' ) . read ( ) engine = StatikTemplateEngine ( project ) template = engine . create_template ( content ) context = { 'model' : model , 'fields' : fields } context . update ( dict ( project . config . context_static ) ) string = template . render ( context ) with open ( filename , 'w' ) as file : file . write ( string )
Creates a webpage for a given instance of a model .
57,436
def build_dynamic ( self , db , extra = None , safe_mode = False ) : result = dict ( ) for var , query in iteritems ( self . dynamic ) : result [ var ] = db . query ( query , safe_mode = safe_mode , additional_locals = extra ) return result
Builds the dynamic context based on our current dynamic context entity and the given database .
57,437
def build_for_each ( self , db , safe_mode = False , extra = None ) : result = dict ( ) for var , query in iteritems ( self . for_each ) : result [ var ] = db . query ( query , additional_locals = extra , safe_mode = safe_mode ) return result
Builds the for - each context .
57,438
def build ( self , db = None , safe_mode = False , for_each_inst = None , extra = None ) : result = copy ( self . initial ) result . update ( self . static ) if self . dynamic : result . update ( self . build_dynamic ( db , extra = extra , safe_mode = safe_mode ) ) if self . for_each and for_each_inst : result . update ( self . build_for_each ( db , safe_mode = safe_mode , extra = extra ) ) if isinstance ( extra , dict ) : result . update ( extra ) return result
Builds a dictionary that can be used as context for template rendering .
57,439
def template_exception_handler ( fn , error_context , filename = None ) : error_message = None if filename : error_context . update ( filename = filename ) try : return fn ( ) except jinja2 . TemplateSyntaxError as exc : error_context . update ( filename = exc . filename , line_no = exc . lineno ) error_message = exc . message except jinja2 . TemplateError as exc : error_message = exc . message except Exception as exc : error_message = "%s" % exc raise TemplateError ( message = error_message , context = error_context )
Calls the given function attempting to catch any template - related errors and converts the error to a Statik TemplateError instance . Returns the result returned by the function itself .
57,440
def create_template ( self , s , provider_name = None ) : if provider_name is None : provider_name = self . supported_providers [ 0 ] return template_exception_handler ( lambda : self . get_provider ( provider_name ) . create_template ( s ) , self . error_context )
Creates a template from the given string based on the specified provider or the provider with highest precedence .
57,441
def construct_field ( model_name , field_name , field_type , all_models , ** kwargs ) : field_type_parts = field_type . split ( '->' ) _field_type = field_type_parts [ 0 ] . strip ( ) . split ( '[]' ) [ 0 ] . strip ( ) back_populates = field_type_parts [ 1 ] . strip ( ) if len ( field_type_parts ) > 1 else None error_context = kwargs . pop ( 'error_context' , StatikErrorContext ( ) ) _kwargs = copy ( kwargs ) _kwargs [ 'back_populates' ] = back_populates if _field_type not in FIELD_TYPES and _field_type not in all_models : raise InvalidFieldTypeError ( model_name , field_name , context = error_context ) if _field_type in FIELD_TYPES : return FIELD_TYPES [ _field_type ] ( field_name , ** _kwargs ) if field_type_parts [ 0 ] . strip ( ) . endswith ( '[]' ) : return StatikManyToManyField ( field_name , _field_type , ** _kwargs ) return StatikForeignKeyField ( field_name , _field_type , ** _kwargs )
Helper function to build a field from the given field name and type .
57,442
def paginate ( db_query , items_per_page , offset = 0 , start_page = 1 ) : return Paginator ( db_query , items_per_page , offset = offset , start_page = start_page )
Instantiates a Paginator instance for database queries .
57,443
def render_reverse ( self , inst = None , context = None ) : rendered = self . render ( inst = inst , context = context ) parts = rendered . split ( '/' ) if parts [ - 1 ] in [ 'index.html' , 'index.htm' ] : return ( '/' . join ( parts [ : - 1 ] ) ) + '/' return rendered
Renders the reverse URL for this path .
57,444
def create ( cls , path , template_engine = None , output_filename = None , output_ext = None , view_name = None ) : if isinstance ( path , dict ) : return StatikViewComplexPath ( path , template_engine , output_filename = output_filename , output_ext = output_ext , view_name = view_name ) elif isinstance ( path , basestring ) : return StatikViewSimplePath ( path , output_filename = output_filename , output_ext = output_ext , view_name = view_name ) else : raise ValueError ( "Unrecognised structure for \"path\" configuration in view: %s" % view_name )
Create the relevant subclass of StatikView based on the given path variable and parameters .
57,445
def render ( self , context , db = None , safe_mode = False , extra_context = None ) : if not db : raise MissingParameterError ( "db" , context = self . error_context ) rendered_views = dict ( ) path_instances = db . query ( self . path . query , safe_mode = safe_mode ) extra_ctx = copy ( extra_context ) if extra_context else dict ( ) for inst in path_instances : extra_ctx . update ( { self . path . variable : inst } ) ctx = context . build ( db = db , safe_mode = safe_mode , for_each_inst = inst , extra = extra_ctx ) inst_path = self . path . render ( inst = inst , context = ctx ) rendered_view = self . template . render ( ctx ) rendered_views = deep_merge_dict ( rendered_views , dict_from_path ( inst_path , final_value = rendered_view ) ) return rendered_views
Renders the given context using the specified database returning a dictionary containing path segments and rendered view contents .
57,446
def render ( self , db , safe_mode = False , extra_context = None ) : return self . renderer . render ( self . context , db , safe_mode = safe_mode , extra_context = extra_context )
Renders this view given the specified StatikDatabase instance .
57,447
def _validate_number_of_layers ( self , number_of_layers ) : if number_of_layers <= 0 : raise SquashError ( "Number of layers to squash cannot be less or equal 0, provided: %s" % number_of_layers ) if number_of_layers > len ( self . old_image_layers ) : raise SquashError ( "Cannot squash %s layers, the %s image contains only %s layers" % ( number_of_layers , self . image , len ( self . old_image_layers ) ) )
Makes sure that the specified number of layers to squash is a valid number
57,448
def _files_in_layers ( self , layers , directory ) : files = { } for layer in layers : self . log . debug ( "Generating list of files in layer '%s'..." % layer ) tar_file = os . path . join ( directory , layer , "layer.tar" ) with tarfile . open ( tar_file , 'r' , format = tarfile . PAX_FORMAT ) as tar : files [ layer ] = [ self . _normalize_path ( x ) for x in tar . getnames ( ) ] self . log . debug ( "Done, found %s files" % len ( files [ layer ] ) ) return files
Prepare a list of files in all layers
57,449
def _prepare_tmp_directory ( self , tmp_dir ) : if tmp_dir : if os . path . exists ( tmp_dir ) : raise SquashError ( "The '%s' directory already exists, please remove it before you proceed" % tmp_dir ) os . makedirs ( tmp_dir ) else : tmp_dir = tempfile . mkdtemp ( prefix = "docker-squash-" ) self . log . debug ( "Using %s as the temporary directory" % tmp_dir ) return tmp_dir
Creates temporary directory that is used to work on layers
57,450
def _layers_to_squash ( self , layers , from_layer ) : to_squash = [ ] to_leave = [ ] should_squash = True for l in reversed ( layers ) : if l == from_layer : should_squash = False if should_squash : to_squash . append ( l ) else : to_leave . append ( l ) to_squash . reverse ( ) to_leave . reverse ( ) return to_squash , to_leave
Prepares a list of layer IDs that should be squashed
57,451
def _save_image ( self , image_id , directory ) : for x in [ 0 , 1 , 2 ] : self . log . info ( "Saving image %s to %s directory..." % ( image_id , directory ) ) self . log . debug ( "Try #%s..." % ( x + 1 ) ) try : image = self . docker . get_image ( image_id ) if docker . version_info [ 0 ] < 3 : self . log . debug ( "Extracting image using HTTPResponse object directly" ) self . _extract_tar ( image , directory ) else : self . log . debug ( "Extracting image using iterator over raw data" ) fd_r , fd_w = os . pipe ( ) r = os . fdopen ( fd_r , 'rb' ) w = os . fdopen ( fd_w , 'wb' ) extracter = threading . Thread ( target = self . _extract_tar , args = ( r , directory ) ) extracter . start ( ) for chunk in image : w . write ( chunk ) w . flush ( ) w . close ( ) extracter . join ( ) r . close ( ) self . log . info ( "Image saved!" ) return True except Exception as e : self . log . exception ( e ) self . log . warn ( "An error occured while saving the %s image, retrying..." % image_id ) raise SquashError ( "Couldn't save %s image!" % image_id )
Saves the image as a tar archive under specified name
57,452
def _unpack ( self , tar_file , directory ) : self . log . info ( "Unpacking %s tar file to %s directory" % ( tar_file , directory ) ) with tarfile . open ( tar_file , 'r' ) as tar : tar . extractall ( path = directory ) self . log . info ( "Archive unpacked!" )
Unpacks tar archive to selected directory
57,453
def _parse_image_name ( self , image ) : if ':' in image and '/' not in image . split ( ':' ) [ - 1 ] : image_tag = image . split ( ':' ) [ - 1 ] image_name = image [ : - ( len ( image_tag ) + 1 ) ] else : image_tag = "latest" image_name = image return ( image_name , image_tag )
Parses the provided image name and splits it in the name and tag part if possible . If no tag is provided latest is used .
57,454
def _dump_json ( self , data , new_line = False ) : json_data = json . dumps ( data , separators = ( ',' , ':' ) ) if new_line : json_data = "%s\n" % json_data sha = hashlib . sha256 ( json_data . encode ( 'utf-8' ) ) . hexdigest ( ) return json_data , sha
Helper function to marshal object into JSON string . Additionally a sha256sum of the created JSON string is generated .
57,455
def _move_layers ( self , layers , src , dest ) : for layer in layers : layer_id = layer . replace ( 'sha256:' , '' ) self . log . debug ( "Moving unmodified layer '%s'..." % layer_id ) shutil . move ( os . path . join ( src , layer_id ) , dest )
This moves all the layers that should be copied as - is . In other words - all layers that are not meant to be squashed will be moved from the old image to the new image untouched .
57,456
def _marker_files ( self , tar , members ) : marker_files = { } self . log . debug ( "Searching for marker files in '%s' archive..." % tar . name ) for member in members : if '.wh.' in member . name : self . log . debug ( "Found '%s' marker file" % member . name ) marker_files [ member ] = tar . extractfile ( member ) self . log . debug ( "Done, found %s files" % len ( marker_files ) ) return marker_files
Searches for marker files in the specified archive .
57,457
def _add_markers ( self , markers , tar , files_in_layers , added_symlinks ) : if markers : self . log . debug ( "Marker files to add: %s" % [ o . name for o in markers . keys ( ) ] ) else : return tar_files = [ self . _normalize_path ( x ) for x in tar . getnames ( ) ] for marker , marker_file in six . iteritems ( markers ) : actual_file = marker . name . replace ( '.wh.' , '' ) normalized_file = self . _normalize_path ( actual_file ) should_be_added_back = False if self . _file_should_be_skipped ( normalized_file , added_symlinks ) : self . log . debug ( "Skipping '%s' marker file, this file is on a symlink path" % normalized_file ) continue if normalized_file in tar_files : self . log . debug ( "Skipping '%s' marker file, this file was added earlier for some reason..." % normalized_file ) continue if files_in_layers : for files in files_in_layers . values ( ) : if normalized_file in files : should_be_added_back = True break else : should_be_added_back = True if should_be_added_back : self . log . debug ( "Adding '%s' marker file back..." % marker . name ) tar . addfile ( tarfile . TarInfo ( name = marker . name ) , marker_file ) tar_files . append ( normalized_file ) else : self . log . debug ( "Skipping '%s' marker file..." % marker . name )
This method is responsible for adding back all markers that were not added to the squashed layer AND files they refer to can be found in layers we do not squash .
57,458
def _proc_pax ( self , filetar ) : buf = filetar . fileobj . read ( self . _block ( self . size ) ) if self . type == tarfile . XGLTYPE : pax_headers = filetar . pax_headers else : pax_headers = filetar . pax_headers . copy ( ) regex = re . compile ( r"(\d+) ([^=]+)=" , re . U ) pos = 0 while True : match = regex . match ( buf , pos ) if not match : break length , keyword = match . groups ( ) length = int ( length ) value = buf [ match . end ( 2 ) + 1 : match . start ( 1 ) + length - 1 ] try : keyword = keyword . decode ( "utf8" ) except Exception : pass try : value = value . decode ( "utf8" ) except Exception : pass pax_headers [ keyword ] = value pos += length try : next = self . fromtarfile ( filetar ) except tarfile . HeaderError : raise tarfile . SubsequentHeaderError ( "missing or bad subsequent header" ) if self . type in ( tarfile . XHDTYPE , tarfile . SOLARIS_XHDTYPE ) : next . _apply_pax_info ( pax_headers , filetar . encoding , filetar . errors ) next . offset = self . offset if "size" in pax_headers : offset = next . offset_data if next . isreg ( ) or next . type not in tarfile . SUPPORTED_TYPES : offset += next . _block ( next . size ) filetar . offset = offset return next
Process an extended or global header as described in POSIX . 1 - 2001 .
57,459
def _create_pax_generic_header ( cls , pax_headers , type = tarfile . XHDTYPE ) : records = [ ] for keyword , value in pax_headers . iteritems ( ) : try : keyword = keyword . encode ( "utf8" ) except Exception : pass try : value = value . encode ( "utf8" ) except Exception : pass l = len ( keyword ) + len ( value ) + 3 n = p = 0 while True : n = l + len ( str ( p ) ) if n == p : break p = n records . append ( "%d %s=%s\n" % ( p , keyword , value ) ) records = "" . join ( records ) info = { } info [ "name" ] = "././@PaxHeader" info [ "type" ] = type info [ "size" ] = len ( records ) info [ "magic" ] = tarfile . POSIX_MAGIC return cls . _create_header ( info , tarfile . USTAR_FORMAT ) + cls . _create_payload ( records )
Return a POSIX . 1 - 2001 extended or global header sequence that contains a list of keyword value pairs . The values must be unicode objects .
57,460
def _read_json_file ( self , json_file ) : self . log . debug ( "Reading '%s' JSON file..." % json_file ) with open ( json_file , 'r' ) as f : return json . load ( f , object_pairs_hook = OrderedDict )
Helper function to read JSON file as OrderedDict
57,461
def _read_layer_paths ( self , old_image_config , old_image_manifest , layers_to_move ) : current_manifest_layer = 0 layer_paths_to_move = [ ] layer_paths_to_squash = [ ] for i , layer in enumerate ( old_image_config [ 'history' ] ) : if not layer . get ( 'empty_layer' , False ) : layer_id = old_image_manifest [ 'Layers' ] [ current_manifest_layer ] . rsplit ( '/' ) [ 0 ] if len ( layers_to_move ) > i : layer_paths_to_move . append ( layer_id ) else : layer_paths_to_squash . append ( layer_id ) current_manifest_layer += 1 return layer_paths_to_squash , layer_paths_to_move
In case of v2 format layer id s are not the same as the id s used in the exported tar archive to name directories for layers . These id s can be found in the configuration files saved with the image - we need to read them .
57,462
def _generate_squashed_layer_path_id ( self ) : v1_metadata = OrderedDict ( self . old_image_config ) v1_metadata [ 'created' ] = self . date for key in 'history' , 'rootfs' , 'container' : v1_metadata . pop ( key , None ) operating_system = v1_metadata . pop ( 'os' , None ) v1_metadata [ 'layer_id' ] = "sha256:%s" % self . chain_ids [ - 1 ] if operating_system : v1_metadata [ 'os' ] = operating_system if self . layer_paths_to_move : if self . layer_paths_to_squash : parent = self . layer_paths_to_move [ - 1 ] else : parent = self . layer_paths_to_move [ 0 ] v1_metadata [ 'parent' ] = "sha256:%s" % parent if self . squash_id : v1_metadata [ 'config' ] [ 'Image' ] = self . squash_id else : v1_metadata [ 'config' ] [ 'Image' ] = "" sha = self . _dump_json ( v1_metadata ) [ 1 ] return sha
This function generates the id used to name the directory to store the squashed layer content in the archive .
57,463
def write_local_file ( self , outputfile , path ) : self . logger . info ( "Writing file to %s" , path ) outputfile . seek ( 0 ) with open ( path , 'wb' ) as fd : copyfileobj ( outputfile , fd )
Write file to the desired path .
57,464
def _cleanup_old_backups ( self , database = None , servername = None ) : self . storage . clean_old_backups ( encrypted = self . encrypt , compressed = self . compress , content_type = self . content_type , database = database , servername = servername )
Cleanup old backups keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month .
57,465
def _save_new_backup ( self , database ) : self . logger . info ( "Backing Up Database: %s" , database [ 'NAME' ] ) filename = self . connector . generate_filename ( self . servername ) outputfile = self . connector . create_dump ( ) if self . compress : compressed_file , filename = utils . compress_file ( outputfile , filename ) outputfile = compressed_file if self . encrypt : encrypted_file , filename = utils . encrypt_file ( outputfile , filename ) outputfile = encrypted_file filename = self . filename if self . filename else filename self . logger . debug ( "Backup size: %s" , utils . handle_size ( outputfile ) ) outputfile . seek ( 0 ) if self . path is None : self . write_to_storage ( outputfile , filename ) else : self . write_local_file ( outputfile , self . path )
Save a new backup file .
57,466
def _explore_storage ( self ) : path = '' dirs = [ path ] while dirs : path = dirs . pop ( ) subdirs , files = self . media_storage . listdir ( path ) for media_filename in files : yield os . path . join ( path , media_filename ) dirs . extend ( [ os . path . join ( path , subdir ) for subdir in subdirs ] )
Generator of all files contained in media storage .
57,467
def _create_tar ( self , name ) : fileobj = utils . create_spooled_temporary_file ( ) mode = 'w:gz' if self . compress else 'w' tar_file = tarfile . open ( name = name , fileobj = fileobj , mode = mode ) for media_filename in self . _explore_storage ( ) : tarinfo = tarfile . TarInfo ( media_filename ) media_file = self . media_storage . open ( media_filename ) tarinfo . size = len ( media_file ) tar_file . addfile ( tarinfo , media_file ) tar_file . close ( ) return fileobj
Create TAR file .
57,468
def backup_mediafiles ( self ) : extension = "tar%s" % ( '.gz' if self . compress else '' ) filename = utils . filename_generate ( extension , servername = self . servername , content_type = self . content_type ) tarball = self . _create_tar ( filename ) if self . encrypt : encrypted_file = utils . encrypt_file ( tarball , filename ) tarball , filename = encrypted_file self . logger . debug ( "Backup size: %s" , utils . handle_size ( tarball ) ) tarball . seek ( 0 ) if self . path is None : self . write_to_storage ( tarball , filename ) else : self . write_local_file ( tarball , self . path )
Create backup file and write it to storage .
57,469
def bytes_to_str ( byteVal , decimals = 1 ) : for unit , byte in BYTES : if ( byteVal >= byte ) : if decimals == 0 : return '%s %s' % ( int ( round ( byteVal / byte , 0 ) ) , unit ) return '%s %s' % ( round ( byteVal / byte , decimals ) , unit ) return '%s B' % byteVal
Convert bytes to a human readable string .
57,470
def mail_admins ( subject , message , fail_silently = False , connection = None , html_message = None ) : if not settings . ADMINS : return mail = EmailMultiAlternatives ( '%s%s' % ( settings . EMAIL_SUBJECT_PREFIX , subject ) , message , settings . SERVER_EMAIL , [ a [ 1 ] for a in settings . ADMINS ] , connection = connection ) if html_message : mail . attach_alternative ( html_message , 'text/html' ) mail . send ( fail_silently = fail_silently )
Sends a message to the admins as defined by the DBBACKUP_ADMINS setting .
57,471
def create_spooled_temporary_file ( filepath = None , fileobj = None ) : spooled_file = tempfile . SpooledTemporaryFile ( max_size = settings . TMP_FILE_MAX_SIZE , dir = settings . TMP_DIR ) if filepath : fileobj = open ( filepath , 'r+b' ) if fileobj is not None : fileobj . seek ( 0 ) copyfileobj ( fileobj , spooled_file , settings . TMP_FILE_READ_SIZE ) return spooled_file
Create a spooled temporary file . if filepath or fileobj is defined its content will be copied into temporary file .
57,472
def compress_file ( inputfile , filename ) : outputfile = create_spooled_temporary_file ( ) new_filename = filename + '.gz' zipfile = gzip . GzipFile ( filename = filename , fileobj = outputfile , mode = "wb" ) try : inputfile . seek ( 0 ) copyfileobj ( inputfile , zipfile , settings . TMP_FILE_READ_SIZE ) finally : zipfile . close ( ) return outputfile , new_filename
Compress input file using gzip and change its name .
57,473
def uncompress_file ( inputfile , filename ) : zipfile = gzip . GzipFile ( fileobj = inputfile , mode = "rb" ) try : outputfile = create_spooled_temporary_file ( fileobj = zipfile ) finally : zipfile . close ( ) new_basename = os . path . basename ( filename ) . replace ( '.gz' , '' ) return outputfile , new_basename
Uncompress this file using gzip and change its name .
57,474
def timestamp ( value ) : value = value if timezone . is_naive ( value ) else timezone . localtime ( value ) return value . strftime ( settings . DATE_FORMAT )
Return the timestamp of a datetime . datetime object .
57,475
def datefmt_to_regex ( datefmt ) : new_string = datefmt for pat , reg in PATTERN_MATCHNG : new_string = new_string . replace ( pat , reg ) return re . compile ( r'(%s)' % new_string )
Convert a strftime format string to a regex .
57,476
def filename_to_date ( filename , datefmt = None ) : datefmt = datefmt or settings . DATE_FORMAT datestring = filename_to_datestring ( filename , datefmt ) if datestring is not None : return datetime . strptime ( datestring , datefmt )
Return a datetime from a file name .
57,477
def filename_generate ( extension , database_name = '' , servername = None , content_type = 'db' , wildcard = None ) : if content_type == 'db' : if '/' in database_name : database_name = os . path . basename ( database_name ) if '.' in database_name : database_name = database_name . split ( '.' ) [ 0 ] template = settings . FILENAME_TEMPLATE elif content_type == 'media' : template = settings . MEDIA_FILENAME_TEMPLATE else : template = settings . FILENAME_TEMPLATE params = { 'servername' : servername or settings . HOSTNAME , 'datetime' : wildcard or datetime . now ( ) . strftime ( settings . DATE_FORMAT ) , 'databasename' : database_name , 'extension' : extension , 'content_type' : content_type } if callable ( template ) : filename = template ( ** params ) else : filename = template . format ( ** params ) filename = REG_FILENAME_CLEAN . sub ( '-' , filename ) filename = filename [ 1 : ] if filename . startswith ( '-' ) else filename return filename
Create a new backup filename .
57,478
def get_storage ( path = None , options = None ) : path = path or settings . STORAGE options = options or settings . STORAGE_OPTIONS if not path : raise ImproperlyConfigured ( 'You must specify a storage class using ' 'DBBACKUP_STORAGE settings.' ) return Storage ( path , ** options )
Get the specified storage configured with options .
57,479
def list_backups ( self , encrypted = None , compressed = None , content_type = None , database = None , servername = None ) : if content_type not in ( 'db' , 'media' , None ) : msg = "Bad content_type %s, must be 'db', 'media', or None" % ( content_type ) raise TypeError ( msg ) files = [ f for f in self . list_directory ( ) if utils . filename_to_datestring ( f ) ] if encrypted is not None : files = [ f for f in files if ( '.gpg' in f ) == encrypted ] if compressed is not None : files = [ f for f in files if ( '.gz' in f ) == compressed ] if content_type == 'media' : files = [ f for f in files if '.tar' in f ] elif content_type == 'db' : files = [ f for f in files if '.tar' not in f ] if database : files = [ f for f in files if database in f ] if servername : files = [ f for f in files if servername in f ] return files
List stored files except given filter . If filter is None it won t be used . content_type must be db for database backups or media for media backups .
57,480
def get_older_backup ( self , encrypted = None , compressed = None , content_type = None , database = None , servername = None ) : files = self . list_backups ( encrypted = encrypted , compressed = compressed , content_type = content_type , database = database , servername = servername ) if not files : raise FileNotFound ( "There's no backup file available." ) return min ( files , key = utils . filename_to_date )
Return the older backup s file name .
57,481
def clean_old_backups ( self , encrypted = None , compressed = None , content_type = None , database = None , servername = None , keep_number = None ) : if keep_number is None : keep_number = settings . CLEANUP_KEEP if content_type == 'db' else settings . CLEANUP_KEEP_MEDIA keep_filter = settings . CLEANUP_KEEP_FILTER files = self . list_backups ( encrypted = encrypted , compressed = compressed , content_type = content_type , database = database , servername = servername ) files = sorted ( files , key = utils . filename_to_date , reverse = True ) files_to_delete = [ fi for i , fi in enumerate ( files ) if i >= keep_number ] for filename in files_to_delete : if keep_filter ( filename ) : continue self . delete_file ( filename )
Delete olders backups and hold the number defined .
57,482
def _get_database ( self , options ) : database_name = options . get ( 'database' ) if not database_name : if len ( settings . DATABASES ) > 1 : errmsg = "Because this project contains more than one database, you" " must specify the --database option." raise CommandError ( errmsg ) database_name = list ( settings . DATABASES . keys ( ) ) [ 0 ] if database_name not in settings . DATABASES : raise CommandError ( "Database %s does not exist." % database_name ) return database_name , settings . DATABASES [ database_name ]
Get the database to restore .
57,483
def _restore_backup ( self ) : input_filename , input_file = self . _get_backup_file ( database = self . database_name , servername = self . servername ) self . logger . info ( "Restoring backup for database '%s' and server '%s'" , self . database_name , self . servername ) self . logger . info ( "Restoring: %s" % input_filename ) if self . decrypt : unencrypted_file , input_filename = utils . unencrypt_file ( input_file , input_filename , self . passphrase ) input_file . close ( ) input_file = unencrypted_file if self . uncompress : uncompressed_file , input_filename = utils . uncompress_file ( input_file , input_filename ) input_file . close ( ) input_file = uncompressed_file self . logger . info ( "Restore tempfile created: %s" , utils . handle_size ( input_file ) ) if self . interactive : self . _ask_confirmation ( ) input_file . seek ( 0 ) self . connector = get_connector ( self . database_name ) self . connector . restore_dump ( input_file )
Restore the specified database .
57,484
def get_connector ( database_name = None ) : from django . db import connections , DEFAULT_DB_ALIAS database_name = database_name or DEFAULT_DB_ALIAS connection = connections [ database_name ] engine = connection . settings_dict [ 'ENGINE' ] connector_settings = settings . CONNECTORS . get ( database_name , { } ) connector_path = connector_settings . get ( 'CONNECTOR' , CONNECTOR_MAPPING [ engine ] ) connector_module_path = '.' . join ( connector_path . split ( '.' ) [ : - 1 ] ) module = import_module ( connector_module_path ) connector_name = connector_path . split ( '.' ) [ - 1 ] connector = getattr ( module , connector_name ) return connector ( database_name , ** connector_settings )
Get a connector from its database key in setttings .
57,485
def settings ( self ) : if not hasattr ( self , '_settings' ) : sett = self . connection . settings_dict . copy ( ) sett . update ( settings . CONNECTORS . get ( self . database_name , { } ) ) self . _settings = sett return self . _settings
Mix of database and connector settings .
57,486
def run_command ( self , command , stdin = None , env = None ) : cmd = shlex . split ( command ) stdout = SpooledTemporaryFile ( max_size = settings . TMP_FILE_MAX_SIZE , dir = settings . TMP_DIR ) stderr = SpooledTemporaryFile ( max_size = settings . TMP_FILE_MAX_SIZE , dir = settings . TMP_DIR ) full_env = os . environ . copy ( ) if self . use_parent_env else { } full_env . update ( self . env ) full_env . update ( env or { } ) try : if isinstance ( stdin , ( ContentFile , SFTPStorageFile ) ) : process = Popen ( cmd , stdin = PIPE , stdout = stdout , stderr = stderr , env = full_env ) process . communicate ( input = stdin . read ( ) ) else : process = Popen ( cmd , stdin = stdin , stdout = stdout , stderr = stderr , env = full_env ) process . wait ( ) if process . poll ( ) : stderr . seek ( 0 ) raise exceptions . CommandConnectorError ( "Error running: {}\n{}" . format ( command , stderr . read ( ) . decode ( 'utf-8' ) ) ) stdout . seek ( 0 ) stderr . seek ( 0 ) return stdout , stderr except OSError as err : raise exceptions . CommandConnectorError ( "Error running: {}\n{}" . format ( command , str ( err ) ) )
Launch a shell command line .
57,487
def _assign_zones ( self ) : for zone_id in range ( 1 , 5 ) : zone = RainCloudyFaucetZone ( parent = self . _parent , controller = self . _controller , faucet = self , zone_id = zone_id ) if zone not in self . zones : self . zones . append ( zone )
Assign all RainCloudyFaucetZone managed by faucet .
57,488
def _find_zone_by_id ( self , zone_id ) : if not self . zones : return None zone = list ( filter ( lambda zone : zone . id == zone_id , self . zones ) ) return zone [ 0 ] if zone else None
Return zone by id .
57,489
def _set_zone_name ( self , zoneid , name ) : zoneid -= 1 data = { '_set_zone_name' : 'Set Name' , 'select_zone' : str ( zoneid ) , 'zone_name' : name , } self . _controller . post ( data )
Private method to override zone name .
57,490
def _set_watering_time ( self , zoneid , value ) : if value not in MANUAL_WATERING_ALLOWED : raise ValueError ( 'Valid options are: {}' . format ( ', ' . join ( map ( str , MANUAL_WATERING_ALLOWED ) ) ) ) if isinstance ( value , int ) and value == 0 : value = 'OFF' elif isinstance ( value , str ) : value = value . upper ( ) if value == 'ON' : value = MAX_WATERING_MINUTES ddata = self . preupdate ( ) attr = 'zone{}_select_manual_mode' . format ( zoneid ) ddata [ attr ] = value self . submit_action ( ddata )
Private method to set watering_time per zone .
57,491
def watering_time ( self ) : index = self . id - 1 auto_watering_time = self . _attributes [ 'rain_delay_mode' ] [ index ] [ 'auto_watering_time' ] manual_watering_time = self . _attributes [ 'rain_delay_mode' ] [ index ] [ 'manual_watering_time' ] if auto_watering_time > manual_watering_time : watering_time = auto_watering_time else : watering_time = manual_watering_time return watering_time
Return watering_time from zone .
57,492
def _set_rain_delay ( self , zoneid , value ) : zoneid -= 1 if isinstance ( value , int ) : if value > MAX_RAIN_DELAY_DAYS or value < 0 : return None elif value == 0 : value = 'off' elif value == 1 : value = '1day' elif value >= 2 : value = str ( value ) + 'days' elif isinstance ( value , str ) : if value . lower ( ) != 'off' : return None ddata = self . preupdate ( ) attr = 'zone{}_rain_delay_select' . format ( zoneid ) ddata [ attr ] = value self . submit_action ( ddata ) return True
Generic method to set auto_watering program .
57,493
def _set_auto_watering ( self , zoneid , value ) : if not isinstance ( value , bool ) : return None ddata = self . preupdate ( ) attr = 'zone{}_program_toggle' . format ( zoneid ) try : if not value : ddata . pop ( attr ) else : ddata [ attr ] = 'on' except KeyError : pass self . submit_action ( ddata ) return True
Private method to set auto_watering program .
57,494
def auto_watering ( self ) : value = "zone{}" . format ( self . id ) return find_program_status ( self . _parent . html [ 'home' ] , value )
Return if zone is configured to automatic watering .
57,495
def _to_dict ( self ) : return { 'auto_watering' : getattr ( self , "auto_watering" ) , 'droplet' : getattr ( self , "droplet" ) , 'is_watering' : getattr ( self , "is_watering" ) , 'name' : getattr ( self , "name" ) , 'next_cycle' : getattr ( self , "next_cycle" ) , 'rain_delay' : getattr ( self , "rain_delay" ) , 'watering_time' : getattr ( self , "watering_time" ) , }
Method to build zone dict .
57,496
def preupdate ( self , force_refresh = True ) : ddata = MANUAL_OP_DATA . copy ( ) if force_refresh : self . update ( ) ddata [ 'select_controller' ] = self . _parent . controllers . index ( self . _controller ) ddata [ 'select_faucet' ] = self . _controller . faucets . index ( self . _faucet ) for zone in self . _faucet . zones : attr = 'zone{}_program_toggle' . format ( zone . id ) if zone . auto_watering : ddata [ attr ] = 'on' for zone in self . _faucet . zones : attr = 'zone{}_select_manual_mode' . format ( zone . id ) if zone . watering_time and attr in ddata . keys ( ) : ddata [ attr ] = zone . watering_time for zone in self . _faucet . zones : attr = 'zone{}_rain_delay_select' . format ( zone . id - 1 ) value = zone . rain_delay if value and attr in ddata . keys ( ) : if int ( value ) >= 2 and int ( value ) <= 7 : value = str ( value ) + 'days' else : value = str ( value ) + 'day' ddata [ attr ] = value return ddata
Return a dict with all current options prior submitting request .
57,497
def submit_action ( self , ddata ) : self . _controller . post ( ddata , url = HOME_ENDPOINT , referer = HOME_ENDPOINT )
Post data .
57,498
def controller ( self ) : if hasattr ( self , 'controllers' ) : if len ( self . controllers ) > 1 : raise TypeError ( "Only one controller per account." ) return self . controllers [ 0 ] raise AttributeError ( "There is no controller assigned." )
Show current linked controllers .
57,499
def _assign_faucets ( self , faucets ) : if not faucets : raise TypeError ( "Controller does not have a faucet assigned." ) for faucet_id in faucets : self . faucets . append ( RainCloudyFaucet ( self . _parent , self , faucet_id ) )
Assign RainCloudyFaucet objects to self . faucets .