repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
Josef-Friedrich/phrydy
phrydy/mediafile.py
MediaFile.bitdepth
def bitdepth(self): """The number of bits per sample in the audio encoding (an int). Only available for certain file formats (zero where unavailable). """ if hasattr(self.mgfile.info, 'bits_per_sample'): return self.mgfile.info.bits_per_sample return 0
python
def bitdepth(self): """The number of bits per sample in the audio encoding (an int). Only available for certain file formats (zero where unavailable). """ if hasattr(self.mgfile.info, 'bits_per_sample'): return self.mgfile.info.bits_per_sample return 0
[ "def", "bitdepth", "(", "self", ")", ":", "if", "hasattr", "(", "self", ".", "mgfile", ".", "info", ",", "'bits_per_sample'", ")", ":", "return", "self", ".", "mgfile", ".", "info", ".", "bits_per_sample", "return", "0" ]
The number of bits per sample in the audio encoding (an int). Only available for certain file formats (zero where unavailable).
[ "The", "number", "of", "bits", "per", "sample", "in", "the", "audio", "encoding", "(", "an", "int", ")", ".", "Only", "available", "for", "certain", "file", "formats", "(", "zero", "where", "unavailable", ")", "." ]
train
https://github.com/Josef-Friedrich/phrydy/blob/aa13755155977b4776e49f79984f9968ac1d74dc/phrydy/mediafile.py#L2172-L2179
Josef-Friedrich/phrydy
phrydy/mediafile.py
MediaFile.channels
def channels(self): """The number of channels in the audio (an int).""" if hasattr(self.mgfile.info, 'channels'): return self.mgfile.info.channels return 0
python
def channels(self): """The number of channels in the audio (an int).""" if hasattr(self.mgfile.info, 'channels'): return self.mgfile.info.channels return 0
[ "def", "channels", "(", "self", ")", ":", "if", "hasattr", "(", "self", ".", "mgfile", ".", "info", ",", "'channels'", ")", ":", "return", "self", ".", "mgfile", ".", "info", ".", "channels", "return", "0" ]
The number of channels in the audio (an int).
[ "The", "number", "of", "channels", "in", "the", "audio", "(", "an", "int", ")", "." ]
train
https://github.com/Josef-Friedrich/phrydy/blob/aa13755155977b4776e49f79984f9968ac1d74dc/phrydy/mediafile.py#L2182-L2186
Josef-Friedrich/phrydy
phrydy/mediafile.py
MediaFile.bitrate
def bitrate(self): """The number of bits per seconds used in the audio coding (an int). If this is provided explicitly by the compressed file format, this is a precise reflection of the encoding. Otherwise, it is estimated from the on-disk file size. In this case, some imprecision is possible because the file header is incorporated in the file size. """ if hasattr(self.mgfile.info, 'bitrate') and self.mgfile.info.bitrate: # Many formats provide it explicitly. return self.mgfile.info.bitrate else: # Otherwise, we calculate bitrate from the file size. (This # is the case for all of the lossless formats.) if not self.length: # Avoid division by zero if length is not available. return 0 size = os.path.getsize(self.path) return int(size * 8 / self.length)
python
def bitrate(self): """The number of bits per seconds used in the audio coding (an int). If this is provided explicitly by the compressed file format, this is a precise reflection of the encoding. Otherwise, it is estimated from the on-disk file size. In this case, some imprecision is possible because the file header is incorporated in the file size. """ if hasattr(self.mgfile.info, 'bitrate') and self.mgfile.info.bitrate: # Many formats provide it explicitly. return self.mgfile.info.bitrate else: # Otherwise, we calculate bitrate from the file size. (This # is the case for all of the lossless formats.) if not self.length: # Avoid division by zero if length is not available. return 0 size = os.path.getsize(self.path) return int(size * 8 / self.length)
[ "def", "bitrate", "(", "self", ")", ":", "if", "hasattr", "(", "self", ".", "mgfile", ".", "info", ",", "'bitrate'", ")", "and", "self", ".", "mgfile", ".", "info", ".", "bitrate", ":", "# Many formats provide it explicitly.", "return", "self", ".", "mgfile", ".", "info", ".", "bitrate", "else", ":", "# Otherwise, we calculate bitrate from the file size. (This", "# is the case for all of the lossless formats.)", "if", "not", "self", ".", "length", ":", "# Avoid division by zero if length is not available.", "return", "0", "size", "=", "os", ".", "path", ".", "getsize", "(", "self", ".", "path", ")", "return", "int", "(", "size", "*", "8", "/", "self", ".", "length", ")" ]
The number of bits per seconds used in the audio coding (an int). If this is provided explicitly by the compressed file format, this is a precise reflection of the encoding. Otherwise, it is estimated from the on-disk file size. In this case, some imprecision is possible because the file header is incorporated in the file size.
[ "The", "number", "of", "bits", "per", "seconds", "used", "in", "the", "audio", "coding", "(", "an", "int", ")", ".", "If", "this", "is", "provided", "explicitly", "by", "the", "compressed", "file", "format", "this", "is", "a", "precise", "reflection", "of", "the", "encoding", ".", "Otherwise", "it", "is", "estimated", "from", "the", "on", "-", "disk", "file", "size", ".", "In", "this", "case", "some", "imprecision", "is", "possible", "because", "the", "file", "header", "is", "incorporated", "in", "the", "file", "size", "." ]
train
https://github.com/Josef-Friedrich/phrydy/blob/aa13755155977b4776e49f79984f9968ac1d74dc/phrydy/mediafile.py#L2189-L2207
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
fromfits
def fromfits(infilename, hdu = 0, verbose = True): """ Reads a FITS file and returns a 2D numpy array of the data. Use hdu to specify which HDU you want (default = primary = 0) """ pixelarray, hdr = pyfits.getdata(infilename, hdu, header=True) pixelarray = np.asarray(pixelarray).transpose() pixelarrayshape = pixelarray.shape if verbose : print "FITS import shape : (%i, %i)" % (pixelarrayshape[0], pixelarrayshape[1]) print "FITS file BITPIX : %s" % (hdr["BITPIX"]) print "Internal array type :", pixelarray.dtype.name return pixelarray, hdr
python
def fromfits(infilename, hdu = 0, verbose = True): """ Reads a FITS file and returns a 2D numpy array of the data. Use hdu to specify which HDU you want (default = primary = 0) """ pixelarray, hdr = pyfits.getdata(infilename, hdu, header=True) pixelarray = np.asarray(pixelarray).transpose() pixelarrayshape = pixelarray.shape if verbose : print "FITS import shape : (%i, %i)" % (pixelarrayshape[0], pixelarrayshape[1]) print "FITS file BITPIX : %s" % (hdr["BITPIX"]) print "Internal array type :", pixelarray.dtype.name return pixelarray, hdr
[ "def", "fromfits", "(", "infilename", ",", "hdu", "=", "0", ",", "verbose", "=", "True", ")", ":", "pixelarray", ",", "hdr", "=", "pyfits", ".", "getdata", "(", "infilename", ",", "hdu", ",", "header", "=", "True", ")", "pixelarray", "=", "np", ".", "asarray", "(", "pixelarray", ")", ".", "transpose", "(", ")", "pixelarrayshape", "=", "pixelarray", ".", "shape", "if", "verbose", ":", "print", "\"FITS import shape : (%i, %i)\"", "%", "(", "pixelarrayshape", "[", "0", "]", ",", "pixelarrayshape", "[", "1", "]", ")", "print", "\"FITS file BITPIX : %s\"", "%", "(", "hdr", "[", "\"BITPIX\"", "]", ")", "print", "\"Internal array type :\"", ",", "pixelarray", ".", "dtype", ".", "name", "return", "pixelarray", ",", "hdr" ]
Reads a FITS file and returns a 2D numpy array of the data. Use hdu to specify which HDU you want (default = primary = 0)
[ "Reads", "a", "FITS", "file", "and", "returns", "a", "2D", "numpy", "array", "of", "the", "data", ".", "Use", "hdu", "to", "specify", "which", "HDU", "you", "want", "(", "default", "=", "primary", "=", "0", ")" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L639-L654
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
tofits
def tofits(outfilename, pixelarray, hdr = None, verbose = True): """ Takes a 2D numpy array and write it into a FITS file. If you specify a header (pyfits format, as returned by fromfits()) it will be used for the image. You can give me boolean numpy arrays, I will convert them into 8 bit integers. """ pixelarrayshape = pixelarray.shape if verbose : print "FITS export shape : (%i, %i)" % (pixelarrayshape[0], pixelarrayshape[1]) if pixelarray.dtype.name == "bool": pixelarray = np.cast["uint8"](pixelarray) if os.path.isfile(outfilename): os.remove(outfilename) if hdr == None: # then a minimal header will be created hdu = pyfits.PrimaryHDU(pixelarray.transpose()) else: # this if else is probably not needed but anyway ... hdu = pyfits.PrimaryHDU(pixelarray.transpose(), hdr) hdu.writeto(outfilename) if verbose : print "Wrote %s" % outfilename
python
def tofits(outfilename, pixelarray, hdr = None, verbose = True): """ Takes a 2D numpy array and write it into a FITS file. If you specify a header (pyfits format, as returned by fromfits()) it will be used for the image. You can give me boolean numpy arrays, I will convert them into 8 bit integers. """ pixelarrayshape = pixelarray.shape if verbose : print "FITS export shape : (%i, %i)" % (pixelarrayshape[0], pixelarrayshape[1]) if pixelarray.dtype.name == "bool": pixelarray = np.cast["uint8"](pixelarray) if os.path.isfile(outfilename): os.remove(outfilename) if hdr == None: # then a minimal header will be created hdu = pyfits.PrimaryHDU(pixelarray.transpose()) else: # this if else is probably not needed but anyway ... hdu = pyfits.PrimaryHDU(pixelarray.transpose(), hdr) hdu.writeto(outfilename) if verbose : print "Wrote %s" % outfilename
[ "def", "tofits", "(", "outfilename", ",", "pixelarray", ",", "hdr", "=", "None", ",", "verbose", "=", "True", ")", ":", "pixelarrayshape", "=", "pixelarray", ".", "shape", "if", "verbose", ":", "print", "\"FITS export shape : (%i, %i)\"", "%", "(", "pixelarrayshape", "[", "0", "]", ",", "pixelarrayshape", "[", "1", "]", ")", "if", "pixelarray", ".", "dtype", ".", "name", "==", "\"bool\"", ":", "pixelarray", "=", "np", ".", "cast", "[", "\"uint8\"", "]", "(", "pixelarray", ")", "if", "os", ".", "path", ".", "isfile", "(", "outfilename", ")", ":", "os", ".", "remove", "(", "outfilename", ")", "if", "hdr", "==", "None", ":", "# then a minimal header will be created ", "hdu", "=", "pyfits", ".", "PrimaryHDU", "(", "pixelarray", ".", "transpose", "(", ")", ")", "else", ":", "# this if else is probably not needed but anyway ...", "hdu", "=", "pyfits", ".", "PrimaryHDU", "(", "pixelarray", ".", "transpose", "(", ")", ",", "hdr", ")", "hdu", ".", "writeto", "(", "outfilename", ")", "if", "verbose", ":", "print", "\"Wrote %s\"", "%", "outfilename" ]
Takes a 2D numpy array and write it into a FITS file. If you specify a header (pyfits format, as returned by fromfits()) it will be used for the image. You can give me boolean numpy arrays, I will convert them into 8 bit integers.
[ "Takes", "a", "2D", "numpy", "array", "and", "write", "it", "into", "a", "FITS", "file", ".", "If", "you", "specify", "a", "header", "(", "pyfits", "format", "as", "returned", "by", "fromfits", "()", ")", "it", "will", "be", "used", "for", "the", "image", ".", "You", "can", "give", "me", "boolean", "numpy", "arrays", "I", "will", "convert", "them", "into", "8", "bit", "integers", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L656-L680
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
subsample
def subsample(a): # this is more a generic function then a method ... """ Returns a 2x2-subsampled version of array a (no interpolation, just cutting pixels in 4). The version below is directly from the scipy cookbook on rebinning : U{http://www.scipy.org/Cookbook/Rebinning} There is ndimage.zoom(cutout.array, 2, order=0, prefilter=False), but it makes funny borders. """ """ # Ouuwww this is slow ... outarray = np.zeros((a.shape[0]*2, a.shape[1]*2), dtype=np.float64) for i in range(a.shape[0]): for j in range(a.shape[1]): outarray[2*i,2*j] = a[i,j] outarray[2*i+1,2*j] = a[i,j] outarray[2*i,2*j+1] = a[i,j] outarray[2*i+1,2*j+1] = a[i,j] return outarray """ # much better : newshape = (2*a.shape[0], 2*a.shape[1]) slices = [slice(0,old, float(old)/new) for old,new in zip(a.shape,newshape) ] coordinates = np.mgrid[slices] indices = coordinates.astype('i') #choose the biggest smaller integer index return a[tuple(indices)]
python
def subsample(a): # this is more a generic function then a method ... """ Returns a 2x2-subsampled version of array a (no interpolation, just cutting pixels in 4). The version below is directly from the scipy cookbook on rebinning : U{http://www.scipy.org/Cookbook/Rebinning} There is ndimage.zoom(cutout.array, 2, order=0, prefilter=False), but it makes funny borders. """ """ # Ouuwww this is slow ... outarray = np.zeros((a.shape[0]*2, a.shape[1]*2), dtype=np.float64) for i in range(a.shape[0]): for j in range(a.shape[1]): outarray[2*i,2*j] = a[i,j] outarray[2*i+1,2*j] = a[i,j] outarray[2*i,2*j+1] = a[i,j] outarray[2*i+1,2*j+1] = a[i,j] return outarray """ # much better : newshape = (2*a.shape[0], 2*a.shape[1]) slices = [slice(0,old, float(old)/new) for old,new in zip(a.shape,newshape) ] coordinates = np.mgrid[slices] indices = coordinates.astype('i') #choose the biggest smaller integer index return a[tuple(indices)]
[ "def", "subsample", "(", "a", ")", ":", "# this is more a generic function then a method ...", "\"\"\"\n # Ouuwww this is slow ...\n outarray = np.zeros((a.shape[0]*2, a.shape[1]*2), dtype=np.float64)\n for i in range(a.shape[0]):\n for j in range(a.shape[1]): \n outarray[2*i,2*j] = a[i,j]\n outarray[2*i+1,2*j] = a[i,j]\n outarray[2*i,2*j+1] = a[i,j]\n outarray[2*i+1,2*j+1] = a[i,j]\n return outarray\n \"\"\"", "# much better :", "newshape", "=", "(", "2", "*", "a", ".", "shape", "[", "0", "]", ",", "2", "*", "a", ".", "shape", "[", "1", "]", ")", "slices", "=", "[", "slice", "(", "0", ",", "old", ",", "float", "(", "old", ")", "/", "new", ")", "for", "old", ",", "new", "in", "zip", "(", "a", ".", "shape", ",", "newshape", ")", "]", "coordinates", "=", "np", ".", "mgrid", "[", "slices", "]", "indices", "=", "coordinates", ".", "astype", "(", "'i'", ")", "#choose the biggest smaller integer index", "return", "a", "[", "tuple", "(", "indices", ")", "]" ]
Returns a 2x2-subsampled version of array a (no interpolation, just cutting pixels in 4). The version below is directly from the scipy cookbook on rebinning : U{http://www.scipy.org/Cookbook/Rebinning} There is ndimage.zoom(cutout.array, 2, order=0, prefilter=False), but it makes funny borders.
[ "Returns", "a", "2x2", "-", "subsampled", "version", "of", "array", "a", "(", "no", "interpolation", "just", "cutting", "pixels", "in", "4", ")", ".", "The", "version", "below", "is", "directly", "from", "the", "scipy", "cookbook", "on", "rebinning", ":", "U", "{", "http", ":", "//", "www", ".", "scipy", ".", "org", "/", "Cookbook", "/", "Rebinning", "}", "There", "is", "ndimage", ".", "zoom", "(", "cutout", ".", "array", "2", "order", "=", "0", "prefilter", "=", "False", ")", "but", "it", "makes", "funny", "borders", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L685-L709
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
rebin2x2
def rebin2x2(a): """ Wrapper around rebin that actually rebins 2 by 2 """ inshape = np.array(a.shape) if not (inshape % 2 == np.zeros(2)).all(): # Modulo check to see if size is even raise RuntimeError, "I want even image shapes !" return rebin(a, inshape/2)
python
def rebin2x2(a): """ Wrapper around rebin that actually rebins 2 by 2 """ inshape = np.array(a.shape) if not (inshape % 2 == np.zeros(2)).all(): # Modulo check to see if size is even raise RuntimeError, "I want even image shapes !" return rebin(a, inshape/2)
[ "def", "rebin2x2", "(", "a", ")", ":", "inshape", "=", "np", ".", "array", "(", "a", ".", "shape", ")", "if", "not", "(", "inshape", "%", "2", "==", "np", ".", "zeros", "(", "2", ")", ")", ".", "all", "(", ")", ":", "# Modulo check to see if size is even", "raise", "RuntimeError", ",", "\"I want even image shapes !\"", "return", "rebin", "(", "a", ",", "inshape", "/", "2", ")" ]
Wrapper around rebin that actually rebins 2 by 2
[ "Wrapper", "around", "rebin", "that", "actually", "rebins", "2", "by", "2" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L734-L742
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
cosmicsimage.labelmask
def labelmask(self, verbose = None): """ Finds and labels the cosmic "islands" and returns a list of dicts containing their positions. This is made on purpose for visualizations a la f2n.drawstarslist, but could be useful anyway. """ if verbose == None: verbose = self.verbose if verbose: print "Labeling mask pixels ..." # We morphologicaly dilate the mask to generously connect "sparse" cosmics : #dilstruct = np.ones((5,5)) dilmask = ndimage.morphology.binary_dilation(self.mask, structure=dilstruct, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False) # origin = 0 means center (labels, n) = ndimage.measurements.label(dilmask) #print "Number of cosmic ray hits : %i" % n #tofits(labels, "labels.fits", verbose = False) slicecouplelist = ndimage.measurements.find_objects(labels) # Now we have a huge list of couples of numpy slice objects giving a frame around each object # For plotting purposes, we want to transform this into the center of each object. if len(slicecouplelist) != n: # This never happened, but you never know ... raise RuntimeError, "Mega error in labelmask !" centers = [[(tup[0].start + tup[0].stop)/2.0, (tup[1].start + tup[1].stop)/2.0] for tup in slicecouplelist] # We also want to know how many pixels where affected by each cosmic ray. # Why ? Dunno... it's fun and available in scipy :-) sizes = ndimage.measurements.sum(self.mask.ravel(), labels.ravel(), np.arange(1,n+1,1)) retdictlist = [{"name":"%i" % size, "x":center[0], "y":center[1]} for (size, center) in zip(sizes, centers)] if verbose: print "Labeling done" return retdictlist
python
def labelmask(self, verbose = None): """ Finds and labels the cosmic "islands" and returns a list of dicts containing their positions. This is made on purpose for visualizations a la f2n.drawstarslist, but could be useful anyway. """ if verbose == None: verbose = self.verbose if verbose: print "Labeling mask pixels ..." # We morphologicaly dilate the mask to generously connect "sparse" cosmics : #dilstruct = np.ones((5,5)) dilmask = ndimage.morphology.binary_dilation(self.mask, structure=dilstruct, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False) # origin = 0 means center (labels, n) = ndimage.measurements.label(dilmask) #print "Number of cosmic ray hits : %i" % n #tofits(labels, "labels.fits", verbose = False) slicecouplelist = ndimage.measurements.find_objects(labels) # Now we have a huge list of couples of numpy slice objects giving a frame around each object # For plotting purposes, we want to transform this into the center of each object. if len(slicecouplelist) != n: # This never happened, but you never know ... raise RuntimeError, "Mega error in labelmask !" centers = [[(tup[0].start + tup[0].stop)/2.0, (tup[1].start + tup[1].stop)/2.0] for tup in slicecouplelist] # We also want to know how many pixels where affected by each cosmic ray. # Why ? Dunno... it's fun and available in scipy :-) sizes = ndimage.measurements.sum(self.mask.ravel(), labels.ravel(), np.arange(1,n+1,1)) retdictlist = [{"name":"%i" % size, "x":center[0], "y":center[1]} for (size, center) in zip(sizes, centers)] if verbose: print "Labeling done" return retdictlist
[ "def", "labelmask", "(", "self", ",", "verbose", "=", "None", ")", ":", "if", "verbose", "==", "None", ":", "verbose", "=", "self", ".", "verbose", "if", "verbose", ":", "print", "\"Labeling mask pixels ...\"", "# We morphologicaly dilate the mask to generously connect \"sparse\" cosmics :", "#dilstruct = np.ones((5,5))", "dilmask", "=", "ndimage", ".", "morphology", ".", "binary_dilation", "(", "self", ".", "mask", ",", "structure", "=", "dilstruct", ",", "iterations", "=", "1", ",", "mask", "=", "None", ",", "output", "=", "None", ",", "border_value", "=", "0", ",", "origin", "=", "0", ",", "brute_force", "=", "False", ")", "# origin = 0 means center", "(", "labels", ",", "n", ")", "=", "ndimage", ".", "measurements", ".", "label", "(", "dilmask", ")", "#print \"Number of cosmic ray hits : %i\" % n", "#tofits(labels, \"labels.fits\", verbose = False)", "slicecouplelist", "=", "ndimage", ".", "measurements", ".", "find_objects", "(", "labels", ")", "# Now we have a huge list of couples of numpy slice objects giving a frame around each object", "# For plotting purposes, we want to transform this into the center of each object.", "if", "len", "(", "slicecouplelist", ")", "!=", "n", ":", "# This never happened, but you never know ...", "raise", "RuntimeError", ",", "\"Mega error in labelmask !\"", "centers", "=", "[", "[", "(", "tup", "[", "0", "]", ".", "start", "+", "tup", "[", "0", "]", ".", "stop", ")", "/", "2.0", ",", "(", "tup", "[", "1", "]", ".", "start", "+", "tup", "[", "1", "]", ".", "stop", ")", "/", "2.0", "]", "for", "tup", "in", "slicecouplelist", "]", "# We also want to know how many pixels where affected by each cosmic ray.", "# Why ? Dunno... it's fun and available in scipy :-)", "sizes", "=", "ndimage", ".", "measurements", ".", "sum", "(", "self", ".", "mask", ".", "ravel", "(", ")", ",", "labels", ".", "ravel", "(", ")", ",", "np", ".", "arange", "(", "1", ",", "n", "+", "1", ",", "1", ")", ")", "retdictlist", "=", "[", "{", "\"name\"", ":", "\"%i\"", "%", "size", ",", "\"x\"", ":", "center", "[", "0", "]", ",", "\"y\"", ":", "center", "[", "1", "]", "}", "for", "(", "size", ",", "center", ")", "in", "zip", "(", "sizes", ",", "centers", ")", "]", "if", "verbose", ":", "print", "\"Labeling done\"", "return", "retdictlist" ]
Finds and labels the cosmic "islands" and returns a list of dicts containing their positions. This is made on purpose for visualizations a la f2n.drawstarslist, but could be useful anyway.
[ "Finds", "and", "labels", "the", "cosmic", "islands", "and", "returns", "a", "list", "of", "dicts", "containing", "their", "positions", ".", "This", "is", "made", "on", "purpose", "for", "visualizations", "a", "la", "f2n", ".", "drawstarslist", "but", "could", "be", "useful", "anyway", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L154-L185
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
cosmicsimage.getdilatedmask
def getdilatedmask(self, size=3): """ Returns a morphologically dilated copy of the current mask. size = 3 or 5 decides how to dilate. """ if size == 3: dilmask = ndimage.morphology.binary_dilation(self.mask, structure=growkernel, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False) elif size == 5: dilmask = ndimage.morphology.binary_dilation(self.mask, structure=dilstruct, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False) else: dismask = self.mask.copy() return dilmask
python
def getdilatedmask(self, size=3): """ Returns a morphologically dilated copy of the current mask. size = 3 or 5 decides how to dilate. """ if size == 3: dilmask = ndimage.morphology.binary_dilation(self.mask, structure=growkernel, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False) elif size == 5: dilmask = ndimage.morphology.binary_dilation(self.mask, structure=dilstruct, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False) else: dismask = self.mask.copy() return dilmask
[ "def", "getdilatedmask", "(", "self", ",", "size", "=", "3", ")", ":", "if", "size", "==", "3", ":", "dilmask", "=", "ndimage", ".", "morphology", ".", "binary_dilation", "(", "self", ".", "mask", ",", "structure", "=", "growkernel", ",", "iterations", "=", "1", ",", "mask", "=", "None", ",", "output", "=", "None", ",", "border_value", "=", "0", ",", "origin", "=", "0", ",", "brute_force", "=", "False", ")", "elif", "size", "==", "5", ":", "dilmask", "=", "ndimage", ".", "morphology", ".", "binary_dilation", "(", "self", ".", "mask", ",", "structure", "=", "dilstruct", ",", "iterations", "=", "1", ",", "mask", "=", "None", ",", "output", "=", "None", ",", "border_value", "=", "0", ",", "origin", "=", "0", ",", "brute_force", "=", "False", ")", "else", ":", "dismask", "=", "self", ".", "mask", ".", "copy", "(", ")", "return", "dilmask" ]
Returns a morphologically dilated copy of the current mask. size = 3 or 5 decides how to dilate.
[ "Returns", "a", "morphologically", "dilated", "copy", "of", "the", "current", "mask", ".", "size", "=", "3", "or", "5", "decides", "how", "to", "dilate", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L188-L200
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
cosmicsimage.clean
def clean(self, mask = None, verbose = None): """ Given the mask, we replace the actual problematic pixels with the masked 5x5 median value. This mimics what is done in L.A.Cosmic, but it's a bit harder to do in python, as there is no readymade masked median. So for now we do a loop... Saturated stars, if calculated, are also masked : they are not "cleaned", but their pixels are not used for the interpolation. We will directly change self.cleanimage. Instead of using the self.mask, you can supply your own mask as argument. This might be useful to apply this cleaning function iteratively. But for the true L.A.Cosmic, we don't use this, i.e. we use the full mask at each iteration. """ if verbose == None: verbose = self.verbose if mask == None: mask = self.mask if verbose: print "Cleaning cosmic affected pixels ..." # So... mask is a 2D array containing False and True, where True means "here is a cosmic" # We want to loop through these cosmics one by one. cosmicindices = np.argwhere(mask) # This is a list of the indices of cosmic affected pixels. #print cosmicindices # We put cosmic ray pixels to np.Inf to flag them : self.cleanarray[mask] = np.Inf # Now we want to have a 2 pixel frame of Inf padding around our image. w = self.cleanarray.shape[0] h = self.cleanarray.shape[1] padarray = np.zeros((w+4,h+4))+np.Inf padarray[2:w+2,2:h+2] = self.cleanarray.copy() # that copy is important, we need 2 independent arrays # The medians will be evaluated in this padarray, skipping the np.Inf. # Now in this copy called padarray, we also put the saturated stars to np.Inf, if available : if self.satstars is not None: padarray[2:w+2,2:h+2][self.satstars] = np.Inf # Viva python, I tested this one, it works... # A loop through every cosmic pixel : for cosmicpos in cosmicindices: x = cosmicpos[0] y = cosmicpos[1] cutout = padarray[x:x+5, y:y+5].ravel() # remember the shift due to the padding ! #print cutout # Now we have our 25 pixels, some of them are np.Inf, and we want to take the median goodcutout = cutout[cutout != np.Inf] #print np.alen(goodcutout) if np.alen(goodcutout) >= 25 : # This never happened, but you never know ... raise RuntimeError, "Mega error in clean !" elif np.alen(goodcutout) > 0 : replacementvalue = np.median(goodcutout) else : # i.e. no good pixels : Shit, a huge cosmic, we will have to improvise ... print "OH NO, I HAVE A HUUUUUUUGE COSMIC !!!!!" replacementvalue = self.guessbackgroundlevel() # We update the cleanarray, # but measure the medians in the padarray, so to not mix things up... self.cleanarray[x, y] = replacementvalue # That's it. if verbose: print "Cleaning done" # FYI, that's how the LACosmic cleaning looks in iraf : """ imarith(outmask,"+",finalsel,outmask) imreplace(outmask,1,lower=1,upper=INDEF) # ok so outmask = 1 are the cosmics imcalc(outmask,inputmask,"(1.-10000.*im1)",verb-) imarith(oldoutput,"*",inputmask,inputmask) median(inputmask,med5,5,5,zloreject=-9999,zhi=INDEF,verb-) imarith(outmask,"*",med5,med5) if (i>1) imdel(output) imcalc(oldoutput//","//outmask//","//med5,output,"(1.-im2)*im1+im3",verb-) # = merging to full mask inputmask = 1.0 - 10000.0 * finalsel # So this is 1.0, but cosmics are very negative inputmask = oldoutput * inputmask # orig image, with very negative cosmics med5 = median of inputmask, but rejecting these negative cosmics # i dunno how to do this in python -> had to do the loop med5 = finalsel * med5 # we keep only the cosmics of this median # actual replacement : output = (1.0 - outmask)*oldoutput + med5 # ok """
python
def clean(self, mask = None, verbose = None): """ Given the mask, we replace the actual problematic pixels with the masked 5x5 median value. This mimics what is done in L.A.Cosmic, but it's a bit harder to do in python, as there is no readymade masked median. So for now we do a loop... Saturated stars, if calculated, are also masked : they are not "cleaned", but their pixels are not used for the interpolation. We will directly change self.cleanimage. Instead of using the self.mask, you can supply your own mask as argument. This might be useful to apply this cleaning function iteratively. But for the true L.A.Cosmic, we don't use this, i.e. we use the full mask at each iteration. """ if verbose == None: verbose = self.verbose if mask == None: mask = self.mask if verbose: print "Cleaning cosmic affected pixels ..." # So... mask is a 2D array containing False and True, where True means "here is a cosmic" # We want to loop through these cosmics one by one. cosmicindices = np.argwhere(mask) # This is a list of the indices of cosmic affected pixels. #print cosmicindices # We put cosmic ray pixels to np.Inf to flag them : self.cleanarray[mask] = np.Inf # Now we want to have a 2 pixel frame of Inf padding around our image. w = self.cleanarray.shape[0] h = self.cleanarray.shape[1] padarray = np.zeros((w+4,h+4))+np.Inf padarray[2:w+2,2:h+2] = self.cleanarray.copy() # that copy is important, we need 2 independent arrays # The medians will be evaluated in this padarray, skipping the np.Inf. # Now in this copy called padarray, we also put the saturated stars to np.Inf, if available : if self.satstars is not None: padarray[2:w+2,2:h+2][self.satstars] = np.Inf # Viva python, I tested this one, it works... # A loop through every cosmic pixel : for cosmicpos in cosmicindices: x = cosmicpos[0] y = cosmicpos[1] cutout = padarray[x:x+5, y:y+5].ravel() # remember the shift due to the padding ! #print cutout # Now we have our 25 pixels, some of them are np.Inf, and we want to take the median goodcutout = cutout[cutout != np.Inf] #print np.alen(goodcutout) if np.alen(goodcutout) >= 25 : # This never happened, but you never know ... raise RuntimeError, "Mega error in clean !" elif np.alen(goodcutout) > 0 : replacementvalue = np.median(goodcutout) else : # i.e. no good pixels : Shit, a huge cosmic, we will have to improvise ... print "OH NO, I HAVE A HUUUUUUUGE COSMIC !!!!!" replacementvalue = self.guessbackgroundlevel() # We update the cleanarray, # but measure the medians in the padarray, so to not mix things up... self.cleanarray[x, y] = replacementvalue # That's it. if verbose: print "Cleaning done" # FYI, that's how the LACosmic cleaning looks in iraf : """ imarith(outmask,"+",finalsel,outmask) imreplace(outmask,1,lower=1,upper=INDEF) # ok so outmask = 1 are the cosmics imcalc(outmask,inputmask,"(1.-10000.*im1)",verb-) imarith(oldoutput,"*",inputmask,inputmask) median(inputmask,med5,5,5,zloreject=-9999,zhi=INDEF,verb-) imarith(outmask,"*",med5,med5) if (i>1) imdel(output) imcalc(oldoutput//","//outmask//","//med5,output,"(1.-im2)*im1+im3",verb-) # = merging to full mask inputmask = 1.0 - 10000.0 * finalsel # So this is 1.0, but cosmics are very negative inputmask = oldoutput * inputmask # orig image, with very negative cosmics med5 = median of inputmask, but rejecting these negative cosmics # i dunno how to do this in python -> had to do the loop med5 = finalsel * med5 # we keep only the cosmics of this median # actual replacement : output = (1.0 - outmask)*oldoutput + med5 # ok """
[ "def", "clean", "(", "self", ",", "mask", "=", "None", ",", "verbose", "=", "None", ")", ":", "if", "verbose", "==", "None", ":", "verbose", "=", "self", ".", "verbose", "if", "mask", "==", "None", ":", "mask", "=", "self", ".", "mask", "if", "verbose", ":", "print", "\"Cleaning cosmic affected pixels ...\"", "# So... mask is a 2D array containing False and True, where True means \"here is a cosmic\"", "# We want to loop through these cosmics one by one.", "cosmicindices", "=", "np", ".", "argwhere", "(", "mask", ")", "# This is a list of the indices of cosmic affected pixels.", "#print cosmicindices", "# We put cosmic ray pixels to np.Inf to flag them :", "self", ".", "cleanarray", "[", "mask", "]", "=", "np", ".", "Inf", "# Now we want to have a 2 pixel frame of Inf padding around our image.", "w", "=", "self", ".", "cleanarray", ".", "shape", "[", "0", "]", "h", "=", "self", ".", "cleanarray", ".", "shape", "[", "1", "]", "padarray", "=", "np", ".", "zeros", "(", "(", "w", "+", "4", ",", "h", "+", "4", ")", ")", "+", "np", ".", "Inf", "padarray", "[", "2", ":", "w", "+", "2", ",", "2", ":", "h", "+", "2", "]", "=", "self", ".", "cleanarray", ".", "copy", "(", ")", "# that copy is important, we need 2 independent arrays", "# The medians will be evaluated in this padarray, skipping the np.Inf.", "# Now in this copy called padarray, we also put the saturated stars to np.Inf, if available :", "if", "self", ".", "satstars", "is", "not", "None", ":", "padarray", "[", "2", ":", "w", "+", "2", ",", "2", ":", "h", "+", "2", "]", "[", "self", ".", "satstars", "]", "=", "np", ".", "Inf", "# Viva python, I tested this one, it works...", "# A loop through every cosmic pixel :", "for", "cosmicpos", "in", "cosmicindices", ":", "x", "=", "cosmicpos", "[", "0", "]", "y", "=", "cosmicpos", "[", "1", "]", "cutout", "=", "padarray", "[", "x", ":", "x", "+", "5", ",", "y", ":", "y", "+", "5", "]", ".", "ravel", "(", ")", "# remember the shift due to the padding !", "#print cutout", "# Now we have our 25 pixels, some of them are np.Inf, and we want to take the median", "goodcutout", "=", "cutout", "[", "cutout", "!=", "np", ".", "Inf", "]", "#print np.alen(goodcutout)", "if", "np", ".", "alen", "(", "goodcutout", ")", ">=", "25", ":", "# This never happened, but you never know ...", "raise", "RuntimeError", ",", "\"Mega error in clean !\"", "elif", "np", ".", "alen", "(", "goodcutout", ")", ">", "0", ":", "replacementvalue", "=", "np", ".", "median", "(", "goodcutout", ")", "else", ":", "# i.e. no good pixels : Shit, a huge cosmic, we will have to improvise ...", "print", "\"OH NO, I HAVE A HUUUUUUUGE COSMIC !!!!!\"", "replacementvalue", "=", "self", ".", "guessbackgroundlevel", "(", ")", "# We update the cleanarray,", "# but measure the medians in the padarray, so to not mix things up...", "self", ".", "cleanarray", "[", "x", ",", "y", "]", "=", "replacementvalue", "# That's it.", "if", "verbose", ":", "print", "\"Cleaning done\"", "# FYI, that's how the LACosmic cleaning looks in iraf :", "\"\"\"\n imarith(outmask,\"+\",finalsel,outmask)\n imreplace(outmask,1,lower=1,upper=INDEF) # ok so outmask = 1 are the cosmics\n imcalc(outmask,inputmask,\"(1.-10000.*im1)\",verb-)\n imarith(oldoutput,\"*\",inputmask,inputmask)\n median(inputmask,med5,5,5,zloreject=-9999,zhi=INDEF,verb-)\n imarith(outmask,\"*\",med5,med5)\n if (i>1) imdel(output)\n imcalc(oldoutput//\",\"//outmask//\",\"//med5,output,\"(1.-im2)*im1+im3\",verb-)\n \n # =\n \n merging to full mask\n inputmask = 1.0 - 10000.0 * finalsel # So this is 1.0, but cosmics are very negative\n inputmask = oldoutput * inputmask # orig image, with very negative cosmics\n med5 = median of inputmask, but rejecting these negative cosmics\n # i dunno how to do this in python -> had to do the loop\n med5 = finalsel * med5 # we keep only the cosmics of this median\n # actual replacement :\n output = (1.0 - outmask)*oldoutput + med5 # ok \n \"\"\"" ]
Given the mask, we replace the actual problematic pixels with the masked 5x5 median value. This mimics what is done in L.A.Cosmic, but it's a bit harder to do in python, as there is no readymade masked median. So for now we do a loop... Saturated stars, if calculated, are also masked : they are not "cleaned", but their pixels are not used for the interpolation. We will directly change self.cleanimage. Instead of using the self.mask, you can supply your own mask as argument. This might be useful to apply this cleaning function iteratively. But for the true L.A.Cosmic, we don't use this, i.e. we use the full mask at each iteration.
[ "Given", "the", "mask", "we", "replace", "the", "actual", "problematic", "pixels", "with", "the", "masked", "5x5", "median", "value", ".", "This", "mimics", "what", "is", "done", "in", "L", ".", "A", ".", "Cosmic", "but", "it", "s", "a", "bit", "harder", "to", "do", "in", "python", "as", "there", "is", "no", "readymade", "masked", "median", ".", "So", "for", "now", "we", "do", "a", "loop", "...", "Saturated", "stars", "if", "calculated", "are", "also", "masked", ":", "they", "are", "not", "cleaned", "but", "their", "pixels", "are", "not", "used", "for", "the", "interpolation", ".", "We", "will", "directly", "change", "self", ".", "cleanimage", ".", "Instead", "of", "using", "the", "self", ".", "mask", "you", "can", "supply", "your", "own", "mask", "as", "argument", ".", "This", "might", "be", "useful", "to", "apply", "this", "cleaning", "function", "iteratively", ".", "But", "for", "the", "true", "L", ".", "A", ".", "Cosmic", "we", "don", "t", "use", "this", "i", ".", "e", ".", "we", "use", "the", "full", "mask", "at", "each", "iteration", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L203-L294
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
cosmicsimage.findsatstars
def findsatstars(self, verbose = None): """ Uses the satlevel to find saturated stars (not cosmics !), and puts the result as a mask in self.satstars. This can then be used to avoid these regions in cosmic detection and cleaning procedures. Slow ... """ if verbose == None: verbose = self.verbose if verbose: print "Detecting saturated stars ..." # DETECTION satpixels = self.rawarray > self.satlevel # the candidate pixels # We build a smoothed version of the image to look for large stars and their support : m5 = ndimage.filters.median_filter(self.rawarray, size=5, mode='mirror') # We look where this is above half the satlevel largestruct = m5 > (self.satlevel/2.0) # The rough locations of saturated stars are now : satstarscenters = np.logical_and(largestruct, satpixels) if verbose: print "Building mask of saturated stars ..." # BUILDING THE MASK # The subtility is that we want to include all saturated pixels connected to these saturated stars... # I haven't found a better solution then the double loop # We dilate the satpixels alone, to ensure connectivity in glitchy regions and to add a safety margin around them. #dilstruct = np.array([[0,1,0], [1,1,1], [0,1,0]]) dilsatpixels = ndimage.morphology.binary_dilation(satpixels, structure=dilstruct, iterations=2, mask=None, output=None, border_value=0, origin=0, brute_force=False) # It turns out it's better to think large and do 2 iterations... # We label these : (dilsatlabels, nsat) = ndimage.measurements.label(dilsatpixels) #tofits(dilsatlabels, "test.fits") if verbose: print "We have %i saturated stars." % nsat # The ouput, False for now : outmask = np.zeros(self.rawarray.shape) for i in range(1,nsat+1): # we go through the islands of saturated pixels thisisland = dilsatlabels == i # gives us a boolean array # Does this intersect with satstarscenters ? overlap = np.logical_and(thisisland, satstarscenters) if np.sum(overlap) > 0: outmask = np.logical_or(outmask, thisisland) # we add thisisland to the mask self.satstars = np.cast['bool'](outmask) if verbose: print "Mask of saturated stars done"
python
def findsatstars(self, verbose = None): """ Uses the satlevel to find saturated stars (not cosmics !), and puts the result as a mask in self.satstars. This can then be used to avoid these regions in cosmic detection and cleaning procedures. Slow ... """ if verbose == None: verbose = self.verbose if verbose: print "Detecting saturated stars ..." # DETECTION satpixels = self.rawarray > self.satlevel # the candidate pixels # We build a smoothed version of the image to look for large stars and their support : m5 = ndimage.filters.median_filter(self.rawarray, size=5, mode='mirror') # We look where this is above half the satlevel largestruct = m5 > (self.satlevel/2.0) # The rough locations of saturated stars are now : satstarscenters = np.logical_and(largestruct, satpixels) if verbose: print "Building mask of saturated stars ..." # BUILDING THE MASK # The subtility is that we want to include all saturated pixels connected to these saturated stars... # I haven't found a better solution then the double loop # We dilate the satpixels alone, to ensure connectivity in glitchy regions and to add a safety margin around them. #dilstruct = np.array([[0,1,0], [1,1,1], [0,1,0]]) dilsatpixels = ndimage.morphology.binary_dilation(satpixels, structure=dilstruct, iterations=2, mask=None, output=None, border_value=0, origin=0, brute_force=False) # It turns out it's better to think large and do 2 iterations... # We label these : (dilsatlabels, nsat) = ndimage.measurements.label(dilsatpixels) #tofits(dilsatlabels, "test.fits") if verbose: print "We have %i saturated stars." % nsat # The ouput, False for now : outmask = np.zeros(self.rawarray.shape) for i in range(1,nsat+1): # we go through the islands of saturated pixels thisisland = dilsatlabels == i # gives us a boolean array # Does this intersect with satstarscenters ? overlap = np.logical_and(thisisland, satstarscenters) if np.sum(overlap) > 0: outmask = np.logical_or(outmask, thisisland) # we add thisisland to the mask self.satstars = np.cast['bool'](outmask) if verbose: print "Mask of saturated stars done"
[ "def", "findsatstars", "(", "self", ",", "verbose", "=", "None", ")", ":", "if", "verbose", "==", "None", ":", "verbose", "=", "self", ".", "verbose", "if", "verbose", ":", "print", "\"Detecting saturated stars ...\"", "# DETECTION", "satpixels", "=", "self", ".", "rawarray", ">", "self", ".", "satlevel", "# the candidate pixels", "# We build a smoothed version of the image to look for large stars and their support :", "m5", "=", "ndimage", ".", "filters", ".", "median_filter", "(", "self", ".", "rawarray", ",", "size", "=", "5", ",", "mode", "=", "'mirror'", ")", "# We look where this is above half the satlevel", "largestruct", "=", "m5", ">", "(", "self", ".", "satlevel", "/", "2.0", ")", "# The rough locations of saturated stars are now :", "satstarscenters", "=", "np", ".", "logical_and", "(", "largestruct", ",", "satpixels", ")", "if", "verbose", ":", "print", "\"Building mask of saturated stars ...\"", "# BUILDING THE MASK", "# The subtility is that we want to include all saturated pixels connected to these saturated stars...", "# I haven't found a better solution then the double loop", "# We dilate the satpixels alone, to ensure connectivity in glitchy regions and to add a safety margin around them.", "#dilstruct = np.array([[0,1,0], [1,1,1], [0,1,0]])", "dilsatpixels", "=", "ndimage", ".", "morphology", ".", "binary_dilation", "(", "satpixels", ",", "structure", "=", "dilstruct", ",", "iterations", "=", "2", ",", "mask", "=", "None", ",", "output", "=", "None", ",", "border_value", "=", "0", ",", "origin", "=", "0", ",", "brute_force", "=", "False", ")", "# It turns out it's better to think large and do 2 iterations...", "# We label these :", "(", "dilsatlabels", ",", "nsat", ")", "=", "ndimage", ".", "measurements", ".", "label", "(", "dilsatpixels", ")", "#tofits(dilsatlabels, \"test.fits\")", "if", "verbose", ":", "print", "\"We have %i saturated stars.\"", "%", "nsat", "# The ouput, False for now :", "outmask", "=", "np", ".", "zeros", "(", "self", ".", "rawarray", ".", "shape", ")", "for", "i", "in", "range", "(", "1", ",", "nsat", "+", "1", ")", ":", "# we go through the islands of saturated pixels", "thisisland", "=", "dilsatlabels", "==", "i", "# gives us a boolean array", "# Does this intersect with satstarscenters ?", "overlap", "=", "np", ".", "logical_and", "(", "thisisland", ",", "satstarscenters", ")", "if", "np", ".", "sum", "(", "overlap", ")", ">", "0", ":", "outmask", "=", "np", ".", "logical_or", "(", "outmask", ",", "thisisland", ")", "# we add thisisland to the mask", "self", ".", "satstars", "=", "np", ".", "cast", "[", "'bool'", "]", "(", "outmask", ")", "if", "verbose", ":", "print", "\"Mask of saturated stars done\"" ]
Uses the satlevel to find saturated stars (not cosmics !), and puts the result as a mask in self.satstars. This can then be used to avoid these regions in cosmic detection and cleaning procedures. Slow ...
[ "Uses", "the", "satlevel", "to", "find", "saturated", "stars", "(", "not", "cosmics", "!", ")", "and", "puts", "the", "result", "as", "a", "mask", "in", "self", ".", "satstars", ".", "This", "can", "then", "be", "used", "to", "avoid", "these", "regions", "in", "cosmic", "detection", "and", "cleaning", "procedures", ".", "Slow", "..." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L296-L351
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
cosmicsimage.getsatstars
def getsatstars(self, verbose = None): """ Returns the mask of saturated stars after finding them if not yet done. Intended mainly for external use. """ if verbose == None: verbose = self.verbose if not self.satlevel > 0: raise RuntimeError, "Cannot determine satstars : you gave satlevel <= 0 !" if self.satstars == None: self.findsatstars(verbose = verbose) return self.satstars
python
def getsatstars(self, verbose = None): """ Returns the mask of saturated stars after finding them if not yet done. Intended mainly for external use. """ if verbose == None: verbose = self.verbose if not self.satlevel > 0: raise RuntimeError, "Cannot determine satstars : you gave satlevel <= 0 !" if self.satstars == None: self.findsatstars(verbose = verbose) return self.satstars
[ "def", "getsatstars", "(", "self", ",", "verbose", "=", "None", ")", ":", "if", "verbose", "==", "None", ":", "verbose", "=", "self", ".", "verbose", "if", "not", "self", ".", "satlevel", ">", "0", ":", "raise", "RuntimeError", ",", "\"Cannot determine satstars : you gave satlevel <= 0 !\"", "if", "self", ".", "satstars", "==", "None", ":", "self", ".", "findsatstars", "(", "verbose", "=", "verbose", ")", "return", "self", ".", "satstars" ]
Returns the mask of saturated stars after finding them if not yet done. Intended mainly for external use.
[ "Returns", "the", "mask", "of", "saturated", "stars", "after", "finding", "them", "if", "not", "yet", "done", ".", "Intended", "mainly", "for", "external", "use", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L353-L364
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
cosmicsimage.guessbackgroundlevel
def guessbackgroundlevel(self): """ Estimates the background level. This could be used to fill pixels in large cosmics. """ if self.backgroundlevel == None: self.backgroundlevel = np.median(self.rawarray.ravel()) return self.backgroundlevel
python
def guessbackgroundlevel(self): """ Estimates the background level. This could be used to fill pixels in large cosmics. """ if self.backgroundlevel == None: self.backgroundlevel = np.median(self.rawarray.ravel()) return self.backgroundlevel
[ "def", "guessbackgroundlevel", "(", "self", ")", ":", "if", "self", ".", "backgroundlevel", "==", "None", ":", "self", ".", "backgroundlevel", "=", "np", ".", "median", "(", "self", ".", "rawarray", ".", "ravel", "(", ")", ")", "return", "self", ".", "backgroundlevel" ]
Estimates the background level. This could be used to fill pixels in large cosmics.
[ "Estimates", "the", "background", "level", ".", "This", "could", "be", "used", "to", "fill", "pixels", "in", "large", "cosmics", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L382-L388
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
cosmicsimage.lacosmiciteration
def lacosmiciteration(self, verbose = None): """ Performs one iteration of the L.A.Cosmic algorithm. It operates on self.cleanarray, and afterwards updates self.mask by adding the newly detected cosmics to the existing self.mask. Cleaning is not made automatically ! You have to call clean() after each iteration. This way you can run it several times in a row to to L.A.Cosmic "iterations". See function lacosmic, that mimics the full iterative L.A.Cosmic algorithm. Returns a dict containing - niter : the number of cosmic pixels detected in this iteration - nnew : among these, how many were not yet in the mask - itermask : the mask of pixels detected in this iteration - newmask : the pixels detected that were not yet in the mask If findsatstars() was called, we exclude these regions from the search. """ if verbose == None: verbose = self.verbose if verbose: print "Convolving image with Laplacian kernel ..." # We subsample, convolve, clip negative values, and rebin to original size subsam = subsample(self.cleanarray) conved = signal.convolve2d(subsam, laplkernel, mode="same", boundary="symm") cliped = conved.clip(min=0.0) #cliped = np.abs(conved) # unfortunately this does not work to find holes as well ... lplus = rebin2x2(cliped) if verbose: print "Creating noise model ..." # We build a custom noise map, so to compare the laplacian to m5 = ndimage.filters.median_filter(self.cleanarray, size=5, mode='mirror') # We keep this m5, as I will use it later for the interpolation. m5clipped = m5.clip(min=0.00001) # As we will take the sqrt noise = (1.0/self.gain) * np.sqrt(self.gain*m5clipped + self.readnoise*self.readnoise) if verbose: print "Calculating Laplacian signal to noise ratio ..." # Laplacian signal to noise ratio : s = lplus / (2.0 * noise) # the 2.0 is from the 2x2 subsampling # This s is called sigmap in the original lacosmic.cl # We remove the large structures (s prime) : sp = s - ndimage.filters.median_filter(s, size=5, mode='mirror') if verbose: print "Selecting candidate cosmic rays ..." # Candidate cosmic rays (this will include stars + HII regions) candidates = sp > self.sigclip nbcandidates = np.sum(candidates) if verbose: print " %5i candidate pixels" % nbcandidates # At this stage we use the saturated stars to mask the candidates, if available : if self.satstars is not None: if verbose: print "Masking saturated stars ..." candidates = np.logical_and(np.logical_not(self.satstars), candidates) nbcandidates = np.sum(candidates) if verbose: print " %5i candidate pixels not part of saturated stars" % nbcandidates if verbose: print "Building fine structure image ..." # We build the fine structure image : m3 = ndimage.filters.median_filter(self.cleanarray, size=3, mode='mirror') m37 = ndimage.filters.median_filter(m3, size=7, mode='mirror') f = m3 - m37 # In the article that's it, but in lacosmic.cl f is divided by the noise... # Ok I understand why, it depends on if you use sp/f or L+/f as criterion. # There are some differences between the article and the iraf implementation. # So I will stick to the iraf implementation. f = f / noise f = f.clip(min=0.01) # as we will divide by f. like in the iraf version. if verbose: print "Removing suspected compact bright objects ..." # Now we have our better selection of cosmics : cosmics = np.logical_and(candidates, sp/f > self.objlim) # Note the sp/f and not lplus/f ... due to the f = f/noise above. nbcosmics = np.sum(cosmics) if verbose: print " %5i remaining candidate pixels" % nbcosmics # What follows is a special treatment for neighbors, with more relaxed constains. if verbose: print "Finding neighboring pixels affected by cosmic rays ..." # We grow these cosmics a first time to determine the immediate neighborhod : growcosmics = np.cast['bool'](signal.convolve2d(np.cast['float32'](cosmics), growkernel, mode="same", boundary="symm")) # From this grown set, we keep those that have sp > sigmalim # so obviously not requiring sp/f > objlim, otherwise it would be pointless growcosmics = np.logical_and(sp > self.sigclip, growcosmics) # Now we repeat this procedure, but lower the detection limit to sigmalimlow : finalsel = np.cast['bool'](signal.convolve2d(np.cast['float32'](growcosmics), growkernel, mode="same", boundary="symm")) finalsel = np.logical_and(sp > self.sigcliplow, finalsel) # Again, we have to kick out pixels on saturated stars : if self.satstars is not None: if verbose: print "Masking saturated stars ..." finalsel = np.logical_and(np.logical_not(self.satstars), finalsel) nbfinal = np.sum(finalsel) if verbose: print " %5i pixels detected as cosmics" % nbfinal # Now the replacement of the cosmics... # we outsource this to the function clean(), as for some purposes the cleaning might not even be needed. # Easy way without masking would be : #self.cleanarray[finalsel] = m5[finalsel] # We find how many cosmics are not yet known : newmask = np.logical_and(np.logical_not(self.mask), finalsel) nbnew = np.sum(newmask) # We update the mask with the cosmics we have found : self.mask = np.logical_or(self.mask, finalsel) # We return # (used by function lacosmic) return {"niter":nbfinal, "nnew":nbnew, "itermask":finalsel, "newmask":newmask}
python
def lacosmiciteration(self, verbose = None): """ Performs one iteration of the L.A.Cosmic algorithm. It operates on self.cleanarray, and afterwards updates self.mask by adding the newly detected cosmics to the existing self.mask. Cleaning is not made automatically ! You have to call clean() after each iteration. This way you can run it several times in a row to to L.A.Cosmic "iterations". See function lacosmic, that mimics the full iterative L.A.Cosmic algorithm. Returns a dict containing - niter : the number of cosmic pixels detected in this iteration - nnew : among these, how many were not yet in the mask - itermask : the mask of pixels detected in this iteration - newmask : the pixels detected that were not yet in the mask If findsatstars() was called, we exclude these regions from the search. """ if verbose == None: verbose = self.verbose if verbose: print "Convolving image with Laplacian kernel ..." # We subsample, convolve, clip negative values, and rebin to original size subsam = subsample(self.cleanarray) conved = signal.convolve2d(subsam, laplkernel, mode="same", boundary="symm") cliped = conved.clip(min=0.0) #cliped = np.abs(conved) # unfortunately this does not work to find holes as well ... lplus = rebin2x2(cliped) if verbose: print "Creating noise model ..." # We build a custom noise map, so to compare the laplacian to m5 = ndimage.filters.median_filter(self.cleanarray, size=5, mode='mirror') # We keep this m5, as I will use it later for the interpolation. m5clipped = m5.clip(min=0.00001) # As we will take the sqrt noise = (1.0/self.gain) * np.sqrt(self.gain*m5clipped + self.readnoise*self.readnoise) if verbose: print "Calculating Laplacian signal to noise ratio ..." # Laplacian signal to noise ratio : s = lplus / (2.0 * noise) # the 2.0 is from the 2x2 subsampling # This s is called sigmap in the original lacosmic.cl # We remove the large structures (s prime) : sp = s - ndimage.filters.median_filter(s, size=5, mode='mirror') if verbose: print "Selecting candidate cosmic rays ..." # Candidate cosmic rays (this will include stars + HII regions) candidates = sp > self.sigclip nbcandidates = np.sum(candidates) if verbose: print " %5i candidate pixels" % nbcandidates # At this stage we use the saturated stars to mask the candidates, if available : if self.satstars is not None: if verbose: print "Masking saturated stars ..." candidates = np.logical_and(np.logical_not(self.satstars), candidates) nbcandidates = np.sum(candidates) if verbose: print " %5i candidate pixels not part of saturated stars" % nbcandidates if verbose: print "Building fine structure image ..." # We build the fine structure image : m3 = ndimage.filters.median_filter(self.cleanarray, size=3, mode='mirror') m37 = ndimage.filters.median_filter(m3, size=7, mode='mirror') f = m3 - m37 # In the article that's it, but in lacosmic.cl f is divided by the noise... # Ok I understand why, it depends on if you use sp/f or L+/f as criterion. # There are some differences between the article and the iraf implementation. # So I will stick to the iraf implementation. f = f / noise f = f.clip(min=0.01) # as we will divide by f. like in the iraf version. if verbose: print "Removing suspected compact bright objects ..." # Now we have our better selection of cosmics : cosmics = np.logical_and(candidates, sp/f > self.objlim) # Note the sp/f and not lplus/f ... due to the f = f/noise above. nbcosmics = np.sum(cosmics) if verbose: print " %5i remaining candidate pixels" % nbcosmics # What follows is a special treatment for neighbors, with more relaxed constains. if verbose: print "Finding neighboring pixels affected by cosmic rays ..." # We grow these cosmics a first time to determine the immediate neighborhod : growcosmics = np.cast['bool'](signal.convolve2d(np.cast['float32'](cosmics), growkernel, mode="same", boundary="symm")) # From this grown set, we keep those that have sp > sigmalim # so obviously not requiring sp/f > objlim, otherwise it would be pointless growcosmics = np.logical_and(sp > self.sigclip, growcosmics) # Now we repeat this procedure, but lower the detection limit to sigmalimlow : finalsel = np.cast['bool'](signal.convolve2d(np.cast['float32'](growcosmics), growkernel, mode="same", boundary="symm")) finalsel = np.logical_and(sp > self.sigcliplow, finalsel) # Again, we have to kick out pixels on saturated stars : if self.satstars is not None: if verbose: print "Masking saturated stars ..." finalsel = np.logical_and(np.logical_not(self.satstars), finalsel) nbfinal = np.sum(finalsel) if verbose: print " %5i pixels detected as cosmics" % nbfinal # Now the replacement of the cosmics... # we outsource this to the function clean(), as for some purposes the cleaning might not even be needed. # Easy way without masking would be : #self.cleanarray[finalsel] = m5[finalsel] # We find how many cosmics are not yet known : newmask = np.logical_and(np.logical_not(self.mask), finalsel) nbnew = np.sum(newmask) # We update the mask with the cosmics we have found : self.mask = np.logical_or(self.mask, finalsel) # We return # (used by function lacosmic) return {"niter":nbfinal, "nnew":nbnew, "itermask":finalsel, "newmask":newmask}
[ "def", "lacosmiciteration", "(", "self", ",", "verbose", "=", "None", ")", ":", "if", "verbose", "==", "None", ":", "verbose", "=", "self", ".", "verbose", "if", "verbose", ":", "print", "\"Convolving image with Laplacian kernel ...\"", "# We subsample, convolve, clip negative values, and rebin to original size", "subsam", "=", "subsample", "(", "self", ".", "cleanarray", ")", "conved", "=", "signal", ".", "convolve2d", "(", "subsam", ",", "laplkernel", ",", "mode", "=", "\"same\"", ",", "boundary", "=", "\"symm\"", ")", "cliped", "=", "conved", ".", "clip", "(", "min", "=", "0.0", ")", "#cliped = np.abs(conved) # unfortunately this does not work to find holes as well ...", "lplus", "=", "rebin2x2", "(", "cliped", ")", "if", "verbose", ":", "print", "\"Creating noise model ...\"", "# We build a custom noise map, so to compare the laplacian to", "m5", "=", "ndimage", ".", "filters", ".", "median_filter", "(", "self", ".", "cleanarray", ",", "size", "=", "5", ",", "mode", "=", "'mirror'", ")", "# We keep this m5, as I will use it later for the interpolation.", "m5clipped", "=", "m5", ".", "clip", "(", "min", "=", "0.00001", ")", "# As we will take the sqrt", "noise", "=", "(", "1.0", "/", "self", ".", "gain", ")", "*", "np", ".", "sqrt", "(", "self", ".", "gain", "*", "m5clipped", "+", "self", ".", "readnoise", "*", "self", ".", "readnoise", ")", "if", "verbose", ":", "print", "\"Calculating Laplacian signal to noise ratio ...\"", "# Laplacian signal to noise ratio :", "s", "=", "lplus", "/", "(", "2.0", "*", "noise", ")", "# the 2.0 is from the 2x2 subsampling", "# This s is called sigmap in the original lacosmic.cl", "# We remove the large structures (s prime) :", "sp", "=", "s", "-", "ndimage", ".", "filters", ".", "median_filter", "(", "s", ",", "size", "=", "5", ",", "mode", "=", "'mirror'", ")", "if", "verbose", ":", "print", "\"Selecting candidate cosmic rays ...\"", "# Candidate cosmic rays (this will include stars + HII regions)", "candidates", "=", "sp", ">", "self", ".", "sigclip", "nbcandidates", "=", "np", ".", "sum", "(", "candidates", ")", "if", "verbose", ":", "print", "\" %5i candidate pixels\"", "%", "nbcandidates", "# At this stage we use the saturated stars to mask the candidates, if available :", "if", "self", ".", "satstars", "is", "not", "None", ":", "if", "verbose", ":", "print", "\"Masking saturated stars ...\"", "candidates", "=", "np", ".", "logical_and", "(", "np", ".", "logical_not", "(", "self", ".", "satstars", ")", ",", "candidates", ")", "nbcandidates", "=", "np", ".", "sum", "(", "candidates", ")", "if", "verbose", ":", "print", "\" %5i candidate pixels not part of saturated stars\"", "%", "nbcandidates", "if", "verbose", ":", "print", "\"Building fine structure image ...\"", "# We build the fine structure image :", "m3", "=", "ndimage", ".", "filters", ".", "median_filter", "(", "self", ".", "cleanarray", ",", "size", "=", "3", ",", "mode", "=", "'mirror'", ")", "m37", "=", "ndimage", ".", "filters", ".", "median_filter", "(", "m3", ",", "size", "=", "7", ",", "mode", "=", "'mirror'", ")", "f", "=", "m3", "-", "m37", "# In the article that's it, but in lacosmic.cl f is divided by the noise...", "# Ok I understand why, it depends on if you use sp/f or L+/f as criterion.", "# There are some differences between the article and the iraf implementation.", "# So I will stick to the iraf implementation.", "f", "=", "f", "/", "noise", "f", "=", "f", ".", "clip", "(", "min", "=", "0.01", ")", "# as we will divide by f. like in the iraf version.", "if", "verbose", ":", "print", "\"Removing suspected compact bright objects ...\"", "# Now we have our better selection of cosmics :", "cosmics", "=", "np", ".", "logical_and", "(", "candidates", ",", "sp", "/", "f", ">", "self", ".", "objlim", ")", "# Note the sp/f and not lplus/f ... due to the f = f/noise above.", "nbcosmics", "=", "np", ".", "sum", "(", "cosmics", ")", "if", "verbose", ":", "print", "\" %5i remaining candidate pixels\"", "%", "nbcosmics", "# What follows is a special treatment for neighbors, with more relaxed constains.", "if", "verbose", ":", "print", "\"Finding neighboring pixels affected by cosmic rays ...\"", "# We grow these cosmics a first time to determine the immediate neighborhod :", "growcosmics", "=", "np", ".", "cast", "[", "'bool'", "]", "(", "signal", ".", "convolve2d", "(", "np", ".", "cast", "[", "'float32'", "]", "(", "cosmics", ")", ",", "growkernel", ",", "mode", "=", "\"same\"", ",", "boundary", "=", "\"symm\"", ")", ")", "# From this grown set, we keep those that have sp > sigmalim", "# so obviously not requiring sp/f > objlim, otherwise it would be pointless", "growcosmics", "=", "np", ".", "logical_and", "(", "sp", ">", "self", ".", "sigclip", ",", "growcosmics", ")", "# Now we repeat this procedure, but lower the detection limit to sigmalimlow :", "finalsel", "=", "np", ".", "cast", "[", "'bool'", "]", "(", "signal", ".", "convolve2d", "(", "np", ".", "cast", "[", "'float32'", "]", "(", "growcosmics", ")", ",", "growkernel", ",", "mode", "=", "\"same\"", ",", "boundary", "=", "\"symm\"", ")", ")", "finalsel", "=", "np", ".", "logical_and", "(", "sp", ">", "self", ".", "sigcliplow", ",", "finalsel", ")", "# Again, we have to kick out pixels on saturated stars :", "if", "self", ".", "satstars", "is", "not", "None", ":", "if", "verbose", ":", "print", "\"Masking saturated stars ...\"", "finalsel", "=", "np", ".", "logical_and", "(", "np", ".", "logical_not", "(", "self", ".", "satstars", ")", ",", "finalsel", ")", "nbfinal", "=", "np", ".", "sum", "(", "finalsel", ")", "if", "verbose", ":", "print", "\" %5i pixels detected as cosmics\"", "%", "nbfinal", "# Now the replacement of the cosmics...", "# we outsource this to the function clean(), as for some purposes the cleaning might not even be needed.", "# Easy way without masking would be :", "#self.cleanarray[finalsel] = m5[finalsel]", "# We find how many cosmics are not yet known :", "newmask", "=", "np", ".", "logical_and", "(", "np", ".", "logical_not", "(", "self", ".", "mask", ")", ",", "finalsel", ")", "nbnew", "=", "np", ".", "sum", "(", "newmask", ")", "# We update the mask with the cosmics we have found :", "self", ".", "mask", "=", "np", ".", "logical_or", "(", "self", ".", "mask", ",", "finalsel", ")", "# We return", "# (used by function lacosmic)", "return", "{", "\"niter\"", ":", "nbfinal", ",", "\"nnew\"", ":", "nbnew", ",", "\"itermask\"", ":", "finalsel", ",", "\"newmask\"", ":", "newmask", "}" ]
Performs one iteration of the L.A.Cosmic algorithm. It operates on self.cleanarray, and afterwards updates self.mask by adding the newly detected cosmics to the existing self.mask. Cleaning is not made automatically ! You have to call clean() after each iteration. This way you can run it several times in a row to to L.A.Cosmic "iterations". See function lacosmic, that mimics the full iterative L.A.Cosmic algorithm. Returns a dict containing - niter : the number of cosmic pixels detected in this iteration - nnew : among these, how many were not yet in the mask - itermask : the mask of pixels detected in this iteration - newmask : the pixels detected that were not yet in the mask If findsatstars() was called, we exclude these regions from the search.
[ "Performs", "one", "iteration", "of", "the", "L", ".", "A", ".", "Cosmic", "algorithm", ".", "It", "operates", "on", "self", ".", "cleanarray", "and", "afterwards", "updates", "self", ".", "mask", "by", "adding", "the", "newly", "detected", "cosmics", "to", "the", "existing", "self", ".", "mask", ".", "Cleaning", "is", "not", "made", "automatically", "!", "You", "have", "to", "call", "clean", "()", "after", "each", "iteration", ".", "This", "way", "you", "can", "run", "it", "several", "times", "in", "a", "row", "to", "to", "L", ".", "A", ".", "Cosmic", "iterations", ".", "See", "function", "lacosmic", "that", "mimics", "the", "full", "iterative", "L", ".", "A", ".", "Cosmic", "algorithm", ".", "Returns", "a", "dict", "containing", "-", "niter", ":", "the", "number", "of", "cosmic", "pixels", "detected", "in", "this", "iteration", "-", "nnew", ":", "among", "these", "how", "many", "were", "not", "yet", "in", "the", "mask", "-", "itermask", ":", "the", "mask", "of", "pixels", "detected", "in", "this", "iteration", "-", "newmask", ":", "the", "pixels", "detected", "that", "were", "not", "yet", "in", "the", "mask", "If", "findsatstars", "()", "was", "called", "we", "exclude", "these", "regions", "from", "the", "search", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L391-L531
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/cosmics.py
cosmicsimage.run
def run(self, maxiter = 4, verbose = False): """ Full artillery :-) - Find saturated stars - Run maxiter L.A.Cosmic iterations (stops if no more cosmics are found) Stops if no cosmics are found or if maxiter is reached. """ if self.satlevel > 0 and self.satstars == None: self.findsatstars(verbose=True) print "Starting %i L.A.Cosmic iterations ..." % maxiter for i in range(1, maxiter+1): print "Iteration %i" % i iterres = self.lacosmiciteration(verbose=verbose) print "%i cosmic pixels (%i new)" % (iterres["niter"], iterres["nnew"]) #self.clean(mask = iterres["mask"]) # No, we want clean to operate on really clean pixels only ! # Thus we always apply it on the full mask, as lacosmic does : self.clean(verbose=verbose) # But note that for huge cosmics, one might want to revise this. # Thats why I added a feature to skip saturated stars ! if iterres["niter"] == 0: break
python
def run(self, maxiter = 4, verbose = False): """ Full artillery :-) - Find saturated stars - Run maxiter L.A.Cosmic iterations (stops if no more cosmics are found) Stops if no cosmics are found or if maxiter is reached. """ if self.satlevel > 0 and self.satstars == None: self.findsatstars(verbose=True) print "Starting %i L.A.Cosmic iterations ..." % maxiter for i in range(1, maxiter+1): print "Iteration %i" % i iterres = self.lacosmiciteration(verbose=verbose) print "%i cosmic pixels (%i new)" % (iterres["niter"], iterres["nnew"]) #self.clean(mask = iterres["mask"]) # No, we want clean to operate on really clean pixels only ! # Thus we always apply it on the full mask, as lacosmic does : self.clean(verbose=verbose) # But note that for huge cosmics, one might want to revise this. # Thats why I added a feature to skip saturated stars ! if iterres["niter"] == 0: break
[ "def", "run", "(", "self", ",", "maxiter", "=", "4", ",", "verbose", "=", "False", ")", ":", "if", "self", ".", "satlevel", ">", "0", "and", "self", ".", "satstars", "==", "None", ":", "self", ".", "findsatstars", "(", "verbose", "=", "True", ")", "print", "\"Starting %i L.A.Cosmic iterations ...\"", "%", "maxiter", "for", "i", "in", "range", "(", "1", ",", "maxiter", "+", "1", ")", ":", "print", "\"Iteration %i\"", "%", "i", "iterres", "=", "self", ".", "lacosmiciteration", "(", "verbose", "=", "verbose", ")", "print", "\"%i cosmic pixels (%i new)\"", "%", "(", "iterres", "[", "\"niter\"", "]", ",", "iterres", "[", "\"nnew\"", "]", ")", "#self.clean(mask = iterres[\"mask\"]) # No, we want clean to operate on really clean pixels only !", "# Thus we always apply it on the full mask, as lacosmic does :", "self", ".", "clean", "(", "verbose", "=", "verbose", ")", "# But note that for huge cosmics, one might want to revise this.", "# Thats why I added a feature to skip saturated stars !", "if", "iterres", "[", "\"niter\"", "]", "==", "0", ":", "break" ]
Full artillery :-) - Find saturated stars - Run maxiter L.A.Cosmic iterations (stops if no more cosmics are found) Stops if no cosmics are found or if maxiter is reached.
[ "Full", "artillery", ":", "-", ")", "-", "Find", "saturated", "stars", "-", "Run", "maxiter", "L", ".", "A", ".", "Cosmic", "iterations", "(", "stops", "if", "no", "more", "cosmics", "are", "found", ")", "Stops", "if", "no", "cosmics", "are", "found", "or", "if", "maxiter", "is", "reached", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/cosmics.py#L591-L617
shotastage/mirage-django-lts
mirage/proj/environ.py
MirageEnvironment.search_project_root
def search_project_root(): """ Search your Django project root. returns: - path:string Django project root path """ while True: current = os.getcwd() if pathlib.Path("Miragefile.py").is_file() or pathlib.Path("Miragefile").is_file(): return current elif os.getcwd() == "/": raise FileNotFoundError else: os.chdir("../")
python
def search_project_root(): """ Search your Django project root. returns: - path:string Django project root path """ while True: current = os.getcwd() if pathlib.Path("Miragefile.py").is_file() or pathlib.Path("Miragefile").is_file(): return current elif os.getcwd() == "/": raise FileNotFoundError else: os.chdir("../")
[ "def", "search_project_root", "(", ")", ":", "while", "True", ":", "current", "=", "os", ".", "getcwd", "(", ")", "if", "pathlib", ".", "Path", "(", "\"Miragefile.py\"", ")", ".", "is_file", "(", ")", "or", "pathlib", ".", "Path", "(", "\"Miragefile\"", ")", ".", "is_file", "(", ")", ":", "return", "current", "elif", "os", ".", "getcwd", "(", ")", "==", "\"/\"", ":", "raise", "FileNotFoundError", "else", ":", "os", ".", "chdir", "(", "\"../\"", ")" ]
Search your Django project root. returns: - path:string Django project root path
[ "Search", "your", "Django", "project", "root", "." ]
train
https://github.com/shotastage/mirage-django-lts/blob/4e32dd48fff4b191abb90813ce3cc5ef0654a2ab/mirage/proj/environ.py#L61-L78
shotastage/mirage-django-lts
mirage/proj/environ.py
MirageEnvironment.search_app_root
def search_app_root(): """ Search your Django application root returns: - (String) Django application root path """ while True: current = os.getcwd() if pathlib.Path("apps.py").is_file(): return current elif pathlib.Path.cwd() == "/": raise FileNotFoundError else: os.chdir("../")
python
def search_app_root(): """ Search your Django application root returns: - (String) Django application root path """ while True: current = os.getcwd() if pathlib.Path("apps.py").is_file(): return current elif pathlib.Path.cwd() == "/": raise FileNotFoundError else: os.chdir("../")
[ "def", "search_app_root", "(", ")", ":", "while", "True", ":", "current", "=", "os", ".", "getcwd", "(", ")", "if", "pathlib", ".", "Path", "(", "\"apps.py\"", ")", ".", "is_file", "(", ")", ":", "return", "current", "elif", "pathlib", ".", "Path", ".", "cwd", "(", ")", "==", "\"/\"", ":", "raise", "FileNotFoundError", "else", ":", "os", ".", "chdir", "(", "\"../\"", ")" ]
Search your Django application root returns: - (String) Django application root path
[ "Search", "your", "Django", "application", "root" ]
train
https://github.com/shotastage/mirage-django-lts/blob/4e32dd48fff4b191abb90813ce3cc5ef0654a2ab/mirage/proj/environ.py#L81-L97
shotastage/mirage-django-lts
mirage/proj/environ.py
MirageEnvironment.in_app
def in_app() -> bool: """ Judge where current working directory is in Django application or not. returns: - (Bool) cwd is in app dir returns True """ try: MirageEnvironment.set_import_root() import apps if os.path.isfile("apps.py"): return True else: return False except ImportError: return False except: return False
python
def in_app() -> bool: """ Judge where current working directory is in Django application or not. returns: - (Bool) cwd is in app dir returns True """ try: MirageEnvironment.set_import_root() import apps if os.path.isfile("apps.py"): return True else: return False except ImportError: return False except: return False
[ "def", "in_app", "(", ")", "->", "bool", ":", "try", ":", "MirageEnvironment", ".", "set_import_root", "(", ")", "import", "apps", "if", "os", ".", "path", ".", "isfile", "(", "\"apps.py\"", ")", ":", "return", "True", "else", ":", "return", "False", "except", "ImportError", ":", "return", "False", "except", ":", "return", "False" ]
Judge where current working directory is in Django application or not. returns: - (Bool) cwd is in app dir returns True
[ "Judge", "where", "current", "working", "directory", "is", "in", "Django", "application", "or", "not", "." ]
train
https://github.com/shotastage/mirage-django-lts/blob/4e32dd48fff4b191abb90813ce3cc5ef0654a2ab/mirage/proj/environ.py#L137-L154
kajala/django-jutil
jutil/cache.py
update_cached_fields
def update_cached_fields(*args): """ Calls update_cached_fields() for each object passed in as argument. Supports also iterable objects by checking __iter__ attribute. :param args: List of objects :return: None """ for a in args: if a is not None: if hasattr(a, '__iter__'): for e in a: e.update_cached_fields() else: a.update_cached_fields()
python
def update_cached_fields(*args): """ Calls update_cached_fields() for each object passed in as argument. Supports also iterable objects by checking __iter__ attribute. :param args: List of objects :return: None """ for a in args: if a is not None: if hasattr(a, '__iter__'): for e in a: e.update_cached_fields() else: a.update_cached_fields()
[ "def", "update_cached_fields", "(", "*", "args", ")", ":", "for", "a", "in", "args", ":", "if", "a", "is", "not", "None", ":", "if", "hasattr", "(", "a", ",", "'__iter__'", ")", ":", "for", "e", "in", "a", ":", "e", ".", "update_cached_fields", "(", ")", "else", ":", "a", ".", "update_cached_fields", "(", ")" ]
Calls update_cached_fields() for each object passed in as argument. Supports also iterable objects by checking __iter__ attribute. :param args: List of objects :return: None
[ "Calls", "update_cached_fields", "()", "for", "each", "object", "passed", "in", "as", "argument", ".", "Supports", "also", "iterable", "objects", "by", "checking", "__iter__", "attribute", ".", ":", "param", "args", ":", "List", "of", "objects", ":", "return", ":", "None" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/cache.py#L49-L62
kajala/django-jutil
jutil/cache.py
CachedFieldsMixin.update_cached_fields_pre_save
def update_cached_fields_pre_save(self, update_fields: list): """ Call on pre_save signal for objects (to automatically refresh on save). :param update_fields: list of fields to update """ if self.id and update_fields is None: self.update_cached_fields(commit=False, exceptions=False)
python
def update_cached_fields_pre_save(self, update_fields: list): """ Call on pre_save signal for objects (to automatically refresh on save). :param update_fields: list of fields to update """ if self.id and update_fields is None: self.update_cached_fields(commit=False, exceptions=False)
[ "def", "update_cached_fields_pre_save", "(", "self", ",", "update_fields", ":", "list", ")", ":", "if", "self", ".", "id", "and", "update_fields", "is", "None", ":", "self", ".", "update_cached_fields", "(", "commit", "=", "False", ",", "exceptions", "=", "False", ")" ]
Call on pre_save signal for objects (to automatically refresh on save). :param update_fields: list of fields to update
[ "Call", "on", "pre_save", "signal", "for", "objects", "(", "to", "automatically", "refresh", "on", "save", ")", ".", ":", "param", "update_fields", ":", "list", "of", "fields", "to", "update" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/cache.py#L40-L46
novopl/peltak
src/peltak/extra/gitflow/logic/hotfix.py
start
def start(name): # type: (str) -> None """ Start working on a new hotfix. This will create a new branch off master called hotfix/<name>. Args: name (str): The name of the new feature. """ hotfix_branch = 'hotfix/' + common.to_branch_name(name) master = conf.get('git.master_branch', 'master') common.assert_on_branch(master) common.git_checkout(hotfix_branch, create=True)
python
def start(name): # type: (str) -> None """ Start working on a new hotfix. This will create a new branch off master called hotfix/<name>. Args: name (str): The name of the new feature. """ hotfix_branch = 'hotfix/' + common.to_branch_name(name) master = conf.get('git.master_branch', 'master') common.assert_on_branch(master) common.git_checkout(hotfix_branch, create=True)
[ "def", "start", "(", "name", ")", ":", "# type: (str) -> None", "hotfix_branch", "=", "'hotfix/'", "+", "common", ".", "to_branch_name", "(", "name", ")", "master", "=", "conf", ".", "get", "(", "'git.master_branch'", ",", "'master'", ")", "common", ".", "assert_on_branch", "(", "master", ")", "common", ".", "git_checkout", "(", "hotfix_branch", ",", "create", "=", "True", ")" ]
Start working on a new hotfix. This will create a new branch off master called hotfix/<name>. Args: name (str): The name of the new feature.
[ "Start", "working", "on", "a", "new", "hotfix", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/extra/gitflow/logic/hotfix.py#L30-L44
novopl/peltak
src/peltak/extra/gitflow/logic/hotfix.py
finish
def finish(): # type: () -> None """ Merge current feature into develop. """ pretend = context.get('pretend', False) if not pretend and (git.staged() or git.unstaged()): log.err( "You have uncommitted changes in your repo!\n" "You need to stash them before you merge the hotfix branch" ) sys.exit(1) develop = conf.get('git.devel_branch', 'develop') master = conf.get('git.master_branch', 'master') branch = git.current_branch(refresh=True) common.assert_branch_type('hotfix') # Merge hotfix into master common.git_checkout(master) common.git_pull(master) common.git_merge(master, branch.name) # Merge hotfix into develop common.git_checkout(develop) common.git_pull(develop) common.git_merge(develop, branch.name) # Cleanup common.git_branch_delete(branch.name) common.git_prune() common.git_checkout(master)
python
def finish(): # type: () -> None """ Merge current feature into develop. """ pretend = context.get('pretend', False) if not pretend and (git.staged() or git.unstaged()): log.err( "You have uncommitted changes in your repo!\n" "You need to stash them before you merge the hotfix branch" ) sys.exit(1) develop = conf.get('git.devel_branch', 'develop') master = conf.get('git.master_branch', 'master') branch = git.current_branch(refresh=True) common.assert_branch_type('hotfix') # Merge hotfix into master common.git_checkout(master) common.git_pull(master) common.git_merge(master, branch.name) # Merge hotfix into develop common.git_checkout(develop) common.git_pull(develop) common.git_merge(develop, branch.name) # Cleanup common.git_branch_delete(branch.name) common.git_prune() common.git_checkout(master)
[ "def", "finish", "(", ")", ":", "# type: () -> None", "pretend", "=", "context", ".", "get", "(", "'pretend'", ",", "False", ")", "if", "not", "pretend", "and", "(", "git", ".", "staged", "(", ")", "or", "git", ".", "unstaged", "(", ")", ")", ":", "log", ".", "err", "(", "\"You have uncommitted changes in your repo!\\n\"", "\"You need to stash them before you merge the hotfix branch\"", ")", "sys", ".", "exit", "(", "1", ")", "develop", "=", "conf", ".", "get", "(", "'git.devel_branch'", ",", "'develop'", ")", "master", "=", "conf", ".", "get", "(", "'git.master_branch'", ",", "'master'", ")", "branch", "=", "git", ".", "current_branch", "(", "refresh", "=", "True", ")", "common", ".", "assert_branch_type", "(", "'hotfix'", ")", "# Merge hotfix into master", "common", ".", "git_checkout", "(", "master", ")", "common", ".", "git_pull", "(", "master", ")", "common", ".", "git_merge", "(", "master", ",", "branch", ".", "name", ")", "# Merge hotfix into develop", "common", ".", "git_checkout", "(", "develop", ")", "common", ".", "git_pull", "(", "develop", ")", "common", ".", "git_merge", "(", "develop", ",", "branch", ".", "name", ")", "# Cleanup", "common", ".", "git_branch_delete", "(", "branch", ".", "name", ")", "common", ".", "git_prune", "(", ")", "common", ".", "git_checkout", "(", "master", ")" ]
Merge current feature into develop.
[ "Merge", "current", "feature", "into", "develop", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/extra/gitflow/logic/hotfix.py#L76-L108
novopl/peltak
src/peltak/extra/gitflow/logic/hotfix.py
merged
def merged(): # type: () -> None """ Cleanup a remotely merged branch. """ develop = conf.get('git.devel_branch', 'develop') master = conf.get('git.master_branch', 'master') branch = git.current_branch(refresh=True) common.assert_branch_type('hotfix') # Pull master with the merged hotfix common.git_checkout(master) common.git_pull(master) # Merge to develop common.git_checkout(develop) common.git_pull(develop) common.git_merge(develop, branch.name) # Cleanup common.git_branch_delete(branch.name) common.git_prune() common.git_checkout(master)
python
def merged(): # type: () -> None """ Cleanup a remotely merged branch. """ develop = conf.get('git.devel_branch', 'develop') master = conf.get('git.master_branch', 'master') branch = git.current_branch(refresh=True) common.assert_branch_type('hotfix') # Pull master with the merged hotfix common.git_checkout(master) common.git_pull(master) # Merge to develop common.git_checkout(develop) common.git_pull(develop) common.git_merge(develop, branch.name) # Cleanup common.git_branch_delete(branch.name) common.git_prune() common.git_checkout(master)
[ "def", "merged", "(", ")", ":", "# type: () -> None", "develop", "=", "conf", ".", "get", "(", "'git.devel_branch'", ",", "'develop'", ")", "master", "=", "conf", ".", "get", "(", "'git.master_branch'", ",", "'master'", ")", "branch", "=", "git", ".", "current_branch", "(", "refresh", "=", "True", ")", "common", ".", "assert_branch_type", "(", "'hotfix'", ")", "# Pull master with the merged hotfix", "common", ".", "git_checkout", "(", "master", ")", "common", ".", "git_pull", "(", "master", ")", "# Merge to develop", "common", ".", "git_checkout", "(", "develop", ")", "common", ".", "git_pull", "(", "develop", ")", "common", ".", "git_merge", "(", "develop", ",", "branch", ".", "name", ")", "# Cleanup", "common", ".", "git_branch_delete", "(", "branch", ".", "name", ")", "common", ".", "git_prune", "(", ")", "common", ".", "git_checkout", "(", "master", ")" ]
Cleanup a remotely merged branch.
[ "Cleanup", "a", "remotely", "merged", "branch", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/extra/gitflow/logic/hotfix.py#L111-L133
Varkal/chuda
chuda/shell.py
ShellCommand.run
def run(self): """ Run the shell command Returns: ShellCommand: return this ShellCommand instance for chaining """ if not self.block: self.output = [] self.error = [] self.thread = threading.Thread(target=self.run_non_blocking) self.thread.start() else: self.__create_process() self.process.wait() if self._stdout is not None: self.output = self.process.stdout.read().decode("utf-8") if self._stderr is not None: self.error = self.process.stderr.read().decode("utf-8") self.return_code = self.process.returncode return self
python
def run(self): """ Run the shell command Returns: ShellCommand: return this ShellCommand instance for chaining """ if not self.block: self.output = [] self.error = [] self.thread = threading.Thread(target=self.run_non_blocking) self.thread.start() else: self.__create_process() self.process.wait() if self._stdout is not None: self.output = self.process.stdout.read().decode("utf-8") if self._stderr is not None: self.error = self.process.stderr.read().decode("utf-8") self.return_code = self.process.returncode return self
[ "def", "run", "(", "self", ")", ":", "if", "not", "self", ".", "block", ":", "self", ".", "output", "=", "[", "]", "self", ".", "error", "=", "[", "]", "self", ".", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "run_non_blocking", ")", "self", ".", "thread", ".", "start", "(", ")", "else", ":", "self", ".", "__create_process", "(", ")", "self", ".", "process", ".", "wait", "(", ")", "if", "self", ".", "_stdout", "is", "not", "None", ":", "self", ".", "output", "=", "self", ".", "process", ".", "stdout", ".", "read", "(", ")", ".", "decode", "(", "\"utf-8\"", ")", "if", "self", ".", "_stderr", "is", "not", "None", ":", "self", ".", "error", "=", "self", ".", "process", ".", "stderr", ".", "read", "(", ")", ".", "decode", "(", "\"utf-8\"", ")", "self", ".", "return_code", "=", "self", ".", "process", ".", "returncode", "return", "self" ]
Run the shell command Returns: ShellCommand: return this ShellCommand instance for chaining
[ "Run", "the", "shell", "command" ]
train
https://github.com/Varkal/chuda/blob/0d93b716dede35231c21be97bcc19a656655983f/chuda/shell.py#L51-L72
Varkal/chuda
chuda/shell.py
ShellCommand.send
def send(self, value): """ Send text to stdin. Can only be used on non blocking commands Args: value (str): the text to write on stdin Raises: TypeError: If command is blocking Returns: ShellCommand: return this ShellCommand instance for chaining """ if not self.block and self._stdin is not None: self.writer.write("{}\n".format(value)) return self else: raise TypeError(NON_BLOCKING_ERROR_MESSAGE)
python
def send(self, value): """ Send text to stdin. Can only be used on non blocking commands Args: value (str): the text to write on stdin Raises: TypeError: If command is blocking Returns: ShellCommand: return this ShellCommand instance for chaining """ if not self.block and self._stdin is not None: self.writer.write("{}\n".format(value)) return self else: raise TypeError(NON_BLOCKING_ERROR_MESSAGE)
[ "def", "send", "(", "self", ",", "value", ")", ":", "if", "not", "self", ".", "block", "and", "self", ".", "_stdin", "is", "not", "None", ":", "self", ".", "writer", ".", "write", "(", "\"{}\\n\"", ".", "format", "(", "value", ")", ")", "return", "self", "else", ":", "raise", "TypeError", "(", "NON_BLOCKING_ERROR_MESSAGE", ")" ]
Send text to stdin. Can only be used on non blocking commands Args: value (str): the text to write on stdin Raises: TypeError: If command is blocking Returns: ShellCommand: return this ShellCommand instance for chaining
[ "Send", "text", "to", "stdin", ".", "Can", "only", "be", "used", "on", "non", "blocking", "commands" ]
train
https://github.com/Varkal/chuda/blob/0d93b716dede35231c21be97bcc19a656655983f/chuda/shell.py#L114-L129
Varkal/chuda
chuda/shell.py
ShellCommand.poll_output
def poll_output(self): """ Append lines from stdout to self.output. Returns: list: The lines added since last call """ if self.block: return self.output new_list = self.output[self.old_output_size:] self.old_output_size += len(new_list) return new_list
python
def poll_output(self): """ Append lines from stdout to self.output. Returns: list: The lines added since last call """ if self.block: return self.output new_list = self.output[self.old_output_size:] self.old_output_size += len(new_list) return new_list
[ "def", "poll_output", "(", "self", ")", ":", "if", "self", ".", "block", ":", "return", "self", ".", "output", "new_list", "=", "self", ".", "output", "[", "self", ".", "old_output_size", ":", "]", "self", ".", "old_output_size", "+=", "len", "(", "new_list", ")", "return", "new_list" ]
Append lines from stdout to self.output. Returns: list: The lines added since last call
[ "Append", "lines", "from", "stdout", "to", "self", ".", "output", "." ]
train
https://github.com/Varkal/chuda/blob/0d93b716dede35231c21be97bcc19a656655983f/chuda/shell.py#L131-L143
Varkal/chuda
chuda/shell.py
ShellCommand.poll_error
def poll_error(self): """ Append lines from stderr to self.errors. Returns: list: The lines added since last call """ if self.block: return self.error new_list = self.error[self.old_error_size:] self.old_error_size += len(new_list) return new_list
python
def poll_error(self): """ Append lines from stderr to self.errors. Returns: list: The lines added since last call """ if self.block: return self.error new_list = self.error[self.old_error_size:] self.old_error_size += len(new_list) return new_list
[ "def", "poll_error", "(", "self", ")", ":", "if", "self", ".", "block", ":", "return", "self", ".", "error", "new_list", "=", "self", ".", "error", "[", "self", ".", "old_error_size", ":", "]", "self", ".", "old_error_size", "+=", "len", "(", "new_list", ")", "return", "new_list" ]
Append lines from stderr to self.errors. Returns: list: The lines added since last call
[ "Append", "lines", "from", "stderr", "to", "self", ".", "errors", "." ]
train
https://github.com/Varkal/chuda/blob/0d93b716dede35231c21be97bcc19a656655983f/chuda/shell.py#L145-L157
Varkal/chuda
chuda/shell.py
ShellCommand.kill
def kill(self): """ Kill the current non blocking command Raises: TypeError: If command is blocking """ if self.block: raise TypeError(NON_BLOCKING_ERROR_MESSAGE) try: self.process.kill() except ProcessLookupError as exc: self.logger.debug(exc)
python
def kill(self): """ Kill the current non blocking command Raises: TypeError: If command is blocking """ if self.block: raise TypeError(NON_BLOCKING_ERROR_MESSAGE) try: self.process.kill() except ProcessLookupError as exc: self.logger.debug(exc)
[ "def", "kill", "(", "self", ")", ":", "if", "self", ".", "block", ":", "raise", "TypeError", "(", "NON_BLOCKING_ERROR_MESSAGE", ")", "try", ":", "self", ".", "process", ".", "kill", "(", ")", "except", "ProcessLookupError", "as", "exc", ":", "self", ".", "logger", ".", "debug", "(", "exc", ")" ]
Kill the current non blocking command Raises: TypeError: If command is blocking
[ "Kill", "the", "current", "non", "blocking", "command" ]
train
https://github.com/Varkal/chuda/blob/0d93b716dede35231c21be97bcc19a656655983f/chuda/shell.py#L159-L172
Varkal/chuda
chuda/shell.py
ShellCommand.wait_for
def wait_for(self, pattern, timeout=None): """ Block until a pattern have been found in stdout and stderr Args: pattern(:class:`~re.Pattern`): The pattern to search timeout(int): Maximum number of second to wait. If None, wait infinitely Raises: TimeoutError: When timeout is reach """ should_continue = True if self.block: raise TypeError(NON_BLOCKING_ERROR_MESSAGE) def stop(signum, frame): # pylint: disable=W0613 nonlocal should_continue if should_continue: raise TimeoutError() if timeout: signal.signal(signal.SIGALRM, stop) signal.alarm(timeout) while should_continue: output = self.poll_output() + self.poll_error() filtered = [line for line in output if re.match(pattern, line)] if filtered: should_continue = False
python
def wait_for(self, pattern, timeout=None): """ Block until a pattern have been found in stdout and stderr Args: pattern(:class:`~re.Pattern`): The pattern to search timeout(int): Maximum number of second to wait. If None, wait infinitely Raises: TimeoutError: When timeout is reach """ should_continue = True if self.block: raise TypeError(NON_BLOCKING_ERROR_MESSAGE) def stop(signum, frame): # pylint: disable=W0613 nonlocal should_continue if should_continue: raise TimeoutError() if timeout: signal.signal(signal.SIGALRM, stop) signal.alarm(timeout) while should_continue: output = self.poll_output() + self.poll_error() filtered = [line for line in output if re.match(pattern, line)] if filtered: should_continue = False
[ "def", "wait_for", "(", "self", ",", "pattern", ",", "timeout", "=", "None", ")", ":", "should_continue", "=", "True", "if", "self", ".", "block", ":", "raise", "TypeError", "(", "NON_BLOCKING_ERROR_MESSAGE", ")", "def", "stop", "(", "signum", ",", "frame", ")", ":", "# pylint: disable=W0613", "nonlocal", "should_continue", "if", "should_continue", ":", "raise", "TimeoutError", "(", ")", "if", "timeout", ":", "signal", ".", "signal", "(", "signal", ".", "SIGALRM", ",", "stop", ")", "signal", ".", "alarm", "(", "timeout", ")", "while", "should_continue", ":", "output", "=", "self", ".", "poll_output", "(", ")", "+", "self", ".", "poll_error", "(", ")", "filtered", "=", "[", "line", "for", "line", "in", "output", "if", "re", ".", "match", "(", "pattern", ",", "line", ")", "]", "if", "filtered", ":", "should_continue", "=", "False" ]
Block until a pattern have been found in stdout and stderr Args: pattern(:class:`~re.Pattern`): The pattern to search timeout(int): Maximum number of second to wait. If None, wait infinitely Raises: TimeoutError: When timeout is reach
[ "Block", "until", "a", "pattern", "have", "been", "found", "in", "stdout", "and", "stderr" ]
train
https://github.com/Varkal/chuda/blob/0d93b716dede35231c21be97bcc19a656655983f/chuda/shell.py#L174-L203
Varkal/chuda
chuda/shell.py
ShellCommand.is_running
def is_running(self): """ Check if the command is currently running Returns: bool: True if running, else False """ if self.block: return False return self.thread.is_alive() or self.process.poll() is None
python
def is_running(self): """ Check if the command is currently running Returns: bool: True if running, else False """ if self.block: return False return self.thread.is_alive() or self.process.poll() is None
[ "def", "is_running", "(", "self", ")", ":", "if", "self", ".", "block", ":", "return", "False", "return", "self", ".", "thread", ".", "is_alive", "(", ")", "or", "self", ".", "process", ".", "poll", "(", ")", "is", "None" ]
Check if the command is currently running Returns: bool: True if running, else False
[ "Check", "if", "the", "command", "is", "currently", "running" ]
train
https://github.com/Varkal/chuda/blob/0d93b716dede35231c21be97bcc19a656655983f/chuda/shell.py#L205-L215
Varkal/chuda
chuda/shell.py
ShellCommand.print_live_output
def print_live_output(self): ''' Block and print the output of the command Raises: TypeError: If command is blocking ''' if self.block: raise TypeError(NON_BLOCKING_ERROR_MESSAGE) else: while self.thread.is_alive() or self.old_output_size < len(self.output) or self.old_error_size < len(self.error): if self._stdout is not None and len(self.output) > self.old_output_size: while self.old_output_size < len(self.output): self.logger.info(self.output[self.old_output_size]) self.old_output_size += 1 if self._stderr is not None and len(self.error) > self.old_error_size: while self.old_error_size < len(self.error): self.logger.error(self.error[self.old_error_size]) self.old_error_size += 1
python
def print_live_output(self): ''' Block and print the output of the command Raises: TypeError: If command is blocking ''' if self.block: raise TypeError(NON_BLOCKING_ERROR_MESSAGE) else: while self.thread.is_alive() or self.old_output_size < len(self.output) or self.old_error_size < len(self.error): if self._stdout is not None and len(self.output) > self.old_output_size: while self.old_output_size < len(self.output): self.logger.info(self.output[self.old_output_size]) self.old_output_size += 1 if self._stderr is not None and len(self.error) > self.old_error_size: while self.old_error_size < len(self.error): self.logger.error(self.error[self.old_error_size]) self.old_error_size += 1
[ "def", "print_live_output", "(", "self", ")", ":", "if", "self", ".", "block", ":", "raise", "TypeError", "(", "NON_BLOCKING_ERROR_MESSAGE", ")", "else", ":", "while", "self", ".", "thread", ".", "is_alive", "(", ")", "or", "self", ".", "old_output_size", "<", "len", "(", "self", ".", "output", ")", "or", "self", ".", "old_error_size", "<", "len", "(", "self", ".", "error", ")", ":", "if", "self", ".", "_stdout", "is", "not", "None", "and", "len", "(", "self", ".", "output", ")", ">", "self", ".", "old_output_size", ":", "while", "self", ".", "old_output_size", "<", "len", "(", "self", ".", "output", ")", ":", "self", ".", "logger", ".", "info", "(", "self", ".", "output", "[", "self", ".", "old_output_size", "]", ")", "self", ".", "old_output_size", "+=", "1", "if", "self", ".", "_stderr", "is", "not", "None", "and", "len", "(", "self", ".", "error", ")", ">", "self", ".", "old_error_size", ":", "while", "self", ".", "old_error_size", "<", "len", "(", "self", ".", "error", ")", ":", "self", ".", "logger", ".", "error", "(", "self", ".", "error", "[", "self", ".", "old_error_size", "]", ")", "self", ".", "old_error_size", "+=", "1" ]
Block and print the output of the command Raises: TypeError: If command is blocking
[ "Block", "and", "print", "the", "output", "of", "the", "command" ]
train
https://github.com/Varkal/chuda/blob/0d93b716dede35231c21be97bcc19a656655983f/chuda/shell.py#L224-L243
Varkal/chuda
chuda/shell.py
Runner.run
def run(self, command, block=True, cwd=None, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE): """ Create an instance of :class:`~ShellCommand` and run it Args: command (str): :class:`~ShellCommand` block (bool): See :class:`~ShellCommand` cwd (str): Override the runner cwd. Useb by the :class:`~ShellCommand` instance """ if cwd is None: cwd = self.cwd return ShellCommand(command=command, logger=self.logger, block=block, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr).run()
python
def run(self, command, block=True, cwd=None, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE): """ Create an instance of :class:`~ShellCommand` and run it Args: command (str): :class:`~ShellCommand` block (bool): See :class:`~ShellCommand` cwd (str): Override the runner cwd. Useb by the :class:`~ShellCommand` instance """ if cwd is None: cwd = self.cwd return ShellCommand(command=command, logger=self.logger, block=block, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr).run()
[ "def", "run", "(", "self", ",", "command", ",", "block", "=", "True", ",", "cwd", "=", "None", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", ":", "if", "cwd", "is", "None", ":", "cwd", "=", "self", ".", "cwd", "return", "ShellCommand", "(", "command", "=", "command", ",", "logger", "=", "self", ".", "logger", ",", "block", "=", "block", ",", "cwd", "=", "cwd", ",", "stdin", "=", "stdin", ",", "stdout", "=", "stdout", ",", "stderr", "=", "stderr", ")", ".", "run", "(", ")" ]
Create an instance of :class:`~ShellCommand` and run it Args: command (str): :class:`~ShellCommand` block (bool): See :class:`~ShellCommand` cwd (str): Override the runner cwd. Useb by the :class:`~ShellCommand` instance
[ "Create", "an", "instance", "of", ":", "class", ":", "~ShellCommand", "and", "run", "it" ]
train
https://github.com/Varkal/chuda/blob/0d93b716dede35231c21be97bcc19a656655983f/chuda/shell.py#L259-L271
Vital-Fernandez/dazer
bin/lib/Math_Libraries/lnr_script.py
bces
def bces(x1, x2, x1err=[], x2err=[], cerr=[], logify=True, model='yx', \ bootstrap=5000, verbose='normal', full_output=True): """ Bivariate, Correlated Errors and intrinsic Scatter (BCES) translated from the FORTRAN code by Christina Bird and Matthew Bershady (Akritas & Bershady, 1996) Linear regression in the presence of heteroscedastic errors on both variables and intrinsic scatter Parameters ---------- x1 : array of floats Independent variable, or observable x2 : array of floats Dependent variable x1err : array of floats (optional) Uncertainties on the independent variable x2err : array of floats (optional) Uncertainties on the dependent variable cerr : array of floats (optional) Covariances of the uncertainties in the dependent and independent variables logify : bool (default True) Whether to take the log of the measurements in order to estimate the best-fit power law instead of linear relation model : {'yx', 'xy', 'bi', 'orth'} BCES model with which to calculate regression. See Notes below for details. bootstrap : False or int (default 5000) get the errors from bootstrap resampling instead of the analytical prescription? if bootstrap is an int, it is the number of bootstrap resamplings verbose : str (default 'normal') Verbose level. Options are {'quiet', 'normal', 'debug'} full_output : bool (default True) If True, return also the covariance between the normalization and slope of the regression. Returns ------- a : tuple of length 2 Best-fit normalization and its uncertainty (a, da) b : tuple of length 2 Best-fit slope and its uncertainty (b, db) Optional outputs ---------------- cov_ab : 2x2 array of floats covariance between a and b. Returned if full_output is set to True. Notes ----- If verbose is normal or debug, the results from all the BCES models will be printed (still, only the one selected in *model* will be returned). the *model* parameter: -'yx' stands for BCES(Y|X) -'xy' stands for BCES(X|Y) -'bi' stands for BCES Bisector -'orth' stands for BCES Orthogonal """ def _bess_bootstrap(npts, x1, x2, x1err, x2err, cerr,nsim): ##added by Gerrit, July 2014 ##Unfortunately I needed a copy of the _bess function for bootstrapping. #Would be nicer if those two could be combined """ Do the entire regression calculation for 4 slopes: OLS(Y|X), OLS(X|Y), bisector, orthogonal """ #calculate sigma's for datapoints using length of confidence intervals sig11var = numpy.sum(x1err ** 2,axis=1,keepdims=True) / npts sig22var = numpy.sum(x2err ** 2,axis=1,keepdims=True) / npts sig12var = numpy.sum(cerr,axis=1,keepdims=True) / npts # calculate means and variances x1av = numpy.mean(x1,axis=1,keepdims=True) x1var = x1.var(axis=1,keepdims=True) x2av = numpy.mean(x2,axis=1,keepdims=True) x2var = x2.var(axis=1,keepdims=True) covar_x1x2 = numpy.mean((x1-numpy.mean(x1,axis=1,keepdims=True)) * \ (x2-numpy.mean(x2,axis=1,keepdims=True)), axis=1,keepdims=True) # compute the regression slopes for OLS(X2|X1), OLS(X1|X2), # bisector and orthogonal if model == 'yx': modelint = 1 else: modelint = 4 b = numpy.zeros((modelint,nsim)) b[0] = ((covar_x1x2 - sig12var) / (x1var - sig11var)).flatten() if model != 'yx': b[1] = ((x2var - sig22var) / (covar_x1x2 - sig12var)).flatten() b[2] = ((b[0] * b[1] - 1 + numpy.sqrt((1 + b[0] ** 2) * \ (1 + b[1] ** 2))) / (b[0] + b[1])).flatten() b[3] = 0.5 * ((b[1] - 1 / b[0]) + numpy.sign(covar_x1x2).flatten()* \ numpy.sqrt(4 + (b[1] - 1 / b[0]) ** 2)) # compute intercepts for above 4 cases: a = x2av.flatten() - b * x1av.flatten() # set up variables to calculate standard deviations of slope and # intercept xi = [] xi.append(((x1 - x1av) * (x2 - b[0].reshape(nsim,1) * x1 - \ a[0].reshape(nsim,1)) + \ b[0].reshape(nsim,1) * x1err ** 2) / \ (x1var - sig11var)) if model != 'yx': xi.append(((x2 - x2av) * (x2 - b[1].reshape(nsim,1) * x1 - \ a[1].reshape(nsim,1)) + x2err ** 2) / \ covar_x1x2) xi.append((xi[0] * (1 + b[1].reshape(nsim,1) ** 2) + \ xi[1] * (1 + b[0].reshape(nsim,1) ** 2)) / \ ((b[0].reshape(nsim,1) + \ b[1].reshape(nsim,1)) * \ numpy.sqrt((1 + b[0].reshape(nsim,1) ** 2) * \ (1 + b[1].reshape(nsim,1) ** 2)))) xi.append((xi[0] / b[0].reshape(nsim,1) ** 2 + xi[1]) * \ b[3].reshape(nsim,1) / \ numpy.sqrt(4 + (b[1].reshape(nsim,1) - \ 1 / b[0].reshape(nsim,1)) ** 2)) zeta = [] for i in xrange(modelint): zeta.append(x2 - b[i].reshape(nsim,1) * x1 - x1av * xi[i]) # calculate variance for all a and b bvar = numpy.zeros((4,nsim)) avar = numpy.zeros((4,nsim)) for i in xrange(modelint): bvar[i] = xi[i].var(axis=1,keepdims=False)/ npts avar[i] = zeta[i].var(axis=1,keepdims=False) / npts return a, b, avar, bvar, xi, zeta def _bess(npts, x1, x2, x1err, x2err, cerr): """ Do the entire regression calculation for 4 slopes: OLS(Y|X), OLS(X|Y), bisector, orthogonal """ # calculate sigma's for datapoints using length of confidence # intervals sig11var = sum(x1err ** 2) / npts sig22var = sum(x2err ** 2) / npts sig12var = sum(cerr) / npts # calculate means and variances x1av = numpy.average(x1) x1var = numpy.std(x1) ** 2 x2av = numpy.average(x2) x2var = numpy.std(x2) ** 2 covar_x1x2 = sum((x1 - x1av) * (x2 - x2av)) / npts # compute the regression slopes for OLS(X2|X1), OLS(X1|X2), # bisector and orthogonal b = numpy.zeros(4) b[0] = (covar_x1x2 - sig12var) / (x1var - sig11var) b[1] = (x2var - sig22var) / (covar_x1x2 - sig12var) b[2] = (b[0] * b[1] - 1 + numpy.sqrt((1 + b[0] ** 2) * \ (1 + b[1] ** 2))) / (b[0] + b[1]) b[3] = 0.5 * ((b[1] - 1 / b[0]) + numpy.sign(covar_x1x2) * \ numpy.sqrt(4 + (b[1] - 1 / b[0]) ** 2)) # compute intercepts for above 4 cases: a = x2av - b * x1av # set up variables to calculate standard deviations of slope # and intercept xi = [] xi.append(((x1 - x1av) * \ (x2 - b[0] * x1 - a[0]) + b[0] * x1err ** 2) / \ (x1var - sig11var)) xi.append(((x2 - x2av) * (x2 - b[1] * x1 - a[1]) + x2err ** 2) / \ covar_x1x2) xi.append((xi[0] * (1 + b[1] ** 2) + xi[1] * (1 + b[0] ** 2)) / \ ((b[0] + b[1]) * \ numpy.sqrt((1 + b[0] ** 2) * (1 + b[1] ** 2)))) xi.append((xi[0] / b[0] ** 2 + xi[1]) * b[3] / \ numpy.sqrt(4 + (b[1] - 1 / b[0]) ** 2)) zeta = [] for i in xrange(4): zeta.append(x2 - b[i]*x1 - x1av*xi[i]) # calculate variance for all a and b bvar = numpy.zeros(4) avar = numpy.zeros(4) for i in xrange(4): bvar[i] = numpy.std(xi[i]) ** 2 / npts avar[i] = numpy.std(zeta[i]) ** 2 / npts return a, b, avar, bvar, xi, zeta def _bootspbec(npts, x, y, xerr, yerr, cerr): """ Bootstrap samples """ j = numpy.random.randint(npts, size = npts) xboot = x[j] xerrboot = xerr[j] yboot = y[j] yerrboot = yerr[j] cerrboot = cerr[j] return xboot, yboot, xerrboot, yerrboot, cerrboot # ---- Main routine starts here ---- # # convert to numpy arrays just in case x1 = numpy.array(x1) x2 = numpy.array(x2) x1err = numpy.array(x1err) x2err = numpy.array(x2err) if logify: x1, x2, x1err, x2err = to_log(x1, x2, x1err, x2err) cerr = numpy.array(cerr) models = [['yx', 'xy', 'bi', 'orth'], ['BCES(Y|X)', 'BCES(X|Y)', 'BCES Bisector', 'BCES Orthogonal']] # which to return? j = models[0].index(model) npts = len(x1) # are the errors defined? if len(x1err) == 0: x1err = numpy.zeros(npts) if len(x2err) == 0: x2err = numpy.zeros(npts) if len(cerr) == 0: cerr = numpy.zeros(npts) if verbose == 'debug': print 'x1 =', x1 print 'x1err =', x1err print 'x2 =', x2 print 'x2err =', x2err print 'cerr =', cerr print '\n ** Returning values for', models[1][j], '**' if bootstrap is not False: print ' with errors from %d bootstrap resamplings' %bootstrap print '' # calculate nominal fits bessresults = _bess(npts, x1, x2, x1err, x2err, cerr) (a, b, avar, bvar, xi, zeta) = bessresults # covariance between normalization and slope if full_output: covar_ab = numpy.cov(xi[j], zeta[j]) if bootstrap is not False: # make bootstrap simulated datasets, and compute averages and # standard deviations of regression coefficients asum = numpy.zeros(4) assum = numpy.zeros(4) bsum = numpy.zeros(4) bssum = numpy.zeros(4) sda = numpy.zeros(4) sdb = numpy.zeros(4) for i in xrange(bootstrap): samples = _bootspbec(npts, x1, x2, x1err, x2err, cerr) (x1sim, x2sim, x1errsim, x2errsim, cerrsim) = samples besssim = _bess(npts, x1sim, x2sim, x1errsim, x2errsim, cerrsim) (asim, bsim, avarsim, bvarsim, xi, zeta) = besssim asum += asim assum += asim ** 2 bsum += bsim bssum += bsim ** 2 aavg = asum / bootstrap bavg = bsum / bootstrap for i in range(4): sdtest = assum[i] - bootstrap * aavg[i] ** 2 if sdtest > 0: sda[i] = numpy.sqrt(sdtest / (bootstrap - 1)) sdtest = bssum[i] - bootstrap * bavg[i] ** 2 if sdtest > 0: sdb[i] = numpy.sqrt(sdtest / (bootstrap - 1)) if verbose in ('normal', 'debug'): print '%s B err(B)' %('Fit'.ljust(19)), print ' A err(A)' for i in range(4): print '%s %9.2e +/- %8.2e %10.3e +/- %9.3e' \ %(models[1][i].ljust(16), b[i], numpy.sqrt(bvar[i]), a[i], numpy.sqrt(avar[i])) if bootstrap is not False: print '%s %9.2e +/- %8.2e %10.3e +/- %9.3e' \ %('bootstrap'.ljust(16), bavg[i], sdb[i], aavg[i], sda[i]) print '' if verbose == 'debug': print 'cov[%s] =' %models[model] print covar_ab if bootstrap is not False: if full_output: return (a[j], sda[j]), (b[j], sdb[j]), covar_ab else: return (a[j], sda[j]), (b[j], sdb[j]) if full_output: out = ((a[j], numpy.sqrt(avar[j])), (b[j], numpy.sqrt(bvar[j])), covar_ab) else: out = ((a[j], numpy.sqrt(avar[j])), (b[j], numpy.sqrt(bvar[j]))) return out
python
def bces(x1, x2, x1err=[], x2err=[], cerr=[], logify=True, model='yx', \ bootstrap=5000, verbose='normal', full_output=True): """ Bivariate, Correlated Errors and intrinsic Scatter (BCES) translated from the FORTRAN code by Christina Bird and Matthew Bershady (Akritas & Bershady, 1996) Linear regression in the presence of heteroscedastic errors on both variables and intrinsic scatter Parameters ---------- x1 : array of floats Independent variable, or observable x2 : array of floats Dependent variable x1err : array of floats (optional) Uncertainties on the independent variable x2err : array of floats (optional) Uncertainties on the dependent variable cerr : array of floats (optional) Covariances of the uncertainties in the dependent and independent variables logify : bool (default True) Whether to take the log of the measurements in order to estimate the best-fit power law instead of linear relation model : {'yx', 'xy', 'bi', 'orth'} BCES model with which to calculate regression. See Notes below for details. bootstrap : False or int (default 5000) get the errors from bootstrap resampling instead of the analytical prescription? if bootstrap is an int, it is the number of bootstrap resamplings verbose : str (default 'normal') Verbose level. Options are {'quiet', 'normal', 'debug'} full_output : bool (default True) If True, return also the covariance between the normalization and slope of the regression. Returns ------- a : tuple of length 2 Best-fit normalization and its uncertainty (a, da) b : tuple of length 2 Best-fit slope and its uncertainty (b, db) Optional outputs ---------------- cov_ab : 2x2 array of floats covariance between a and b. Returned if full_output is set to True. Notes ----- If verbose is normal or debug, the results from all the BCES models will be printed (still, only the one selected in *model* will be returned). the *model* parameter: -'yx' stands for BCES(Y|X) -'xy' stands for BCES(X|Y) -'bi' stands for BCES Bisector -'orth' stands for BCES Orthogonal """ def _bess_bootstrap(npts, x1, x2, x1err, x2err, cerr,nsim): ##added by Gerrit, July 2014 ##Unfortunately I needed a copy of the _bess function for bootstrapping. #Would be nicer if those two could be combined """ Do the entire regression calculation for 4 slopes: OLS(Y|X), OLS(X|Y), bisector, orthogonal """ #calculate sigma's for datapoints using length of confidence intervals sig11var = numpy.sum(x1err ** 2,axis=1,keepdims=True) / npts sig22var = numpy.sum(x2err ** 2,axis=1,keepdims=True) / npts sig12var = numpy.sum(cerr,axis=1,keepdims=True) / npts # calculate means and variances x1av = numpy.mean(x1,axis=1,keepdims=True) x1var = x1.var(axis=1,keepdims=True) x2av = numpy.mean(x2,axis=1,keepdims=True) x2var = x2.var(axis=1,keepdims=True) covar_x1x2 = numpy.mean((x1-numpy.mean(x1,axis=1,keepdims=True)) * \ (x2-numpy.mean(x2,axis=1,keepdims=True)), axis=1,keepdims=True) # compute the regression slopes for OLS(X2|X1), OLS(X1|X2), # bisector and orthogonal if model == 'yx': modelint = 1 else: modelint = 4 b = numpy.zeros((modelint,nsim)) b[0] = ((covar_x1x2 - sig12var) / (x1var - sig11var)).flatten() if model != 'yx': b[1] = ((x2var - sig22var) / (covar_x1x2 - sig12var)).flatten() b[2] = ((b[0] * b[1] - 1 + numpy.sqrt((1 + b[0] ** 2) * \ (1 + b[1] ** 2))) / (b[0] + b[1])).flatten() b[3] = 0.5 * ((b[1] - 1 / b[0]) + numpy.sign(covar_x1x2).flatten()* \ numpy.sqrt(4 + (b[1] - 1 / b[0]) ** 2)) # compute intercepts for above 4 cases: a = x2av.flatten() - b * x1av.flatten() # set up variables to calculate standard deviations of slope and # intercept xi = [] xi.append(((x1 - x1av) * (x2 - b[0].reshape(nsim,1) * x1 - \ a[0].reshape(nsim,1)) + \ b[0].reshape(nsim,1) * x1err ** 2) / \ (x1var - sig11var)) if model != 'yx': xi.append(((x2 - x2av) * (x2 - b[1].reshape(nsim,1) * x1 - \ a[1].reshape(nsim,1)) + x2err ** 2) / \ covar_x1x2) xi.append((xi[0] * (1 + b[1].reshape(nsim,1) ** 2) + \ xi[1] * (1 + b[0].reshape(nsim,1) ** 2)) / \ ((b[0].reshape(nsim,1) + \ b[1].reshape(nsim,1)) * \ numpy.sqrt((1 + b[0].reshape(nsim,1) ** 2) * \ (1 + b[1].reshape(nsim,1) ** 2)))) xi.append((xi[0] / b[0].reshape(nsim,1) ** 2 + xi[1]) * \ b[3].reshape(nsim,1) / \ numpy.sqrt(4 + (b[1].reshape(nsim,1) - \ 1 / b[0].reshape(nsim,1)) ** 2)) zeta = [] for i in xrange(modelint): zeta.append(x2 - b[i].reshape(nsim,1) * x1 - x1av * xi[i]) # calculate variance for all a and b bvar = numpy.zeros((4,nsim)) avar = numpy.zeros((4,nsim)) for i in xrange(modelint): bvar[i] = xi[i].var(axis=1,keepdims=False)/ npts avar[i] = zeta[i].var(axis=1,keepdims=False) / npts return a, b, avar, bvar, xi, zeta def _bess(npts, x1, x2, x1err, x2err, cerr): """ Do the entire regression calculation for 4 slopes: OLS(Y|X), OLS(X|Y), bisector, orthogonal """ # calculate sigma's for datapoints using length of confidence # intervals sig11var = sum(x1err ** 2) / npts sig22var = sum(x2err ** 2) / npts sig12var = sum(cerr) / npts # calculate means and variances x1av = numpy.average(x1) x1var = numpy.std(x1) ** 2 x2av = numpy.average(x2) x2var = numpy.std(x2) ** 2 covar_x1x2 = sum((x1 - x1av) * (x2 - x2av)) / npts # compute the regression slopes for OLS(X2|X1), OLS(X1|X2), # bisector and orthogonal b = numpy.zeros(4) b[0] = (covar_x1x2 - sig12var) / (x1var - sig11var) b[1] = (x2var - sig22var) / (covar_x1x2 - sig12var) b[2] = (b[0] * b[1] - 1 + numpy.sqrt((1 + b[0] ** 2) * \ (1 + b[1] ** 2))) / (b[0] + b[1]) b[3] = 0.5 * ((b[1] - 1 / b[0]) + numpy.sign(covar_x1x2) * \ numpy.sqrt(4 + (b[1] - 1 / b[0]) ** 2)) # compute intercepts for above 4 cases: a = x2av - b * x1av # set up variables to calculate standard deviations of slope # and intercept xi = [] xi.append(((x1 - x1av) * \ (x2 - b[0] * x1 - a[0]) + b[0] * x1err ** 2) / \ (x1var - sig11var)) xi.append(((x2 - x2av) * (x2 - b[1] * x1 - a[1]) + x2err ** 2) / \ covar_x1x2) xi.append((xi[0] * (1 + b[1] ** 2) + xi[1] * (1 + b[0] ** 2)) / \ ((b[0] + b[1]) * \ numpy.sqrt((1 + b[0] ** 2) * (1 + b[1] ** 2)))) xi.append((xi[0] / b[0] ** 2 + xi[1]) * b[3] / \ numpy.sqrt(4 + (b[1] - 1 / b[0]) ** 2)) zeta = [] for i in xrange(4): zeta.append(x2 - b[i]*x1 - x1av*xi[i]) # calculate variance for all a and b bvar = numpy.zeros(4) avar = numpy.zeros(4) for i in xrange(4): bvar[i] = numpy.std(xi[i]) ** 2 / npts avar[i] = numpy.std(zeta[i]) ** 2 / npts return a, b, avar, bvar, xi, zeta def _bootspbec(npts, x, y, xerr, yerr, cerr): """ Bootstrap samples """ j = numpy.random.randint(npts, size = npts) xboot = x[j] xerrboot = xerr[j] yboot = y[j] yerrboot = yerr[j] cerrboot = cerr[j] return xboot, yboot, xerrboot, yerrboot, cerrboot # ---- Main routine starts here ---- # # convert to numpy arrays just in case x1 = numpy.array(x1) x2 = numpy.array(x2) x1err = numpy.array(x1err) x2err = numpy.array(x2err) if logify: x1, x2, x1err, x2err = to_log(x1, x2, x1err, x2err) cerr = numpy.array(cerr) models = [['yx', 'xy', 'bi', 'orth'], ['BCES(Y|X)', 'BCES(X|Y)', 'BCES Bisector', 'BCES Orthogonal']] # which to return? j = models[0].index(model) npts = len(x1) # are the errors defined? if len(x1err) == 0: x1err = numpy.zeros(npts) if len(x2err) == 0: x2err = numpy.zeros(npts) if len(cerr) == 0: cerr = numpy.zeros(npts) if verbose == 'debug': print 'x1 =', x1 print 'x1err =', x1err print 'x2 =', x2 print 'x2err =', x2err print 'cerr =', cerr print '\n ** Returning values for', models[1][j], '**' if bootstrap is not False: print ' with errors from %d bootstrap resamplings' %bootstrap print '' # calculate nominal fits bessresults = _bess(npts, x1, x2, x1err, x2err, cerr) (a, b, avar, bvar, xi, zeta) = bessresults # covariance between normalization and slope if full_output: covar_ab = numpy.cov(xi[j], zeta[j]) if bootstrap is not False: # make bootstrap simulated datasets, and compute averages and # standard deviations of regression coefficients asum = numpy.zeros(4) assum = numpy.zeros(4) bsum = numpy.zeros(4) bssum = numpy.zeros(4) sda = numpy.zeros(4) sdb = numpy.zeros(4) for i in xrange(bootstrap): samples = _bootspbec(npts, x1, x2, x1err, x2err, cerr) (x1sim, x2sim, x1errsim, x2errsim, cerrsim) = samples besssim = _bess(npts, x1sim, x2sim, x1errsim, x2errsim, cerrsim) (asim, bsim, avarsim, bvarsim, xi, zeta) = besssim asum += asim assum += asim ** 2 bsum += bsim bssum += bsim ** 2 aavg = asum / bootstrap bavg = bsum / bootstrap for i in range(4): sdtest = assum[i] - bootstrap * aavg[i] ** 2 if sdtest > 0: sda[i] = numpy.sqrt(sdtest / (bootstrap - 1)) sdtest = bssum[i] - bootstrap * bavg[i] ** 2 if sdtest > 0: sdb[i] = numpy.sqrt(sdtest / (bootstrap - 1)) if verbose in ('normal', 'debug'): print '%s B err(B)' %('Fit'.ljust(19)), print ' A err(A)' for i in range(4): print '%s %9.2e +/- %8.2e %10.3e +/- %9.3e' \ %(models[1][i].ljust(16), b[i], numpy.sqrt(bvar[i]), a[i], numpy.sqrt(avar[i])) if bootstrap is not False: print '%s %9.2e +/- %8.2e %10.3e +/- %9.3e' \ %('bootstrap'.ljust(16), bavg[i], sdb[i], aavg[i], sda[i]) print '' if verbose == 'debug': print 'cov[%s] =' %models[model] print covar_ab if bootstrap is not False: if full_output: return (a[j], sda[j]), (b[j], sdb[j]), covar_ab else: return (a[j], sda[j]), (b[j], sdb[j]) if full_output: out = ((a[j], numpy.sqrt(avar[j])), (b[j], numpy.sqrt(bvar[j])), covar_ab) else: out = ((a[j], numpy.sqrt(avar[j])), (b[j], numpy.sqrt(bvar[j]))) return out
[ "def", "bces", "(", "x1", ",", "x2", ",", "x1err", "=", "[", "]", ",", "x2err", "=", "[", "]", ",", "cerr", "=", "[", "]", ",", "logify", "=", "True", ",", "model", "=", "'yx'", ",", "bootstrap", "=", "5000", ",", "verbose", "=", "'normal'", ",", "full_output", "=", "True", ")", ":", "def", "_bess_bootstrap", "(", "npts", ",", "x1", ",", "x2", ",", "x1err", ",", "x2err", ",", "cerr", ",", "nsim", ")", ":", "##added by Gerrit, July 2014", "##Unfortunately I needed a copy of the _bess function for bootstrapping.", "#Would be nicer if those two could be combined", "\"\"\"\n Do the entire regression calculation for 4 slopes:\n OLS(Y|X), OLS(X|Y), bisector, orthogonal\n \"\"\"", "#calculate sigma's for datapoints using length of confidence intervals", "sig11var", "=", "numpy", ".", "sum", "(", "x1err", "**", "2", ",", "axis", "=", "1", ",", "keepdims", "=", "True", ")", "/", "npts", "sig22var", "=", "numpy", ".", "sum", "(", "x2err", "**", "2", ",", "axis", "=", "1", ",", "keepdims", "=", "True", ")", "/", "npts", "sig12var", "=", "numpy", ".", "sum", "(", "cerr", ",", "axis", "=", "1", ",", "keepdims", "=", "True", ")", "/", "npts", "# calculate means and variances", "x1av", "=", "numpy", ".", "mean", "(", "x1", ",", "axis", "=", "1", ",", "keepdims", "=", "True", ")", "x1var", "=", "x1", ".", "var", "(", "axis", "=", "1", ",", "keepdims", "=", "True", ")", "x2av", "=", "numpy", ".", "mean", "(", "x2", ",", "axis", "=", "1", ",", "keepdims", "=", "True", ")", "x2var", "=", "x2", ".", "var", "(", "axis", "=", "1", ",", "keepdims", "=", "True", ")", "covar_x1x2", "=", "numpy", ".", "mean", "(", "(", "x1", "-", "numpy", ".", "mean", "(", "x1", ",", "axis", "=", "1", ",", "keepdims", "=", "True", ")", ")", "*", "(", "x2", "-", "numpy", ".", "mean", "(", "x2", ",", "axis", "=", "1", ",", "keepdims", "=", "True", ")", ")", ",", "axis", "=", "1", ",", "keepdims", "=", "True", ")", "# compute the regression slopes for OLS(X2|X1), OLS(X1|X2), ", "# bisector and orthogonal", "if", "model", "==", "'yx'", ":", "modelint", "=", "1", "else", ":", "modelint", "=", "4", "b", "=", "numpy", ".", "zeros", "(", "(", "modelint", ",", "nsim", ")", ")", "b", "[", "0", "]", "=", "(", "(", "covar_x1x2", "-", "sig12var", ")", "/", "(", "x1var", "-", "sig11var", ")", ")", ".", "flatten", "(", ")", "if", "model", "!=", "'yx'", ":", "b", "[", "1", "]", "=", "(", "(", "x2var", "-", "sig22var", ")", "/", "(", "covar_x1x2", "-", "sig12var", ")", ")", ".", "flatten", "(", ")", "b", "[", "2", "]", "=", "(", "(", "b", "[", "0", "]", "*", "b", "[", "1", "]", "-", "1", "+", "numpy", ".", "sqrt", "(", "(", "1", "+", "b", "[", "0", "]", "**", "2", ")", "*", "(", "1", "+", "b", "[", "1", "]", "**", "2", ")", ")", ")", "/", "(", "b", "[", "0", "]", "+", "b", "[", "1", "]", ")", ")", ".", "flatten", "(", ")", "b", "[", "3", "]", "=", "0.5", "*", "(", "(", "b", "[", "1", "]", "-", "1", "/", "b", "[", "0", "]", ")", "+", "numpy", ".", "sign", "(", "covar_x1x2", ")", ".", "flatten", "(", ")", "*", "numpy", ".", "sqrt", "(", "4", "+", "(", "b", "[", "1", "]", "-", "1", "/", "b", "[", "0", "]", ")", "**", "2", ")", ")", "# compute intercepts for above 4 cases:", "a", "=", "x2av", ".", "flatten", "(", ")", "-", "b", "*", "x1av", ".", "flatten", "(", ")", "# set up variables to calculate standard deviations of slope and ", "# intercept", "xi", "=", "[", "]", "xi", ".", "append", "(", "(", "(", "x1", "-", "x1av", ")", "*", "(", "x2", "-", "b", "[", "0", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "*", "x1", "-", "a", "[", "0", "]", ".", "reshape", "(", "nsim", ",", "1", ")", ")", "+", "b", "[", "0", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "*", "x1err", "**", "2", ")", "/", "(", "x1var", "-", "sig11var", ")", ")", "if", "model", "!=", "'yx'", ":", "xi", ".", "append", "(", "(", "(", "x2", "-", "x2av", ")", "*", "(", "x2", "-", "b", "[", "1", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "*", "x1", "-", "a", "[", "1", "]", ".", "reshape", "(", "nsim", ",", "1", ")", ")", "+", "x2err", "**", "2", ")", "/", "covar_x1x2", ")", "xi", ".", "append", "(", "(", "xi", "[", "0", "]", "*", "(", "1", "+", "b", "[", "1", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "**", "2", ")", "+", "xi", "[", "1", "]", "*", "(", "1", "+", "b", "[", "0", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "**", "2", ")", ")", "/", "(", "(", "b", "[", "0", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "+", "b", "[", "1", "]", ".", "reshape", "(", "nsim", ",", "1", ")", ")", "*", "numpy", ".", "sqrt", "(", "(", "1", "+", "b", "[", "0", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "**", "2", ")", "*", "(", "1", "+", "b", "[", "1", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "**", "2", ")", ")", ")", ")", "xi", ".", "append", "(", "(", "xi", "[", "0", "]", "/", "b", "[", "0", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "**", "2", "+", "xi", "[", "1", "]", ")", "*", "b", "[", "3", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "/", "numpy", ".", "sqrt", "(", "4", "+", "(", "b", "[", "1", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "-", "1", "/", "b", "[", "0", "]", ".", "reshape", "(", "nsim", ",", "1", ")", ")", "**", "2", ")", ")", "zeta", "=", "[", "]", "for", "i", "in", "xrange", "(", "modelint", ")", ":", "zeta", ".", "append", "(", "x2", "-", "b", "[", "i", "]", ".", "reshape", "(", "nsim", ",", "1", ")", "*", "x1", "-", "x1av", "*", "xi", "[", "i", "]", ")", "# calculate variance for all a and b", "bvar", "=", "numpy", ".", "zeros", "(", "(", "4", ",", "nsim", ")", ")", "avar", "=", "numpy", ".", "zeros", "(", "(", "4", ",", "nsim", ")", ")", "for", "i", "in", "xrange", "(", "modelint", ")", ":", "bvar", "[", "i", "]", "=", "xi", "[", "i", "]", ".", "var", "(", "axis", "=", "1", ",", "keepdims", "=", "False", ")", "/", "npts", "avar", "[", "i", "]", "=", "zeta", "[", "i", "]", ".", "var", "(", "axis", "=", "1", ",", "keepdims", "=", "False", ")", "/", "npts", "return", "a", ",", "b", ",", "avar", ",", "bvar", ",", "xi", ",", "zeta", "def", "_bess", "(", "npts", ",", "x1", ",", "x2", ",", "x1err", ",", "x2err", ",", "cerr", ")", ":", "\"\"\"\n Do the entire regression calculation for 4 slopes:\n OLS(Y|X), OLS(X|Y), bisector, orthogonal\n \"\"\"", "# calculate sigma's for datapoints using length of confidence", "# intervals", "sig11var", "=", "sum", "(", "x1err", "**", "2", ")", "/", "npts", "sig22var", "=", "sum", "(", "x2err", "**", "2", ")", "/", "npts", "sig12var", "=", "sum", "(", "cerr", ")", "/", "npts", "# calculate means and variances", "x1av", "=", "numpy", ".", "average", "(", "x1", ")", "x1var", "=", "numpy", ".", "std", "(", "x1", ")", "**", "2", "x2av", "=", "numpy", ".", "average", "(", "x2", ")", "x2var", "=", "numpy", ".", "std", "(", "x2", ")", "**", "2", "covar_x1x2", "=", "sum", "(", "(", "x1", "-", "x1av", ")", "*", "(", "x2", "-", "x2av", ")", ")", "/", "npts", "# compute the regression slopes for OLS(X2|X1), OLS(X1|X2), ", "# bisector and orthogonal", "b", "=", "numpy", ".", "zeros", "(", "4", ")", "b", "[", "0", "]", "=", "(", "covar_x1x2", "-", "sig12var", ")", "/", "(", "x1var", "-", "sig11var", ")", "b", "[", "1", "]", "=", "(", "x2var", "-", "sig22var", ")", "/", "(", "covar_x1x2", "-", "sig12var", ")", "b", "[", "2", "]", "=", "(", "b", "[", "0", "]", "*", "b", "[", "1", "]", "-", "1", "+", "numpy", ".", "sqrt", "(", "(", "1", "+", "b", "[", "0", "]", "**", "2", ")", "*", "(", "1", "+", "b", "[", "1", "]", "**", "2", ")", ")", ")", "/", "(", "b", "[", "0", "]", "+", "b", "[", "1", "]", ")", "b", "[", "3", "]", "=", "0.5", "*", "(", "(", "b", "[", "1", "]", "-", "1", "/", "b", "[", "0", "]", ")", "+", "numpy", ".", "sign", "(", "covar_x1x2", ")", "*", "numpy", ".", "sqrt", "(", "4", "+", "(", "b", "[", "1", "]", "-", "1", "/", "b", "[", "0", "]", ")", "**", "2", ")", ")", "# compute intercepts for above 4 cases:", "a", "=", "x2av", "-", "b", "*", "x1av", "# set up variables to calculate standard deviations of slope", "# and intercept", "xi", "=", "[", "]", "xi", ".", "append", "(", "(", "(", "x1", "-", "x1av", ")", "*", "(", "x2", "-", "b", "[", "0", "]", "*", "x1", "-", "a", "[", "0", "]", ")", "+", "b", "[", "0", "]", "*", "x1err", "**", "2", ")", "/", "(", "x1var", "-", "sig11var", ")", ")", "xi", ".", "append", "(", "(", "(", "x2", "-", "x2av", ")", "*", "(", "x2", "-", "b", "[", "1", "]", "*", "x1", "-", "a", "[", "1", "]", ")", "+", "x2err", "**", "2", ")", "/", "covar_x1x2", ")", "xi", ".", "append", "(", "(", "xi", "[", "0", "]", "*", "(", "1", "+", "b", "[", "1", "]", "**", "2", ")", "+", "xi", "[", "1", "]", "*", "(", "1", "+", "b", "[", "0", "]", "**", "2", ")", ")", "/", "(", "(", "b", "[", "0", "]", "+", "b", "[", "1", "]", ")", "*", "numpy", ".", "sqrt", "(", "(", "1", "+", "b", "[", "0", "]", "**", "2", ")", "*", "(", "1", "+", "b", "[", "1", "]", "**", "2", ")", ")", ")", ")", "xi", ".", "append", "(", "(", "xi", "[", "0", "]", "/", "b", "[", "0", "]", "**", "2", "+", "xi", "[", "1", "]", ")", "*", "b", "[", "3", "]", "/", "numpy", ".", "sqrt", "(", "4", "+", "(", "b", "[", "1", "]", "-", "1", "/", "b", "[", "0", "]", ")", "**", "2", ")", ")", "zeta", "=", "[", "]", "for", "i", "in", "xrange", "(", "4", ")", ":", "zeta", ".", "append", "(", "x2", "-", "b", "[", "i", "]", "*", "x1", "-", "x1av", "*", "xi", "[", "i", "]", ")", "# calculate variance for all a and b", "bvar", "=", "numpy", ".", "zeros", "(", "4", ")", "avar", "=", "numpy", ".", "zeros", "(", "4", ")", "for", "i", "in", "xrange", "(", "4", ")", ":", "bvar", "[", "i", "]", "=", "numpy", ".", "std", "(", "xi", "[", "i", "]", ")", "**", "2", "/", "npts", "avar", "[", "i", "]", "=", "numpy", ".", "std", "(", "zeta", "[", "i", "]", ")", "**", "2", "/", "npts", "return", "a", ",", "b", ",", "avar", ",", "bvar", ",", "xi", ",", "zeta", "def", "_bootspbec", "(", "npts", ",", "x", ",", "y", ",", "xerr", ",", "yerr", ",", "cerr", ")", ":", "\"\"\"\n Bootstrap samples\n \"\"\"", "j", "=", "numpy", ".", "random", ".", "randint", "(", "npts", ",", "size", "=", "npts", ")", "xboot", "=", "x", "[", "j", "]", "xerrboot", "=", "xerr", "[", "j", "]", "yboot", "=", "y", "[", "j", "]", "yerrboot", "=", "yerr", "[", "j", "]", "cerrboot", "=", "cerr", "[", "j", "]", "return", "xboot", ",", "yboot", ",", "xerrboot", ",", "yerrboot", ",", "cerrboot", "# ---- Main routine starts here ---- #", "# convert to numpy arrays just in case", "x1", "=", "numpy", ".", "array", "(", "x1", ")", "x2", "=", "numpy", ".", "array", "(", "x2", ")", "x1err", "=", "numpy", ".", "array", "(", "x1err", ")", "x2err", "=", "numpy", ".", "array", "(", "x2err", ")", "if", "logify", ":", "x1", ",", "x2", ",", "x1err", ",", "x2err", "=", "to_log", "(", "x1", ",", "x2", ",", "x1err", ",", "x2err", ")", "cerr", "=", "numpy", ".", "array", "(", "cerr", ")", "models", "=", "[", "[", "'yx'", ",", "'xy'", ",", "'bi'", ",", "'orth'", "]", ",", "[", "'BCES(Y|X)'", ",", "'BCES(X|Y)'", ",", "'BCES Bisector'", ",", "'BCES Orthogonal'", "]", "]", "# which to return?", "j", "=", "models", "[", "0", "]", ".", "index", "(", "model", ")", "npts", "=", "len", "(", "x1", ")", "# are the errors defined?", "if", "len", "(", "x1err", ")", "==", "0", ":", "x1err", "=", "numpy", ".", "zeros", "(", "npts", ")", "if", "len", "(", "x2err", ")", "==", "0", ":", "x2err", "=", "numpy", ".", "zeros", "(", "npts", ")", "if", "len", "(", "cerr", ")", "==", "0", ":", "cerr", "=", "numpy", ".", "zeros", "(", "npts", ")", "if", "verbose", "==", "'debug'", ":", "print", "'x1 ='", ",", "x1", "print", "'x1err ='", ",", "x1err", "print", "'x2 ='", ",", "x2", "print", "'x2err ='", ",", "x2err", "print", "'cerr ='", ",", "cerr", "print", "'\\n ** Returning values for'", ",", "models", "[", "1", "]", "[", "j", "]", ",", "'**'", "if", "bootstrap", "is", "not", "False", ":", "print", "' with errors from %d bootstrap resamplings'", "%", "bootstrap", "print", "''", "# calculate nominal fits", "bessresults", "=", "_bess", "(", "npts", ",", "x1", ",", "x2", ",", "x1err", ",", "x2err", ",", "cerr", ")", "(", "a", ",", "b", ",", "avar", ",", "bvar", ",", "xi", ",", "zeta", ")", "=", "bessresults", "# covariance between normalization and slope", "if", "full_output", ":", "covar_ab", "=", "numpy", ".", "cov", "(", "xi", "[", "j", "]", ",", "zeta", "[", "j", "]", ")", "if", "bootstrap", "is", "not", "False", ":", "# make bootstrap simulated datasets, and compute averages and", "# standard deviations of regression coefficients", "asum", "=", "numpy", ".", "zeros", "(", "4", ")", "assum", "=", "numpy", ".", "zeros", "(", "4", ")", "bsum", "=", "numpy", ".", "zeros", "(", "4", ")", "bssum", "=", "numpy", ".", "zeros", "(", "4", ")", "sda", "=", "numpy", ".", "zeros", "(", "4", ")", "sdb", "=", "numpy", ".", "zeros", "(", "4", ")", "for", "i", "in", "xrange", "(", "bootstrap", ")", ":", "samples", "=", "_bootspbec", "(", "npts", ",", "x1", ",", "x2", ",", "x1err", ",", "x2err", ",", "cerr", ")", "(", "x1sim", ",", "x2sim", ",", "x1errsim", ",", "x2errsim", ",", "cerrsim", ")", "=", "samples", "besssim", "=", "_bess", "(", "npts", ",", "x1sim", ",", "x2sim", ",", "x1errsim", ",", "x2errsim", ",", "cerrsim", ")", "(", "asim", ",", "bsim", ",", "avarsim", ",", "bvarsim", ",", "xi", ",", "zeta", ")", "=", "besssim", "asum", "+=", "asim", "assum", "+=", "asim", "**", "2", "bsum", "+=", "bsim", "bssum", "+=", "bsim", "**", "2", "aavg", "=", "asum", "/", "bootstrap", "bavg", "=", "bsum", "/", "bootstrap", "for", "i", "in", "range", "(", "4", ")", ":", "sdtest", "=", "assum", "[", "i", "]", "-", "bootstrap", "*", "aavg", "[", "i", "]", "**", "2", "if", "sdtest", ">", "0", ":", "sda", "[", "i", "]", "=", "numpy", ".", "sqrt", "(", "sdtest", "/", "(", "bootstrap", "-", "1", ")", ")", "sdtest", "=", "bssum", "[", "i", "]", "-", "bootstrap", "*", "bavg", "[", "i", "]", "**", "2", "if", "sdtest", ">", "0", ":", "sdb", "[", "i", "]", "=", "numpy", ".", "sqrt", "(", "sdtest", "/", "(", "bootstrap", "-", "1", ")", ")", "if", "verbose", "in", "(", "'normal'", ",", "'debug'", ")", ":", "print", "'%s B err(B)'", "%", "(", "'Fit'", ".", "ljust", "(", "19", ")", ")", ",", "print", "' A err(A)'", "for", "i", "in", "range", "(", "4", ")", ":", "print", "'%s %9.2e +/- %8.2e %10.3e +/- %9.3e'", "%", "(", "models", "[", "1", "]", "[", "i", "]", ".", "ljust", "(", "16", ")", ",", "b", "[", "i", "]", ",", "numpy", ".", "sqrt", "(", "bvar", "[", "i", "]", ")", ",", "a", "[", "i", "]", ",", "numpy", ".", "sqrt", "(", "avar", "[", "i", "]", ")", ")", "if", "bootstrap", "is", "not", "False", ":", "print", "'%s %9.2e +/- %8.2e %10.3e +/- %9.3e'", "%", "(", "'bootstrap'", ".", "ljust", "(", "16", ")", ",", "bavg", "[", "i", "]", ",", "sdb", "[", "i", "]", ",", "aavg", "[", "i", "]", ",", "sda", "[", "i", "]", ")", "print", "''", "if", "verbose", "==", "'debug'", ":", "print", "'cov[%s] ='", "%", "models", "[", "model", "]", "print", "covar_ab", "if", "bootstrap", "is", "not", "False", ":", "if", "full_output", ":", "return", "(", "a", "[", "j", "]", ",", "sda", "[", "j", "]", ")", ",", "(", "b", "[", "j", "]", ",", "sdb", "[", "j", "]", ")", ",", "covar_ab", "else", ":", "return", "(", "a", "[", "j", "]", ",", "sda", "[", "j", "]", ")", ",", "(", "b", "[", "j", "]", ",", "sdb", "[", "j", "]", ")", "if", "full_output", ":", "out", "=", "(", "(", "a", "[", "j", "]", ",", "numpy", ".", "sqrt", "(", "avar", "[", "j", "]", ")", ")", ",", "(", "b", "[", "j", "]", ",", "numpy", ".", "sqrt", "(", "bvar", "[", "j", "]", ")", ")", ",", "covar_ab", ")", "else", ":", "out", "=", "(", "(", "a", "[", "j", "]", ",", "numpy", ".", "sqrt", "(", "avar", "[", "j", "]", ")", ")", ",", "(", "b", "[", "j", "]", ",", "numpy", ".", "sqrt", "(", "bvar", "[", "j", "]", ")", ")", ")", "return", "out" ]
Bivariate, Correlated Errors and intrinsic Scatter (BCES) translated from the FORTRAN code by Christina Bird and Matthew Bershady (Akritas & Bershady, 1996) Linear regression in the presence of heteroscedastic errors on both variables and intrinsic scatter Parameters ---------- x1 : array of floats Independent variable, or observable x2 : array of floats Dependent variable x1err : array of floats (optional) Uncertainties on the independent variable x2err : array of floats (optional) Uncertainties on the dependent variable cerr : array of floats (optional) Covariances of the uncertainties in the dependent and independent variables logify : bool (default True) Whether to take the log of the measurements in order to estimate the best-fit power law instead of linear relation model : {'yx', 'xy', 'bi', 'orth'} BCES model with which to calculate regression. See Notes below for details. bootstrap : False or int (default 5000) get the errors from bootstrap resampling instead of the analytical prescription? if bootstrap is an int, it is the number of bootstrap resamplings verbose : str (default 'normal') Verbose level. Options are {'quiet', 'normal', 'debug'} full_output : bool (default True) If True, return also the covariance between the normalization and slope of the regression. Returns ------- a : tuple of length 2 Best-fit normalization and its uncertainty (a, da) b : tuple of length 2 Best-fit slope and its uncertainty (b, db) Optional outputs ---------------- cov_ab : 2x2 array of floats covariance between a and b. Returned if full_output is set to True. Notes ----- If verbose is normal or debug, the results from all the BCES models will be printed (still, only the one selected in *model* will be returned). the *model* parameter: -'yx' stands for BCES(Y|X) -'xy' stands for BCES(X|Y) -'bi' stands for BCES Bisector -'orth' stands for BCES Orthogonal
[ "Bivariate", "Correlated", "Errors", "and", "intrinsic", "Scatter", "(", "BCES", ")", "translated", "from", "the", "FORTRAN", "code", "by", "Christina", "Bird", "and", "Matthew", "Bershady", "(", "Akritas", "&", "Bershady", "1996", ")" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Math_Libraries/lnr_script.py#L12-L309
Vital-Fernandez/dazer
bin/lib/Math_Libraries/lnr_script.py
scatter
def scatter(slope, zero, x1, x2, x1err=[], x2err=[]): """ Used mainly to measure scatter for the BCES best-fit """ n = len(x1) x2pred = zero + slope * x1 s = sum((x2 - x2pred) ** 2) / (n - 1) if len(x2err) == n: s_obs = sum((x2err / x2) ** 2) / n s0 = s - s_obs print numpy.sqrt(s), numpy.sqrt(s_obs), numpy.sqrt(s0) return numpy.sqrt(s0)
python
def scatter(slope, zero, x1, x2, x1err=[], x2err=[]): """ Used mainly to measure scatter for the BCES best-fit """ n = len(x1) x2pred = zero + slope * x1 s = sum((x2 - x2pred) ** 2) / (n - 1) if len(x2err) == n: s_obs = sum((x2err / x2) ** 2) / n s0 = s - s_obs print numpy.sqrt(s), numpy.sqrt(s_obs), numpy.sqrt(s0) return numpy.sqrt(s0)
[ "def", "scatter", "(", "slope", ",", "zero", ",", "x1", ",", "x2", ",", "x1err", "=", "[", "]", ",", "x2err", "=", "[", "]", ")", ":", "n", "=", "len", "(", "x1", ")", "x2pred", "=", "zero", "+", "slope", "*", "x1", "s", "=", "sum", "(", "(", "x2", "-", "x2pred", ")", "**", "2", ")", "/", "(", "n", "-", "1", ")", "if", "len", "(", "x2err", ")", "==", "n", ":", "s_obs", "=", "sum", "(", "(", "x2err", "/", "x2", ")", "**", "2", ")", "/", "n", "s0", "=", "s", "-", "s_obs", "print", "numpy", ".", "sqrt", "(", "s", ")", ",", "numpy", ".", "sqrt", "(", "s_obs", ")", ",", "numpy", ".", "sqrt", "(", "s0", ")", "return", "numpy", ".", "sqrt", "(", "s0", ")" ]
Used mainly to measure scatter for the BCES best-fit
[ "Used", "mainly", "to", "measure", "scatter", "for", "the", "BCES", "best", "-", "fit" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Math_Libraries/lnr_script.py#L311-L323
Vital-Fernandez/dazer
bin/lib/Math_Libraries/lnr_script.py
kelly
def kelly(x1, x2, x1err=[], x2err=[], cerr=[], logify=True, miniter=5000, maxiter=1e5, metro=True, silent=True): """ Python wrapper for the linear regression MCMC of Kelly (2007). Requires pidly (http://astronomy.sussex.ac.uk/~anthonys/pidly/) and an IDL license. Parameters ---------- x1 : array of floats Independent variable, or observable x2 : array of floats Dependent variable x1err : array of floats (optional) Uncertainties on the independent variable x2err : array of floats (optional) Uncertainties on the dependent variable cerr : array of floats (optional) Covariances of the uncertainties in the dependent and independent variables """ import pidly n = len(x1) if len(x2) != n: raise ValueError('x1 and x2 must have same length') if len(x1err) == 0: x1err = numpy.zeros(n) if len(x2err) == 0: x2err = numpy.zeros(n) if logify: x1, x2, x1err, x2err = to_log(x1, x2, x1err, x2err) idl = pidly.IDL() idl('x1 = %s' %list(x1)) idl('x2 = %s' %list(x2)) cmd = 'linmix_err, x1, x2, fit' if len(x1err) == n: idl('x1err = %s' %list(x1err)) cmd += ', xsig=x1err' if len(x2err) == n: idl('x2err = %s' %list(x2err)) cmd += ', ysig=x2err' if len(cerr) == n: idl('cerr = %s' %list(cerr)) cmd += ', xycov=cerr' cmd += ', miniter=%d, maxiter=%d' %(miniter, maxiter) if metro: cmd += ', /metro' if silent: cmd += ', /silent' idl(cmd) alpha = idl.ev('fit.alpha') beta = idl.ev('fit.beta') sigma = numpy.sqrt(idl.ev('fit.sigsqr')) return alpha, beta, sigma
python
def kelly(x1, x2, x1err=[], x2err=[], cerr=[], logify=True, miniter=5000, maxiter=1e5, metro=True, silent=True): """ Python wrapper for the linear regression MCMC of Kelly (2007). Requires pidly (http://astronomy.sussex.ac.uk/~anthonys/pidly/) and an IDL license. Parameters ---------- x1 : array of floats Independent variable, or observable x2 : array of floats Dependent variable x1err : array of floats (optional) Uncertainties on the independent variable x2err : array of floats (optional) Uncertainties on the dependent variable cerr : array of floats (optional) Covariances of the uncertainties in the dependent and independent variables """ import pidly n = len(x1) if len(x2) != n: raise ValueError('x1 and x2 must have same length') if len(x1err) == 0: x1err = numpy.zeros(n) if len(x2err) == 0: x2err = numpy.zeros(n) if logify: x1, x2, x1err, x2err = to_log(x1, x2, x1err, x2err) idl = pidly.IDL() idl('x1 = %s' %list(x1)) idl('x2 = %s' %list(x2)) cmd = 'linmix_err, x1, x2, fit' if len(x1err) == n: idl('x1err = %s' %list(x1err)) cmd += ', xsig=x1err' if len(x2err) == n: idl('x2err = %s' %list(x2err)) cmd += ', ysig=x2err' if len(cerr) == n: idl('cerr = %s' %list(cerr)) cmd += ', xycov=cerr' cmd += ', miniter=%d, maxiter=%d' %(miniter, maxiter) if metro: cmd += ', /metro' if silent: cmd += ', /silent' idl(cmd) alpha = idl.ev('fit.alpha') beta = idl.ev('fit.beta') sigma = numpy.sqrt(idl.ev('fit.sigsqr')) return alpha, beta, sigma
[ "def", "kelly", "(", "x1", ",", "x2", ",", "x1err", "=", "[", "]", ",", "x2err", "=", "[", "]", ",", "cerr", "=", "[", "]", ",", "logify", "=", "True", ",", "miniter", "=", "5000", ",", "maxiter", "=", "1e5", ",", "metro", "=", "True", ",", "silent", "=", "True", ")", ":", "import", "pidly", "n", "=", "len", "(", "x1", ")", "if", "len", "(", "x2", ")", "!=", "n", ":", "raise", "ValueError", "(", "'x1 and x2 must have same length'", ")", "if", "len", "(", "x1err", ")", "==", "0", ":", "x1err", "=", "numpy", ".", "zeros", "(", "n", ")", "if", "len", "(", "x2err", ")", "==", "0", ":", "x2err", "=", "numpy", ".", "zeros", "(", "n", ")", "if", "logify", ":", "x1", ",", "x2", ",", "x1err", ",", "x2err", "=", "to_log", "(", "x1", ",", "x2", ",", "x1err", ",", "x2err", ")", "idl", "=", "pidly", ".", "IDL", "(", ")", "idl", "(", "'x1 = %s'", "%", "list", "(", "x1", ")", ")", "idl", "(", "'x2 = %s'", "%", "list", "(", "x2", ")", ")", "cmd", "=", "'linmix_err, x1, x2, fit'", "if", "len", "(", "x1err", ")", "==", "n", ":", "idl", "(", "'x1err = %s'", "%", "list", "(", "x1err", ")", ")", "cmd", "+=", "', xsig=x1err'", "if", "len", "(", "x2err", ")", "==", "n", ":", "idl", "(", "'x2err = %s'", "%", "list", "(", "x2err", ")", ")", "cmd", "+=", "', ysig=x2err'", "if", "len", "(", "cerr", ")", "==", "n", ":", "idl", "(", "'cerr = %s'", "%", "list", "(", "cerr", ")", ")", "cmd", "+=", "', xycov=cerr'", "cmd", "+=", "', miniter=%d, maxiter=%d'", "%", "(", "miniter", ",", "maxiter", ")", "if", "metro", ":", "cmd", "+=", "', /metro'", "if", "silent", ":", "cmd", "+=", "', /silent'", "idl", "(", "cmd", ")", "alpha", "=", "idl", ".", "ev", "(", "'fit.alpha'", ")", "beta", "=", "idl", ".", "ev", "(", "'fit.beta'", ")", "sigma", "=", "numpy", ".", "sqrt", "(", "idl", ".", "ev", "(", "'fit.sigsqr'", ")", ")", "return", "alpha", ",", "beta", ",", "sigma" ]
Python wrapper for the linear regression MCMC of Kelly (2007). Requires pidly (http://astronomy.sussex.ac.uk/~anthonys/pidly/) and an IDL license. Parameters ---------- x1 : array of floats Independent variable, or observable x2 : array of floats Dependent variable x1err : array of floats (optional) Uncertainties on the independent variable x2err : array of floats (optional) Uncertainties on the dependent variable cerr : array of floats (optional) Covariances of the uncertainties in the dependent and independent variables
[ "Python", "wrapper", "for", "the", "linear", "regression", "MCMC", "of", "Kelly", "(", "2007", ")", ".", "Requires", "pidly", "(", "http", ":", "//", "astronomy", ".", "sussex", ".", "ac", ".", "uk", "/", "~anthonys", "/", "pidly", "/", ")", "and", "an", "IDL", "license", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Math_Libraries/lnr_script.py#L325-L380
Vital-Fernandez/dazer
bin/lib/Math_Libraries/lnr_script.py
mcmc
def mcmc(x1, x2, x1err=[], x2err=[], po=(1,1,0.5), logify=True, nsteps=5000, nwalkers=100, nburn=500, output='full'): """ Use emcee to find the best-fit linear relation or power law accounting for measurement uncertainties and intrinsic scatter Parameters ---------- x1 : array of floats Independent variable, or observable x2 : array of floats Dependent variable x1err : array of floats (optional) Uncertainties on the independent variable x2err : array of floats (optional) Uncertainties on the dependent variable po : tuple of 3 floats (optional) Initial guesses for zero point, slope, and intrinsic scatter. Results are not very sensitive to these values so they shouldn't matter a lot. logify : bool (default True) Whether to take the log of the measurements in order to estimate the best-fit power law instead of linear relation nsteps : int (default 5000) Number of steps each walker should take in the MCMC nwalkers : int (default 100) Number of MCMC walkers nburn : int (default 500) Number of samples to discard to give the MCMC enough time to converge. output : list of ints or 'full' (default 'full') If 'full', then return the full samples (except for burn-in section) for each parameter. Otherwise, each float corresponds to a percentile that will be returned for each parameter. Returns ------- See *output* argument above for return options. """ import emcee if len(x1err) == 0: x1err = numpy.ones(len(x1)) if len(x2err) == 0: x2err = numpy.ones(len(x1)) def lnlike(theta, x, y, xerr, yerr): a, b, s = theta model = a + b*x sigma = numpy.sqrt((b*xerr)**2 + yerr*2 + s**2) lglk = 2 * sum(numpy.log(sigma)) + \ sum(((y-model) / sigma) ** 2) + \ numpy.log(len(x)) * numpy.sqrt(2*numpy.pi) / 2 return -lglk def lnprior(theta): a, b, s = theta if s >= 0: return 0 return -numpy.inf def lnprob(theta, x, y, xerr, yerr): lp = lnprior(theta) return lp + lnlike(theta, x, y, xerr, yerr) if logify: x1, x2, x1err, x2err = to_log(x1, x2, x1err, x2err) start = numpy.array(po) ndim = len(start) pos = [start + 1e-4*numpy.random.randn(ndim) for i in range(nwalkers)] sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob, args=(x1,x2,x1err,x2err)) sampler.run_mcmc(pos, nsteps) samples = numpy.array([sampler.chain[:,nburn:,i].reshape(-1) \ for i in xrange(ndim)]) if logify: samples[2] *= numpy.log(10) if output == 'full': return samples else: try: values = [[numpy.percentile(s, o) for o in output] for s in samples] return values except TypeError: msg = 'ERROR: wrong value for argument output in mcmc().' msg += ' Must be "full" or list of ints.' print msg exit() return
python
def mcmc(x1, x2, x1err=[], x2err=[], po=(1,1,0.5), logify=True, nsteps=5000, nwalkers=100, nburn=500, output='full'): """ Use emcee to find the best-fit linear relation or power law accounting for measurement uncertainties and intrinsic scatter Parameters ---------- x1 : array of floats Independent variable, or observable x2 : array of floats Dependent variable x1err : array of floats (optional) Uncertainties on the independent variable x2err : array of floats (optional) Uncertainties on the dependent variable po : tuple of 3 floats (optional) Initial guesses for zero point, slope, and intrinsic scatter. Results are not very sensitive to these values so they shouldn't matter a lot. logify : bool (default True) Whether to take the log of the measurements in order to estimate the best-fit power law instead of linear relation nsteps : int (default 5000) Number of steps each walker should take in the MCMC nwalkers : int (default 100) Number of MCMC walkers nburn : int (default 500) Number of samples to discard to give the MCMC enough time to converge. output : list of ints or 'full' (default 'full') If 'full', then return the full samples (except for burn-in section) for each parameter. Otherwise, each float corresponds to a percentile that will be returned for each parameter. Returns ------- See *output* argument above for return options. """ import emcee if len(x1err) == 0: x1err = numpy.ones(len(x1)) if len(x2err) == 0: x2err = numpy.ones(len(x1)) def lnlike(theta, x, y, xerr, yerr): a, b, s = theta model = a + b*x sigma = numpy.sqrt((b*xerr)**2 + yerr*2 + s**2) lglk = 2 * sum(numpy.log(sigma)) + \ sum(((y-model) / sigma) ** 2) + \ numpy.log(len(x)) * numpy.sqrt(2*numpy.pi) / 2 return -lglk def lnprior(theta): a, b, s = theta if s >= 0: return 0 return -numpy.inf def lnprob(theta, x, y, xerr, yerr): lp = lnprior(theta) return lp + lnlike(theta, x, y, xerr, yerr) if logify: x1, x2, x1err, x2err = to_log(x1, x2, x1err, x2err) start = numpy.array(po) ndim = len(start) pos = [start + 1e-4*numpy.random.randn(ndim) for i in range(nwalkers)] sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob, args=(x1,x2,x1err,x2err)) sampler.run_mcmc(pos, nsteps) samples = numpy.array([sampler.chain[:,nburn:,i].reshape(-1) \ for i in xrange(ndim)]) if logify: samples[2] *= numpy.log(10) if output == 'full': return samples else: try: values = [[numpy.percentile(s, o) for o in output] for s in samples] return values except TypeError: msg = 'ERROR: wrong value for argument output in mcmc().' msg += ' Must be "full" or list of ints.' print msg exit() return
[ "def", "mcmc", "(", "x1", ",", "x2", ",", "x1err", "=", "[", "]", ",", "x2err", "=", "[", "]", ",", "po", "=", "(", "1", ",", "1", ",", "0.5", ")", ",", "logify", "=", "True", ",", "nsteps", "=", "5000", ",", "nwalkers", "=", "100", ",", "nburn", "=", "500", ",", "output", "=", "'full'", ")", ":", "import", "emcee", "if", "len", "(", "x1err", ")", "==", "0", ":", "x1err", "=", "numpy", ".", "ones", "(", "len", "(", "x1", ")", ")", "if", "len", "(", "x2err", ")", "==", "0", ":", "x2err", "=", "numpy", ".", "ones", "(", "len", "(", "x1", ")", ")", "def", "lnlike", "(", "theta", ",", "x", ",", "y", ",", "xerr", ",", "yerr", ")", ":", "a", ",", "b", ",", "s", "=", "theta", "model", "=", "a", "+", "b", "*", "x", "sigma", "=", "numpy", ".", "sqrt", "(", "(", "b", "*", "xerr", ")", "**", "2", "+", "yerr", "*", "2", "+", "s", "**", "2", ")", "lglk", "=", "2", "*", "sum", "(", "numpy", ".", "log", "(", "sigma", ")", ")", "+", "sum", "(", "(", "(", "y", "-", "model", ")", "/", "sigma", ")", "**", "2", ")", "+", "numpy", ".", "log", "(", "len", "(", "x", ")", ")", "*", "numpy", ".", "sqrt", "(", "2", "*", "numpy", ".", "pi", ")", "/", "2", "return", "-", "lglk", "def", "lnprior", "(", "theta", ")", ":", "a", ",", "b", ",", "s", "=", "theta", "if", "s", ">=", "0", ":", "return", "0", "return", "-", "numpy", ".", "inf", "def", "lnprob", "(", "theta", ",", "x", ",", "y", ",", "xerr", ",", "yerr", ")", ":", "lp", "=", "lnprior", "(", "theta", ")", "return", "lp", "+", "lnlike", "(", "theta", ",", "x", ",", "y", ",", "xerr", ",", "yerr", ")", "if", "logify", ":", "x1", ",", "x2", ",", "x1err", ",", "x2err", "=", "to_log", "(", "x1", ",", "x2", ",", "x1err", ",", "x2err", ")", "start", "=", "numpy", ".", "array", "(", "po", ")", "ndim", "=", "len", "(", "start", ")", "pos", "=", "[", "start", "+", "1e-4", "*", "numpy", ".", "random", ".", "randn", "(", "ndim", ")", "for", "i", "in", "range", "(", "nwalkers", ")", "]", "sampler", "=", "emcee", ".", "EnsembleSampler", "(", "nwalkers", ",", "ndim", ",", "lnprob", ",", "args", "=", "(", "x1", ",", "x2", ",", "x1err", ",", "x2err", ")", ")", "sampler", ".", "run_mcmc", "(", "pos", ",", "nsteps", ")", "samples", "=", "numpy", ".", "array", "(", "[", "sampler", ".", "chain", "[", ":", ",", "nburn", ":", ",", "i", "]", ".", "reshape", "(", "-", "1", ")", "for", "i", "in", "xrange", "(", "ndim", ")", "]", ")", "if", "logify", ":", "samples", "[", "2", "]", "*=", "numpy", ".", "log", "(", "10", ")", "if", "output", "==", "'full'", ":", "return", "samples", "else", ":", "try", ":", "values", "=", "[", "[", "numpy", ".", "percentile", "(", "s", ",", "o", ")", "for", "o", "in", "output", "]", "for", "s", "in", "samples", "]", "return", "values", "except", "TypeError", ":", "msg", "=", "'ERROR: wrong value for argument output in mcmc().'", "msg", "+=", "' Must be \"full\" or list of ints.'", "print", "msg", "exit", "(", ")", "return" ]
Use emcee to find the best-fit linear relation or power law accounting for measurement uncertainties and intrinsic scatter Parameters ---------- x1 : array of floats Independent variable, or observable x2 : array of floats Dependent variable x1err : array of floats (optional) Uncertainties on the independent variable x2err : array of floats (optional) Uncertainties on the dependent variable po : tuple of 3 floats (optional) Initial guesses for zero point, slope, and intrinsic scatter. Results are not very sensitive to these values so they shouldn't matter a lot. logify : bool (default True) Whether to take the log of the measurements in order to estimate the best-fit power law instead of linear relation nsteps : int (default 5000) Number of steps each walker should take in the MCMC nwalkers : int (default 100) Number of MCMC walkers nburn : int (default 500) Number of samples to discard to give the MCMC enough time to converge. output : list of ints or 'full' (default 'full') If 'full', then return the full samples (except for burn-in section) for each parameter. Otherwise, each float corresponds to a percentile that will be returned for each parameter. Returns ------- See *output* argument above for return options.
[ "Use", "emcee", "to", "find", "the", "best", "-", "fit", "linear", "relation", "or", "power", "law", "accounting", "for", "measurement", "uncertainties", "and", "intrinsic", "scatter" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Math_Libraries/lnr_script.py#L382-L468
Vital-Fernandez/dazer
bin/lib/Math_Libraries/lnr_script.py
mle
def mle(x1, x2, x1err=[], x2err=[], cerr=[], s_int=True, po=(1,0,0.1), verbose=False, logify=True, full_output=False): """ Maximum Likelihood Estimation of best-fit parameters Parameters ---------- x1, x2 : float arrays the independent and dependent variables. x1err, x2err : float arrays (optional) measurement uncertainties on independent and dependent variables. Any of the two, or both, can be supplied. cerr : float array (same size as x1) covariance on the measurement errors s_int : boolean (default True) whether to include intrinsic scatter in the MLE. po : tuple of floats initial guess for free parameters. If s_int is True, then po must have 3 elements; otherwise it can have two (for the zero point and the slope) verbose : boolean (default False) verbose? logify : boolean (default True) whether to convert the values to log10's. This is to calculate the best-fit power law. Note that the result is given for the equation log(y)=a+b*log(x) -- i.e., the zero point must be converted to 10**a if logify=True full_output : boolean (default False) numpy.optimize.fmin's full_output argument Returns ------- a : float Maximum Likelihood Estimate of the zero point. Note that if logify=True, the power-law intercept is 10**a b : float Maximum Likelihood Estimate of the slope s : float (optional, if s_int=True) Maximum Likelihood Estimate of the intrinsic scatter """ from scipy import optimize n = len(x1) if len(x2) != n: raise ValueError('x1 and x2 must have same length') if len(x1err) == 0: x1err = numpy.ones(n) if len(x2err) == 0: x2err = numpy.ones(n) if logify: x1, x2, x1err, x2err = to_log(x1, x2, x1err, x2err) f = lambda a, b: a + b * x1 if s_int: w = lambda b, s: numpy.sqrt(b**2 * x1err**2 + x2err**2 + s**2) loglike = lambda p: 2 * sum(numpy.log(w(p[1],p[2]))) + \ sum(((x2 - f(p[0],p[1])) / w(p[1],p[2])) ** 2) + \ numpy.log(n * numpy.sqrt(2*numpy.pi)) / 2 else: w = lambda b: numpy.sqrt(b**2 * x1err**2 + x2err**2) loglike = lambda p: sum(numpy.log(w(p[1]))) + \ sum(((x2 - f(p[0],p[1])) / w(p[1])) ** 2) / 2 + \ numpy.log(n * numpy.sqrt(2*numpy.pi)) / 2 po = po[:2] out = optimize.fmin(loglike, po, disp=verbose, full_output=full_output) return out
python
def mle(x1, x2, x1err=[], x2err=[], cerr=[], s_int=True, po=(1,0,0.1), verbose=False, logify=True, full_output=False): """ Maximum Likelihood Estimation of best-fit parameters Parameters ---------- x1, x2 : float arrays the independent and dependent variables. x1err, x2err : float arrays (optional) measurement uncertainties on independent and dependent variables. Any of the two, or both, can be supplied. cerr : float array (same size as x1) covariance on the measurement errors s_int : boolean (default True) whether to include intrinsic scatter in the MLE. po : tuple of floats initial guess for free parameters. If s_int is True, then po must have 3 elements; otherwise it can have two (for the zero point and the slope) verbose : boolean (default False) verbose? logify : boolean (default True) whether to convert the values to log10's. This is to calculate the best-fit power law. Note that the result is given for the equation log(y)=a+b*log(x) -- i.e., the zero point must be converted to 10**a if logify=True full_output : boolean (default False) numpy.optimize.fmin's full_output argument Returns ------- a : float Maximum Likelihood Estimate of the zero point. Note that if logify=True, the power-law intercept is 10**a b : float Maximum Likelihood Estimate of the slope s : float (optional, if s_int=True) Maximum Likelihood Estimate of the intrinsic scatter """ from scipy import optimize n = len(x1) if len(x2) != n: raise ValueError('x1 and x2 must have same length') if len(x1err) == 0: x1err = numpy.ones(n) if len(x2err) == 0: x2err = numpy.ones(n) if logify: x1, x2, x1err, x2err = to_log(x1, x2, x1err, x2err) f = lambda a, b: a + b * x1 if s_int: w = lambda b, s: numpy.sqrt(b**2 * x1err**2 + x2err**2 + s**2) loglike = lambda p: 2 * sum(numpy.log(w(p[1],p[2]))) + \ sum(((x2 - f(p[0],p[1])) / w(p[1],p[2])) ** 2) + \ numpy.log(n * numpy.sqrt(2*numpy.pi)) / 2 else: w = lambda b: numpy.sqrt(b**2 * x1err**2 + x2err**2) loglike = lambda p: sum(numpy.log(w(p[1]))) + \ sum(((x2 - f(p[0],p[1])) / w(p[1])) ** 2) / 2 + \ numpy.log(n * numpy.sqrt(2*numpy.pi)) / 2 po = po[:2] out = optimize.fmin(loglike, po, disp=verbose, full_output=full_output) return out
[ "def", "mle", "(", "x1", ",", "x2", ",", "x1err", "=", "[", "]", ",", "x2err", "=", "[", "]", ",", "cerr", "=", "[", "]", ",", "s_int", "=", "True", ",", "po", "=", "(", "1", ",", "0", ",", "0.1", ")", ",", "verbose", "=", "False", ",", "logify", "=", "True", ",", "full_output", "=", "False", ")", ":", "from", "scipy", "import", "optimize", "n", "=", "len", "(", "x1", ")", "if", "len", "(", "x2", ")", "!=", "n", ":", "raise", "ValueError", "(", "'x1 and x2 must have same length'", ")", "if", "len", "(", "x1err", ")", "==", "0", ":", "x1err", "=", "numpy", ".", "ones", "(", "n", ")", "if", "len", "(", "x2err", ")", "==", "0", ":", "x2err", "=", "numpy", ".", "ones", "(", "n", ")", "if", "logify", ":", "x1", ",", "x2", ",", "x1err", ",", "x2err", "=", "to_log", "(", "x1", ",", "x2", ",", "x1err", ",", "x2err", ")", "f", "=", "lambda", "a", ",", "b", ":", "a", "+", "b", "*", "x1", "if", "s_int", ":", "w", "=", "lambda", "b", ",", "s", ":", "numpy", ".", "sqrt", "(", "b", "**", "2", "*", "x1err", "**", "2", "+", "x2err", "**", "2", "+", "s", "**", "2", ")", "loglike", "=", "lambda", "p", ":", "2", "*", "sum", "(", "numpy", ".", "log", "(", "w", "(", "p", "[", "1", "]", ",", "p", "[", "2", "]", ")", ")", ")", "+", "sum", "(", "(", "(", "x2", "-", "f", "(", "p", "[", "0", "]", ",", "p", "[", "1", "]", ")", ")", "/", "w", "(", "p", "[", "1", "]", ",", "p", "[", "2", "]", ")", ")", "**", "2", ")", "+", "numpy", ".", "log", "(", "n", "*", "numpy", ".", "sqrt", "(", "2", "*", "numpy", ".", "pi", ")", ")", "/", "2", "else", ":", "w", "=", "lambda", "b", ":", "numpy", ".", "sqrt", "(", "b", "**", "2", "*", "x1err", "**", "2", "+", "x2err", "**", "2", ")", "loglike", "=", "lambda", "p", ":", "sum", "(", "numpy", ".", "log", "(", "w", "(", "p", "[", "1", "]", ")", ")", ")", "+", "sum", "(", "(", "(", "x2", "-", "f", "(", "p", "[", "0", "]", ",", "p", "[", "1", "]", ")", ")", "/", "w", "(", "p", "[", "1", "]", ")", ")", "**", "2", ")", "/", "2", "+", "numpy", ".", "log", "(", "n", "*", "numpy", ".", "sqrt", "(", "2", "*", "numpy", ".", "pi", ")", ")", "/", "2", "po", "=", "po", "[", ":", "2", "]", "out", "=", "optimize", ".", "fmin", "(", "loglike", ",", "po", ",", "disp", "=", "verbose", ",", "full_output", "=", "full_output", ")", "return", "out" ]
Maximum Likelihood Estimation of best-fit parameters Parameters ---------- x1, x2 : float arrays the independent and dependent variables. x1err, x2err : float arrays (optional) measurement uncertainties on independent and dependent variables. Any of the two, or both, can be supplied. cerr : float array (same size as x1) covariance on the measurement errors s_int : boolean (default True) whether to include intrinsic scatter in the MLE. po : tuple of floats initial guess for free parameters. If s_int is True, then po must have 3 elements; otherwise it can have two (for the zero point and the slope) verbose : boolean (default False) verbose? logify : boolean (default True) whether to convert the values to log10's. This is to calculate the best-fit power law. Note that the result is given for the equation log(y)=a+b*log(x) -- i.e., the zero point must be converted to 10**a if logify=True full_output : boolean (default False) numpy.optimize.fmin's full_output argument Returns ------- a : float Maximum Likelihood Estimate of the zero point. Note that if logify=True, the power-law intercept is 10**a b : float Maximum Likelihood Estimate of the slope s : float (optional, if s_int=True) Maximum Likelihood Estimate of the intrinsic scatter
[ "Maximum", "Likelihood", "Estimation", "of", "best", "-", "fit", "parameters" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Math_Libraries/lnr_script.py#L470-L535
Vital-Fernandez/dazer
bin/lib/Math_Libraries/lnr_script.py
to_log
def to_log(x1, x2, x1err, x2err): """ Take linear measurements and uncertainties and transform to log values. """ logx1 = numpy.log10(numpy.array(x1)) logx2 = numpy.log10(numpy.array(x2)) x1err = numpy.log10(numpy.array(x1)+numpy.array(x1err)) - logx1 x2err = numpy.log10(numpy.array(x2)+numpy.array(x2err)) - logx2 return logx1, logx2, x1err, x2err
python
def to_log(x1, x2, x1err, x2err): """ Take linear measurements and uncertainties and transform to log values. """ logx1 = numpy.log10(numpy.array(x1)) logx2 = numpy.log10(numpy.array(x2)) x1err = numpy.log10(numpy.array(x1)+numpy.array(x1err)) - logx1 x2err = numpy.log10(numpy.array(x2)+numpy.array(x2err)) - logx2 return logx1, logx2, x1err, x2err
[ "def", "to_log", "(", "x1", ",", "x2", ",", "x1err", ",", "x2err", ")", ":", "logx1", "=", "numpy", ".", "log10", "(", "numpy", ".", "array", "(", "x1", ")", ")", "logx2", "=", "numpy", ".", "log10", "(", "numpy", ".", "array", "(", "x2", ")", ")", "x1err", "=", "numpy", ".", "log10", "(", "numpy", ".", "array", "(", "x1", ")", "+", "numpy", ".", "array", "(", "x1err", ")", ")", "-", "logx1", "x2err", "=", "numpy", ".", "log10", "(", "numpy", ".", "array", "(", "x2", ")", "+", "numpy", ".", "array", "(", "x2err", ")", ")", "-", "logx2", "return", "logx1", ",", "logx2", ",", "x1err", ",", "x2err" ]
Take linear measurements and uncertainties and transform to log values.
[ "Take", "linear", "measurements", "and", "uncertainties", "and", "transform", "to", "log", "values", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Math_Libraries/lnr_script.py#L537-L546
novopl/peltak
src/peltak/core/fs.py
wrap_paths
def wrap_paths(paths): # type: (list[str]) -> str """ Put quotes around all paths and join them with space in-between. """ if isinstance(paths, string_types): raise ValueError( "paths cannot be a string. " "Use array with one element instead." ) return ' '.join('"' + path + '"' for path in paths)
python
def wrap_paths(paths): # type: (list[str]) -> str """ Put quotes around all paths and join them with space in-between. """ if isinstance(paths, string_types): raise ValueError( "paths cannot be a string. " "Use array with one element instead." ) return ' '.join('"' + path + '"' for path in paths)
[ "def", "wrap_paths", "(", "paths", ")", ":", "# type: (list[str]) -> str", "if", "isinstance", "(", "paths", ",", "string_types", ")", ":", "raise", "ValueError", "(", "\"paths cannot be a string. \"", "\"Use array with one element instead.\"", ")", "return", "' '", ".", "join", "(", "'\"'", "+", "path", "+", "'\"'", "for", "path", "in", "paths", ")" ]
Put quotes around all paths and join them with space in-between.
[ "Put", "quotes", "around", "all", "paths", "and", "join", "them", "with", "space", "in", "-", "between", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/fs.py#L32-L40
novopl/peltak
src/peltak/core/fs.py
filtered_walk
def filtered_walk(path, include=None, exclude=None): # type: (str, List[str], List[str]) -> Generator[str] """ Walk recursively starting at *path* excluding files matching *exclude* Args: path (str): A starting path. This has to be an existing directory. include (list[str]): A white list of glob patterns. If given, only files that match those globs will be yielded (filtered by exclude). exclude (list[str]): A list of glob string patterns to test against. If the file/path matches any of those patters, it will be filtered out. Returns: Generator[str]: A generator yielding all the files that do not match any pattern in ``exclude``. """ exclude = exclude or [] if not isdir(path): raise ValueError("Cannot walk files, only directories") files = os.listdir(path) for name in files: filename = normpath(join(path, name)) # If excluded, completely skip it. Will not recurse into directories if search_globs(filename, exclude): continue # If we have a whitelist and the pattern matches, yield it. If the # pattern didn't match and it's a dir, it will still be recursively # processed. if include is None or match_globs(filename, include): yield filename if isdir(filename): for p in filtered_walk(filename, include, exclude): yield p
python
def filtered_walk(path, include=None, exclude=None): # type: (str, List[str], List[str]) -> Generator[str] """ Walk recursively starting at *path* excluding files matching *exclude* Args: path (str): A starting path. This has to be an existing directory. include (list[str]): A white list of glob patterns. If given, only files that match those globs will be yielded (filtered by exclude). exclude (list[str]): A list of glob string patterns to test against. If the file/path matches any of those patters, it will be filtered out. Returns: Generator[str]: A generator yielding all the files that do not match any pattern in ``exclude``. """ exclude = exclude or [] if not isdir(path): raise ValueError("Cannot walk files, only directories") files = os.listdir(path) for name in files: filename = normpath(join(path, name)) # If excluded, completely skip it. Will not recurse into directories if search_globs(filename, exclude): continue # If we have a whitelist and the pattern matches, yield it. If the # pattern didn't match and it's a dir, it will still be recursively # processed. if include is None or match_globs(filename, include): yield filename if isdir(filename): for p in filtered_walk(filename, include, exclude): yield p
[ "def", "filtered_walk", "(", "path", ",", "include", "=", "None", ",", "exclude", "=", "None", ")", ":", "# type: (str, List[str], List[str]) -> Generator[str]", "exclude", "=", "exclude", "or", "[", "]", "if", "not", "isdir", "(", "path", ")", ":", "raise", "ValueError", "(", "\"Cannot walk files, only directories\"", ")", "files", "=", "os", ".", "listdir", "(", "path", ")", "for", "name", "in", "files", ":", "filename", "=", "normpath", "(", "join", "(", "path", ",", "name", ")", ")", "# If excluded, completely skip it. Will not recurse into directories", "if", "search_globs", "(", "filename", ",", "exclude", ")", ":", "continue", "# If we have a whitelist and the pattern matches, yield it. If the", "# pattern didn't match and it's a dir, it will still be recursively", "# processed.", "if", "include", "is", "None", "or", "match_globs", "(", "filename", ",", "include", ")", ":", "yield", "filename", "if", "isdir", "(", "filename", ")", ":", "for", "p", "in", "filtered_walk", "(", "filename", ",", "include", ",", "exclude", ")", ":", "yield", "p" ]
Walk recursively starting at *path* excluding files matching *exclude* Args: path (str): A starting path. This has to be an existing directory. include (list[str]): A white list of glob patterns. If given, only files that match those globs will be yielded (filtered by exclude). exclude (list[str]): A list of glob string patterns to test against. If the file/path matches any of those patters, it will be filtered out. Returns: Generator[str]: A generator yielding all the files that do not match any pattern in ``exclude``.
[ "Walk", "recursively", "starting", "at", "*", "path", "*", "excluding", "files", "matching", "*", "exclude", "*" ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/fs.py#L43-L82
novopl/peltak
src/peltak/core/fs.py
match_globs
def match_globs(path, patterns): # type: (str, List[str]) -> bool """ Test whether the given *path* matches any patterns in *patterns* Args: path (str): A file path to test for matches. patterns (list[str]): A list of glob string patterns to test against. If *path* matches any of those patters, it will return True. Returns: bool: **True** if the *path* matches any pattern in *patterns*. """ for pattern in (p for p in patterns if p): if pattern.startswith('/'): regex = fnmatch.translate(pattern[1:]) temp_path = path[1:] if path.startswith('/') else path m = re.search(regex, temp_path) if m and m.start() == 0: return True elif fnmatch.fnmatch(path, pattern): return True return False
python
def match_globs(path, patterns): # type: (str, List[str]) -> bool """ Test whether the given *path* matches any patterns in *patterns* Args: path (str): A file path to test for matches. patterns (list[str]): A list of glob string patterns to test against. If *path* matches any of those patters, it will return True. Returns: bool: **True** if the *path* matches any pattern in *patterns*. """ for pattern in (p for p in patterns if p): if pattern.startswith('/'): regex = fnmatch.translate(pattern[1:]) temp_path = path[1:] if path.startswith('/') else path m = re.search(regex, temp_path) if m and m.start() == 0: return True elif fnmatch.fnmatch(path, pattern): return True return False
[ "def", "match_globs", "(", "path", ",", "patterns", ")", ":", "# type: (str, List[str]) -> bool", "for", "pattern", "in", "(", "p", "for", "p", "in", "patterns", "if", "p", ")", ":", "if", "pattern", ".", "startswith", "(", "'/'", ")", ":", "regex", "=", "fnmatch", ".", "translate", "(", "pattern", "[", "1", ":", "]", ")", "temp_path", "=", "path", "[", "1", ":", "]", "if", "path", ".", "startswith", "(", "'/'", ")", "else", "path", "m", "=", "re", ".", "search", "(", "regex", ",", "temp_path", ")", "if", "m", "and", "m", ".", "start", "(", ")", "==", "0", ":", "return", "True", "elif", "fnmatch", ".", "fnmatch", "(", "path", ",", "pattern", ")", ":", "return", "True", "return", "False" ]
Test whether the given *path* matches any patterns in *patterns* Args: path (str): A file path to test for matches. patterns (list[str]): A list of glob string patterns to test against. If *path* matches any of those patters, it will return True. Returns: bool: **True** if the *path* matches any pattern in *patterns*.
[ "Test", "whether", "the", "given", "*", "path", "*", "matches", "any", "patterns", "in", "*", "patterns", "*" ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/fs.py#L85-L113
novopl/peltak
src/peltak/core/fs.py
search_globs
def search_globs(path, patterns): # type: (str, List[str]) -> bool """ Test whether the given *path* contains any patterns in *patterns* Args: path (str): A file path to test for matches. patterns (list[str]): A list of glob string patterns to test against. If *path* matches any of those patters, it will return True. Returns: bool: **True** if the ``path`` matches any pattern in *patterns*. """ for pattern in (p for p in patterns if p): if pattern.startswith('/'): # If pattern starts with root it means it match from root only regex = fnmatch.translate(pattern[1:]) regex = regex.replace('\\Z', '') temp_path = path[1:] if path.startswith('/') else path m = re.search(regex, temp_path) if m and m.start() == 0: return True else: regex = fnmatch.translate(pattern) regex = regex.replace('\\Z', '') if re.search(regex, path): return True return False
python
def search_globs(path, patterns): # type: (str, List[str]) -> bool """ Test whether the given *path* contains any patterns in *patterns* Args: path (str): A file path to test for matches. patterns (list[str]): A list of glob string patterns to test against. If *path* matches any of those patters, it will return True. Returns: bool: **True** if the ``path`` matches any pattern in *patterns*. """ for pattern in (p for p in patterns if p): if pattern.startswith('/'): # If pattern starts with root it means it match from root only regex = fnmatch.translate(pattern[1:]) regex = regex.replace('\\Z', '') temp_path = path[1:] if path.startswith('/') else path m = re.search(regex, temp_path) if m and m.start() == 0: return True else: regex = fnmatch.translate(pattern) regex = regex.replace('\\Z', '') if re.search(regex, path): return True return False
[ "def", "search_globs", "(", "path", ",", "patterns", ")", ":", "# type: (str, List[str]) -> bool", "for", "pattern", "in", "(", "p", "for", "p", "in", "patterns", "if", "p", ")", ":", "if", "pattern", ".", "startswith", "(", "'/'", ")", ":", "# If pattern starts with root it means it match from root only", "regex", "=", "fnmatch", ".", "translate", "(", "pattern", "[", "1", ":", "]", ")", "regex", "=", "regex", ".", "replace", "(", "'\\\\Z'", ",", "''", ")", "temp_path", "=", "path", "[", "1", ":", "]", "if", "path", ".", "startswith", "(", "'/'", ")", "else", "path", "m", "=", "re", ".", "search", "(", "regex", ",", "temp_path", ")", "if", "m", "and", "m", ".", "start", "(", ")", "==", "0", ":", "return", "True", "else", ":", "regex", "=", "fnmatch", ".", "translate", "(", "pattern", ")", "regex", "=", "regex", ".", "replace", "(", "'\\\\Z'", ",", "''", ")", "if", "re", ".", "search", "(", "regex", ",", "path", ")", ":", "return", "True", "return", "False" ]
Test whether the given *path* contains any patterns in *patterns* Args: path (str): A file path to test for matches. patterns (list[str]): A list of glob string patterns to test against. If *path* matches any of those patters, it will return True. Returns: bool: **True** if the ``path`` matches any pattern in *patterns*.
[ "Test", "whether", "the", "given", "*", "path", "*", "contains", "any", "patterns", "in", "*", "patterns", "*" ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/fs.py#L116-L149
novopl/peltak
src/peltak/core/fs.py
write_file
def write_file(path, content, mode='w'): # type: (Text, Union[Text,bytes], Text) -> None """ --pretend aware file writing. You can always write files manually but you should always handle the --pretend case. Args: path (str): content (str): mode (str): """ from peltak.core import context from peltak.core import log if context.get('pretend', False): log.info("Would overwrite <34>{path}<32> with:\n<90>{content}", path=path, content=content) else: with open(path, mode) as fp: fp.write(content)
python
def write_file(path, content, mode='w'): # type: (Text, Union[Text,bytes], Text) -> None """ --pretend aware file writing. You can always write files manually but you should always handle the --pretend case. Args: path (str): content (str): mode (str): """ from peltak.core import context from peltak.core import log if context.get('pretend', False): log.info("Would overwrite <34>{path}<32> with:\n<90>{content}", path=path, content=content) else: with open(path, mode) as fp: fp.write(content)
[ "def", "write_file", "(", "path", ",", "content", ",", "mode", "=", "'w'", ")", ":", "# type: (Text, Union[Text,bytes], Text) -> None", "from", "peltak", ".", "core", "import", "context", "from", "peltak", ".", "core", "import", "log", "if", "context", ".", "get", "(", "'pretend'", ",", "False", ")", ":", "log", ".", "info", "(", "\"Would overwrite <34>{path}<32> with:\\n<90>{content}\"", ",", "path", "=", "path", ",", "content", "=", "content", ")", "else", ":", "with", "open", "(", "path", ",", "mode", ")", "as", "fp", ":", "fp", ".", "write", "(", "content", ")" ]
--pretend aware file writing. You can always write files manually but you should always handle the --pretend case. Args: path (str): content (str): mode (str):
[ "--", "pretend", "aware", "file", "writing", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/fs.py#L152-L173
novopl/peltak
src/peltak/commands/lint.py
lint_cli
def lint_cli(ctx, exclude, skip_untracked, commit_only): # type: (click.Context, List[str], bool, bool) -> None """ Run pep8 and pylint on all project files. You can configure the linting paths using the lint.paths config variable. This should be a list of paths that will be linted. If a path to a directory is given, all files in that directory and it's subdirectories will be used. The pep8 and pylint config paths are by default stored in ops/tools/pep8.ini and ops/tools/pylint.ini. You can customise those paths in your config with lint.pep8_cfg and lint.pylint_cfg variables. **Config Example**:: \b lint: pylint_cfg: 'ops/tools/pylint.ini' pep8_cfg: 'ops/tools/pep8.ini' paths: - 'src/mypkg' **Examples**:: \b $ peltak lint # Run linter in default mode, skip untracked $ peltak lint --commit # Lint only files staged for commit $ peltak lint --all # Lint all files, including untracked. $ peltak lint --pretend # Print the list of files to lint $ peltak lint -e "*.tox*" # Don't lint files inside .tox directory """ if ctx.invoked_subcommand: return from peltak.logic import lint lint.lint(exclude, skip_untracked, commit_only)
python
def lint_cli(ctx, exclude, skip_untracked, commit_only): # type: (click.Context, List[str], bool, bool) -> None """ Run pep8 and pylint on all project files. You can configure the linting paths using the lint.paths config variable. This should be a list of paths that will be linted. If a path to a directory is given, all files in that directory and it's subdirectories will be used. The pep8 and pylint config paths are by default stored in ops/tools/pep8.ini and ops/tools/pylint.ini. You can customise those paths in your config with lint.pep8_cfg and lint.pylint_cfg variables. **Config Example**:: \b lint: pylint_cfg: 'ops/tools/pylint.ini' pep8_cfg: 'ops/tools/pep8.ini' paths: - 'src/mypkg' **Examples**:: \b $ peltak lint # Run linter in default mode, skip untracked $ peltak lint --commit # Lint only files staged for commit $ peltak lint --all # Lint all files, including untracked. $ peltak lint --pretend # Print the list of files to lint $ peltak lint -e "*.tox*" # Don't lint files inside .tox directory """ if ctx.invoked_subcommand: return from peltak.logic import lint lint.lint(exclude, skip_untracked, commit_only)
[ "def", "lint_cli", "(", "ctx", ",", "exclude", ",", "skip_untracked", ",", "commit_only", ")", ":", "# type: (click.Context, List[str], bool, bool) -> None", "if", "ctx", ".", "invoked_subcommand", ":", "return", "from", "peltak", ".", "logic", "import", "lint", "lint", ".", "lint", "(", "exclude", ",", "skip_untracked", ",", "commit_only", ")" ]
Run pep8 and pylint on all project files. You can configure the linting paths using the lint.paths config variable. This should be a list of paths that will be linted. If a path to a directory is given, all files in that directory and it's subdirectories will be used. The pep8 and pylint config paths are by default stored in ops/tools/pep8.ini and ops/tools/pylint.ini. You can customise those paths in your config with lint.pep8_cfg and lint.pylint_cfg variables. **Config Example**:: \b lint: pylint_cfg: 'ops/tools/pylint.ini' pep8_cfg: 'ops/tools/pep8.ini' paths: - 'src/mypkg' **Examples**:: \b $ peltak lint # Run linter in default mode, skip untracked $ peltak lint --commit # Lint only files staged for commit $ peltak lint --all # Lint all files, including untracked. $ peltak lint --pretend # Print the list of files to lint $ peltak lint -e "*.tox*" # Don't lint files inside .tox directory
[ "Run", "pep8", "and", "pylint", "on", "all", "project", "files", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/commands/lint.py#L54-L90
cons3rt/pycons3rt
pycons3rt/bash.py
run_command
def run_command(command, timeout_sec=3600.0, output=True): """Runs a command using the subprocess module :param command: List containing the command and all args :param timeout_sec (float) seconds to wait before killing the command. :param output (bool) True collects output, False ignores output :return: Dict containing the command output and return code :raises CommandError """ log = logging.getLogger(mod_logger + '.run_command') if not isinstance(command, list): msg = 'command arg must be a list' log.error(msg) raise CommandError(msg) if output: subproc_stdout = subprocess.PIPE subproc_stderr = subprocess.STDOUT else: subproc_stdout = None subproc_stderr = None command = map(str, command) command_str = ' '.join(command) timer = None log.debug('Running command: {c}'.format(c=command_str)) output_collector = '' try: log.debug('Opening subprocess...') subproc = subprocess.Popen( command, bufsize=1, stdin=open(os.devnull), stdout=subproc_stdout, stderr=subproc_stderr ) log.debug('Opened subprocess wih PID: {p}'.format(p=subproc.pid)) log.debug('Setting up process kill timer for PID {p} at {s} sec...'.format(p=subproc.pid, s=timeout_sec)) kill_proc = process_killer timer = Timer(timeout_sec, kill_proc, [subproc]) timer.start() if output: log.debug('Collecting and logging output...') with subproc.stdout: for line in iter(subproc.stdout.readline, b''): output_collector += line.rstrip() + '\n' print(">>> " + line.rstrip()) log.debug('Waiting for process completion...') subproc.wait() log.debug('Collecting the exit code...') code = subproc.poll() except ValueError: _, ex, trace = sys.exc_info() msg = 'Bad command supplied: {c}\n{e}'.format( c=command_str, e=str(ex) ) log.error(msg) raise CommandError, msg, trace except (OSError, IOError): _, ex, trace = sys.exc_info() msg = 'There was a problem running command: {c}\n{e}'.format( c=command_str, e=str(ex)) log.error(msg) raise CommandError, msg, trace except subprocess.CalledProcessError: _, ex, trace = sys.exc_info() msg = 'Command returned a non-zero exit code: {c}, return code: {cde}\n{e}'.format( c=command_str, cde=ex.returncode, e=ex) log.error(msg) raise CommandError, msg, trace finally: if timer is not None: log.debug('Cancelling the timer...') timer.cancel() else: log.debug('No need to cancel the timer.') # Collect exit code and output for return output = output_collector.strip() try: code = int(code) except ValueError: _, ex, trace = sys.exc_info() msg = 'Return code {c} could not be parsed into an int\n{e}'.format( c=code, e=str(ex)) log.error(msg) raise CommandError, msg, trace else: log.debug('Command executed and returned code: {c} with output:\n{o}'.format(c=code, o=output)) output = { 'output': output, 'code': code } return output
python
def run_command(command, timeout_sec=3600.0, output=True): """Runs a command using the subprocess module :param command: List containing the command and all args :param timeout_sec (float) seconds to wait before killing the command. :param output (bool) True collects output, False ignores output :return: Dict containing the command output and return code :raises CommandError """ log = logging.getLogger(mod_logger + '.run_command') if not isinstance(command, list): msg = 'command arg must be a list' log.error(msg) raise CommandError(msg) if output: subproc_stdout = subprocess.PIPE subproc_stderr = subprocess.STDOUT else: subproc_stdout = None subproc_stderr = None command = map(str, command) command_str = ' '.join(command) timer = None log.debug('Running command: {c}'.format(c=command_str)) output_collector = '' try: log.debug('Opening subprocess...') subproc = subprocess.Popen( command, bufsize=1, stdin=open(os.devnull), stdout=subproc_stdout, stderr=subproc_stderr ) log.debug('Opened subprocess wih PID: {p}'.format(p=subproc.pid)) log.debug('Setting up process kill timer for PID {p} at {s} sec...'.format(p=subproc.pid, s=timeout_sec)) kill_proc = process_killer timer = Timer(timeout_sec, kill_proc, [subproc]) timer.start() if output: log.debug('Collecting and logging output...') with subproc.stdout: for line in iter(subproc.stdout.readline, b''): output_collector += line.rstrip() + '\n' print(">>> " + line.rstrip()) log.debug('Waiting for process completion...') subproc.wait() log.debug('Collecting the exit code...') code = subproc.poll() except ValueError: _, ex, trace = sys.exc_info() msg = 'Bad command supplied: {c}\n{e}'.format( c=command_str, e=str(ex) ) log.error(msg) raise CommandError, msg, trace except (OSError, IOError): _, ex, trace = sys.exc_info() msg = 'There was a problem running command: {c}\n{e}'.format( c=command_str, e=str(ex)) log.error(msg) raise CommandError, msg, trace except subprocess.CalledProcessError: _, ex, trace = sys.exc_info() msg = 'Command returned a non-zero exit code: {c}, return code: {cde}\n{e}'.format( c=command_str, cde=ex.returncode, e=ex) log.error(msg) raise CommandError, msg, trace finally: if timer is not None: log.debug('Cancelling the timer...') timer.cancel() else: log.debug('No need to cancel the timer.') # Collect exit code and output for return output = output_collector.strip() try: code = int(code) except ValueError: _, ex, trace = sys.exc_info() msg = 'Return code {c} could not be parsed into an int\n{e}'.format( c=code, e=str(ex)) log.error(msg) raise CommandError, msg, trace else: log.debug('Command executed and returned code: {c} with output:\n{o}'.format(c=code, o=output)) output = { 'output': output, 'code': code } return output
[ "def", "run_command", "(", "command", ",", "timeout_sec", "=", "3600.0", ",", "output", "=", "True", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.run_command'", ")", "if", "not", "isinstance", "(", "command", ",", "list", ")", ":", "msg", "=", "'command arg must be a list'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "if", "output", ":", "subproc_stdout", "=", "subprocess", ".", "PIPE", "subproc_stderr", "=", "subprocess", ".", "STDOUT", "else", ":", "subproc_stdout", "=", "None", "subproc_stderr", "=", "None", "command", "=", "map", "(", "str", ",", "command", ")", "command_str", "=", "' '", ".", "join", "(", "command", ")", "timer", "=", "None", "log", ".", "debug", "(", "'Running command: {c}'", ".", "format", "(", "c", "=", "command_str", ")", ")", "output_collector", "=", "''", "try", ":", "log", ".", "debug", "(", "'Opening subprocess...'", ")", "subproc", "=", "subprocess", ".", "Popen", "(", "command", ",", "bufsize", "=", "1", ",", "stdin", "=", "open", "(", "os", ".", "devnull", ")", ",", "stdout", "=", "subproc_stdout", ",", "stderr", "=", "subproc_stderr", ")", "log", ".", "debug", "(", "'Opened subprocess wih PID: {p}'", ".", "format", "(", "p", "=", "subproc", ".", "pid", ")", ")", "log", ".", "debug", "(", "'Setting up process kill timer for PID {p} at {s} sec...'", ".", "format", "(", "p", "=", "subproc", ".", "pid", ",", "s", "=", "timeout_sec", ")", ")", "kill_proc", "=", "process_killer", "timer", "=", "Timer", "(", "timeout_sec", ",", "kill_proc", ",", "[", "subproc", "]", ")", "timer", ".", "start", "(", ")", "if", "output", ":", "log", ".", "debug", "(", "'Collecting and logging output...'", ")", "with", "subproc", ".", "stdout", ":", "for", "line", "in", "iter", "(", "subproc", ".", "stdout", ".", "readline", ",", "b''", ")", ":", "output_collector", "+=", "line", ".", "rstrip", "(", ")", "+", "'\\n'", "print", "(", "\">>> \"", "+", "line", ".", "rstrip", "(", ")", ")", "log", ".", "debug", "(", "'Waiting for process completion...'", ")", "subproc", ".", "wait", "(", ")", "log", ".", "debug", "(", "'Collecting the exit code...'", ")", "code", "=", "subproc", ".", "poll", "(", ")", "except", "ValueError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Bad command supplied: {c}\\n{e}'", ".", "format", "(", "c", "=", "command_str", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "except", "(", "OSError", ",", "IOError", ")", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'There was a problem running command: {c}\\n{e}'", ".", "format", "(", "c", "=", "command_str", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "except", "subprocess", ".", "CalledProcessError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Command returned a non-zero exit code: {c}, return code: {cde}\\n{e}'", ".", "format", "(", "c", "=", "command_str", ",", "cde", "=", "ex", ".", "returncode", ",", "e", "=", "ex", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "finally", ":", "if", "timer", "is", "not", "None", ":", "log", ".", "debug", "(", "'Cancelling the timer...'", ")", "timer", ".", "cancel", "(", ")", "else", ":", "log", ".", "debug", "(", "'No need to cancel the timer.'", ")", "# Collect exit code and output for return", "output", "=", "output_collector", ".", "strip", "(", ")", "try", ":", "code", "=", "int", "(", "code", ")", "except", "ValueError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Return code {c} could not be parsed into an int\\n{e}'", ".", "format", "(", "c", "=", "code", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "else", ":", "log", ".", "debug", "(", "'Command executed and returned code: {c} with output:\\n{o}'", ".", "format", "(", "c", "=", "code", ",", "o", "=", "output", ")", ")", "output", "=", "{", "'output'", ":", "output", ",", "'code'", ":", "code", "}", "return", "output" ]
Runs a command using the subprocess module :param command: List containing the command and all args :param timeout_sec (float) seconds to wait before killing the command. :param output (bool) True collects output, False ignores output :return: Dict containing the command output and return code :raises CommandError
[ "Runs", "a", "command", "using", "the", "subprocess", "module" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L67-L158
cons3rt/pycons3rt
pycons3rt/bash.py
get_ip_addresses
def get_ip_addresses(): """Gets the ip addresses from ifconfig :return: (dict) of devices and aliases with the IPv4 address """ log = logging.getLogger(mod_logger + '.get_ip_addresses') command = ['/sbin/ifconfig'] try: result = run_command(command) except CommandError: raise ifconfig = result['output'].strip() # Scan the ifconfig output for IPv4 addresses devices = {} parts = ifconfig.split() device = None for part in parts: if device is None: if 'eth' in part or 'eno' in part: device = part else: test = part.split(':', 1) if len(test) == 2: if test[0] == 'addr': ip_address = test[1] log.info('Found IP address %s on device %s', ip_address, device) devices[device] = ip_address device = None return devices
python
def get_ip_addresses(): """Gets the ip addresses from ifconfig :return: (dict) of devices and aliases with the IPv4 address """ log = logging.getLogger(mod_logger + '.get_ip_addresses') command = ['/sbin/ifconfig'] try: result = run_command(command) except CommandError: raise ifconfig = result['output'].strip() # Scan the ifconfig output for IPv4 addresses devices = {} parts = ifconfig.split() device = None for part in parts: if device is None: if 'eth' in part or 'eno' in part: device = part else: test = part.split(':', 1) if len(test) == 2: if test[0] == 'addr': ip_address = test[1] log.info('Found IP address %s on device %s', ip_address, device) devices[device] = ip_address device = None return devices
[ "def", "get_ip_addresses", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.get_ip_addresses'", ")", "command", "=", "[", "'/sbin/ifconfig'", "]", "try", ":", "result", "=", "run_command", "(", "command", ")", "except", "CommandError", ":", "raise", "ifconfig", "=", "result", "[", "'output'", "]", ".", "strip", "(", ")", "# Scan the ifconfig output for IPv4 addresses", "devices", "=", "{", "}", "parts", "=", "ifconfig", ".", "split", "(", ")", "device", "=", "None", "for", "part", "in", "parts", ":", "if", "device", "is", "None", ":", "if", "'eth'", "in", "part", "or", "'eno'", "in", "part", ":", "device", "=", "part", "else", ":", "test", "=", "part", ".", "split", "(", "':'", ",", "1", ")", "if", "len", "(", "test", ")", "==", "2", ":", "if", "test", "[", "0", "]", "==", "'addr'", ":", "ip_address", "=", "test", "[", "1", "]", "log", ".", "info", "(", "'Found IP address %s on device %s'", ",", "ip_address", ",", "device", ")", "devices", "[", "device", "]", "=", "ip_address", "device", "=", "None", "return", "devices" ]
Gets the ip addresses from ifconfig :return: (dict) of devices and aliases with the IPv4 address
[ "Gets", "the", "ip", "addresses", "from", "ifconfig" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L235-L267
cons3rt/pycons3rt
pycons3rt/bash.py
get_mac_address
def get_mac_address(device_index=0): """Returns the Mac Address given a device index :param device_index: (int) Device index :return: (str) Mac address or None """ log = logging.getLogger(mod_logger + '.get_mac_address') command = ['ip', 'addr', 'show', 'eth{d}'.format(d=device_index)] log.info('Attempting to find a mac address at device index: {d}'.format(d=device_index)) try: result = run_command(command) except CommandError: _, ex, trace = sys.exc_info() log.error('There was a problem running command, unable to determine mac address: {c}\n{e}'.format( c=command, e=str(ex))) return ipaddr = result['output'].split() get_next = False mac_address = None for part in ipaddr: if get_next: mac_address = part log.info('Found mac address: {m}'.format(m=mac_address)) break if 'link' in part: get_next = True if not mac_address: log.info('mac address not found for device: {d}'.format(d=device_index)) return mac_address
python
def get_mac_address(device_index=0): """Returns the Mac Address given a device index :param device_index: (int) Device index :return: (str) Mac address or None """ log = logging.getLogger(mod_logger + '.get_mac_address') command = ['ip', 'addr', 'show', 'eth{d}'.format(d=device_index)] log.info('Attempting to find a mac address at device index: {d}'.format(d=device_index)) try: result = run_command(command) except CommandError: _, ex, trace = sys.exc_info() log.error('There was a problem running command, unable to determine mac address: {c}\n{e}'.format( c=command, e=str(ex))) return ipaddr = result['output'].split() get_next = False mac_address = None for part in ipaddr: if get_next: mac_address = part log.info('Found mac address: {m}'.format(m=mac_address)) break if 'link' in part: get_next = True if not mac_address: log.info('mac address not found for device: {d}'.format(d=device_index)) return mac_address
[ "def", "get_mac_address", "(", "device_index", "=", "0", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.get_mac_address'", ")", "command", "=", "[", "'ip'", ",", "'addr'", ",", "'show'", ",", "'eth{d}'", ".", "format", "(", "d", "=", "device_index", ")", "]", "log", ".", "info", "(", "'Attempting to find a mac address at device index: {d}'", ".", "format", "(", "d", "=", "device_index", ")", ")", "try", ":", "result", "=", "run_command", "(", "command", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "log", ".", "error", "(", "'There was a problem running command, unable to determine mac address: {c}\\n{e}'", ".", "format", "(", "c", "=", "command", ",", "e", "=", "str", "(", "ex", ")", ")", ")", "return", "ipaddr", "=", "result", "[", "'output'", "]", ".", "split", "(", ")", "get_next", "=", "False", "mac_address", "=", "None", "for", "part", "in", "ipaddr", ":", "if", "get_next", ":", "mac_address", "=", "part", "log", ".", "info", "(", "'Found mac address: {m}'", ".", "format", "(", "m", "=", "mac_address", ")", ")", "break", "if", "'link'", "in", "part", ":", "get_next", "=", "True", "if", "not", "mac_address", ":", "log", ".", "info", "(", "'mac address not found for device: {d}'", ".", "format", "(", "d", "=", "device_index", ")", ")", "return", "mac_address" ]
Returns the Mac Address given a device index :param device_index: (int) Device index :return: (str) Mac address or None
[ "Returns", "the", "Mac", "Address", "given", "a", "device", "index" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L270-L298
cons3rt/pycons3rt
pycons3rt/bash.py
chmod
def chmod(path, mode, recursive=False): """Emulates bash chmod command This method sets the file permissions to the specified mode. :param path: (str) Full path to the file or directory :param mode: (str) Mode to be set (e.g. 0755) :param recursive: (bool) Set True to make a recursive call :return: int exit code of the chmod command :raises CommandError """ log = logging.getLogger(mod_logger + '.chmod') # Validate args if not isinstance(path, basestring): msg = 'path argument is not a string' log.error(msg) raise CommandError(msg) if not isinstance(mode, basestring): msg = 'mode argument is not a string' log.error(msg) raise CommandError(msg) # Ensure the item exists if not os.path.exists(path): msg = 'Item not found: {p}'.format(p=path) log.error(msg) raise CommandError(msg) # Create the chmod command command = ['chmod'] # Make it recursive if specified if recursive: command.append('-R') command.append(mode) command.append(path) try: result = run_command(command) except CommandError: raise log.info('chmod command exited with code: {c}'.format(c=result['code'])) return result['code']
python
def chmod(path, mode, recursive=False): """Emulates bash chmod command This method sets the file permissions to the specified mode. :param path: (str) Full path to the file or directory :param mode: (str) Mode to be set (e.g. 0755) :param recursive: (bool) Set True to make a recursive call :return: int exit code of the chmod command :raises CommandError """ log = logging.getLogger(mod_logger + '.chmod') # Validate args if not isinstance(path, basestring): msg = 'path argument is not a string' log.error(msg) raise CommandError(msg) if not isinstance(mode, basestring): msg = 'mode argument is not a string' log.error(msg) raise CommandError(msg) # Ensure the item exists if not os.path.exists(path): msg = 'Item not found: {p}'.format(p=path) log.error(msg) raise CommandError(msg) # Create the chmod command command = ['chmod'] # Make it recursive if specified if recursive: command.append('-R') command.append(mode) command.append(path) try: result = run_command(command) except CommandError: raise log.info('chmod command exited with code: {c}'.format(c=result['code'])) return result['code']
[ "def", "chmod", "(", "path", ",", "mode", ",", "recursive", "=", "False", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.chmod'", ")", "# Validate args", "if", "not", "isinstance", "(", "path", ",", "basestring", ")", ":", "msg", "=", "'path argument is not a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "if", "not", "isinstance", "(", "mode", ",", "basestring", ")", ":", "msg", "=", "'mode argument is not a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Ensure the item exists", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "msg", "=", "'Item not found: {p}'", ".", "format", "(", "p", "=", "path", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Create the chmod command", "command", "=", "[", "'chmod'", "]", "# Make it recursive if specified", "if", "recursive", ":", "command", ".", "append", "(", "'-R'", ")", "command", ".", "append", "(", "mode", ")", "command", ".", "append", "(", "path", ")", "try", ":", "result", "=", "run_command", "(", "command", ")", "except", "CommandError", ":", "raise", "log", ".", "info", "(", "'chmod command exited with code: {c}'", ".", "format", "(", "c", "=", "result", "[", "'code'", "]", ")", ")", "return", "result", "[", "'code'", "]" ]
Emulates bash chmod command This method sets the file permissions to the specified mode. :param path: (str) Full path to the file or directory :param mode: (str) Mode to be set (e.g. 0755) :param recursive: (bool) Set True to make a recursive call :return: int exit code of the chmod command :raises CommandError
[ "Emulates", "bash", "chmod", "command" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L301-L342
cons3rt/pycons3rt
pycons3rt/bash.py
mkdir_p
def mkdir_p(path): """Emulates 'mkdir -p' in bash :param path: (str) Path to create :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.mkdir_p') if not isinstance(path, basestring): msg = 'path argument is not a string' log.error(msg) raise CommandError(msg) log.info('Attempting to create directory: %s', path) try: os.makedirs(path) except OSError as e: if e.errno == errno.EEXIST and os.path.isdir(path): pass else: msg = 'Unable to create directory: {p}'.format(p=path) log.error(msg) raise CommandError(msg)
python
def mkdir_p(path): """Emulates 'mkdir -p' in bash :param path: (str) Path to create :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.mkdir_p') if not isinstance(path, basestring): msg = 'path argument is not a string' log.error(msg) raise CommandError(msg) log.info('Attempting to create directory: %s', path) try: os.makedirs(path) except OSError as e: if e.errno == errno.EEXIST and os.path.isdir(path): pass else: msg = 'Unable to create directory: {p}'.format(p=path) log.error(msg) raise CommandError(msg)
[ "def", "mkdir_p", "(", "path", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.mkdir_p'", ")", "if", "not", "isinstance", "(", "path", ",", "basestring", ")", ":", "msg", "=", "'path argument is not a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "log", ".", "info", "(", "'Attempting to create directory: %s'", ",", "path", ")", "try", ":", "os", ".", "makedirs", "(", "path", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "EEXIST", "and", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "pass", "else", ":", "msg", "=", "'Unable to create directory: {p}'", ".", "format", "(", "p", "=", "path", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")" ]
Emulates 'mkdir -p' in bash :param path: (str) Path to create :return: None :raises CommandError
[ "Emulates", "mkdir", "-", "p", "in", "bash" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L345-L366
cons3rt/pycons3rt
pycons3rt/bash.py
source
def source(script): """Emulates 'source' command in bash :param script: (str) Full path to the script to source :return: Updated environment :raises CommandError """ log = logging.getLogger(mod_logger + '.source') if not isinstance(script, basestring): msg = 'script argument must be a string' log.error(msg) raise CommandError(msg) log.info('Attempting to source script: %s', script) try: pipe = subprocess.Popen(". %s; env" % script, stdout=subprocess.PIPE, shell=True) data = pipe.communicate()[0] except ValueError: _, ex, trace = sys.exc_info() msg = 'Invalid argument:\n{e}'.format(e=str(ex)) log.error(msg) raise CommandError, msg, trace except OSError: _, ex, trace = sys.exc_info() msg = 'File not found: {s}\n{e}'.format(s=script, e=str(ex)) raise CommandError, msg, trace except subprocess.CalledProcessError: _, ex, trace = sys.exc_info() msg = 'Script {s} returned a non-zero exit code: {c}\n{e}'.format( s=script, e=str(ex), c=ex.returncode) log.error(msg) raise CommandError, msg, trace env = {} log.debug('Adding environment variables from data: {d}'.format(d=data)) for line in data.splitlines(): entry = line.split("=", 1) if len(entry) != 2: log.warn('This property is not in prop=value format, and will be skipped: {p}'.format(p=line)) continue try: env[entry[0]] = entry[1] except IndexError: _, ex, trace = sys.exc_info() log.warn('IndexError: There was a problem setting environment variables from line: {p}\n{e}'.format( p=line, e=str(ex))) continue else: log.debug('Added environment variable {p}={v}'.format(p=entry[0], v=entry[1])) os.environ.update(env) return env
python
def source(script): """Emulates 'source' command in bash :param script: (str) Full path to the script to source :return: Updated environment :raises CommandError """ log = logging.getLogger(mod_logger + '.source') if not isinstance(script, basestring): msg = 'script argument must be a string' log.error(msg) raise CommandError(msg) log.info('Attempting to source script: %s', script) try: pipe = subprocess.Popen(". %s; env" % script, stdout=subprocess.PIPE, shell=True) data = pipe.communicate()[0] except ValueError: _, ex, trace = sys.exc_info() msg = 'Invalid argument:\n{e}'.format(e=str(ex)) log.error(msg) raise CommandError, msg, trace except OSError: _, ex, trace = sys.exc_info() msg = 'File not found: {s}\n{e}'.format(s=script, e=str(ex)) raise CommandError, msg, trace except subprocess.CalledProcessError: _, ex, trace = sys.exc_info() msg = 'Script {s} returned a non-zero exit code: {c}\n{e}'.format( s=script, e=str(ex), c=ex.returncode) log.error(msg) raise CommandError, msg, trace env = {} log.debug('Adding environment variables from data: {d}'.format(d=data)) for line in data.splitlines(): entry = line.split("=", 1) if len(entry) != 2: log.warn('This property is not in prop=value format, and will be skipped: {p}'.format(p=line)) continue try: env[entry[0]] = entry[1] except IndexError: _, ex, trace = sys.exc_info() log.warn('IndexError: There was a problem setting environment variables from line: {p}\n{e}'.format( p=line, e=str(ex))) continue else: log.debug('Added environment variable {p}={v}'.format(p=entry[0], v=entry[1])) os.environ.update(env) return env
[ "def", "source", "(", "script", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.source'", ")", "if", "not", "isinstance", "(", "script", ",", "basestring", ")", ":", "msg", "=", "'script argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "log", ".", "info", "(", "'Attempting to source script: %s'", ",", "script", ")", "try", ":", "pipe", "=", "subprocess", ".", "Popen", "(", "\". %s; env\"", "%", "script", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "shell", "=", "True", ")", "data", "=", "pipe", ".", "communicate", "(", ")", "[", "0", "]", "except", "ValueError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Invalid argument:\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "except", "OSError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'File not found: {s}\\n{e}'", ".", "format", "(", "s", "=", "script", ",", "e", "=", "str", "(", "ex", ")", ")", "raise", "CommandError", ",", "msg", ",", "trace", "except", "subprocess", ".", "CalledProcessError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Script {s} returned a non-zero exit code: {c}\\n{e}'", ".", "format", "(", "s", "=", "script", ",", "e", "=", "str", "(", "ex", ")", ",", "c", "=", "ex", ".", "returncode", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "env", "=", "{", "}", "log", ".", "debug", "(", "'Adding environment variables from data: {d}'", ".", "format", "(", "d", "=", "data", ")", ")", "for", "line", "in", "data", ".", "splitlines", "(", ")", ":", "entry", "=", "line", ".", "split", "(", "\"=\"", ",", "1", ")", "if", "len", "(", "entry", ")", "!=", "2", ":", "log", ".", "warn", "(", "'This property is not in prop=value format, and will be skipped: {p}'", ".", "format", "(", "p", "=", "line", ")", ")", "continue", "try", ":", "env", "[", "entry", "[", "0", "]", "]", "=", "entry", "[", "1", "]", "except", "IndexError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "log", ".", "warn", "(", "'IndexError: There was a problem setting environment variables from line: {p}\\n{e}'", ".", "format", "(", "p", "=", "line", ",", "e", "=", "str", "(", "ex", ")", ")", ")", "continue", "else", ":", "log", ".", "debug", "(", "'Added environment variable {p}={v}'", ".", "format", "(", "p", "=", "entry", "[", "0", "]", ",", "v", "=", "entry", "[", "1", "]", ")", ")", "os", ".", "environ", ".", "update", "(", "env", ")", "return", "env" ]
Emulates 'source' command in bash :param script: (str) Full path to the script to source :return: Updated environment :raises CommandError
[ "Emulates", "source", "command", "in", "bash" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L369-L417
cons3rt/pycons3rt
pycons3rt/bash.py
yum_update
def yum_update(downloadonly=False, dest_dir='/tmp'): """Run a yum update on this system This public method runs the yum -y update command to update packages from yum. If downloadonly is set to true, the yum updates will be downloaded to the specified dest_dir. :param dest_dir: (str) Full path to the download directory :param downloadonly: Boolean :return: int exit code from the yum command :raises CommandError """ log = logging.getLogger(mod_logger + '.yum_update') # Type checks on the args if not isinstance(dest_dir, basestring): msg = 'dest_dir argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(downloadonly, bool): msg = 'downloadonly argument must be a bool' log.error(msg) raise CommandError(msg) # If downloadonly was True, download packages to dest_dir if downloadonly: # Create the destination directory if it does not exist log.info('Creating directory: %s', dest_dir) try: mkdir_p(dest_dir) except OSError: _, ex, trace = sys.exc_info() msg = 'Unable to create destination directory: {d}'.format( d=dest_dir) log.error(msg) raise CommandError, msg, trace # Build command string with downloadonly options specified command = ['yum', '-y', 'update', '--downloadonly', '--downloaddir={d}'.format(d=dest_dir)] log.info('Downloading updates from yum to %s...', dest_dir) else: # Build command string to update directly command = ['yum', '-y', 'update'] log.info('Installing yum updates from RHN...') # Run the command try: result = run_command(command) except CommandError: raise log.info('Yum update completed and exit with code: {c}'.format( c=result['code'])) return result['code']
python
def yum_update(downloadonly=False, dest_dir='/tmp'): """Run a yum update on this system This public method runs the yum -y update command to update packages from yum. If downloadonly is set to true, the yum updates will be downloaded to the specified dest_dir. :param dest_dir: (str) Full path to the download directory :param downloadonly: Boolean :return: int exit code from the yum command :raises CommandError """ log = logging.getLogger(mod_logger + '.yum_update') # Type checks on the args if not isinstance(dest_dir, basestring): msg = 'dest_dir argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(downloadonly, bool): msg = 'downloadonly argument must be a bool' log.error(msg) raise CommandError(msg) # If downloadonly was True, download packages to dest_dir if downloadonly: # Create the destination directory if it does not exist log.info('Creating directory: %s', dest_dir) try: mkdir_p(dest_dir) except OSError: _, ex, trace = sys.exc_info() msg = 'Unable to create destination directory: {d}'.format( d=dest_dir) log.error(msg) raise CommandError, msg, trace # Build command string with downloadonly options specified command = ['yum', '-y', 'update', '--downloadonly', '--downloaddir={d}'.format(d=dest_dir)] log.info('Downloading updates from yum to %s...', dest_dir) else: # Build command string to update directly command = ['yum', '-y', 'update'] log.info('Installing yum updates from RHN...') # Run the command try: result = run_command(command) except CommandError: raise log.info('Yum update completed and exit with code: {c}'.format( c=result['code'])) return result['code']
[ "def", "yum_update", "(", "downloadonly", "=", "False", ",", "dest_dir", "=", "'/tmp'", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.yum_update'", ")", "# Type checks on the args", "if", "not", "isinstance", "(", "dest_dir", ",", "basestring", ")", ":", "msg", "=", "'dest_dir argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "if", "not", "isinstance", "(", "downloadonly", ",", "bool", ")", ":", "msg", "=", "'downloadonly argument must be a bool'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# If downloadonly was True, download packages to dest_dir", "if", "downloadonly", ":", "# Create the destination directory if it does not exist", "log", ".", "info", "(", "'Creating directory: %s'", ",", "dest_dir", ")", "try", ":", "mkdir_p", "(", "dest_dir", ")", "except", "OSError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to create destination directory: {d}'", ".", "format", "(", "d", "=", "dest_dir", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "# Build command string with downloadonly options specified", "command", "=", "[", "'yum'", ",", "'-y'", ",", "'update'", ",", "'--downloadonly'", ",", "'--downloaddir={d}'", ".", "format", "(", "d", "=", "dest_dir", ")", "]", "log", ".", "info", "(", "'Downloading updates from yum to %s...'", ",", "dest_dir", ")", "else", ":", "# Build command string to update directly", "command", "=", "[", "'yum'", ",", "'-y'", ",", "'update'", "]", "log", ".", "info", "(", "'Installing yum updates from RHN...'", ")", "# Run the command", "try", ":", "result", "=", "run_command", "(", "command", ")", "except", "CommandError", ":", "raise", "log", ".", "info", "(", "'Yum update completed and exit with code: {c}'", ".", "format", "(", "c", "=", "result", "[", "'code'", "]", ")", ")", "return", "result", "[", "'code'", "]" ]
Run a yum update on this system This public method runs the yum -y update command to update packages from yum. If downloadonly is set to true, the yum updates will be downloaded to the specified dest_dir. :param dest_dir: (str) Full path to the download directory :param downloadonly: Boolean :return: int exit code from the yum command :raises CommandError
[ "Run", "a", "yum", "update", "on", "this", "system" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L420-L473
cons3rt/pycons3rt
pycons3rt/bash.py
yum_install
def yum_install(packages, downloadonly=False, dest_dir='/tmp'): """Installs (or downloads) a list of packages from yum This public method installs a list of packages from yum or downloads the packages to the specified destination directory using the yum-downloadonly yum plugin. :param downloadonly: Boolean, set to only download the package and not install it :param packages: List of package names (str) to download param :param dest_dir: (str) Full path to the download directory :return: int exit code from the yum command :raises CommandError """ log = logging.getLogger(mod_logger + '.yum_install') # Type checks on the args if not isinstance(dest_dir, basestring): msg = 'dest_dir argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(packages, list): msg = 'packages argument must be a list' log.error(msg) raise CommandError(msg) if not isinstance(downloadonly, bool): msg = 'downloadonly argument must be a bool' log.error(msg) raise CommandError(msg) if not packages: msg = 'Empty list of packages provided' log.error(msg) raise CommandError(msg) for package in packages: # Ensure the package is specified as a string if not isinstance(package, basestring): msg = 'One of the packages was not specified as a string' log.error(msg) raise CommandError(msg) # Build the yum install command string command = ['yum', '-y', 'install'] + packages # If downloadonly was True, download packages to dest_dir if downloadonly: log.info('yum downloadonly was specified, adding additional options...') # Append downloadonly args to the command command += ['--downloadonly', '--downloaddir={d}'.format(d=dest_dir)] # Create the destination directory if it does not exist log.info('Creating directory: %s', dest_dir) try: mkdir_p(dest_dir) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to create destination directory: {d}'.format(d=dest_dir) log.error(msg) raise CommandError, msg, trace log.info('Downloading packages from yum to %s...', dest_dir) else: log.info('Installing yum packages from RHN...') # Run the yum install command try: result = run_command(command) except CommandError: raise log.info('Yum update completed and exit with code: {c}'.format( c=result['code'])) return result['code']
python
def yum_install(packages, downloadonly=False, dest_dir='/tmp'): """Installs (or downloads) a list of packages from yum This public method installs a list of packages from yum or downloads the packages to the specified destination directory using the yum-downloadonly yum plugin. :param downloadonly: Boolean, set to only download the package and not install it :param packages: List of package names (str) to download param :param dest_dir: (str) Full path to the download directory :return: int exit code from the yum command :raises CommandError """ log = logging.getLogger(mod_logger + '.yum_install') # Type checks on the args if not isinstance(dest_dir, basestring): msg = 'dest_dir argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(packages, list): msg = 'packages argument must be a list' log.error(msg) raise CommandError(msg) if not isinstance(downloadonly, bool): msg = 'downloadonly argument must be a bool' log.error(msg) raise CommandError(msg) if not packages: msg = 'Empty list of packages provided' log.error(msg) raise CommandError(msg) for package in packages: # Ensure the package is specified as a string if not isinstance(package, basestring): msg = 'One of the packages was not specified as a string' log.error(msg) raise CommandError(msg) # Build the yum install command string command = ['yum', '-y', 'install'] + packages # If downloadonly was True, download packages to dest_dir if downloadonly: log.info('yum downloadonly was specified, adding additional options...') # Append downloadonly args to the command command += ['--downloadonly', '--downloaddir={d}'.format(d=dest_dir)] # Create the destination directory if it does not exist log.info('Creating directory: %s', dest_dir) try: mkdir_p(dest_dir) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to create destination directory: {d}'.format(d=dest_dir) log.error(msg) raise CommandError, msg, trace log.info('Downloading packages from yum to %s...', dest_dir) else: log.info('Installing yum packages from RHN...') # Run the yum install command try: result = run_command(command) except CommandError: raise log.info('Yum update completed and exit with code: {c}'.format( c=result['code'])) return result['code']
[ "def", "yum_install", "(", "packages", ",", "downloadonly", "=", "False", ",", "dest_dir", "=", "'/tmp'", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.yum_install'", ")", "# Type checks on the args", "if", "not", "isinstance", "(", "dest_dir", ",", "basestring", ")", ":", "msg", "=", "'dest_dir argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "if", "not", "isinstance", "(", "packages", ",", "list", ")", ":", "msg", "=", "'packages argument must be a list'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "if", "not", "isinstance", "(", "downloadonly", ",", "bool", ")", ":", "msg", "=", "'downloadonly argument must be a bool'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "if", "not", "packages", ":", "msg", "=", "'Empty list of packages provided'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "for", "package", "in", "packages", ":", "# Ensure the package is specified as a string", "if", "not", "isinstance", "(", "package", ",", "basestring", ")", ":", "msg", "=", "'One of the packages was not specified as a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Build the yum install command string", "command", "=", "[", "'yum'", ",", "'-y'", ",", "'install'", "]", "+", "packages", "# If downloadonly was True, download packages to dest_dir", "if", "downloadonly", ":", "log", ".", "info", "(", "'yum downloadonly was specified, adding additional options...'", ")", "# Append downloadonly args to the command", "command", "+=", "[", "'--downloadonly'", ",", "'--downloaddir={d}'", ".", "format", "(", "d", "=", "dest_dir", ")", "]", "# Create the destination directory if it does not exist", "log", ".", "info", "(", "'Creating directory: %s'", ",", "dest_dir", ")", "try", ":", "mkdir_p", "(", "dest_dir", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to create destination directory: {d}'", ".", "format", "(", "d", "=", "dest_dir", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "log", ".", "info", "(", "'Downloading packages from yum to %s...'", ",", "dest_dir", ")", "else", ":", "log", ".", "info", "(", "'Installing yum packages from RHN...'", ")", "# Run the yum install command", "try", ":", "result", "=", "run_command", "(", "command", ")", "except", "CommandError", ":", "raise", "log", ".", "info", "(", "'Yum update completed and exit with code: {c}'", ".", "format", "(", "c", "=", "result", "[", "'code'", "]", ")", ")", "return", "result", "[", "'code'", "]" ]
Installs (or downloads) a list of packages from yum This public method installs a list of packages from yum or downloads the packages to the specified destination directory using the yum-downloadonly yum plugin. :param downloadonly: Boolean, set to only download the package and not install it :param packages: List of package names (str) to download param :param dest_dir: (str) Full path to the download directory :return: int exit code from the yum command :raises CommandError
[ "Installs", "(", "or", "downloads", ")", "a", "list", "of", "packages", "from", "yum" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L476-L546
cons3rt/pycons3rt
pycons3rt/bash.py
rpm_install
def rpm_install(install_dir): """This method installs all RPM files in a specific dir :param install_dir: (str) Full path to the directory :return int exit code form the rpm command :raises CommandError """ log = logging.getLogger(mod_logger + '.rpm_install') # Type checks on the args if not isinstance(install_dir, basestring): msg = 'install_dir argument must be a string' log.error(msg) raise CommandError(msg) # Ensure the install_dir directory exists if not os.path.isdir(install_dir): msg = 'Directory not found: {f}'.format(f=install_dir) log.error(msg) raise CommandError(msg) # Create the command command = ['rpm', '-iv', '--force', '{d}/*.rpm'.format(d=install_dir)] # Run the rpm command try: result = run_command(command) except CommandError: raise log.info('RPM completed and exit with code: {c}'.format( c=result['code'])) return result['code']
python
def rpm_install(install_dir): """This method installs all RPM files in a specific dir :param install_dir: (str) Full path to the directory :return int exit code form the rpm command :raises CommandError """ log = logging.getLogger(mod_logger + '.rpm_install') # Type checks on the args if not isinstance(install_dir, basestring): msg = 'install_dir argument must be a string' log.error(msg) raise CommandError(msg) # Ensure the install_dir directory exists if not os.path.isdir(install_dir): msg = 'Directory not found: {f}'.format(f=install_dir) log.error(msg) raise CommandError(msg) # Create the command command = ['rpm', '-iv', '--force', '{d}/*.rpm'.format(d=install_dir)] # Run the rpm command try: result = run_command(command) except CommandError: raise log.info('RPM completed and exit with code: {c}'.format( c=result['code'])) return result['code']
[ "def", "rpm_install", "(", "install_dir", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.rpm_install'", ")", "# Type checks on the args", "if", "not", "isinstance", "(", "install_dir", ",", "basestring", ")", ":", "msg", "=", "'install_dir argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Ensure the install_dir directory exists", "if", "not", "os", ".", "path", ".", "isdir", "(", "install_dir", ")", ":", "msg", "=", "'Directory not found: {f}'", ".", "format", "(", "f", "=", "install_dir", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Create the command", "command", "=", "[", "'rpm'", ",", "'-iv'", ",", "'--force'", ",", "'{d}/*.rpm'", ".", "format", "(", "d", "=", "install_dir", ")", "]", "# Run the rpm command", "try", ":", "result", "=", "run_command", "(", "command", ")", "except", "CommandError", ":", "raise", "log", ".", "info", "(", "'RPM completed and exit with code: {c}'", ".", "format", "(", "c", "=", "result", "[", "'code'", "]", ")", ")", "return", "result", "[", "'code'", "]" ]
This method installs all RPM files in a specific dir :param install_dir: (str) Full path to the directory :return int exit code form the rpm command :raises CommandError
[ "This", "method", "installs", "all", "RPM", "files", "in", "a", "specific", "dir" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L549-L580
cons3rt/pycons3rt
pycons3rt/bash.py
sed
def sed(file_path, pattern, replace_str, g=0): """Python impl of the bash sed command This method emulates the functionality of a bash sed command. :param file_path: (str) Full path to the file to be edited :param pattern: (str) Search pattern to replace as a regex :param replace_str: (str) String to replace the pattern :param g: (int) Whether to globally replace (0) or replace 1 instance (equivalent to the 'g' option in bash sed :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.sed') # Type checks on the args if not isinstance(file_path, basestring): msg = 'file_path argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(pattern, basestring): msg = 'pattern argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(replace_str, basestring): msg = 'replace_str argument must be a string' log.error(msg) raise CommandError(msg) # Ensure the file_path file exists if not os.path.isfile(file_path): msg = 'File not found: {f}'.format(f=file_path) log.error(msg) raise CommandError(msg) # Search for a matching pattern and replace matching patterns log.info('Updating file: %s...', file_path) for line in fileinput.input(file_path, inplace=True): if re.search(pattern, line): log.info('Updating line: %s', line) new_line = re.sub(pattern, replace_str, line, count=g) log.info('Replacing with line: %s', new_line) sys.stdout.write(new_line) else: sys.stdout.write(line)
python
def sed(file_path, pattern, replace_str, g=0): """Python impl of the bash sed command This method emulates the functionality of a bash sed command. :param file_path: (str) Full path to the file to be edited :param pattern: (str) Search pattern to replace as a regex :param replace_str: (str) String to replace the pattern :param g: (int) Whether to globally replace (0) or replace 1 instance (equivalent to the 'g' option in bash sed :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.sed') # Type checks on the args if not isinstance(file_path, basestring): msg = 'file_path argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(pattern, basestring): msg = 'pattern argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(replace_str, basestring): msg = 'replace_str argument must be a string' log.error(msg) raise CommandError(msg) # Ensure the file_path file exists if not os.path.isfile(file_path): msg = 'File not found: {f}'.format(f=file_path) log.error(msg) raise CommandError(msg) # Search for a matching pattern and replace matching patterns log.info('Updating file: %s...', file_path) for line in fileinput.input(file_path, inplace=True): if re.search(pattern, line): log.info('Updating line: %s', line) new_line = re.sub(pattern, replace_str, line, count=g) log.info('Replacing with line: %s', new_line) sys.stdout.write(new_line) else: sys.stdout.write(line)
[ "def", "sed", "(", "file_path", ",", "pattern", ",", "replace_str", ",", "g", "=", "0", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.sed'", ")", "# Type checks on the args", "if", "not", "isinstance", "(", "file_path", ",", "basestring", ")", ":", "msg", "=", "'file_path argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "if", "not", "isinstance", "(", "pattern", ",", "basestring", ")", ":", "msg", "=", "'pattern argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "if", "not", "isinstance", "(", "replace_str", ",", "basestring", ")", ":", "msg", "=", "'replace_str argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Ensure the file_path file exists", "if", "not", "os", ".", "path", ".", "isfile", "(", "file_path", ")", ":", "msg", "=", "'File not found: {f}'", ".", "format", "(", "f", "=", "file_path", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Search for a matching pattern and replace matching patterns", "log", ".", "info", "(", "'Updating file: %s...'", ",", "file_path", ")", "for", "line", "in", "fileinput", ".", "input", "(", "file_path", ",", "inplace", "=", "True", ")", ":", "if", "re", ".", "search", "(", "pattern", ",", "line", ")", ":", "log", ".", "info", "(", "'Updating line: %s'", ",", "line", ")", "new_line", "=", "re", ".", "sub", "(", "pattern", ",", "replace_str", ",", "line", ",", "count", "=", "g", ")", "log", ".", "info", "(", "'Replacing with line: %s'", ",", "new_line", ")", "sys", ".", "stdout", ".", "write", "(", "new_line", ")", "else", ":", "sys", ".", "stdout", ".", "write", "(", "line", ")" ]
Python impl of the bash sed command This method emulates the functionality of a bash sed command. :param file_path: (str) Full path to the file to be edited :param pattern: (str) Search pattern to replace as a regex :param replace_str: (str) String to replace the pattern :param g: (int) Whether to globally replace (0) or replace 1 instance (equivalent to the 'g' option in bash sed :return: None :raises CommandError
[ "Python", "impl", "of", "the", "bash", "sed", "command" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L583-L627
cons3rt/pycons3rt
pycons3rt/bash.py
zip_dir
def zip_dir(dir_path, zip_file): """Creates a zip file of a directory tree This method creates a zip archive using the directory tree dir_path and adds to zip_file output. :param dir_path: (str) Full path to directory to be zipped :param zip_file: (str) Full path to the output zip file :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.zip_dir') # Validate args if not isinstance(dir_path, basestring): msg = 'dir_path argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(zip_file, basestring): msg = 'zip_file argument must be a string' log.error(msg) raise CommandError(msg) # Ensure the dir_path file exists if not os.path.isdir(dir_path): msg = 'Directory not found: {f}'.format(f=dir_path) log.error(msg) raise CommandError(msg) try: with contextlib.closing(zipfile.ZipFile(zip_file, 'w', allowZip64=True)) as zip_w: for root, dirs, files in os.walk(dir_path): for f in files: log.debug('Adding file to zip: %s', f) strip = len(dir_path) - len(os.path.split(dir_path)[-1]) file_name = os.path.join(root, f) archive_name = os.path.join(root[strip:], f) zip_w.write(file_name, archive_name) except Exception: _, ex, trace = sys.exc_info() msg = 'Unable to create zip file: {f}\n{e}'.format( f=zip_file, e=str(ex)) log.error(msg) raise CommandError, msg, trace log.info('Successfully created zip file: %s', zip_file)
python
def zip_dir(dir_path, zip_file): """Creates a zip file of a directory tree This method creates a zip archive using the directory tree dir_path and adds to zip_file output. :param dir_path: (str) Full path to directory to be zipped :param zip_file: (str) Full path to the output zip file :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.zip_dir') # Validate args if not isinstance(dir_path, basestring): msg = 'dir_path argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(zip_file, basestring): msg = 'zip_file argument must be a string' log.error(msg) raise CommandError(msg) # Ensure the dir_path file exists if not os.path.isdir(dir_path): msg = 'Directory not found: {f}'.format(f=dir_path) log.error(msg) raise CommandError(msg) try: with contextlib.closing(zipfile.ZipFile(zip_file, 'w', allowZip64=True)) as zip_w: for root, dirs, files in os.walk(dir_path): for f in files: log.debug('Adding file to zip: %s', f) strip = len(dir_path) - len(os.path.split(dir_path)[-1]) file_name = os.path.join(root, f) archive_name = os.path.join(root[strip:], f) zip_w.write(file_name, archive_name) except Exception: _, ex, trace = sys.exc_info() msg = 'Unable to create zip file: {f}\n{e}'.format( f=zip_file, e=str(ex)) log.error(msg) raise CommandError, msg, trace log.info('Successfully created zip file: %s', zip_file)
[ "def", "zip_dir", "(", "dir_path", ",", "zip_file", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.zip_dir'", ")", "# Validate args", "if", "not", "isinstance", "(", "dir_path", ",", "basestring", ")", ":", "msg", "=", "'dir_path argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "if", "not", "isinstance", "(", "zip_file", ",", "basestring", ")", ":", "msg", "=", "'zip_file argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Ensure the dir_path file exists", "if", "not", "os", ".", "path", ".", "isdir", "(", "dir_path", ")", ":", "msg", "=", "'Directory not found: {f}'", ".", "format", "(", "f", "=", "dir_path", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "try", ":", "with", "contextlib", ".", "closing", "(", "zipfile", ".", "ZipFile", "(", "zip_file", ",", "'w'", ",", "allowZip64", "=", "True", ")", ")", "as", "zip_w", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "dir_path", ")", ":", "for", "f", "in", "files", ":", "log", ".", "debug", "(", "'Adding file to zip: %s'", ",", "f", ")", "strip", "=", "len", "(", "dir_path", ")", "-", "len", "(", "os", ".", "path", ".", "split", "(", "dir_path", ")", "[", "-", "1", "]", ")", "file_name", "=", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", "archive_name", "=", "os", ".", "path", ".", "join", "(", "root", "[", "strip", ":", "]", ",", "f", ")", "zip_w", ".", "write", "(", "file_name", ",", "archive_name", ")", "except", "Exception", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to create zip file: {f}\\n{e}'", ".", "format", "(", "f", "=", "zip_file", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "log", ".", "info", "(", "'Successfully created zip file: %s'", ",", "zip_file", ")" ]
Creates a zip file of a directory tree This method creates a zip archive using the directory tree dir_path and adds to zip_file output. :param dir_path: (str) Full path to directory to be zipped :param zip_file: (str) Full path to the output zip file :return: None :raises CommandError
[ "Creates", "a", "zip", "file", "of", "a", "directory", "tree" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L630-L674
cons3rt/pycons3rt
pycons3rt/bash.py
get_ip
def get_ip(interface=0): """This method return the IP address :param interface: (int) Interface number (e.g. 0 for eth0) :return: (str) IP address or None """ log = logging.getLogger(mod_logger + '.get_ip') log.info('Getting the IP address for this system...') ip_address = None try: log.info('Attempting to get IP address by hostname...') ip_address = socket.gethostbyname(socket.gethostname()) except socket.error: log.info('Unable to get IP address for this system using hostname, ' 'using a bash command...') command = 'ip addr show eth%s | grep inet | grep -v inet6 | ' \ 'awk \'{ print $2 }\' | cut -d/ -f1 ' \ '>> /root/ip' % interface try: log.info('Running command: %s', command) subprocess.check_call(command, shell=True) except(OSError, subprocess.CalledProcessError): _, ex, trace = sys.exc_info() msg = 'Unable to get the IP address of this system\n{e}'.format( e=str(ex)) log.error(msg) raise CommandError, msg, trace else: ip_file = '/root/ip' log.info('Command executed successfully, pulling IP address from ' 'file: %s', ip_file) if os.path.isfile(ip_file): with open(ip_file, 'r') as f: for line in f: ip_address = line.strip() log.info('Found IP address from file: %s', ip_address) else: msg = 'File not found: {f}'.format(f=ip_file) log.error(msg) raise CommandError(msg) log.info('Returning IP address: %s', ip_address) return ip_address
python
def get_ip(interface=0): """This method return the IP address :param interface: (int) Interface number (e.g. 0 for eth0) :return: (str) IP address or None """ log = logging.getLogger(mod_logger + '.get_ip') log.info('Getting the IP address for this system...') ip_address = None try: log.info('Attempting to get IP address by hostname...') ip_address = socket.gethostbyname(socket.gethostname()) except socket.error: log.info('Unable to get IP address for this system using hostname, ' 'using a bash command...') command = 'ip addr show eth%s | grep inet | grep -v inet6 | ' \ 'awk \'{ print $2 }\' | cut -d/ -f1 ' \ '>> /root/ip' % interface try: log.info('Running command: %s', command) subprocess.check_call(command, shell=True) except(OSError, subprocess.CalledProcessError): _, ex, trace = sys.exc_info() msg = 'Unable to get the IP address of this system\n{e}'.format( e=str(ex)) log.error(msg) raise CommandError, msg, trace else: ip_file = '/root/ip' log.info('Command executed successfully, pulling IP address from ' 'file: %s', ip_file) if os.path.isfile(ip_file): with open(ip_file, 'r') as f: for line in f: ip_address = line.strip() log.info('Found IP address from file: %s', ip_address) else: msg = 'File not found: {f}'.format(f=ip_file) log.error(msg) raise CommandError(msg) log.info('Returning IP address: %s', ip_address) return ip_address
[ "def", "get_ip", "(", "interface", "=", "0", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.get_ip'", ")", "log", ".", "info", "(", "'Getting the IP address for this system...'", ")", "ip_address", "=", "None", "try", ":", "log", ".", "info", "(", "'Attempting to get IP address by hostname...'", ")", "ip_address", "=", "socket", ".", "gethostbyname", "(", "socket", ".", "gethostname", "(", ")", ")", "except", "socket", ".", "error", ":", "log", ".", "info", "(", "'Unable to get IP address for this system using hostname, '", "'using a bash command...'", ")", "command", "=", "'ip addr show eth%s | grep inet | grep -v inet6 | '", "'awk \\'{ print $2 }\\' | cut -d/ -f1 '", "'>> /root/ip'", "%", "interface", "try", ":", "log", ".", "info", "(", "'Running command: %s'", ",", "command", ")", "subprocess", ".", "check_call", "(", "command", ",", "shell", "=", "True", ")", "except", "(", "OSError", ",", "subprocess", ".", "CalledProcessError", ")", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to get the IP address of this system\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "else", ":", "ip_file", "=", "'/root/ip'", "log", ".", "info", "(", "'Command executed successfully, pulling IP address from '", "'file: %s'", ",", "ip_file", ")", "if", "os", ".", "path", ".", "isfile", "(", "ip_file", ")", ":", "with", "open", "(", "ip_file", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "ip_address", "=", "line", ".", "strip", "(", ")", "log", ".", "info", "(", "'Found IP address from file: %s'", ",", "ip_address", ")", "else", ":", "msg", "=", "'File not found: {f}'", ".", "format", "(", "f", "=", "ip_file", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "log", ".", "info", "(", "'Returning IP address: %s'", ",", "ip_address", ")", "return", "ip_address" ]
This method return the IP address :param interface: (int) Interface number (e.g. 0 for eth0) :return: (str) IP address or None
[ "This", "method", "return", "the", "IP", "address" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L677-L719
cons3rt/pycons3rt
pycons3rt/bash.py
update_hosts_file
def update_hosts_file(ip, entry): """Updates the /etc/hosts file for the specified ip This method updates the /etc/hosts file for the specified IP address with the specified entry. :param ip: (str) IP address to be added or updated :param entry: (str) Hosts file entry to be added :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.update_hosts_file') # Validate args if not isinstance(ip, basestring): msg = 'ip argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(entry, basestring): msg = 'entry argument must be a string' log.error(msg) raise CommandError(msg) # Ensure the file_path file exists hosts_file = '/etc/hosts' if not os.path.isfile(hosts_file): msg = 'File not found: {f}'.format(f=hosts_file) log.error(msg) raise CommandError(msg) # Updating /etc/hosts file log.info('Updating hosts file: {f} with IP {i} and entry: {e}'.format(f=hosts_file, i=ip, e=entry)) full_entry = ip + ' ' + entry.strip() + '\n' updated = False for line in fileinput.input(hosts_file, inplace=True): if re.search(ip, line): if line.split()[0] == ip: log.info('Found IP {i} in line: {li}, updating...'.format(i=ip, li=line)) log.info('Replacing with new line: {n}'.format(n=full_entry)) sys.stdout.write(full_entry) updated = True else: log.debug('Found ip {i} in line {li} but not an exact match, adding line back to hosts file {f}...'. format(i=ip, li=line, f=hosts_file)) sys.stdout.write(line) else: log.debug('IP address {i} not found in line, adding line back to hosts file {f}: {li}'.format( i=ip, li=line, f=hosts_file)) sys.stdout.write(line) # Append the entry if the hosts file was not updated if updated is False: with open(hosts_file, 'a') as f: log.info('Appending hosts file entry to {f}: {e}'.format(f=hosts_file, e=full_entry)) f.write(full_entry)
python
def update_hosts_file(ip, entry): """Updates the /etc/hosts file for the specified ip This method updates the /etc/hosts file for the specified IP address with the specified entry. :param ip: (str) IP address to be added or updated :param entry: (str) Hosts file entry to be added :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.update_hosts_file') # Validate args if not isinstance(ip, basestring): msg = 'ip argument must be a string' log.error(msg) raise CommandError(msg) if not isinstance(entry, basestring): msg = 'entry argument must be a string' log.error(msg) raise CommandError(msg) # Ensure the file_path file exists hosts_file = '/etc/hosts' if not os.path.isfile(hosts_file): msg = 'File not found: {f}'.format(f=hosts_file) log.error(msg) raise CommandError(msg) # Updating /etc/hosts file log.info('Updating hosts file: {f} with IP {i} and entry: {e}'.format(f=hosts_file, i=ip, e=entry)) full_entry = ip + ' ' + entry.strip() + '\n' updated = False for line in fileinput.input(hosts_file, inplace=True): if re.search(ip, line): if line.split()[0] == ip: log.info('Found IP {i} in line: {li}, updating...'.format(i=ip, li=line)) log.info('Replacing with new line: {n}'.format(n=full_entry)) sys.stdout.write(full_entry) updated = True else: log.debug('Found ip {i} in line {li} but not an exact match, adding line back to hosts file {f}...'. format(i=ip, li=line, f=hosts_file)) sys.stdout.write(line) else: log.debug('IP address {i} not found in line, adding line back to hosts file {f}: {li}'.format( i=ip, li=line, f=hosts_file)) sys.stdout.write(line) # Append the entry if the hosts file was not updated if updated is False: with open(hosts_file, 'a') as f: log.info('Appending hosts file entry to {f}: {e}'.format(f=hosts_file, e=full_entry)) f.write(full_entry)
[ "def", "update_hosts_file", "(", "ip", ",", "entry", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.update_hosts_file'", ")", "# Validate args", "if", "not", "isinstance", "(", "ip", ",", "basestring", ")", ":", "msg", "=", "'ip argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "if", "not", "isinstance", "(", "entry", ",", "basestring", ")", ":", "msg", "=", "'entry argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Ensure the file_path file exists", "hosts_file", "=", "'/etc/hosts'", "if", "not", "os", ".", "path", ".", "isfile", "(", "hosts_file", ")", ":", "msg", "=", "'File not found: {f}'", ".", "format", "(", "f", "=", "hosts_file", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Updating /etc/hosts file", "log", ".", "info", "(", "'Updating hosts file: {f} with IP {i} and entry: {e}'", ".", "format", "(", "f", "=", "hosts_file", ",", "i", "=", "ip", ",", "e", "=", "entry", ")", ")", "full_entry", "=", "ip", "+", "' '", "+", "entry", ".", "strip", "(", ")", "+", "'\\n'", "updated", "=", "False", "for", "line", "in", "fileinput", ".", "input", "(", "hosts_file", ",", "inplace", "=", "True", ")", ":", "if", "re", ".", "search", "(", "ip", ",", "line", ")", ":", "if", "line", ".", "split", "(", ")", "[", "0", "]", "==", "ip", ":", "log", ".", "info", "(", "'Found IP {i} in line: {li}, updating...'", ".", "format", "(", "i", "=", "ip", ",", "li", "=", "line", ")", ")", "log", ".", "info", "(", "'Replacing with new line: {n}'", ".", "format", "(", "n", "=", "full_entry", ")", ")", "sys", ".", "stdout", ".", "write", "(", "full_entry", ")", "updated", "=", "True", "else", ":", "log", ".", "debug", "(", "'Found ip {i} in line {li} but not an exact match, adding line back to hosts file {f}...'", ".", "format", "(", "i", "=", "ip", ",", "li", "=", "line", ",", "f", "=", "hosts_file", ")", ")", "sys", ".", "stdout", ".", "write", "(", "line", ")", "else", ":", "log", ".", "debug", "(", "'IP address {i} not found in line, adding line back to hosts file {f}: {li}'", ".", "format", "(", "i", "=", "ip", ",", "li", "=", "line", ",", "f", "=", "hosts_file", ")", ")", "sys", ".", "stdout", ".", "write", "(", "line", ")", "# Append the entry if the hosts file was not updated", "if", "updated", "is", "False", ":", "with", "open", "(", "hosts_file", ",", "'a'", ")", "as", "f", ":", "log", ".", "info", "(", "'Appending hosts file entry to {f}: {e}'", ".", "format", "(", "f", "=", "hosts_file", ",", "e", "=", "full_entry", ")", ")", "f", ".", "write", "(", "full_entry", ")" ]
Updates the /etc/hosts file for the specified ip This method updates the /etc/hosts file for the specified IP address with the specified entry. :param ip: (str) IP address to be added or updated :param entry: (str) Hosts file entry to be added :return: None :raises CommandError
[ "Updates", "the", "/", "etc", "/", "hosts", "file", "for", "the", "specified", "ip" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L722-L776
cons3rt/pycons3rt
pycons3rt/bash.py
set_hostname
def set_hostname(new_hostname, pretty_hostname=None): """Sets this hosts hostname This method updates /etc/sysconfig/network and calls the hostname command to set a hostname on a Linux system. :param new_hostname: (str) New hostname :param pretty_hostname: (str) new pretty hostname, set to the same as new_hostname if not provided :return (int) exit code of the hostname command :raises CommandError """ log = logging.getLogger(mod_logger + '.set_hostname') # Ensure the hostname is a str if not isinstance(new_hostname, basestring): msg = 'new_hostname argument must be a string' raise CommandError(msg) # Update the network config file network_file = '/etc/sysconfig/network' if os.path.isfile(network_file): log.info('Updating {f} with the new hostname: {h}...'.format(f=network_file, h=new_hostname)) try: sed(network_file, '^HOSTNAME=.*', 'HOSTNAME=' + new_hostname) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to update [{f}], produced output:\n{e}'.format(f=network_file, e=str(ex)) raise CommandError, msg, trace else: log.info('Network file not found, will not be updated: {f}'.format(f=network_file)) # Update the hostname if is_systemd(): hostname_file = '/etc/hostname' pretty_hostname_file = '/etc/machine-info' log.info('This is systemd, updating files: {h} and {p}'.format(h=hostname_file, p=pretty_hostname_file)) # Update the hostname file log.info('Updating hostname file: {h}...'.format(h=hostname_file)) if os.path.isfile(hostname_file): os.remove(hostname_file) with open(hostname_file, 'w') as f: f.write(new_hostname) log.info('Updating pretty hostname file: {p}'.format(p=pretty_hostname_file)) # Use the same thing if pretty hostname is not provided if pretty_hostname is None: log.info('Pretty hostname not provided, using: {p}'.format(p=pretty_hostname)) pretty_hostname = new_hostname # Update the pretty hostname file if os.path.isfile(pretty_hostname_file): os.remove(pretty_hostname_file) with open(pretty_hostname_file, 'w') as f: f.write('PRETTY_HOSTNAME={p}'.format(p=pretty_hostname)) return 0 else: command = ['/bin/hostname', new_hostname] # Run the hostname command log.info('Running hostname command to set the hostname: [{c}]'.format(c=' '.join(command))) try: result = run_command(command) except CommandError: raise log.info('Hostname command completed with code: {c} and output:\n{o}'.format( c=result['code'], o=result['output'])) return result['code']
python
def set_hostname(new_hostname, pretty_hostname=None): """Sets this hosts hostname This method updates /etc/sysconfig/network and calls the hostname command to set a hostname on a Linux system. :param new_hostname: (str) New hostname :param pretty_hostname: (str) new pretty hostname, set to the same as new_hostname if not provided :return (int) exit code of the hostname command :raises CommandError """ log = logging.getLogger(mod_logger + '.set_hostname') # Ensure the hostname is a str if not isinstance(new_hostname, basestring): msg = 'new_hostname argument must be a string' raise CommandError(msg) # Update the network config file network_file = '/etc/sysconfig/network' if os.path.isfile(network_file): log.info('Updating {f} with the new hostname: {h}...'.format(f=network_file, h=new_hostname)) try: sed(network_file, '^HOSTNAME=.*', 'HOSTNAME=' + new_hostname) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to update [{f}], produced output:\n{e}'.format(f=network_file, e=str(ex)) raise CommandError, msg, trace else: log.info('Network file not found, will not be updated: {f}'.format(f=network_file)) # Update the hostname if is_systemd(): hostname_file = '/etc/hostname' pretty_hostname_file = '/etc/machine-info' log.info('This is systemd, updating files: {h} and {p}'.format(h=hostname_file, p=pretty_hostname_file)) # Update the hostname file log.info('Updating hostname file: {h}...'.format(h=hostname_file)) if os.path.isfile(hostname_file): os.remove(hostname_file) with open(hostname_file, 'w') as f: f.write(new_hostname) log.info('Updating pretty hostname file: {p}'.format(p=pretty_hostname_file)) # Use the same thing if pretty hostname is not provided if pretty_hostname is None: log.info('Pretty hostname not provided, using: {p}'.format(p=pretty_hostname)) pretty_hostname = new_hostname # Update the pretty hostname file if os.path.isfile(pretty_hostname_file): os.remove(pretty_hostname_file) with open(pretty_hostname_file, 'w') as f: f.write('PRETTY_HOSTNAME={p}'.format(p=pretty_hostname)) return 0 else: command = ['/bin/hostname', new_hostname] # Run the hostname command log.info('Running hostname command to set the hostname: [{c}]'.format(c=' '.join(command))) try: result = run_command(command) except CommandError: raise log.info('Hostname command completed with code: {c} and output:\n{o}'.format( c=result['code'], o=result['output'])) return result['code']
[ "def", "set_hostname", "(", "new_hostname", ",", "pretty_hostname", "=", "None", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.set_hostname'", ")", "# Ensure the hostname is a str", "if", "not", "isinstance", "(", "new_hostname", ",", "basestring", ")", ":", "msg", "=", "'new_hostname argument must be a string'", "raise", "CommandError", "(", "msg", ")", "# Update the network config file", "network_file", "=", "'/etc/sysconfig/network'", "if", "os", ".", "path", ".", "isfile", "(", "network_file", ")", ":", "log", ".", "info", "(", "'Updating {f} with the new hostname: {h}...'", ".", "format", "(", "f", "=", "network_file", ",", "h", "=", "new_hostname", ")", ")", "try", ":", "sed", "(", "network_file", ",", "'^HOSTNAME=.*'", ",", "'HOSTNAME='", "+", "new_hostname", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to update [{f}], produced output:\\n{e}'", ".", "format", "(", "f", "=", "network_file", ",", "e", "=", "str", "(", "ex", ")", ")", "raise", "CommandError", ",", "msg", ",", "trace", "else", ":", "log", ".", "info", "(", "'Network file not found, will not be updated: {f}'", ".", "format", "(", "f", "=", "network_file", ")", ")", "# Update the hostname", "if", "is_systemd", "(", ")", ":", "hostname_file", "=", "'/etc/hostname'", "pretty_hostname_file", "=", "'/etc/machine-info'", "log", ".", "info", "(", "'This is systemd, updating files: {h} and {p}'", ".", "format", "(", "h", "=", "hostname_file", ",", "p", "=", "pretty_hostname_file", ")", ")", "# Update the hostname file", "log", ".", "info", "(", "'Updating hostname file: {h}...'", ".", "format", "(", "h", "=", "hostname_file", ")", ")", "if", "os", ".", "path", ".", "isfile", "(", "hostname_file", ")", ":", "os", ".", "remove", "(", "hostname_file", ")", "with", "open", "(", "hostname_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "new_hostname", ")", "log", ".", "info", "(", "'Updating pretty hostname file: {p}'", ".", "format", "(", "p", "=", "pretty_hostname_file", ")", ")", "# Use the same thing if pretty hostname is not provided", "if", "pretty_hostname", "is", "None", ":", "log", ".", "info", "(", "'Pretty hostname not provided, using: {p}'", ".", "format", "(", "p", "=", "pretty_hostname", ")", ")", "pretty_hostname", "=", "new_hostname", "# Update the pretty hostname file", "if", "os", ".", "path", ".", "isfile", "(", "pretty_hostname_file", ")", ":", "os", ".", "remove", "(", "pretty_hostname_file", ")", "with", "open", "(", "pretty_hostname_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'PRETTY_HOSTNAME={p}'", ".", "format", "(", "p", "=", "pretty_hostname", ")", ")", "return", "0", "else", ":", "command", "=", "[", "'/bin/hostname'", ",", "new_hostname", "]", "# Run the hostname command", "log", ".", "info", "(", "'Running hostname command to set the hostname: [{c}]'", ".", "format", "(", "c", "=", "' '", ".", "join", "(", "command", ")", ")", ")", "try", ":", "result", "=", "run_command", "(", "command", ")", "except", "CommandError", ":", "raise", "log", ".", "info", "(", "'Hostname command completed with code: {c} and output:\\n{o}'", ".", "format", "(", "c", "=", "result", "[", "'code'", "]", ",", "o", "=", "result", "[", "'output'", "]", ")", ")", "return", "result", "[", "'code'", "]" ]
Sets this hosts hostname This method updates /etc/sysconfig/network and calls the hostname command to set a hostname on a Linux system. :param new_hostname: (str) New hostname :param pretty_hostname: (str) new pretty hostname, set to the same as new_hostname if not provided :return (int) exit code of the hostname command :raises CommandError
[ "Sets", "this", "hosts", "hostname" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L779-L847
cons3rt/pycons3rt
pycons3rt/bash.py
set_ntp_server
def set_ntp_server(server): """Sets the NTP server on Linux :param server: (str) NTP server IP or hostname :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.set_ntp_server') # Ensure the hostname is a str if not isinstance(server, basestring): msg = 'server argument must be a string' log.error(msg) raise CommandError(msg) # Ensure the ntp.conf file exists ntp_conf = '/etc/ntp.conf' if not os.path.isfile(ntp_conf): msg = 'File not found: {f}'.format(f=ntp_conf) log.error(msg) raise CommandError(msg) log.info('Clearing out existing server entries from %s...', ntp_conf) try: sed(ntp_conf, '^server.*', '', g=0) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to update file: {f}\n{e}'.format(f=ntp_conf, e=str(ex)) log.error(msg) raise CommandError, msg, trace out_str = 'server ' + server log.info('Appending server: %s', out_str) with open(ntp_conf, 'a') as f: f.write(out_str) log.info('Successfully updated file: {f}'.format(f=ntp_conf))
python
def set_ntp_server(server): """Sets the NTP server on Linux :param server: (str) NTP server IP or hostname :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.set_ntp_server') # Ensure the hostname is a str if not isinstance(server, basestring): msg = 'server argument must be a string' log.error(msg) raise CommandError(msg) # Ensure the ntp.conf file exists ntp_conf = '/etc/ntp.conf' if not os.path.isfile(ntp_conf): msg = 'File not found: {f}'.format(f=ntp_conf) log.error(msg) raise CommandError(msg) log.info('Clearing out existing server entries from %s...', ntp_conf) try: sed(ntp_conf, '^server.*', '', g=0) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to update file: {f}\n{e}'.format(f=ntp_conf, e=str(ex)) log.error(msg) raise CommandError, msg, trace out_str = 'server ' + server log.info('Appending server: %s', out_str) with open(ntp_conf, 'a') as f: f.write(out_str) log.info('Successfully updated file: {f}'.format(f=ntp_conf))
[ "def", "set_ntp_server", "(", "server", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.set_ntp_server'", ")", "# Ensure the hostname is a str", "if", "not", "isinstance", "(", "server", ",", "basestring", ")", ":", "msg", "=", "'server argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "# Ensure the ntp.conf file exists", "ntp_conf", "=", "'/etc/ntp.conf'", "if", "not", "os", ".", "path", ".", "isfile", "(", "ntp_conf", ")", ":", "msg", "=", "'File not found: {f}'", ".", "format", "(", "f", "=", "ntp_conf", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "log", ".", "info", "(", "'Clearing out existing server entries from %s...'", ",", "ntp_conf", ")", "try", ":", "sed", "(", "ntp_conf", ",", "'^server.*'", ",", "''", ",", "g", "=", "0", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to update file: {f}\\n{e}'", ".", "format", "(", "f", "=", "ntp_conf", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "out_str", "=", "'server '", "+", "server", "log", ".", "info", "(", "'Appending server: %s'", ",", "out_str", ")", "with", "open", "(", "ntp_conf", ",", "'a'", ")", "as", "f", ":", "f", ".", "write", "(", "out_str", ")", "log", ".", "info", "(", "'Successfully updated file: {f}'", ".", "format", "(", "f", "=", "ntp_conf", ")", ")" ]
Sets the NTP server on Linux :param server: (str) NTP server IP or hostname :return: None :raises CommandError
[ "Sets", "the", "NTP", "server", "on", "Linux" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L850-L882
cons3rt/pycons3rt
pycons3rt/bash.py
copy_ifcfg_file
def copy_ifcfg_file(source_interface, dest_interface): """Copies an existing ifcfg network script to another :param source_interface: String (e.g. 1) :param dest_interface: String (e.g. 0:0) :return: None :raises TypeError, OSError """ log = logging.getLogger(mod_logger + '.copy_ifcfg_file') # Validate args if not isinstance(source_interface, basestring): msg = 'source_interface argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(dest_interface, basestring): msg = 'dest_interface argument must be a string' log.error(msg) raise TypeError(msg) network_script = '/etc/sysconfig/network-scripts/ifcfg-eth' source_file = network_script + source_interface dest_file = network_script + dest_interface command = ['cp', '-f', source_file, dest_file] try: result = run_command(command) code = result['code'] except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to copy the ifcfg file from interface {s} to interface {d}\n{e}'.format( s=source_interface, d=dest_interface, e=str(ex)) raise OSError, msg, trace log.info('Copy command exited with code: {c}'.format(c=code)) if code != 0: msg = 'There was a problem copying file {s} file to {d}'.format(s=source, d=dest_file) log.error(msg) raise OSError(msg) # Updating the destination network script DEVICE property try: sed(file_path=dest_file, pattern='^DEVICE=.*', replace_str='DEVICE="eth{i}"'.format(i=dest_interface)) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to update DEVICE in file: {d}\n{e}'.format( d=dest_file, e=str(ex)) log.error(msg) raise CommandError, msg, trace log.info('Successfully created file: {d}'.format(d=dest_file)) log.info('Restarting networking in 10 seconds to ensure the changes take effect...') time.sleep(10) retry_time = 10 max_retries = 10 for i in range(1, max_retries+2): if i > max_retries: msg = 'Unable to successfully start the networking service after {m} attempts'.format(m=max_retries) log.error(msg) raise OSError(msg) log.info('Attempting to restart the networking service, attempt #{i} of {m}'.format(i=i, m=max_retries)) try: service_network_restart() except CommandError: _, ex, trace = sys.exc_info() log.warn('Attempted unsuccessfully to restart networking on attempt #{i} of {m}, trying again in {t} ' 'seconds\n{e}'.format(i=i, m=max_retries, t=retry_time, e=str(ex))) time.sleep(retry_time) else: log.info('Successfully restarted networking') break log.info('Successfully configured interface: {d}'.format(d=dest_interface))
python
def copy_ifcfg_file(source_interface, dest_interface): """Copies an existing ifcfg network script to another :param source_interface: String (e.g. 1) :param dest_interface: String (e.g. 0:0) :return: None :raises TypeError, OSError """ log = logging.getLogger(mod_logger + '.copy_ifcfg_file') # Validate args if not isinstance(source_interface, basestring): msg = 'source_interface argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(dest_interface, basestring): msg = 'dest_interface argument must be a string' log.error(msg) raise TypeError(msg) network_script = '/etc/sysconfig/network-scripts/ifcfg-eth' source_file = network_script + source_interface dest_file = network_script + dest_interface command = ['cp', '-f', source_file, dest_file] try: result = run_command(command) code = result['code'] except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to copy the ifcfg file from interface {s} to interface {d}\n{e}'.format( s=source_interface, d=dest_interface, e=str(ex)) raise OSError, msg, trace log.info('Copy command exited with code: {c}'.format(c=code)) if code != 0: msg = 'There was a problem copying file {s} file to {d}'.format(s=source, d=dest_file) log.error(msg) raise OSError(msg) # Updating the destination network script DEVICE property try: sed(file_path=dest_file, pattern='^DEVICE=.*', replace_str='DEVICE="eth{i}"'.format(i=dest_interface)) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to update DEVICE in file: {d}\n{e}'.format( d=dest_file, e=str(ex)) log.error(msg) raise CommandError, msg, trace log.info('Successfully created file: {d}'.format(d=dest_file)) log.info('Restarting networking in 10 seconds to ensure the changes take effect...') time.sleep(10) retry_time = 10 max_retries = 10 for i in range(1, max_retries+2): if i > max_retries: msg = 'Unable to successfully start the networking service after {m} attempts'.format(m=max_retries) log.error(msg) raise OSError(msg) log.info('Attempting to restart the networking service, attempt #{i} of {m}'.format(i=i, m=max_retries)) try: service_network_restart() except CommandError: _, ex, trace = sys.exc_info() log.warn('Attempted unsuccessfully to restart networking on attempt #{i} of {m}, trying again in {t} ' 'seconds\n{e}'.format(i=i, m=max_retries, t=retry_time, e=str(ex))) time.sleep(retry_time) else: log.info('Successfully restarted networking') break log.info('Successfully configured interface: {d}'.format(d=dest_interface))
[ "def", "copy_ifcfg_file", "(", "source_interface", ",", "dest_interface", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.copy_ifcfg_file'", ")", "# Validate args", "if", "not", "isinstance", "(", "source_interface", ",", "basestring", ")", ":", "msg", "=", "'source_interface argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "if", "not", "isinstance", "(", "dest_interface", ",", "basestring", ")", ":", "msg", "=", "'dest_interface argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "network_script", "=", "'/etc/sysconfig/network-scripts/ifcfg-eth'", "source_file", "=", "network_script", "+", "source_interface", "dest_file", "=", "network_script", "+", "dest_interface", "command", "=", "[", "'cp'", ",", "'-f'", ",", "source_file", ",", "dest_file", "]", "try", ":", "result", "=", "run_command", "(", "command", ")", "code", "=", "result", "[", "'code'", "]", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to copy the ifcfg file from interface {s} to interface {d}\\n{e}'", ".", "format", "(", "s", "=", "source_interface", ",", "d", "=", "dest_interface", ",", "e", "=", "str", "(", "ex", ")", ")", "raise", "OSError", ",", "msg", ",", "trace", "log", ".", "info", "(", "'Copy command exited with code: {c}'", ".", "format", "(", "c", "=", "code", ")", ")", "if", "code", "!=", "0", ":", "msg", "=", "'There was a problem copying file {s} file to {d}'", ".", "format", "(", "s", "=", "source", ",", "d", "=", "dest_file", ")", "log", ".", "error", "(", "msg", ")", "raise", "OSError", "(", "msg", ")", "# Updating the destination network script DEVICE property", "try", ":", "sed", "(", "file_path", "=", "dest_file", ",", "pattern", "=", "'^DEVICE=.*'", ",", "replace_str", "=", "'DEVICE=\"eth{i}\"'", ".", "format", "(", "i", "=", "dest_interface", ")", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to update DEVICE in file: {d}\\n{e}'", ".", "format", "(", "d", "=", "dest_file", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", ",", "msg", ",", "trace", "log", ".", "info", "(", "'Successfully created file: {d}'", ".", "format", "(", "d", "=", "dest_file", ")", ")", "log", ".", "info", "(", "'Restarting networking in 10 seconds to ensure the changes take effect...'", ")", "time", ".", "sleep", "(", "10", ")", "retry_time", "=", "10", "max_retries", "=", "10", "for", "i", "in", "range", "(", "1", ",", "max_retries", "+", "2", ")", ":", "if", "i", ">", "max_retries", ":", "msg", "=", "'Unable to successfully start the networking service after {m} attempts'", ".", "format", "(", "m", "=", "max_retries", ")", "log", ".", "error", "(", "msg", ")", "raise", "OSError", "(", "msg", ")", "log", ".", "info", "(", "'Attempting to restart the networking service, attempt #{i} of {m}'", ".", "format", "(", "i", "=", "i", ",", "m", "=", "max_retries", ")", ")", "try", ":", "service_network_restart", "(", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "log", ".", "warn", "(", "'Attempted unsuccessfully to restart networking on attempt #{i} of {m}, trying again in {t} '", "'seconds\\n{e}'", ".", "format", "(", "i", "=", "i", ",", "m", "=", "max_retries", ",", "t", "=", "retry_time", ",", "e", "=", "str", "(", "ex", ")", ")", ")", "time", ".", "sleep", "(", "retry_time", ")", "else", ":", "log", ".", "info", "(", "'Successfully restarted networking'", ")", "break", "log", ".", "info", "(", "'Successfully configured interface: {d}'", ".", "format", "(", "d", "=", "dest_interface", ")", ")" ]
Copies an existing ifcfg network script to another :param source_interface: String (e.g. 1) :param dest_interface: String (e.g. 0:0) :return: None :raises TypeError, OSError
[ "Copies", "an", "existing", "ifcfg", "network", "script", "to", "another" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L885-L955
cons3rt/pycons3rt
pycons3rt/bash.py
remove_ifcfg_file
def remove_ifcfg_file(device_index='0'): """Removes the ifcfg file at the specified device index and restarts the network service :param device_index: (int) Device Index :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.remove_ifcfg_file') if not isinstance(device_index, basestring): msg = 'device_index argument must be a string' log.error(msg) raise CommandError(msg) network_script = '/etc/sysconfig/network-scripts/ifcfg-eth{d}'.format(d=device_index) if not os.path.isfile(network_script): log.info('File does not exist, nothing will be removed: {n}'.format(n=network_script)) return # Remove the network config script log.info('Attempting to remove file: {n}'.format(n=network_script)) try: os.remove(network_script) except(IOError, OSError): _, ex, trace = sys.exc_info() msg = 'There was a problem removing network script file: {n}\n{e}'.format(n=network_script, e=str(ex)) log.error(msg) raise OSError, msg, trace else: log.info('Successfully removed file: {n}'.format(n=network_script)) # Restart the network service log.info('Restarting the network service...') try: service_network_restart() except CommandError: _, ex, trace = sys.exc_info() msg = 'There was a problem restarting the network service\n{e}'.format(e=str(ex)) log.error(msg) raise OSError, msg, trace else: log.info('Successfully restarted the network service')
python
def remove_ifcfg_file(device_index='0'): """Removes the ifcfg file at the specified device index and restarts the network service :param device_index: (int) Device Index :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.remove_ifcfg_file') if not isinstance(device_index, basestring): msg = 'device_index argument must be a string' log.error(msg) raise CommandError(msg) network_script = '/etc/sysconfig/network-scripts/ifcfg-eth{d}'.format(d=device_index) if not os.path.isfile(network_script): log.info('File does not exist, nothing will be removed: {n}'.format(n=network_script)) return # Remove the network config script log.info('Attempting to remove file: {n}'.format(n=network_script)) try: os.remove(network_script) except(IOError, OSError): _, ex, trace = sys.exc_info() msg = 'There was a problem removing network script file: {n}\n{e}'.format(n=network_script, e=str(ex)) log.error(msg) raise OSError, msg, trace else: log.info('Successfully removed file: {n}'.format(n=network_script)) # Restart the network service log.info('Restarting the network service...') try: service_network_restart() except CommandError: _, ex, trace = sys.exc_info() msg = 'There was a problem restarting the network service\n{e}'.format(e=str(ex)) log.error(msg) raise OSError, msg, trace else: log.info('Successfully restarted the network service')
[ "def", "remove_ifcfg_file", "(", "device_index", "=", "'0'", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.remove_ifcfg_file'", ")", "if", "not", "isinstance", "(", "device_index", ",", "basestring", ")", ":", "msg", "=", "'device_index argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "network_script", "=", "'/etc/sysconfig/network-scripts/ifcfg-eth{d}'", ".", "format", "(", "d", "=", "device_index", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "network_script", ")", ":", "log", ".", "info", "(", "'File does not exist, nothing will be removed: {n}'", ".", "format", "(", "n", "=", "network_script", ")", ")", "return", "# Remove the network config script", "log", ".", "info", "(", "'Attempting to remove file: {n}'", ".", "format", "(", "n", "=", "network_script", ")", ")", "try", ":", "os", ".", "remove", "(", "network_script", ")", "except", "(", "IOError", ",", "OSError", ")", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'There was a problem removing network script file: {n}\\n{e}'", ".", "format", "(", "n", "=", "network_script", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "OSError", ",", "msg", ",", "trace", "else", ":", "log", ".", "info", "(", "'Successfully removed file: {n}'", ".", "format", "(", "n", "=", "network_script", ")", ")", "# Restart the network service", "log", ".", "info", "(", "'Restarting the network service...'", ")", "try", ":", "service_network_restart", "(", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'There was a problem restarting the network service\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "OSError", ",", "msg", ",", "trace", "else", ":", "log", ".", "info", "(", "'Successfully restarted the network service'", ")" ]
Removes the ifcfg file at the specified device index and restarts the network service :param device_index: (int) Device Index :return: None :raises CommandError
[ "Removes", "the", "ifcfg", "file", "at", "the", "specified", "device", "index", "and", "restarts", "the", "network", "service" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L958-L998
cons3rt/pycons3rt
pycons3rt/bash.py
add_nat_rule
def add_nat_rule(port, source_interface, dest_interface): """Adds a NAT rule to iptables :param port: String or int port number :param source_interface: String (e.g. 1) :param dest_interface: String (e.g. 0:0) :return: None :raises: TypeError, OSError """ log = logging.getLogger(mod_logger + '.add_nat_rule') # Validate args if not isinstance(source_interface, basestring): msg = 'source_interface argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(dest_interface, basestring): msg = 'dest_interface argument must be a string' log.error(msg) raise TypeError(msg) ip_addresses = ip_addr() destination_ip = ip_addresses['eth{i}'.format(i=dest_interface)] log.info('Using destination IP address: {d}'.format(d=destination_ip)) command = ['iptables', '-t', 'nat', '-A', 'PREROUTING', '-i', 'eth{s}'.format(s=source_interface), '-p', 'tcp', '--dport', str(port), '-j', 'DNAT', '--to', '{d}:{p}'.format(p=port, d=destination_ip)] log.info('Running command: {c}'.format(c=command)) try: subprocess.check_call(command) except OSError: _, ex, trace = sys.exc_info() msg = 'There was a problem running command: {c}\n{e}'.format(c=command, e=str(ex)) log.error(msg) raise OSError, msg, trace except subprocess.CalledProcessError: _, ex, trace = sys.exc_info() msg = 'Command returned a non-zero exit code: {c}\n{e}'.format(c=command, e=str(ex)) log.error(msg) raise OSError, msg, trace else: log.info('Successfully ran command: {c}'.format(c=command)) # Save the iptables with the new NAT rule try: save_iptables() except OSError: _, ex, trace = sys.exc_info() msg = 'OSError: There was a problem saving iptables rules\n{e}'.format(e=str(ex)) raise OSError, msg, trace log.info('Successfully saved iptables rules with the NAT rule')
python
def add_nat_rule(port, source_interface, dest_interface): """Adds a NAT rule to iptables :param port: String or int port number :param source_interface: String (e.g. 1) :param dest_interface: String (e.g. 0:0) :return: None :raises: TypeError, OSError """ log = logging.getLogger(mod_logger + '.add_nat_rule') # Validate args if not isinstance(source_interface, basestring): msg = 'source_interface argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(dest_interface, basestring): msg = 'dest_interface argument must be a string' log.error(msg) raise TypeError(msg) ip_addresses = ip_addr() destination_ip = ip_addresses['eth{i}'.format(i=dest_interface)] log.info('Using destination IP address: {d}'.format(d=destination_ip)) command = ['iptables', '-t', 'nat', '-A', 'PREROUTING', '-i', 'eth{s}'.format(s=source_interface), '-p', 'tcp', '--dport', str(port), '-j', 'DNAT', '--to', '{d}:{p}'.format(p=port, d=destination_ip)] log.info('Running command: {c}'.format(c=command)) try: subprocess.check_call(command) except OSError: _, ex, trace = sys.exc_info() msg = 'There was a problem running command: {c}\n{e}'.format(c=command, e=str(ex)) log.error(msg) raise OSError, msg, trace except subprocess.CalledProcessError: _, ex, trace = sys.exc_info() msg = 'Command returned a non-zero exit code: {c}\n{e}'.format(c=command, e=str(ex)) log.error(msg) raise OSError, msg, trace else: log.info('Successfully ran command: {c}'.format(c=command)) # Save the iptables with the new NAT rule try: save_iptables() except OSError: _, ex, trace = sys.exc_info() msg = 'OSError: There was a problem saving iptables rules\n{e}'.format(e=str(ex)) raise OSError, msg, trace log.info('Successfully saved iptables rules with the NAT rule')
[ "def", "add_nat_rule", "(", "port", ",", "source_interface", ",", "dest_interface", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.add_nat_rule'", ")", "# Validate args", "if", "not", "isinstance", "(", "source_interface", ",", "basestring", ")", ":", "msg", "=", "'source_interface argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "if", "not", "isinstance", "(", "dest_interface", ",", "basestring", ")", ":", "msg", "=", "'dest_interface argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "ip_addresses", "=", "ip_addr", "(", ")", "destination_ip", "=", "ip_addresses", "[", "'eth{i}'", ".", "format", "(", "i", "=", "dest_interface", ")", "]", "log", ".", "info", "(", "'Using destination IP address: {d}'", ".", "format", "(", "d", "=", "destination_ip", ")", ")", "command", "=", "[", "'iptables'", ",", "'-t'", ",", "'nat'", ",", "'-A'", ",", "'PREROUTING'", ",", "'-i'", ",", "'eth{s}'", ".", "format", "(", "s", "=", "source_interface", ")", ",", "'-p'", ",", "'tcp'", ",", "'--dport'", ",", "str", "(", "port", ")", ",", "'-j'", ",", "'DNAT'", ",", "'--to'", ",", "'{d}:{p}'", ".", "format", "(", "p", "=", "port", ",", "d", "=", "destination_ip", ")", "]", "log", ".", "info", "(", "'Running command: {c}'", ".", "format", "(", "c", "=", "command", ")", ")", "try", ":", "subprocess", ".", "check_call", "(", "command", ")", "except", "OSError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'There was a problem running command: {c}\\n{e}'", ".", "format", "(", "c", "=", "command", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "OSError", ",", "msg", ",", "trace", "except", "subprocess", ".", "CalledProcessError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Command returned a non-zero exit code: {c}\\n{e}'", ".", "format", "(", "c", "=", "command", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "OSError", ",", "msg", ",", "trace", "else", ":", "log", ".", "info", "(", "'Successfully ran command: {c}'", ".", "format", "(", "c", "=", "command", ")", ")", "# Save the iptables with the new NAT rule", "try", ":", "save_iptables", "(", ")", "except", "OSError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'OSError: There was a problem saving iptables rules\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "raise", "OSError", ",", "msg", ",", "trace", "log", ".", "info", "(", "'Successfully saved iptables rules with the NAT rule'", ")" ]
Adds a NAT rule to iptables :param port: String or int port number :param source_interface: String (e.g. 1) :param dest_interface: String (e.g. 0:0) :return: None :raises: TypeError, OSError
[ "Adds", "a", "NAT", "rule", "to", "iptables" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1001-L1052
cons3rt/pycons3rt
pycons3rt/bash.py
service_network_restart
def service_network_restart(): """Restarts the network service on linux :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.service_network_restart') command = ['service', 'network', 'restart'] time.sleep(5) try: result = run_command(command) time.sleep(5) code = result['code'] except CommandError: raise log.info('Network restart produced output:\n{o}'.format(o=result['output'])) if code != 0: msg = 'Network services did not restart cleanly, exited with code: {c}'.format(c=code) log.error(msg) raise CommandError(msg) else: log.info('Successfully restarted networking!')
python
def service_network_restart(): """Restarts the network service on linux :return: None :raises CommandError """ log = logging.getLogger(mod_logger + '.service_network_restart') command = ['service', 'network', 'restart'] time.sleep(5) try: result = run_command(command) time.sleep(5) code = result['code'] except CommandError: raise log.info('Network restart produced output:\n{o}'.format(o=result['output'])) if code != 0: msg = 'Network services did not restart cleanly, exited with code: {c}'.format(c=code) log.error(msg) raise CommandError(msg) else: log.info('Successfully restarted networking!')
[ "def", "service_network_restart", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.service_network_restart'", ")", "command", "=", "[", "'service'", ",", "'network'", ",", "'restart'", "]", "time", ".", "sleep", "(", "5", ")", "try", ":", "result", "=", "run_command", "(", "command", ")", "time", ".", "sleep", "(", "5", ")", "code", "=", "result", "[", "'code'", "]", "except", "CommandError", ":", "raise", "log", ".", "info", "(", "'Network restart produced output:\\n{o}'", ".", "format", "(", "o", "=", "result", "[", "'output'", "]", ")", ")", "if", "code", "!=", "0", ":", "msg", "=", "'Network services did not restart cleanly, exited with code: {c}'", ".", "format", "(", "c", "=", "code", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "else", ":", "log", ".", "info", "(", "'Successfully restarted networking!'", ")" ]
Restarts the network service on linux :return: None :raises CommandError
[ "Restarts", "the", "network", "service", "on", "linux", ":", "return", ":", "None", ":", "raises", "CommandError" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1055-L1076
cons3rt/pycons3rt
pycons3rt/bash.py
save_iptables
def save_iptables(rules_file='/etc/sysconfig/iptables'): """Saves iptables rules to the provided rules file :return: None :raises OSError """ log = logging.getLogger(mod_logger + '.save_iptables') # Run iptables-save to get the output command = ['iptables-save'] log.debug('Running command: iptables-save') try: iptables_out = run_command(command, timeout_sec=20) except CommandError: _, ex, trace = sys.exc_info() msg = 'There was a problem running iptables command: {c}\n{e}'.format(c=' '.join(command), e=str(ex)) raise OSError, msg, trace # Error if iptables-save did not exit clean if int(iptables_out['code']) != 0: raise OSError('Command [{g}] exited with code [{c}] and output:\n{o}'.format( g=' '.join(command), c=iptables_out['code'], o=iptables_out['output'])) # Back up the existing rules file if it exists if os.path.isfile(rules_file): time_now = datetime.now().strftime('%Y%m%d-%H%M%S') backup_file = '{f}.{d}'.format(f=rules_file, d=time_now) log.debug('Creating backup file: {f}'.format(f=backup_file)) shutil.copy2(rules_file, backup_file) # Save the output to the rules file log.debug('Creating file: {f}'.format(f=rules_file)) with open(rules_file, 'w') as f: f.write(iptables_out['output'])
python
def save_iptables(rules_file='/etc/sysconfig/iptables'): """Saves iptables rules to the provided rules file :return: None :raises OSError """ log = logging.getLogger(mod_logger + '.save_iptables') # Run iptables-save to get the output command = ['iptables-save'] log.debug('Running command: iptables-save') try: iptables_out = run_command(command, timeout_sec=20) except CommandError: _, ex, trace = sys.exc_info() msg = 'There was a problem running iptables command: {c}\n{e}'.format(c=' '.join(command), e=str(ex)) raise OSError, msg, trace # Error if iptables-save did not exit clean if int(iptables_out['code']) != 0: raise OSError('Command [{g}] exited with code [{c}] and output:\n{o}'.format( g=' '.join(command), c=iptables_out['code'], o=iptables_out['output'])) # Back up the existing rules file if it exists if os.path.isfile(rules_file): time_now = datetime.now().strftime('%Y%m%d-%H%M%S') backup_file = '{f}.{d}'.format(f=rules_file, d=time_now) log.debug('Creating backup file: {f}'.format(f=backup_file)) shutil.copy2(rules_file, backup_file) # Save the output to the rules file log.debug('Creating file: {f}'.format(f=rules_file)) with open(rules_file, 'w') as f: f.write(iptables_out['output'])
[ "def", "save_iptables", "(", "rules_file", "=", "'/etc/sysconfig/iptables'", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.save_iptables'", ")", "# Run iptables-save to get the output", "command", "=", "[", "'iptables-save'", "]", "log", ".", "debug", "(", "'Running command: iptables-save'", ")", "try", ":", "iptables_out", "=", "run_command", "(", "command", ",", "timeout_sec", "=", "20", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'There was a problem running iptables command: {c}\\n{e}'", ".", "format", "(", "c", "=", "' '", ".", "join", "(", "command", ")", ",", "e", "=", "str", "(", "ex", ")", ")", "raise", "OSError", ",", "msg", ",", "trace", "# Error if iptables-save did not exit clean", "if", "int", "(", "iptables_out", "[", "'code'", "]", ")", "!=", "0", ":", "raise", "OSError", "(", "'Command [{g}] exited with code [{c}] and output:\\n{o}'", ".", "format", "(", "g", "=", "' '", ".", "join", "(", "command", ")", ",", "c", "=", "iptables_out", "[", "'code'", "]", ",", "o", "=", "iptables_out", "[", "'output'", "]", ")", ")", "# Back up the existing rules file if it exists", "if", "os", ".", "path", ".", "isfile", "(", "rules_file", ")", ":", "time_now", "=", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%Y%m%d-%H%M%S'", ")", "backup_file", "=", "'{f}.{d}'", ".", "format", "(", "f", "=", "rules_file", ",", "d", "=", "time_now", ")", "log", ".", "debug", "(", "'Creating backup file: {f}'", ".", "format", "(", "f", "=", "backup_file", ")", ")", "shutil", ".", "copy2", "(", "rules_file", ",", "backup_file", ")", "# Save the output to the rules file", "log", ".", "debug", "(", "'Creating file: {f}'", ".", "format", "(", "f", "=", "rules_file", ")", ")", "with", "open", "(", "rules_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "iptables_out", "[", "'output'", "]", ")" ]
Saves iptables rules to the provided rules file :return: None :raises OSError
[ "Saves", "iptables", "rules", "to", "the", "provided", "rules", "file" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1079-L1112
cons3rt/pycons3rt
pycons3rt/bash.py
get_remote_host_environment_variable
def get_remote_host_environment_variable(host, environment_variable): """Retrieves the value of an environment variable of a remote host over SSH :param host: (str) host to query :param environment_variable: (str) variable to query :return: (str) value of the environment variable :raises: TypeError, CommandError """ log = logging.getLogger(mod_logger + '.get_remote_host_environment_variable') if not isinstance(host, basestring): msg = 'host argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(environment_variable, basestring): msg = 'environment_variable argument must be a string' log.error(msg) raise TypeError(msg) log.info('Checking host {h} for environment variable: {v}...'.format(h=host, v=environment_variable)) command = ['ssh', '{h}'.format(h=host), 'echo ${v}'.format(v=environment_variable)] try: result = run_command(command, timeout_sec=5.0) code = result['code'] except CommandError: raise if code != 0: msg = 'There was a problem checking the remote host {h} over SSH, return code: {c}'.format( h=host, c=code) log.error(msg) raise CommandError(msg) else: value = result['output'].strip() log.info('Environment variable {e} on host {h} value is: {v}'.format( e=environment_variable, h=host, v=value)) return value
python
def get_remote_host_environment_variable(host, environment_variable): """Retrieves the value of an environment variable of a remote host over SSH :param host: (str) host to query :param environment_variable: (str) variable to query :return: (str) value of the environment variable :raises: TypeError, CommandError """ log = logging.getLogger(mod_logger + '.get_remote_host_environment_variable') if not isinstance(host, basestring): msg = 'host argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(environment_variable, basestring): msg = 'environment_variable argument must be a string' log.error(msg) raise TypeError(msg) log.info('Checking host {h} for environment variable: {v}...'.format(h=host, v=environment_variable)) command = ['ssh', '{h}'.format(h=host), 'echo ${v}'.format(v=environment_variable)] try: result = run_command(command, timeout_sec=5.0) code = result['code'] except CommandError: raise if code != 0: msg = 'There was a problem checking the remote host {h} over SSH, return code: {c}'.format( h=host, c=code) log.error(msg) raise CommandError(msg) else: value = result['output'].strip() log.info('Environment variable {e} on host {h} value is: {v}'.format( e=environment_variable, h=host, v=value)) return value
[ "def", "get_remote_host_environment_variable", "(", "host", ",", "environment_variable", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.get_remote_host_environment_variable'", ")", "if", "not", "isinstance", "(", "host", ",", "basestring", ")", ":", "msg", "=", "'host argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "if", "not", "isinstance", "(", "environment_variable", ",", "basestring", ")", ":", "msg", "=", "'environment_variable argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "log", ".", "info", "(", "'Checking host {h} for environment variable: {v}...'", ".", "format", "(", "h", "=", "host", ",", "v", "=", "environment_variable", ")", ")", "command", "=", "[", "'ssh'", ",", "'{h}'", ".", "format", "(", "h", "=", "host", ")", ",", "'echo ${v}'", ".", "format", "(", "v", "=", "environment_variable", ")", "]", "try", ":", "result", "=", "run_command", "(", "command", ",", "timeout_sec", "=", "5.0", ")", "code", "=", "result", "[", "'code'", "]", "except", "CommandError", ":", "raise", "if", "code", "!=", "0", ":", "msg", "=", "'There was a problem checking the remote host {h} over SSH, return code: {c}'", ".", "format", "(", "h", "=", "host", ",", "c", "=", "code", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "else", ":", "value", "=", "result", "[", "'output'", "]", ".", "strip", "(", ")", "log", ".", "info", "(", "'Environment variable {e} on host {h} value is: {v}'", ".", "format", "(", "e", "=", "environment_variable", ",", "h", "=", "host", ",", "v", "=", "value", ")", ")", "return", "value" ]
Retrieves the value of an environment variable of a remote host over SSH :param host: (str) host to query :param environment_variable: (str) variable to query :return: (str) value of the environment variable :raises: TypeError, CommandError
[ "Retrieves", "the", "value", "of", "an", "environment", "variable", "of", "a", "remote", "host", "over", "SSH" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1115-L1149
cons3rt/pycons3rt
pycons3rt/bash.py
set_remote_host_environment_variable
def set_remote_host_environment_variable(host, variable_name, variable_value, env_file='/etc/bashrc'): """Sets an environment variable on the remote host in the specified environment file :param host: (str) host to set environment variable on :param variable_name: (str) name of the variable :param variable_value: (str) value of the variable :param env_file: (str) full path to the environment file to set :return: None :raises: TypeError, CommandError """ log = logging.getLogger(mod_logger + '.set_remote_host_environment_variable') if not isinstance(host, basestring): msg = 'host argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(variable_name, basestring): msg = 'variable_name argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(variable_value, basestring): msg = 'variable_value argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(env_file, basestring): msg = 'env_file argument must be a string' log.error(msg) raise TypeError(msg) log.info('Creating the environment file if it does not exist...') command = ['ssh', host, 'touch {f}'.format(f=env_file)] try: result = run_command(command, timeout_sec=5.0) code = result['code'] output = result['output'] except CommandError: raise if code != 0: msg = 'There was a problem creating environment file {f} on remote host {h} over SSH, ' \ 'exit code {c} and output:\n{o}'.format(h=host, c=code, f=env_file, o=output) log.error(msg) raise CommandError(msg) log.info('Creating ensuring the environment file is executable...') command = ['ssh', host, 'chmod +x {f}'.format(f=env_file)] try: result = run_command(command, timeout_sec=5.0) code = result['code'] output = result['output'] except CommandError: raise if code != 0: msg = 'There was a problem setting permissions on environment file {f} on remote host {h} over SSH, ' \ 'exit code {c} and output:\n{o}'.format(h=host, c=code, f=env_file, o=output) log.error(msg) raise CommandError(msg) log.info('Adding environment variable {v} with value {n} to file {f}...'.format( v=variable_name, n=variable_value, f=env_file)) command = ['ssh', host, 'echo "export {v}=\\"{n}\\"" >> {f}'.format(f=env_file, v=variable_name, n=variable_value)] try: result = run_command(command, timeout_sec=5.0) code = result['code'] output = result['output'] except CommandError: raise if code != 0: msg = 'There was a problem adding variable {v} to environment file {f} on remote host {h} over SSH, ' \ 'exit code {c} and output:\n{o}'.format(h=host, c=code, f=env_file, o=output, v=variable_name) log.error(msg) raise CommandError(msg) else: log.info('Environment variable {v} set to {n} on host {h}'.format(v=variable_name, n=variable_value, h=host))
python
def set_remote_host_environment_variable(host, variable_name, variable_value, env_file='/etc/bashrc'): """Sets an environment variable on the remote host in the specified environment file :param host: (str) host to set environment variable on :param variable_name: (str) name of the variable :param variable_value: (str) value of the variable :param env_file: (str) full path to the environment file to set :return: None :raises: TypeError, CommandError """ log = logging.getLogger(mod_logger + '.set_remote_host_environment_variable') if not isinstance(host, basestring): msg = 'host argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(variable_name, basestring): msg = 'variable_name argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(variable_value, basestring): msg = 'variable_value argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(env_file, basestring): msg = 'env_file argument must be a string' log.error(msg) raise TypeError(msg) log.info('Creating the environment file if it does not exist...') command = ['ssh', host, 'touch {f}'.format(f=env_file)] try: result = run_command(command, timeout_sec=5.0) code = result['code'] output = result['output'] except CommandError: raise if code != 0: msg = 'There was a problem creating environment file {f} on remote host {h} over SSH, ' \ 'exit code {c} and output:\n{o}'.format(h=host, c=code, f=env_file, o=output) log.error(msg) raise CommandError(msg) log.info('Creating ensuring the environment file is executable...') command = ['ssh', host, 'chmod +x {f}'.format(f=env_file)] try: result = run_command(command, timeout_sec=5.0) code = result['code'] output = result['output'] except CommandError: raise if code != 0: msg = 'There was a problem setting permissions on environment file {f} on remote host {h} over SSH, ' \ 'exit code {c} and output:\n{o}'.format(h=host, c=code, f=env_file, o=output) log.error(msg) raise CommandError(msg) log.info('Adding environment variable {v} with value {n} to file {f}...'.format( v=variable_name, n=variable_value, f=env_file)) command = ['ssh', host, 'echo "export {v}=\\"{n}\\"" >> {f}'.format(f=env_file, v=variable_name, n=variable_value)] try: result = run_command(command, timeout_sec=5.0) code = result['code'] output = result['output'] except CommandError: raise if code != 0: msg = 'There was a problem adding variable {v} to environment file {f} on remote host {h} over SSH, ' \ 'exit code {c} and output:\n{o}'.format(h=host, c=code, f=env_file, o=output, v=variable_name) log.error(msg) raise CommandError(msg) else: log.info('Environment variable {v} set to {n} on host {h}'.format(v=variable_name, n=variable_value, h=host))
[ "def", "set_remote_host_environment_variable", "(", "host", ",", "variable_name", ",", "variable_value", ",", "env_file", "=", "'/etc/bashrc'", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.set_remote_host_environment_variable'", ")", "if", "not", "isinstance", "(", "host", ",", "basestring", ")", ":", "msg", "=", "'host argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "if", "not", "isinstance", "(", "variable_name", ",", "basestring", ")", ":", "msg", "=", "'variable_name argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "if", "not", "isinstance", "(", "variable_value", ",", "basestring", ")", ":", "msg", "=", "'variable_value argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "if", "not", "isinstance", "(", "env_file", ",", "basestring", ")", ":", "msg", "=", "'env_file argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "log", ".", "info", "(", "'Creating the environment file if it does not exist...'", ")", "command", "=", "[", "'ssh'", ",", "host", ",", "'touch {f}'", ".", "format", "(", "f", "=", "env_file", ")", "]", "try", ":", "result", "=", "run_command", "(", "command", ",", "timeout_sec", "=", "5.0", ")", "code", "=", "result", "[", "'code'", "]", "output", "=", "result", "[", "'output'", "]", "except", "CommandError", ":", "raise", "if", "code", "!=", "0", ":", "msg", "=", "'There was a problem creating environment file {f} on remote host {h} over SSH, '", "'exit code {c} and output:\\n{o}'", ".", "format", "(", "h", "=", "host", ",", "c", "=", "code", ",", "f", "=", "env_file", ",", "o", "=", "output", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "log", ".", "info", "(", "'Creating ensuring the environment file is executable...'", ")", "command", "=", "[", "'ssh'", ",", "host", ",", "'chmod +x {f}'", ".", "format", "(", "f", "=", "env_file", ")", "]", "try", ":", "result", "=", "run_command", "(", "command", ",", "timeout_sec", "=", "5.0", ")", "code", "=", "result", "[", "'code'", "]", "output", "=", "result", "[", "'output'", "]", "except", "CommandError", ":", "raise", "if", "code", "!=", "0", ":", "msg", "=", "'There was a problem setting permissions on environment file {f} on remote host {h} over SSH, '", "'exit code {c} and output:\\n{o}'", ".", "format", "(", "h", "=", "host", ",", "c", "=", "code", ",", "f", "=", "env_file", ",", "o", "=", "output", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "log", ".", "info", "(", "'Adding environment variable {v} with value {n} to file {f}...'", ".", "format", "(", "v", "=", "variable_name", ",", "n", "=", "variable_value", ",", "f", "=", "env_file", ")", ")", "command", "=", "[", "'ssh'", ",", "host", ",", "'echo \"export {v}=\\\\\"{n}\\\\\"\" >> {f}'", ".", "format", "(", "f", "=", "env_file", ",", "v", "=", "variable_name", ",", "n", "=", "variable_value", ")", "]", "try", ":", "result", "=", "run_command", "(", "command", ",", "timeout_sec", "=", "5.0", ")", "code", "=", "result", "[", "'code'", "]", "output", "=", "result", "[", "'output'", "]", "except", "CommandError", ":", "raise", "if", "code", "!=", "0", ":", "msg", "=", "'There was a problem adding variable {v} to environment file {f} on remote host {h} over SSH, '", "'exit code {c} and output:\\n{o}'", ".", "format", "(", "h", "=", "host", ",", "c", "=", "code", ",", "f", "=", "env_file", ",", "o", "=", "output", ",", "v", "=", "variable_name", ")", "log", ".", "error", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")", "else", ":", "log", ".", "info", "(", "'Environment variable {v} set to {n} on host {h}'", ".", "format", "(", "v", "=", "variable_name", ",", "n", "=", "variable_value", ",", "h", "=", "host", ")", ")" ]
Sets an environment variable on the remote host in the specified environment file :param host: (str) host to set environment variable on :param variable_name: (str) name of the variable :param variable_value: (str) value of the variable :param env_file: (str) full path to the environment file to set :return: None :raises: TypeError, CommandError
[ "Sets", "an", "environment", "variable", "on", "the", "remote", "host", "in", "the", "specified", "environment", "file" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1152-L1223
cons3rt/pycons3rt
pycons3rt/bash.py
run_remote_command
def run_remote_command(host, command, timeout_sec=5.0): """Retrieves the value of an environment variable of a remote host over SSH :param host: (str) host to query :param command: (str) command :param timeout_sec (float) seconds to wait before killing the command. :return: (str) command output :raises: TypeError, CommandError """ log = logging.getLogger(mod_logger + '.run_remote_command') if not isinstance(host, basestring): msg = 'host argument must be a string' raise TypeError(msg) if not isinstance(command, basestring): msg = 'command argument must be a string' raise TypeError(msg) log.debug('Running remote command on host: {h}: {c}...'.format(h=host, c=command)) command = ['ssh', '{h}'.format(h=host), '{c}'.format(c=command)] try: result = run_command(command, timeout_sec=timeout_sec) code = result['code'] except CommandError: raise if code != 0: msg = 'There was a problem running command [{m}] on host {h} over SSH, return code: {c}, and ' \ 'produced output:\n{o}'.format(h=host, c=code, m=' '.join(command), o=result['output']) raise CommandError(msg) else: output_text = result['output'].strip() log.debug('Running command [{m}] host {h} over SSH produced output: {o}'.format( m=command, h=host, o=output_text)) output = { 'output': output_text, 'code': code } return output
python
def run_remote_command(host, command, timeout_sec=5.0): """Retrieves the value of an environment variable of a remote host over SSH :param host: (str) host to query :param command: (str) command :param timeout_sec (float) seconds to wait before killing the command. :return: (str) command output :raises: TypeError, CommandError """ log = logging.getLogger(mod_logger + '.run_remote_command') if not isinstance(host, basestring): msg = 'host argument must be a string' raise TypeError(msg) if not isinstance(command, basestring): msg = 'command argument must be a string' raise TypeError(msg) log.debug('Running remote command on host: {h}: {c}...'.format(h=host, c=command)) command = ['ssh', '{h}'.format(h=host), '{c}'.format(c=command)] try: result = run_command(command, timeout_sec=timeout_sec) code = result['code'] except CommandError: raise if code != 0: msg = 'There was a problem running command [{m}] on host {h} over SSH, return code: {c}, and ' \ 'produced output:\n{o}'.format(h=host, c=code, m=' '.join(command), o=result['output']) raise CommandError(msg) else: output_text = result['output'].strip() log.debug('Running command [{m}] host {h} over SSH produced output: {o}'.format( m=command, h=host, o=output_text)) output = { 'output': output_text, 'code': code } return output
[ "def", "run_remote_command", "(", "host", ",", "command", ",", "timeout_sec", "=", "5.0", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.run_remote_command'", ")", "if", "not", "isinstance", "(", "host", ",", "basestring", ")", ":", "msg", "=", "'host argument must be a string'", "raise", "TypeError", "(", "msg", ")", "if", "not", "isinstance", "(", "command", ",", "basestring", ")", ":", "msg", "=", "'command argument must be a string'", "raise", "TypeError", "(", "msg", ")", "log", ".", "debug", "(", "'Running remote command on host: {h}: {c}...'", ".", "format", "(", "h", "=", "host", ",", "c", "=", "command", ")", ")", "command", "=", "[", "'ssh'", ",", "'{h}'", ".", "format", "(", "h", "=", "host", ")", ",", "'{c}'", ".", "format", "(", "c", "=", "command", ")", "]", "try", ":", "result", "=", "run_command", "(", "command", ",", "timeout_sec", "=", "timeout_sec", ")", "code", "=", "result", "[", "'code'", "]", "except", "CommandError", ":", "raise", "if", "code", "!=", "0", ":", "msg", "=", "'There was a problem running command [{m}] on host {h} over SSH, return code: {c}, and '", "'produced output:\\n{o}'", ".", "format", "(", "h", "=", "host", ",", "c", "=", "code", ",", "m", "=", "' '", ".", "join", "(", "command", ")", ",", "o", "=", "result", "[", "'output'", "]", ")", "raise", "CommandError", "(", "msg", ")", "else", ":", "output_text", "=", "result", "[", "'output'", "]", ".", "strip", "(", ")", "log", ".", "debug", "(", "'Running command [{m}] host {h} over SSH produced output: {o}'", ".", "format", "(", "m", "=", "command", ",", "h", "=", "host", ",", "o", "=", "output_text", ")", ")", "output", "=", "{", "'output'", ":", "output_text", ",", "'code'", ":", "code", "}", "return", "output" ]
Retrieves the value of an environment variable of a remote host over SSH :param host: (str) host to query :param command: (str) command :param timeout_sec (float) seconds to wait before killing the command. :return: (str) command output :raises: TypeError, CommandError
[ "Retrieves", "the", "value", "of", "an", "environment", "variable", "of", "a", "remote", "host", "over", "SSH" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1226-L1262
cons3rt/pycons3rt
pycons3rt/bash.py
check_remote_host_marker_file
def check_remote_host_marker_file(host, file_path): """Queries a remote host over SSH to check for existence of a marker file :param host: (str) host to query :param file_path: (str) path to the marker file :return: (bool) True if the marker file exists :raises: TypeError, CommandError """ log = logging.getLogger(mod_logger + '.check_remote_host_marker_file') if not isinstance(host, basestring): msg = 'host argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(file_path, basestring): msg = 'file_path argument must be a string' log.error(msg) raise TypeError(msg) log.debug('Checking host {h} for marker file: {f}...'.format(h=host, f=file_path)) command = ['ssh', '{h}'.format(h=host), 'if [ -f {f} ] ; then exit 0 ; else exit 1 ; fi'.format(f=file_path)] try: result = run_command(command, timeout_sec=5.0) code = result['code'] output = result['output'] except CommandError: raise if code == 0: log.debug('Marker file <{f}> was found on host {h}'.format(f=file_path, h=host)) return True elif code == 1 and output == '': log.debug('Marker file <{f}> was not found on host {h}'.format(f=file_path, h=host)) return False else: msg = 'There was a problem checking the remote host {h} over SSH for marker file {f}, ' \ 'command returned code {c} and produced output: {o}'.format( h=host, f=file_path, c=code, o=output) log.debug(msg) raise CommandError(msg)
python
def check_remote_host_marker_file(host, file_path): """Queries a remote host over SSH to check for existence of a marker file :param host: (str) host to query :param file_path: (str) path to the marker file :return: (bool) True if the marker file exists :raises: TypeError, CommandError """ log = logging.getLogger(mod_logger + '.check_remote_host_marker_file') if not isinstance(host, basestring): msg = 'host argument must be a string' log.error(msg) raise TypeError(msg) if not isinstance(file_path, basestring): msg = 'file_path argument must be a string' log.error(msg) raise TypeError(msg) log.debug('Checking host {h} for marker file: {f}...'.format(h=host, f=file_path)) command = ['ssh', '{h}'.format(h=host), 'if [ -f {f} ] ; then exit 0 ; else exit 1 ; fi'.format(f=file_path)] try: result = run_command(command, timeout_sec=5.0) code = result['code'] output = result['output'] except CommandError: raise if code == 0: log.debug('Marker file <{f}> was found on host {h}'.format(f=file_path, h=host)) return True elif code == 1 and output == '': log.debug('Marker file <{f}> was not found on host {h}'.format(f=file_path, h=host)) return False else: msg = 'There was a problem checking the remote host {h} over SSH for marker file {f}, ' \ 'command returned code {c} and produced output: {o}'.format( h=host, f=file_path, c=code, o=output) log.debug(msg) raise CommandError(msg)
[ "def", "check_remote_host_marker_file", "(", "host", ",", "file_path", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.check_remote_host_marker_file'", ")", "if", "not", "isinstance", "(", "host", ",", "basestring", ")", ":", "msg", "=", "'host argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "if", "not", "isinstance", "(", "file_path", ",", "basestring", ")", ":", "msg", "=", "'file_path argument must be a string'", "log", ".", "error", "(", "msg", ")", "raise", "TypeError", "(", "msg", ")", "log", ".", "debug", "(", "'Checking host {h} for marker file: {f}...'", ".", "format", "(", "h", "=", "host", ",", "f", "=", "file_path", ")", ")", "command", "=", "[", "'ssh'", ",", "'{h}'", ".", "format", "(", "h", "=", "host", ")", ",", "'if [ -f {f} ] ; then exit 0 ; else exit 1 ; fi'", ".", "format", "(", "f", "=", "file_path", ")", "]", "try", ":", "result", "=", "run_command", "(", "command", ",", "timeout_sec", "=", "5.0", ")", "code", "=", "result", "[", "'code'", "]", "output", "=", "result", "[", "'output'", "]", "except", "CommandError", ":", "raise", "if", "code", "==", "0", ":", "log", ".", "debug", "(", "'Marker file <{f}> was found on host {h}'", ".", "format", "(", "f", "=", "file_path", ",", "h", "=", "host", ")", ")", "return", "True", "elif", "code", "==", "1", "and", "output", "==", "''", ":", "log", ".", "debug", "(", "'Marker file <{f}> was not found on host {h}'", ".", "format", "(", "f", "=", "file_path", ",", "h", "=", "host", ")", ")", "return", "False", "else", ":", "msg", "=", "'There was a problem checking the remote host {h} over SSH for marker file {f}, '", "'command returned code {c} and produced output: {o}'", ".", "format", "(", "h", "=", "host", ",", "f", "=", "file_path", ",", "c", "=", "code", ",", "o", "=", "output", ")", "log", ".", "debug", "(", "msg", ")", "raise", "CommandError", "(", "msg", ")" ]
Queries a remote host over SSH to check for existence of a marker file :param host: (str) host to query :param file_path: (str) path to the marker file :return: (bool) True if the marker file exists :raises: TypeError, CommandError
[ "Queries", "a", "remote", "host", "over", "SSH", "to", "check", "for", "existence", "of", "a", "marker", "file" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1265-L1302
cons3rt/pycons3rt
pycons3rt/bash.py
restore_iptables
def restore_iptables(firewall_rules): """Restores and saves firewall rules from the firewall_rules file :param firewall_rules: (str) Full path to the firewall rules file :return: None :raises OSError """ log = logging.getLogger(mod_logger + '.restore_iptables') log.info('Restoring firewall rules from file: {f}'.format(f=firewall_rules)) # Ensure the firewall rules file exists if not os.path.isfile(firewall_rules): msg = 'Unable to restore iptables, file not found: {f}'.format(f=firewall_rules) log.error(msg) raise OSError(msg) # Restore the firewall rules log.info('Restoring iptables from file: {f}'.format(f=firewall_rules)) command = ['/sbin/iptables-restore', firewall_rules] try: result = run_command(command) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to restore firewall rules from file: {f}\n{e}'.format(f=firewall_rules, e=str(ex)) log.error(msg) raise OSError(msg) log.info('Restoring iptables produced output:\n{o}'.format(o=result['output'])) # Save iptables log.info('Saving iptables...') command = ['/etc/init.d/iptables', 'save'] try: result = run_command(command) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to save firewall rules\n{e}'.format(e=str(ex)) log.error(msg) raise OSError(msg) log.info('Saving iptables produced output:\n{o}'.format(o=result['output']))
python
def restore_iptables(firewall_rules): """Restores and saves firewall rules from the firewall_rules file :param firewall_rules: (str) Full path to the firewall rules file :return: None :raises OSError """ log = logging.getLogger(mod_logger + '.restore_iptables') log.info('Restoring firewall rules from file: {f}'.format(f=firewall_rules)) # Ensure the firewall rules file exists if not os.path.isfile(firewall_rules): msg = 'Unable to restore iptables, file not found: {f}'.format(f=firewall_rules) log.error(msg) raise OSError(msg) # Restore the firewall rules log.info('Restoring iptables from file: {f}'.format(f=firewall_rules)) command = ['/sbin/iptables-restore', firewall_rules] try: result = run_command(command) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to restore firewall rules from file: {f}\n{e}'.format(f=firewall_rules, e=str(ex)) log.error(msg) raise OSError(msg) log.info('Restoring iptables produced output:\n{o}'.format(o=result['output'])) # Save iptables log.info('Saving iptables...') command = ['/etc/init.d/iptables', 'save'] try: result = run_command(command) except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to save firewall rules\n{e}'.format(e=str(ex)) log.error(msg) raise OSError(msg) log.info('Saving iptables produced output:\n{o}'.format(o=result['output']))
[ "def", "restore_iptables", "(", "firewall_rules", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.restore_iptables'", ")", "log", ".", "info", "(", "'Restoring firewall rules from file: {f}'", ".", "format", "(", "f", "=", "firewall_rules", ")", ")", "# Ensure the firewall rules file exists", "if", "not", "os", ".", "path", ".", "isfile", "(", "firewall_rules", ")", ":", "msg", "=", "'Unable to restore iptables, file not found: {f}'", ".", "format", "(", "f", "=", "firewall_rules", ")", "log", ".", "error", "(", "msg", ")", "raise", "OSError", "(", "msg", ")", "# Restore the firewall rules", "log", ".", "info", "(", "'Restoring iptables from file: {f}'", ".", "format", "(", "f", "=", "firewall_rules", ")", ")", "command", "=", "[", "'/sbin/iptables-restore'", ",", "firewall_rules", "]", "try", ":", "result", "=", "run_command", "(", "command", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to restore firewall rules from file: {f}\\n{e}'", ".", "format", "(", "f", "=", "firewall_rules", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "OSError", "(", "msg", ")", "log", ".", "info", "(", "'Restoring iptables produced output:\\n{o}'", ".", "format", "(", "o", "=", "result", "[", "'output'", "]", ")", ")", "# Save iptables", "log", ".", "info", "(", "'Saving iptables...'", ")", "command", "=", "[", "'/etc/init.d/iptables'", ",", "'save'", "]", "try", ":", "result", "=", "run_command", "(", "command", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to save firewall rules\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "OSError", "(", "msg", ")", "log", ".", "info", "(", "'Saving iptables produced output:\\n{o}'", ".", "format", "(", "o", "=", "result", "[", "'output'", "]", ")", ")" ]
Restores and saves firewall rules from the firewall_rules file :param firewall_rules: (str) Full path to the firewall rules file :return: None :raises OSError
[ "Restores", "and", "saves", "firewall", "rules", "from", "the", "firewall_rules", "file" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1339-L1377
cons3rt/pycons3rt
pycons3rt/bash.py
remove_default_gateway
def remove_default_gateway(): """Removes Default Gateway configuration from /etc/sysconfig/network and restarts networking :return: None :raises: OSError """ log = logging.getLogger(mod_logger + '.remove_default_gateway') # Ensure the network script exists network_script = '/etc/sysconfig/network' if not os.path.isfile(network_script): log.info('Network script not found, nothing to do: {f}'.format(f=network_script)) return log.debug('Found network script: {f}'.format(f=network_script)) # Remove settings for GATEWAY and GATEWAYDEV log.info('Attempting to remove any default gateway configurations...') for line in fileinput.input(network_script, inplace=True): if re.search('^GATEWAY=.*', line): log.info('Removing GATEWAY line: {li}'.format(li=line)) elif re.search('^GATEWAYDEV=.*', line): log.info('Removing GATEWAYDEV line: {li}'.format(li=line)) else: log.debug('Keeping line: {li}'.format(li=line)) sys.stdout.write(line) # Restart networking for the changes to take effect log.info('Restarting the network service...') try: service_network_restart() except CommandError: _, ex, trace = sys.exc_info() raise OSError('{n}: Attempted unsuccessfully to restart networking\n{e}'.format( n=ex.__class__.__name__, e=str(ex))) else: log.info('Successfully restarted networking')
python
def remove_default_gateway(): """Removes Default Gateway configuration from /etc/sysconfig/network and restarts networking :return: None :raises: OSError """ log = logging.getLogger(mod_logger + '.remove_default_gateway') # Ensure the network script exists network_script = '/etc/sysconfig/network' if not os.path.isfile(network_script): log.info('Network script not found, nothing to do: {f}'.format(f=network_script)) return log.debug('Found network script: {f}'.format(f=network_script)) # Remove settings for GATEWAY and GATEWAYDEV log.info('Attempting to remove any default gateway configurations...') for line in fileinput.input(network_script, inplace=True): if re.search('^GATEWAY=.*', line): log.info('Removing GATEWAY line: {li}'.format(li=line)) elif re.search('^GATEWAYDEV=.*', line): log.info('Removing GATEWAYDEV line: {li}'.format(li=line)) else: log.debug('Keeping line: {li}'.format(li=line)) sys.stdout.write(line) # Restart networking for the changes to take effect log.info('Restarting the network service...') try: service_network_restart() except CommandError: _, ex, trace = sys.exc_info() raise OSError('{n}: Attempted unsuccessfully to restart networking\n{e}'.format( n=ex.__class__.__name__, e=str(ex))) else: log.info('Successfully restarted networking')
[ "def", "remove_default_gateway", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.remove_default_gateway'", ")", "# Ensure the network script exists", "network_script", "=", "'/etc/sysconfig/network'", "if", "not", "os", ".", "path", ".", "isfile", "(", "network_script", ")", ":", "log", ".", "info", "(", "'Network script not found, nothing to do: {f}'", ".", "format", "(", "f", "=", "network_script", ")", ")", "return", "log", ".", "debug", "(", "'Found network script: {f}'", ".", "format", "(", "f", "=", "network_script", ")", ")", "# Remove settings for GATEWAY and GATEWAYDEV", "log", ".", "info", "(", "'Attempting to remove any default gateway configurations...'", ")", "for", "line", "in", "fileinput", ".", "input", "(", "network_script", ",", "inplace", "=", "True", ")", ":", "if", "re", ".", "search", "(", "'^GATEWAY=.*'", ",", "line", ")", ":", "log", ".", "info", "(", "'Removing GATEWAY line: {li}'", ".", "format", "(", "li", "=", "line", ")", ")", "elif", "re", ".", "search", "(", "'^GATEWAYDEV=.*'", ",", "line", ")", ":", "log", ".", "info", "(", "'Removing GATEWAYDEV line: {li}'", ".", "format", "(", "li", "=", "line", ")", ")", "else", ":", "log", ".", "debug", "(", "'Keeping line: {li}'", ".", "format", "(", "li", "=", "line", ")", ")", "sys", ".", "stdout", ".", "write", "(", "line", ")", "# Restart networking for the changes to take effect", "log", ".", "info", "(", "'Restarting the network service...'", ")", "try", ":", "service_network_restart", "(", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "raise", "OSError", "(", "'{n}: Attempted unsuccessfully to restart networking\\n{e}'", ".", "format", "(", "n", "=", "ex", ".", "__class__", ".", "__name__", ",", "e", "=", "str", "(", "ex", ")", ")", ")", "else", ":", "log", ".", "info", "(", "'Successfully restarted networking'", ")" ]
Removes Default Gateway configuration from /etc/sysconfig/network and restarts networking :return: None :raises: OSError
[ "Removes", "Default", "Gateway", "configuration", "from", "/", "etc", "/", "sysconfig", "/", "network", "and", "restarts", "networking", ":", "return", ":", "None", ":", "raises", ":", "OSError" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1380-L1416
cons3rt/pycons3rt
pycons3rt/bash.py
is_systemd
def is_systemd(): """Determines whether this system uses systemd :return: (bool) True if this distro has systemd """ os_family = platform.system() if os_family != 'Linux': raise OSError('This method is only supported on Linux, found OS: {o}'.format(o=os_family)) linux_distro, linux_version, distro_name = platform.linux_distribution() # Determine when to use systemd systemd = False if 'ubuntu' in linux_distro.lower() and '16' in linux_version: systemd = True elif 'red' in linux_distro.lower() and '7' in linux_version: systemd = True elif 'cent' in linux_distro.lower() and '7' in linux_version: systemd = True return systemd
python
def is_systemd(): """Determines whether this system uses systemd :return: (bool) True if this distro has systemd """ os_family = platform.system() if os_family != 'Linux': raise OSError('This method is only supported on Linux, found OS: {o}'.format(o=os_family)) linux_distro, linux_version, distro_name = platform.linux_distribution() # Determine when to use systemd systemd = False if 'ubuntu' in linux_distro.lower() and '16' in linux_version: systemd = True elif 'red' in linux_distro.lower() and '7' in linux_version: systemd = True elif 'cent' in linux_distro.lower() and '7' in linux_version: systemd = True return systemd
[ "def", "is_systemd", "(", ")", ":", "os_family", "=", "platform", ".", "system", "(", ")", "if", "os_family", "!=", "'Linux'", ":", "raise", "OSError", "(", "'This method is only supported on Linux, found OS: {o}'", ".", "format", "(", "o", "=", "os_family", ")", ")", "linux_distro", ",", "linux_version", ",", "distro_name", "=", "platform", ".", "linux_distribution", "(", ")", "# Determine when to use systemd", "systemd", "=", "False", "if", "'ubuntu'", "in", "linux_distro", ".", "lower", "(", ")", "and", "'16'", "in", "linux_version", ":", "systemd", "=", "True", "elif", "'red'", "in", "linux_distro", ".", "lower", "(", ")", "and", "'7'", "in", "linux_version", ":", "systemd", "=", "True", "elif", "'cent'", "in", "linux_distro", ".", "lower", "(", ")", "and", "'7'", "in", "linux_version", ":", "systemd", "=", "True", "return", "systemd" ]
Determines whether this system uses systemd :return: (bool) True if this distro has systemd
[ "Determines", "whether", "this", "system", "uses", "systemd" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1419-L1437
cons3rt/pycons3rt
pycons3rt/bash.py
manage_service
def manage_service(service_name, service_action='status', systemd=None, output=True): """Use to run Linux sysv or systemd service commands :param service_name (str) name of the service to start :param service_action (str) action to perform on the service :param systemd (bool) True if the command should use systemd :param output (bool) True to print output :return: None :raises: OSError """ log = logging.getLogger(mod_logger + '.manage_service') # Ensure the service name is a string if not isinstance(service_name, basestring): raise OSError('service_name arg must be a string, found: {t}'.format(t=service_name.__class__.__name__)) # Ensure the service name is a string if not isinstance(service_action, basestring): raise OSError('service_action arg must be a string, found: {t}'.format(t=service_name.__class__.__name__)) # Ensure the service action is valid valid_actions = ['start', 'stop', 'reload', 'restart', 'status', 'enable', 'disable'] service_action = service_action.lower().strip() if service_action not in valid_actions: raise OSError('Invalid service action requested [{a}], valid actions are: [{v}]'.format( a=service_action, v=','.join(valid_actions) )) log.info('Attempting to [{a}] service: {s}'.format(a=service_action, s=service_name)) # If systemd was not provided, attempt to determine which method to use if not systemd: log.debug('Systemd not provided, attempting to determine which method to use...') systemd = is_systemd() # Create commands depending on the method command_list = [] if systemd: if not service_name.endswith('.service'): service_name = '{s}.service'.format(s=service_name) log.info('Attempting to manage service with systemd: {s}'.format(s=service_name)) command_list.append(['/usr/bin/systemctl', service_action, service_name]) else: log.info('Attempting to manage service with sysv: {s}'.format(s=service_name)) # Determine the commands to run if service_action == 'enable': command_list.append(['/sbin/chkconfig', '--add', service_name]) command_list.append(['/sbin/chkconfig', service_name, 'on']) elif service_action == 'disable': command_list.append(['/sbin/chkconfig', service_name, 'off']) else: command_list.append(['/sbin/service', service_name, service_action]) # Run the commands in the command list post_command_wait_time_sec = 3 for command in command_list: log.info('Attempting to run command: [{c}]'.format(c=' '.join(command))) try: result = run_command(command, timeout_sec=30, output=output) except CommandError: _, ex, trace = sys.exc_info() msg = 'There was a problem running a service management command\n{e}'.format(e=str(ex)) raise OSError, msg, trace log.info('Command exited with code: {c}'.format(c=str(result['code']))) if result['code'] != 0: msg = 'Command exited with a non-zero code: [{c}], and produced output:\n{o}'.format( c=str(result['code']), o=result['output']) raise OSError(msg) else: log.info('Command returned successfully with output:\n{o}'.format(o=result['output'])) log.info('Waiting {t} sec...'.format(t=str(post_command_wait_time_sec))) time.sleep(post_command_wait_time_sec)
python
def manage_service(service_name, service_action='status', systemd=None, output=True): """Use to run Linux sysv or systemd service commands :param service_name (str) name of the service to start :param service_action (str) action to perform on the service :param systemd (bool) True if the command should use systemd :param output (bool) True to print output :return: None :raises: OSError """ log = logging.getLogger(mod_logger + '.manage_service') # Ensure the service name is a string if not isinstance(service_name, basestring): raise OSError('service_name arg must be a string, found: {t}'.format(t=service_name.__class__.__name__)) # Ensure the service name is a string if not isinstance(service_action, basestring): raise OSError('service_action arg must be a string, found: {t}'.format(t=service_name.__class__.__name__)) # Ensure the service action is valid valid_actions = ['start', 'stop', 'reload', 'restart', 'status', 'enable', 'disable'] service_action = service_action.lower().strip() if service_action not in valid_actions: raise OSError('Invalid service action requested [{a}], valid actions are: [{v}]'.format( a=service_action, v=','.join(valid_actions) )) log.info('Attempting to [{a}] service: {s}'.format(a=service_action, s=service_name)) # If systemd was not provided, attempt to determine which method to use if not systemd: log.debug('Systemd not provided, attempting to determine which method to use...') systemd = is_systemd() # Create commands depending on the method command_list = [] if systemd: if not service_name.endswith('.service'): service_name = '{s}.service'.format(s=service_name) log.info('Attempting to manage service with systemd: {s}'.format(s=service_name)) command_list.append(['/usr/bin/systemctl', service_action, service_name]) else: log.info('Attempting to manage service with sysv: {s}'.format(s=service_name)) # Determine the commands to run if service_action == 'enable': command_list.append(['/sbin/chkconfig', '--add', service_name]) command_list.append(['/sbin/chkconfig', service_name, 'on']) elif service_action == 'disable': command_list.append(['/sbin/chkconfig', service_name, 'off']) else: command_list.append(['/sbin/service', service_name, service_action]) # Run the commands in the command list post_command_wait_time_sec = 3 for command in command_list: log.info('Attempting to run command: [{c}]'.format(c=' '.join(command))) try: result = run_command(command, timeout_sec=30, output=output) except CommandError: _, ex, trace = sys.exc_info() msg = 'There was a problem running a service management command\n{e}'.format(e=str(ex)) raise OSError, msg, trace log.info('Command exited with code: {c}'.format(c=str(result['code']))) if result['code'] != 0: msg = 'Command exited with a non-zero code: [{c}], and produced output:\n{o}'.format( c=str(result['code']), o=result['output']) raise OSError(msg) else: log.info('Command returned successfully with output:\n{o}'.format(o=result['output'])) log.info('Waiting {t} sec...'.format(t=str(post_command_wait_time_sec))) time.sleep(post_command_wait_time_sec)
[ "def", "manage_service", "(", "service_name", ",", "service_action", "=", "'status'", ",", "systemd", "=", "None", ",", "output", "=", "True", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.manage_service'", ")", "# Ensure the service name is a string", "if", "not", "isinstance", "(", "service_name", ",", "basestring", ")", ":", "raise", "OSError", "(", "'service_name arg must be a string, found: {t}'", ".", "format", "(", "t", "=", "service_name", ".", "__class__", ".", "__name__", ")", ")", "# Ensure the service name is a string", "if", "not", "isinstance", "(", "service_action", ",", "basestring", ")", ":", "raise", "OSError", "(", "'service_action arg must be a string, found: {t}'", ".", "format", "(", "t", "=", "service_name", ".", "__class__", ".", "__name__", ")", ")", "# Ensure the service action is valid", "valid_actions", "=", "[", "'start'", ",", "'stop'", ",", "'reload'", ",", "'restart'", ",", "'status'", ",", "'enable'", ",", "'disable'", "]", "service_action", "=", "service_action", ".", "lower", "(", ")", ".", "strip", "(", ")", "if", "service_action", "not", "in", "valid_actions", ":", "raise", "OSError", "(", "'Invalid service action requested [{a}], valid actions are: [{v}]'", ".", "format", "(", "a", "=", "service_action", ",", "v", "=", "','", ".", "join", "(", "valid_actions", ")", ")", ")", "log", ".", "info", "(", "'Attempting to [{a}] service: {s}'", ".", "format", "(", "a", "=", "service_action", ",", "s", "=", "service_name", ")", ")", "# If systemd was not provided, attempt to determine which method to use", "if", "not", "systemd", ":", "log", ".", "debug", "(", "'Systemd not provided, attempting to determine which method to use...'", ")", "systemd", "=", "is_systemd", "(", ")", "# Create commands depending on the method", "command_list", "=", "[", "]", "if", "systemd", ":", "if", "not", "service_name", ".", "endswith", "(", "'.service'", ")", ":", "service_name", "=", "'{s}.service'", ".", "format", "(", "s", "=", "service_name", ")", "log", ".", "info", "(", "'Attempting to manage service with systemd: {s}'", ".", "format", "(", "s", "=", "service_name", ")", ")", "command_list", ".", "append", "(", "[", "'/usr/bin/systemctl'", ",", "service_action", ",", "service_name", "]", ")", "else", ":", "log", ".", "info", "(", "'Attempting to manage service with sysv: {s}'", ".", "format", "(", "s", "=", "service_name", ")", ")", "# Determine the commands to run", "if", "service_action", "==", "'enable'", ":", "command_list", ".", "append", "(", "[", "'/sbin/chkconfig'", ",", "'--add'", ",", "service_name", "]", ")", "command_list", ".", "append", "(", "[", "'/sbin/chkconfig'", ",", "service_name", ",", "'on'", "]", ")", "elif", "service_action", "==", "'disable'", ":", "command_list", ".", "append", "(", "[", "'/sbin/chkconfig'", ",", "service_name", ",", "'off'", "]", ")", "else", ":", "command_list", ".", "append", "(", "[", "'/sbin/service'", ",", "service_name", ",", "service_action", "]", ")", "# Run the commands in the command list", "post_command_wait_time_sec", "=", "3", "for", "command", "in", "command_list", ":", "log", ".", "info", "(", "'Attempting to run command: [{c}]'", ".", "format", "(", "c", "=", "' '", ".", "join", "(", "command", ")", ")", ")", "try", ":", "result", "=", "run_command", "(", "command", ",", "timeout_sec", "=", "30", ",", "output", "=", "output", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'There was a problem running a service management command\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "raise", "OSError", ",", "msg", ",", "trace", "log", ".", "info", "(", "'Command exited with code: {c}'", ".", "format", "(", "c", "=", "str", "(", "result", "[", "'code'", "]", ")", ")", ")", "if", "result", "[", "'code'", "]", "!=", "0", ":", "msg", "=", "'Command exited with a non-zero code: [{c}], and produced output:\\n{o}'", ".", "format", "(", "c", "=", "str", "(", "result", "[", "'code'", "]", ")", ",", "o", "=", "result", "[", "'output'", "]", ")", "raise", "OSError", "(", "msg", ")", "else", ":", "log", ".", "info", "(", "'Command returned successfully with output:\\n{o}'", ".", "format", "(", "o", "=", "result", "[", "'output'", "]", ")", ")", "log", ".", "info", "(", "'Waiting {t} sec...'", ".", "format", "(", "t", "=", "str", "(", "post_command_wait_time_sec", ")", ")", ")", "time", ".", "sleep", "(", "post_command_wait_time_sec", ")" ]
Use to run Linux sysv or systemd service commands :param service_name (str) name of the service to start :param service_action (str) action to perform on the service :param systemd (bool) True if the command should use systemd :param output (bool) True to print output :return: None :raises: OSError
[ "Use", "to", "run", "Linux", "sysv", "or", "systemd", "service", "commands" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1440-L1511
cons3rt/pycons3rt
pycons3rt/bash.py
system_reboot
def system_reboot(wait_time_sec=20): """Reboots the system after a specified wait time. Must be run as root :param wait_time_sec: (int) number of sec to wait before performing the reboot :return: None :raises: SystemRebootError, SystemRebootTimeoutError """ log = logging.getLogger(mod_logger + '.system_reboot') try: wait_time_sec = int(wait_time_sec) except ValueError: raise CommandError('wait_time_sec must be an int, or a string convertible to an int') log.info('Waiting {t} seconds before reboot...'.format(t=str(wait_time_sec))) time.sleep(wait_time_sec) command = ['shutdown', '-r', 'now'] log.info('Shutting down with command: [{c}]'.format(c=' '.join(command))) time.sleep(2) log.info('Shutting down...') try: result = run_command(command=command, timeout_sec=60) except CommandError: _, ex, trace = sys.exc_info() msg = 'There was a problem running shutdown command: [{c}]\n{e}'.format(c=' '.join(command), e=str(ex)) raise SystemRebootError, msg, trace if result['code'] != 0: msg = 'Shutdown command exited with a non-zero code: [{c}], and produced output:\n{o}'.format( c=str(result['code']), o=result['output']) raise SystemRebootError(msg) log.info('Waiting 60 seconds to ensure the reboot completes...') time.sleep(60) msg = 'Reboot has not completed after 60 seconds' log.error(msg) raise SystemRebootTimeoutError(msg)
python
def system_reboot(wait_time_sec=20): """Reboots the system after a specified wait time. Must be run as root :param wait_time_sec: (int) number of sec to wait before performing the reboot :return: None :raises: SystemRebootError, SystemRebootTimeoutError """ log = logging.getLogger(mod_logger + '.system_reboot') try: wait_time_sec = int(wait_time_sec) except ValueError: raise CommandError('wait_time_sec must be an int, or a string convertible to an int') log.info('Waiting {t} seconds before reboot...'.format(t=str(wait_time_sec))) time.sleep(wait_time_sec) command = ['shutdown', '-r', 'now'] log.info('Shutting down with command: [{c}]'.format(c=' '.join(command))) time.sleep(2) log.info('Shutting down...') try: result = run_command(command=command, timeout_sec=60) except CommandError: _, ex, trace = sys.exc_info() msg = 'There was a problem running shutdown command: [{c}]\n{e}'.format(c=' '.join(command), e=str(ex)) raise SystemRebootError, msg, trace if result['code'] != 0: msg = 'Shutdown command exited with a non-zero code: [{c}], and produced output:\n{o}'.format( c=str(result['code']), o=result['output']) raise SystemRebootError(msg) log.info('Waiting 60 seconds to ensure the reboot completes...') time.sleep(60) msg = 'Reboot has not completed after 60 seconds' log.error(msg) raise SystemRebootTimeoutError(msg)
[ "def", "system_reboot", "(", "wait_time_sec", "=", "20", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.system_reboot'", ")", "try", ":", "wait_time_sec", "=", "int", "(", "wait_time_sec", ")", "except", "ValueError", ":", "raise", "CommandError", "(", "'wait_time_sec must be an int, or a string convertible to an int'", ")", "log", ".", "info", "(", "'Waiting {t} seconds before reboot...'", ".", "format", "(", "t", "=", "str", "(", "wait_time_sec", ")", ")", ")", "time", ".", "sleep", "(", "wait_time_sec", ")", "command", "=", "[", "'shutdown'", ",", "'-r'", ",", "'now'", "]", "log", ".", "info", "(", "'Shutting down with command: [{c}]'", ".", "format", "(", "c", "=", "' '", ".", "join", "(", "command", ")", ")", ")", "time", ".", "sleep", "(", "2", ")", "log", ".", "info", "(", "'Shutting down...'", ")", "try", ":", "result", "=", "run_command", "(", "command", "=", "command", ",", "timeout_sec", "=", "60", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'There was a problem running shutdown command: [{c}]\\n{e}'", ".", "format", "(", "c", "=", "' '", ".", "join", "(", "command", ")", ",", "e", "=", "str", "(", "ex", ")", ")", "raise", "SystemRebootError", ",", "msg", ",", "trace", "if", "result", "[", "'code'", "]", "!=", "0", ":", "msg", "=", "'Shutdown command exited with a non-zero code: [{c}], and produced output:\\n{o}'", ".", "format", "(", "c", "=", "str", "(", "result", "[", "'code'", "]", ")", ",", "o", "=", "result", "[", "'output'", "]", ")", "raise", "SystemRebootError", "(", "msg", ")", "log", ".", "info", "(", "'Waiting 60 seconds to ensure the reboot completes...'", ")", "time", ".", "sleep", "(", "60", ")", "msg", "=", "'Reboot has not completed after 60 seconds'", "log", ".", "error", "(", "msg", ")", "raise", "SystemRebootTimeoutError", "(", "msg", ")" ]
Reboots the system after a specified wait time. Must be run as root :param wait_time_sec: (int) number of sec to wait before performing the reboot :return: None :raises: SystemRebootError, SystemRebootTimeoutError
[ "Reboots", "the", "system", "after", "a", "specified", "wait", "time", ".", "Must", "be", "run", "as", "root" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1514-L1548
cons3rt/pycons3rt
pycons3rt/bash.py
main
def main(): """Sample usage for this python module This main method simply illustrates sample usage for this python module. :return: None """ mkdir_p('/tmp/test/test') source('/root/.bash_profile') yum_install(['httpd', 'git']) yum_install(['httpd', 'git'], dest_dir='/tmp/test/test', downloadonly=True) sed('/Users/yennaco/Downloads/homer_testing/network', '^HOSTNAME.*', 'HOSTNAME=foo.joe') test_script = '/Users/yennaco/Downloads/homer/script.sh' results = run_command([test_script], timeout_sec=1000) print('Script {s} produced exit code [{c}] and output:\n{o}'.format( s=test_script, c=results['code'], o=results['output']))
python
def main(): """Sample usage for this python module This main method simply illustrates sample usage for this python module. :return: None """ mkdir_p('/tmp/test/test') source('/root/.bash_profile') yum_install(['httpd', 'git']) yum_install(['httpd', 'git'], dest_dir='/tmp/test/test', downloadonly=True) sed('/Users/yennaco/Downloads/homer_testing/network', '^HOSTNAME.*', 'HOSTNAME=foo.joe') test_script = '/Users/yennaco/Downloads/homer/script.sh' results = run_command([test_script], timeout_sec=1000) print('Script {s} produced exit code [{c}] and output:\n{o}'.format( s=test_script, c=results['code'], o=results['output']))
[ "def", "main", "(", ")", ":", "mkdir_p", "(", "'/tmp/test/test'", ")", "source", "(", "'/root/.bash_profile'", ")", "yum_install", "(", "[", "'httpd'", ",", "'git'", "]", ")", "yum_install", "(", "[", "'httpd'", ",", "'git'", "]", ",", "dest_dir", "=", "'/tmp/test/test'", ",", "downloadonly", "=", "True", ")", "sed", "(", "'/Users/yennaco/Downloads/homer_testing/network'", ",", "'^HOSTNAME.*'", ",", "'HOSTNAME=foo.joe'", ")", "test_script", "=", "'/Users/yennaco/Downloads/homer/script.sh'", "results", "=", "run_command", "(", "[", "test_script", "]", ",", "timeout_sec", "=", "1000", ")", "print", "(", "'Script {s} produced exit code [{c}] and output:\\n{o}'", ".", "format", "(", "s", "=", "test_script", ",", "c", "=", "results", "[", "'code'", "]", ",", "o", "=", "results", "[", "'output'", "]", ")", ")" ]
Sample usage for this python module This main method simply illustrates sample usage for this python module. :return: None
[ "Sample", "usage", "for", "this", "python", "module" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/bash.py#L1551-L1567
kajala/django-jutil
jutil/logs.py
log_event
def log_event(name: str, request: Request=None, data=None, ip=None): """ Logs consistent event for easy parsing/analysis. :param name: Name of the event. Will be logged as EVENT_XXX with XXX in capitals. :param request: Django REST framework Request (optional) :param data: Even data (optional) :param ip: Even IP (optional) """ log_data = {} if not ip and request: ip = get_real_ip(request) if ip: log_data['ip'] = ip if data: log_data['data'] = data msg = 'EVENT_{}: {}'.format(name.upper(), json.dumps(log_data)) logger.info(msg)
python
def log_event(name: str, request: Request=None, data=None, ip=None): """ Logs consistent event for easy parsing/analysis. :param name: Name of the event. Will be logged as EVENT_XXX with XXX in capitals. :param request: Django REST framework Request (optional) :param data: Even data (optional) :param ip: Even IP (optional) """ log_data = {} if not ip and request: ip = get_real_ip(request) if ip: log_data['ip'] = ip if data: log_data['data'] = data msg = 'EVENT_{}: {}'.format(name.upper(), json.dumps(log_data)) logger.info(msg)
[ "def", "log_event", "(", "name", ":", "str", ",", "request", ":", "Request", "=", "None", ",", "data", "=", "None", ",", "ip", "=", "None", ")", ":", "log_data", "=", "{", "}", "if", "not", "ip", "and", "request", ":", "ip", "=", "get_real_ip", "(", "request", ")", "if", "ip", ":", "log_data", "[", "'ip'", "]", "=", "ip", "if", "data", ":", "log_data", "[", "'data'", "]", "=", "data", "msg", "=", "'EVENT_{}: {}'", ".", "format", "(", "name", ".", "upper", "(", ")", ",", "json", ".", "dumps", "(", "log_data", ")", ")", "logger", ".", "info", "(", "msg", ")" ]
Logs consistent event for easy parsing/analysis. :param name: Name of the event. Will be logged as EVENT_XXX with XXX in capitals. :param request: Django REST framework Request (optional) :param data: Even data (optional) :param ip: Even IP (optional)
[ "Logs", "consistent", "event", "for", "easy", "parsing", "/", "analysis", ".", ":", "param", "name", ":", "Name", "of", "the", "event", ".", "Will", "be", "logged", "as", "EVENT_XXX", "with", "XXX", "in", "capitals", ".", ":", "param", "request", ":", "Django", "REST", "framework", "Request", "(", "optional", ")", ":", "param", "data", ":", "Even", "data", "(", "optional", ")", ":", "param", "ip", ":", "Even", "IP", "(", "optional", ")" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/logs.py#L10-L27
jaredLunde/vital-tools
vital/security/__init__.py
aes_b64_encrypt
def aes_b64_encrypt(value, secret, block_size=AES.block_size): """ AES encrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#str) AES encrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' .. """ # iv = randstr(block_size * 2, rng=random) iv = randstr(block_size * 2) cipher = AES.new(secret[:32], AES.MODE_CFB, iv[:block_size].encode()) return iv + b64encode(cipher.encrypt( uniorbytes(value, bytes))).decode('utf-8')
python
def aes_b64_encrypt(value, secret, block_size=AES.block_size): """ AES encrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#str) AES encrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' .. """ # iv = randstr(block_size * 2, rng=random) iv = randstr(block_size * 2) cipher = AES.new(secret[:32], AES.MODE_CFB, iv[:block_size].encode()) return iv + b64encode(cipher.encrypt( uniorbytes(value, bytes))).decode('utf-8')
[ "def", "aes_b64_encrypt", "(", "value", ",", "secret", ",", "block_size", "=", "AES", ".", "block_size", ")", ":", "# iv = randstr(block_size * 2, rng=random)", "iv", "=", "randstr", "(", "block_size", "*", "2", ")", "cipher", "=", "AES", ".", "new", "(", "secret", "[", ":", "32", "]", ",", "AES", ".", "MODE_CFB", ",", "iv", "[", ":", "block_size", "]", ".", "encode", "(", ")", ")", "return", "iv", "+", "b64encode", "(", "cipher", ".", "encrypt", "(", "uniorbytes", "(", "value", ",", "bytes", ")", ")", ")", ".", "decode", "(", "'utf-8'", ")" ]
AES encrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#str) AES encrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' ..
[ "AES", "encrypt", "@value", "with", "@secret", "using", "the", "|CFB|", "mode", "of", "AES", "with", "a", "cryptographically", "secure", "initialization", "vector", "." ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L31-L52
jaredLunde/vital-tools
vital/security/__init__.py
aes_b64_decrypt
def aes_b64_decrypt(value, secret, block_size=AES.block_size): """ AES decrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#str) AES decrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' .. """ if value is not None: iv = value[:block_size] cipher = AES.new(secret[:32], AES.MODE_CFB, iv) return cipher.decrypt(b64decode( uniorbytes(value[block_size * 2:], bytes))).decode('utf-8')
python
def aes_b64_decrypt(value, secret, block_size=AES.block_size): """ AES decrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#str) AES decrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' .. """ if value is not None: iv = value[:block_size] cipher = AES.new(secret[:32], AES.MODE_CFB, iv) return cipher.decrypt(b64decode( uniorbytes(value[block_size * 2:], bytes))).decode('utf-8')
[ "def", "aes_b64_decrypt", "(", "value", ",", "secret", ",", "block_size", "=", "AES", ".", "block_size", ")", ":", "if", "value", "is", "not", "None", ":", "iv", "=", "value", "[", ":", "block_size", "]", "cipher", "=", "AES", ".", "new", "(", "secret", "[", ":", "32", "]", ",", "AES", ".", "MODE_CFB", ",", "iv", ")", "return", "cipher", ".", "decrypt", "(", "b64decode", "(", "uniorbytes", "(", "value", "[", "block_size", "*", "2", ":", "]", ",", "bytes", ")", ")", ")", ".", "decode", "(", "'utf-8'", ")" ]
AES decrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#str) AES decrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' ..
[ "AES", "decrypt", "@value", "with", "@secret", "using", "the", "|CFB|", "mode", "of", "AES", "with", "a", "cryptographically", "secure", "initialization", "vector", "." ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L55-L76
jaredLunde/vital-tools
vital/security/__init__.py
aes_encrypt
def aes_encrypt(value, secret, block_size=AES.block_size): """ AES encrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#bytes) AES encrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' ..f """ iv = os.urandom(block_size * 2) cipher = AES.new(secret[:32], AES.MODE_CFB, iv[:block_size]) return b'%s%s' % (iv, cipher.encrypt(value))
python
def aes_encrypt(value, secret, block_size=AES.block_size): """ AES encrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#bytes) AES encrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' ..f """ iv = os.urandom(block_size * 2) cipher = AES.new(secret[:32], AES.MODE_CFB, iv[:block_size]) return b'%s%s' % (iv, cipher.encrypt(value))
[ "def", "aes_encrypt", "(", "value", ",", "secret", ",", "block_size", "=", "AES", ".", "block_size", ")", ":", "iv", "=", "os", ".", "urandom", "(", "block_size", "*", "2", ")", "cipher", "=", "AES", ".", "new", "(", "secret", "[", ":", "32", "]", ",", "AES", ".", "MODE_CFB", ",", "iv", "[", ":", "block_size", "]", ")", "return", "b'%s%s'", "%", "(", "iv", ",", "cipher", ".", "encrypt", "(", "value", ")", ")" ]
AES encrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#bytes) AES encrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' ..f
[ "AES", "encrypt", "@value", "with", "@secret", "using", "the", "|CFB|", "mode", "of", "AES", "with", "a", "cryptographically", "secure", "initialization", "vector", "." ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L79-L98
jaredLunde/vital-tools
vital/security/__init__.py
aes_decrypt
def aes_decrypt(value, secret, block_size=AES.block_size): """ AES decrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#str) AES decrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' .. """ if value is not None: cipher = AES.new(secret[:32], AES.MODE_CFB, value[:block_size]) return cipher.decrypt(uniorbytes(value[block_size * 2:], bytes))
python
def aes_decrypt(value, secret, block_size=AES.block_size): """ AES decrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#str) AES decrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' .. """ if value is not None: cipher = AES.new(secret[:32], AES.MODE_CFB, value[:block_size]) return cipher.decrypt(uniorbytes(value[block_size * 2:], bytes))
[ "def", "aes_decrypt", "(", "value", ",", "secret", ",", "block_size", "=", "AES", ".", "block_size", ")", ":", "if", "value", "is", "not", "None", ":", "cipher", "=", "AES", ".", "new", "(", "secret", "[", ":", "32", "]", ",", "AES", ".", "MODE_CFB", ",", "value", "[", ":", "block_size", "]", ")", "return", "cipher", ".", "decrypt", "(", "uniorbytes", "(", "value", "[", "block_size", "*", "2", ":", "]", ",", "bytes", ")", ")" ]
AES decrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#str) AES decrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' ..
[ "AES", "decrypt", "@value", "with", "@secret", "using", "the", "|CFB|", "mode", "of", "AES", "with", "a", "cryptographically", "secure", "initialization", "vector", "." ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L101-L120
jaredLunde/vital-tools
vital/security/__init__.py
aes_pad
def aes_pad(s, block_size=32, padding='{'): """ Adds padding to get the correct block sizes for AES encryption @s: #str being AES encrypted or decrypted @block_size: the AES block size @padding: character to pad with -> padded #str .. from vital.security import aes_pad aes_pad("swing") # -> 'swing{{{{{{{{{{{{{{{{{{{{{{{{{{{' .. """ return s + (block_size - len(s) % block_size) * padding
python
def aes_pad(s, block_size=32, padding='{'): """ Adds padding to get the correct block sizes for AES encryption @s: #str being AES encrypted or decrypted @block_size: the AES block size @padding: character to pad with -> padded #str .. from vital.security import aes_pad aes_pad("swing") # -> 'swing{{{{{{{{{{{{{{{{{{{{{{{{{{{' .. """ return s + (block_size - len(s) % block_size) * padding
[ "def", "aes_pad", "(", "s", ",", "block_size", "=", "32", ",", "padding", "=", "'{'", ")", ":", "return", "s", "+", "(", "block_size", "-", "len", "(", "s", ")", "%", "block_size", ")", "*", "padding" ]
Adds padding to get the correct block sizes for AES encryption @s: #str being AES encrypted or decrypted @block_size: the AES block size @padding: character to pad with -> padded #str .. from vital.security import aes_pad aes_pad("swing") # -> 'swing{{{{{{{{{{{{{{{{{{{{{{{{{{{' ..
[ "Adds", "padding", "to", "get", "the", "correct", "block", "sizes", "for", "AES", "encryption" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L123-L138
jaredLunde/vital-tools
vital/security/__init__.py
lscmp
def lscmp(a, b): """ Compares two strings in a cryptographically safe way: Runtime is not affected by length of common prefix, so this is helpful against timing attacks. .. from vital.security import lscmp lscmp("ringo", "starr") # -> False lscmp("ringo", "ringo") # -> True .. """ l = len return not sum(0 if x == y else 1 for x, y in zip(a, b)) and l(a) == l(b)
python
def lscmp(a, b): """ Compares two strings in a cryptographically safe way: Runtime is not affected by length of common prefix, so this is helpful against timing attacks. .. from vital.security import lscmp lscmp("ringo", "starr") # -> False lscmp("ringo", "ringo") # -> True .. """ l = len return not sum(0 if x == y else 1 for x, y in zip(a, b)) and l(a) == l(b)
[ "def", "lscmp", "(", "a", ",", "b", ")", ":", "l", "=", "len", "return", "not", "sum", "(", "0", "if", "x", "==", "y", "else", "1", "for", "x", ",", "y", "in", "zip", "(", "a", ",", "b", ")", ")", "and", "l", "(", "a", ")", "==", "l", "(", "b", ")" ]
Compares two strings in a cryptographically safe way: Runtime is not affected by length of common prefix, so this is helpful against timing attacks. .. from vital.security import lscmp lscmp("ringo", "starr") # -> False lscmp("ringo", "ringo") # -> True ..
[ "Compares", "two", "strings", "in", "a", "cryptographically", "safe", "way", ":", "Runtime", "is", "not", "affected", "by", "length", "of", "common", "prefix", "so", "this", "is", "helpful", "against", "timing", "attacks", "." ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L159-L173
jaredLunde/vital-tools
vital/security/__init__.py
cookie
def cookie(data, key_salt='', secret=None, digestmod=None): """ Encodes or decodes a signed cookie. @data: cookie data @key_salt: HMAC key signing salt @secret: HMAC signing secret key @digestmod: hashing algorithm to sign with, recommended >=sha256 -> HMAC signed or unsigned cookie data .. from vital.security import cookie cookie("Hello, world.", "saltyDog", secret="alBVlwe") # -> '!YuOoKwDp8GhrwwojdjTxSCj1c2Z+7yz7r6cC7E3hBWo=?IkhlbGxvLCB3b3JsZC4i' cookie( "!YuOoKwDp8GhrwwojdjTxSCj1c2Z+7yz7r6cC7E3hBWo=?IkhlbGxvLCB3b3JsZC4i", "saltyDog", secret="alBVlwe") # -> 'Hello, world.' .. """ digestmod = digestmod or sha256 if not data: return None try: # Decode signed cookie assert cookie_is_encoded(data) datab = uniorbytes(data, bytes) sig, msg = datab.split(uniorbytes('?', bytes), 1) key = ("{}{}").format(secret, key_salt) sig_check = hmac.new( key=uniorbytes(key, bytes), msg=msg, digestmod=digestmod).digest() sig_check = uniorbytes(b64encode(sig_check), bytes) if lscmp(sig[1:], sig_check): return json.loads(uniorbytes(b64decode(msg))) return None except: # Encode and sign a json-able object. Return a string. key = ("{}{}").format(secret, key_salt) msg = b64encode(uniorbytes(json.dumps(data), bytes)) sig = hmac.new( key=uniorbytes(key, bytes), msg=msg, digestmod=digestmod).digest() sig = uniorbytes(b64encode(sig), bytes) return uniorbytes('!'.encode() + sig + '?'.encode() + msg)
python
def cookie(data, key_salt='', secret=None, digestmod=None): """ Encodes or decodes a signed cookie. @data: cookie data @key_salt: HMAC key signing salt @secret: HMAC signing secret key @digestmod: hashing algorithm to sign with, recommended >=sha256 -> HMAC signed or unsigned cookie data .. from vital.security import cookie cookie("Hello, world.", "saltyDog", secret="alBVlwe") # -> '!YuOoKwDp8GhrwwojdjTxSCj1c2Z+7yz7r6cC7E3hBWo=?IkhlbGxvLCB3b3JsZC4i' cookie( "!YuOoKwDp8GhrwwojdjTxSCj1c2Z+7yz7r6cC7E3hBWo=?IkhlbGxvLCB3b3JsZC4i", "saltyDog", secret="alBVlwe") # -> 'Hello, world.' .. """ digestmod = digestmod or sha256 if not data: return None try: # Decode signed cookie assert cookie_is_encoded(data) datab = uniorbytes(data, bytes) sig, msg = datab.split(uniorbytes('?', bytes), 1) key = ("{}{}").format(secret, key_salt) sig_check = hmac.new( key=uniorbytes(key, bytes), msg=msg, digestmod=digestmod).digest() sig_check = uniorbytes(b64encode(sig_check), bytes) if lscmp(sig[1:], sig_check): return json.loads(uniorbytes(b64decode(msg))) return None except: # Encode and sign a json-able object. Return a string. key = ("{}{}").format(secret, key_salt) msg = b64encode(uniorbytes(json.dumps(data), bytes)) sig = hmac.new( key=uniorbytes(key, bytes), msg=msg, digestmod=digestmod).digest() sig = uniorbytes(b64encode(sig), bytes) return uniorbytes('!'.encode() + sig + '?'.encode() + msg)
[ "def", "cookie", "(", "data", ",", "key_salt", "=", "''", ",", "secret", "=", "None", ",", "digestmod", "=", "None", ")", ":", "digestmod", "=", "digestmod", "or", "sha256", "if", "not", "data", ":", "return", "None", "try", ":", "# Decode signed cookie", "assert", "cookie_is_encoded", "(", "data", ")", "datab", "=", "uniorbytes", "(", "data", ",", "bytes", ")", "sig", ",", "msg", "=", "datab", ".", "split", "(", "uniorbytes", "(", "'?'", ",", "bytes", ")", ",", "1", ")", "key", "=", "(", "\"{}{}\"", ")", ".", "format", "(", "secret", ",", "key_salt", ")", "sig_check", "=", "hmac", ".", "new", "(", "key", "=", "uniorbytes", "(", "key", ",", "bytes", ")", ",", "msg", "=", "msg", ",", "digestmod", "=", "digestmod", ")", ".", "digest", "(", ")", "sig_check", "=", "uniorbytes", "(", "b64encode", "(", "sig_check", ")", ",", "bytes", ")", "if", "lscmp", "(", "sig", "[", "1", ":", "]", ",", "sig_check", ")", ":", "return", "json", ".", "loads", "(", "uniorbytes", "(", "b64decode", "(", "msg", ")", ")", ")", "return", "None", "except", ":", "# Encode and sign a json-able object. Return a string.", "key", "=", "(", "\"{}{}\"", ")", ".", "format", "(", "secret", ",", "key_salt", ")", "msg", "=", "b64encode", "(", "uniorbytes", "(", "json", ".", "dumps", "(", "data", ")", ",", "bytes", ")", ")", "sig", "=", "hmac", ".", "new", "(", "key", "=", "uniorbytes", "(", "key", ",", "bytes", ")", ",", "msg", "=", "msg", ",", "digestmod", "=", "digestmod", ")", ".", "digest", "(", ")", "sig", "=", "uniorbytes", "(", "b64encode", "(", "sig", ")", ",", "bytes", ")", "return", "uniorbytes", "(", "'!'", ".", "encode", "(", ")", "+", "sig", "+", "'?'", ".", "encode", "(", ")", "+", "msg", ")" ]
Encodes or decodes a signed cookie. @data: cookie data @key_salt: HMAC key signing salt @secret: HMAC signing secret key @digestmod: hashing algorithm to sign with, recommended >=sha256 -> HMAC signed or unsigned cookie data .. from vital.security import cookie cookie("Hello, world.", "saltyDog", secret="alBVlwe") # -> '!YuOoKwDp8GhrwwojdjTxSCj1c2Z+7yz7r6cC7E3hBWo=?IkhlbGxvLCB3b3JsZC4i' cookie( "!YuOoKwDp8GhrwwojdjTxSCj1c2Z+7yz7r6cC7E3hBWo=?IkhlbGxvLCB3b3JsZC4i", "saltyDog", secret="alBVlwe") # -> 'Hello, world.' ..
[ "Encodes", "or", "decodes", "a", "signed", "cookie", ".", "@data", ":", "cookie", "data", "@key_salt", ":", "HMAC", "key", "signing", "salt", "@secret", ":", "HMAC", "signing", "secret", "key", "@digestmod", ":", "hashing", "algorithm", "to", "sign", "with", "recommended", ">", "=", "sha256" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L179-L222
jaredLunde/vital-tools
vital/security/__init__.py
strkey
def strkey(val, chaffify=1, keyspace=string.ascii_letters + string.digits): """ Converts integers to a sequence of strings, and reverse. This is not intended to obfuscate numbers in any kind of cryptographically secure way, in fact it's the opposite. It's for predictable, reversable, obfuscation. It can also be used to transform a random bit integer to a string of the same bit length. @val: #int or #str @chaffify: #int multiple to avoid 0=a, 1=b, 2=c, ... obfuscates the ordering @keyspace: #str allowed output chars -> #str if @val is #int, #int if @val is #str .. from vital.security import strkey strkey(0, chaffify=1) # -> b strkey(0, chaffify=4) # -> e strkey(90000000000050500502200302035023) # -> 'f3yMpJQUazIZHp1UO7k' strkey('f3yMpJQUazIZHp1UO7k') # -> 90000000000050500502200302035023 strkey(2000000, chaffify=200000000000) # -> 'DIaqtyo2sC' .. """ chaffify = chaffify or 1 keylen = len(keyspace) try: # INT TO STRING if val < 0: raise ValueError("Input value must be greater than -1.") # chaffify the value val = val * chaffify if val == 0: return keyspace[0] # output the new string value out = [] out_add = out.append while val > 0: val, digit = divmod(val, keylen) out_add(keyspace[digit]) return "".join(out)[::-1] except TypeError: # STRING TO INT out = 0 val = str(val) find = str.find for c in val: out = out * keylen + find(keyspace, c) # dechaffify the value out = out // chaffify return int(out)
python
def strkey(val, chaffify=1, keyspace=string.ascii_letters + string.digits): """ Converts integers to a sequence of strings, and reverse. This is not intended to obfuscate numbers in any kind of cryptographically secure way, in fact it's the opposite. It's for predictable, reversable, obfuscation. It can also be used to transform a random bit integer to a string of the same bit length. @val: #int or #str @chaffify: #int multiple to avoid 0=a, 1=b, 2=c, ... obfuscates the ordering @keyspace: #str allowed output chars -> #str if @val is #int, #int if @val is #str .. from vital.security import strkey strkey(0, chaffify=1) # -> b strkey(0, chaffify=4) # -> e strkey(90000000000050500502200302035023) # -> 'f3yMpJQUazIZHp1UO7k' strkey('f3yMpJQUazIZHp1UO7k') # -> 90000000000050500502200302035023 strkey(2000000, chaffify=200000000000) # -> 'DIaqtyo2sC' .. """ chaffify = chaffify or 1 keylen = len(keyspace) try: # INT TO STRING if val < 0: raise ValueError("Input value must be greater than -1.") # chaffify the value val = val * chaffify if val == 0: return keyspace[0] # output the new string value out = [] out_add = out.append while val > 0: val, digit = divmod(val, keylen) out_add(keyspace[digit]) return "".join(out)[::-1] except TypeError: # STRING TO INT out = 0 val = str(val) find = str.find for c in val: out = out * keylen + find(keyspace, c) # dechaffify the value out = out // chaffify return int(out)
[ "def", "strkey", "(", "val", ",", "chaffify", "=", "1", ",", "keyspace", "=", "string", ".", "ascii_letters", "+", "string", ".", "digits", ")", ":", "chaffify", "=", "chaffify", "or", "1", "keylen", "=", "len", "(", "keyspace", ")", "try", ":", "# INT TO STRING", "if", "val", "<", "0", ":", "raise", "ValueError", "(", "\"Input value must be greater than -1.\"", ")", "# chaffify the value", "val", "=", "val", "*", "chaffify", "if", "val", "==", "0", ":", "return", "keyspace", "[", "0", "]", "# output the new string value", "out", "=", "[", "]", "out_add", "=", "out", ".", "append", "while", "val", ">", "0", ":", "val", ",", "digit", "=", "divmod", "(", "val", ",", "keylen", ")", "out_add", "(", "keyspace", "[", "digit", "]", ")", "return", "\"\"", ".", "join", "(", "out", ")", "[", ":", ":", "-", "1", "]", "except", "TypeError", ":", "# STRING TO INT", "out", "=", "0", "val", "=", "str", "(", "val", ")", "find", "=", "str", ".", "find", "for", "c", "in", "val", ":", "out", "=", "out", "*", "keylen", "+", "find", "(", "keyspace", ",", "c", ")", "# dechaffify the value", "out", "=", "out", "//", "chaffify", "return", "int", "(", "out", ")" ]
Converts integers to a sequence of strings, and reverse. This is not intended to obfuscate numbers in any kind of cryptographically secure way, in fact it's the opposite. It's for predictable, reversable, obfuscation. It can also be used to transform a random bit integer to a string of the same bit length. @val: #int or #str @chaffify: #int multiple to avoid 0=a, 1=b, 2=c, ... obfuscates the ordering @keyspace: #str allowed output chars -> #str if @val is #int, #int if @val is #str .. from vital.security import strkey strkey(0, chaffify=1) # -> b strkey(0, chaffify=4) # -> e strkey(90000000000050500502200302035023) # -> 'f3yMpJQUazIZHp1UO7k' strkey('f3yMpJQUazIZHp1UO7k') # -> 90000000000050500502200302035023 strkey(2000000, chaffify=200000000000) # -> 'DIaqtyo2sC' ..
[ "Converts", "integers", "to", "a", "sequence", "of", "strings", "and", "reverse", ".", "This", "is", "not", "intended", "to", "obfuscate", "numbers", "in", "any", "kind", "of", "cryptographically", "secure", "way", "in", "fact", "it", "s", "the", "opposite", ".", "It", "s", "for", "predictable", "reversable", "obfuscation", ".", "It", "can", "also", "be", "used", "to", "transform", "a", "random", "bit", "integer", "to", "a", "string", "of", "the", "same", "bit", "length", "." ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L241-L302
jaredLunde/vital-tools
vital/security/__init__.py
chars_in
def chars_in(bits, keyspace): """ .. log2(keyspace^x_chars) = bits log(keyspace^x_chars) = log(2) * bits exp(log(keyspace^x_chars)) = exp(log(2) * bits) x_chars = log(exp(log(2) * bits)) / log(keyspace) .. -> (#int) number of characters in @bits of entropy given the @keyspace """ keyspace = len(keyspace) if keyspace < 2: raise ValueError("Keyspace size must be >1") bits_per_cycle = 512 if bits > bits_per_cycle: chars = 0 bits_processed = 0 cycles = ceil(bits / bits_per_cycle) for _ in range(int(cycles)): if bits_processed + bits_per_cycle > bits: bits_per_cycle = bits - bits_processed chars += calc_chars_in(bits_per_cycle, keyspace) bits_processed += bits_per_cycle else: chars = calc_chars_in(bits, keyspace) return abs(chars)
python
def chars_in(bits, keyspace): """ .. log2(keyspace^x_chars) = bits log(keyspace^x_chars) = log(2) * bits exp(log(keyspace^x_chars)) = exp(log(2) * bits) x_chars = log(exp(log(2) * bits)) / log(keyspace) .. -> (#int) number of characters in @bits of entropy given the @keyspace """ keyspace = len(keyspace) if keyspace < 2: raise ValueError("Keyspace size must be >1") bits_per_cycle = 512 if bits > bits_per_cycle: chars = 0 bits_processed = 0 cycles = ceil(bits / bits_per_cycle) for _ in range(int(cycles)): if bits_processed + bits_per_cycle > bits: bits_per_cycle = bits - bits_processed chars += calc_chars_in(bits_per_cycle, keyspace) bits_processed += bits_per_cycle else: chars = calc_chars_in(bits, keyspace) return abs(chars)
[ "def", "chars_in", "(", "bits", ",", "keyspace", ")", ":", "keyspace", "=", "len", "(", "keyspace", ")", "if", "keyspace", "<", "2", ":", "raise", "ValueError", "(", "\"Keyspace size must be >1\"", ")", "bits_per_cycle", "=", "512", "if", "bits", ">", "bits_per_cycle", ":", "chars", "=", "0", "bits_processed", "=", "0", "cycles", "=", "ceil", "(", "bits", "/", "bits_per_cycle", ")", "for", "_", "in", "range", "(", "int", "(", "cycles", ")", ")", ":", "if", "bits_processed", "+", "bits_per_cycle", ">", "bits", ":", "bits_per_cycle", "=", "bits", "-", "bits_processed", "chars", "+=", "calc_chars_in", "(", "bits_per_cycle", ",", "keyspace", ")", "bits_processed", "+=", "bits_per_cycle", "else", ":", "chars", "=", "calc_chars_in", "(", "bits", ",", "keyspace", ")", "return", "abs", "(", "chars", ")" ]
.. log2(keyspace^x_chars) = bits log(keyspace^x_chars) = log(2) * bits exp(log(keyspace^x_chars)) = exp(log(2) * bits) x_chars = log(exp(log(2) * bits)) / log(keyspace) .. -> (#int) number of characters in @bits of entropy given the @keyspace
[ "..", "log2", "(", "keyspace^x_chars", ")", "=", "bits", "log", "(", "keyspace^x_chars", ")", "=", "log", "(", "2", ")", "*", "bits", "exp", "(", "log", "(", "keyspace^x_chars", "))", "=", "exp", "(", "log", "(", "2", ")", "*", "bits", ")", "x_chars", "=", "log", "(", "exp", "(", "log", "(", "2", ")", "*", "bits", "))", "/", "log", "(", "keyspace", ")", "..", "-", ">", "(", "#int", ")", "number", "of", "characters", "in" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L327-L351
jaredLunde/vital-tools
vital/security/__init__.py
bits_in
def bits_in(length, keyspace): """ |log2(keyspace^length) = bits| -> (#float) number of bits of entropy in @length of characters for a given a @keyspace """ keyspace = len(keyspace) length_per_cycle = 64 if length > length_per_cycle: bits = 0 length_processed = 0 cycles = ceil(length / length_per_cycle) for _ in range(int(cycles)): if length_processed + length_per_cycle > length: length_per_cycle = length - length_processed bits += calc_bits_in(length_per_cycle, keyspace) length_processed += length_per_cycle else: bits = calc_bits_in(length, keyspace) return float(abs(bits))
python
def bits_in(length, keyspace): """ |log2(keyspace^length) = bits| -> (#float) number of bits of entropy in @length of characters for a given a @keyspace """ keyspace = len(keyspace) length_per_cycle = 64 if length > length_per_cycle: bits = 0 length_processed = 0 cycles = ceil(length / length_per_cycle) for _ in range(int(cycles)): if length_processed + length_per_cycle > length: length_per_cycle = length - length_processed bits += calc_bits_in(length_per_cycle, keyspace) length_processed += length_per_cycle else: bits = calc_bits_in(length, keyspace) return float(abs(bits))
[ "def", "bits_in", "(", "length", ",", "keyspace", ")", ":", "keyspace", "=", "len", "(", "keyspace", ")", "length_per_cycle", "=", "64", "if", "length", ">", "length_per_cycle", ":", "bits", "=", "0", "length_processed", "=", "0", "cycles", "=", "ceil", "(", "length", "/", "length_per_cycle", ")", "for", "_", "in", "range", "(", "int", "(", "cycles", ")", ")", ":", "if", "length_processed", "+", "length_per_cycle", ">", "length", ":", "length_per_cycle", "=", "length", "-", "length_processed", "bits", "+=", "calc_bits_in", "(", "length_per_cycle", ",", "keyspace", ")", "length_processed", "+=", "length_per_cycle", "else", ":", "bits", "=", "calc_bits_in", "(", "length", ",", "keyspace", ")", "return", "float", "(", "abs", "(", "bits", ")", ")" ]
|log2(keyspace^length) = bits| -> (#float) number of bits of entropy in @length of characters for a given a @keyspace
[ "|log2", "(", "keyspace^length", ")", "=", "bits|", "-", ">", "(", "#float", ")", "number", "of", "bits", "of", "entropy", "in" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L360-L378
jaredLunde/vital-tools
vital/security/__init__.py
iter_random_chars
def iter_random_chars(bits, keyspace=string.ascii_letters + string.digits + '#/.', rng=None): """ Yields a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars .. from vital.security import iter_rand for char in iter_rand(512): do_something_with(char) """ if bits < 8: raise ValueError('Bits cannot be <8') else: chars = chars_in(bits, keyspace) rng = rng or random.SystemRandom() for char in range(int(ceil(chars))): yield rng.choice(keyspace)
python
def iter_random_chars(bits, keyspace=string.ascii_letters + string.digits + '#/.', rng=None): """ Yields a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars .. from vital.security import iter_rand for char in iter_rand(512): do_something_with(char) """ if bits < 8: raise ValueError('Bits cannot be <8') else: chars = chars_in(bits, keyspace) rng = rng or random.SystemRandom() for char in range(int(ceil(chars))): yield rng.choice(keyspace)
[ "def", "iter_random_chars", "(", "bits", ",", "keyspace", "=", "string", ".", "ascii_letters", "+", "string", ".", "digits", "+", "'#/.'", ",", "rng", "=", "None", ")", ":", "if", "bits", "<", "8", ":", "raise", "ValueError", "(", "'Bits cannot be <8'", ")", "else", ":", "chars", "=", "chars_in", "(", "bits", ",", "keyspace", ")", "rng", "=", "rng", "or", "random", ".", "SystemRandom", "(", ")", "for", "char", "in", "range", "(", "int", "(", "ceil", "(", "chars", ")", ")", ")", ":", "yield", "rng", ".", "choice", "(", "keyspace", ")" ]
Yields a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars .. from vital.security import iter_rand for char in iter_rand(512): do_something_with(char)
[ "Yields", "a", "cryptographically", "secure", "random", "key", "of", "desired", "@bits", "of", "entropy", "within", "@keyspace", "using", ":", "class", ":", "random", ".", "SystemRandom" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L381-L402
jaredLunde/vital-tools
vital/security/__init__.py
randkey
def randkey(bits, keyspace=string.ascii_letters + string.digits + '#/.', rng=None): """ Returns a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:random.SystemRandom. Must have a |choice| method -> (#str) random key .. from vital.security import randkey randkey(24) # -> '9qaX' randkey(48) # -> 'iPJ5YWs9' randkey(64) # - > 'C..VJ.KLdxg' randkey(64, keyspace="abc", rng=random) # -> 'aabcccbabcaacaccccabcaabbabcacabacbbbaaab' .. """ return "".join(char for char in iter_random_chars(bits, keyspace, rng))
python
def randkey(bits, keyspace=string.ascii_letters + string.digits + '#/.', rng=None): """ Returns a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:random.SystemRandom. Must have a |choice| method -> (#str) random key .. from vital.security import randkey randkey(24) # -> '9qaX' randkey(48) # -> 'iPJ5YWs9' randkey(64) # - > 'C..VJ.KLdxg' randkey(64, keyspace="abc", rng=random) # -> 'aabcccbabcaacaccccabcaabbabcacabacbbbaaab' .. """ return "".join(char for char in iter_random_chars(bits, keyspace, rng))
[ "def", "randkey", "(", "bits", ",", "keyspace", "=", "string", ".", "ascii_letters", "+", "string", ".", "digits", "+", "'#/.'", ",", "rng", "=", "None", ")", ":", "return", "\"\"", ".", "join", "(", "char", "for", "char", "in", "iter_random_chars", "(", "bits", ",", "keyspace", ",", "rng", ")", ")" ]
Returns a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:random.SystemRandom. Must have a |choice| method -> (#str) random key .. from vital.security import randkey randkey(24) # -> '9qaX' randkey(48) # -> 'iPJ5YWs9' randkey(64) # - > 'C..VJ.KLdxg' randkey(64, keyspace="abc", rng=random) # -> 'aabcccbabcaacaccccabcaabbabcacabacbbbaaab' ..
[ "Returns", "a", "cryptographically", "secure", "random", "key", "of", "desired", "@bits", "of", "entropy", "within", "@keyspace", "using", ":", "class", ":", "random", ".", "SystemRandom" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L405-L430
jaredLunde/vital-tools
vital/security/__init__.py
randstr
def randstr(size, keyspace=string.ascii_letters + string.digits, rng=None): """ Returns a cryptographically secure random string of desired @size (in character length) within @keyspace using :class:random.SystemRandom @size: (#int) number of random characters to generate @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:random.SystemRandom. Must have a |choice| method -> #str random key .. from vital.security import randkey randstr(4) # -> '9qaX' .. """ rng = rng or random.SystemRandom() return "".join(rng.choice(keyspace) for char in range(int(ceil(size))))
python
def randstr(size, keyspace=string.ascii_letters + string.digits, rng=None): """ Returns a cryptographically secure random string of desired @size (in character length) within @keyspace using :class:random.SystemRandom @size: (#int) number of random characters to generate @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:random.SystemRandom. Must have a |choice| method -> #str random key .. from vital.security import randkey randstr(4) # -> '9qaX' .. """ rng = rng or random.SystemRandom() return "".join(rng.choice(keyspace) for char in range(int(ceil(size))))
[ "def", "randstr", "(", "size", ",", "keyspace", "=", "string", ".", "ascii_letters", "+", "string", ".", "digits", ",", "rng", "=", "None", ")", ":", "rng", "=", "rng", "or", "random", ".", "SystemRandom", "(", ")", "return", "\"\"", ".", "join", "(", "rng", ".", "choice", "(", "keyspace", ")", "for", "char", "in", "range", "(", "int", "(", "ceil", "(", "size", ")", ")", ")", ")" ]
Returns a cryptographically secure random string of desired @size (in character length) within @keyspace using :class:random.SystemRandom @size: (#int) number of random characters to generate @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:random.SystemRandom. Must have a |choice| method -> #str random key .. from vital.security import randkey randstr(4) # -> '9qaX' ..
[ "Returns", "a", "cryptographically", "secure", "random", "string", "of", "desired", "@size", "(", "in", "character", "length", ")", "within", "@keyspace", "using", ":", "class", ":", "random", ".", "SystemRandom" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/security/__init__.py#L433-L452
jaredLunde/vital-tools
setup.py
parse_requirements
def parse_requirements(filename): """ load requirements from a pip requirements file """ lineiter = (line.strip() for line in open(filename)) return (line for line in lineiter if line and not line.startswith("#"))
python
def parse_requirements(filename): """ load requirements from a pip requirements file """ lineiter = (line.strip() for line in open(filename)) return (line for line in lineiter if line and not line.startswith("#"))
[ "def", "parse_requirements", "(", "filename", ")", ":", "lineiter", "=", "(", "line", ".", "strip", "(", ")", "for", "line", "in", "open", "(", "filename", ")", ")", "return", "(", "line", "for", "line", "in", "lineiter", "if", "line", "and", "not", "line", ".", "startswith", "(", "\"#\"", ")", ")" ]
load requirements from a pip requirements file
[ "load", "requirements", "from", "a", "pip", "requirements", "file" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/setup.py#L24-L27
Vital-Fernandez/dazer
bin/lib/Plotting_Libraries/sigfig.py
round_sig
def round_sig(x, n, scien_notation = False): if x < 0: x = x * -1 symbol = '-' else: symbol = '' '''round floating point x to n significant figures''' if type(n) is not types.IntType: raise TypeError, "n must be an integer" try: x = float(x) except: raise TypeError, "x must be a floating point object" form = "%0." + str(n-1) + "e" st = form % x num,expo = epat.findall(st)[0] expo = int(expo) fs = string.split(num,'.') if len(fs) < 2: fs = [fs[0],""] if expo == 0: # print 'One', num return symbol + num elif expo > 0: if len(fs[1]) < expo: fs[1] += "0"*(expo-len(fs[1])) st = fs[0]+fs[1][0:expo] if len(fs[1][expo:]) > 0: st += '.'+fs[1][expo:] # print 'Two', st return symbol + st else: expo = -expo if fs[0][0] == '-': fs[0] = fs[0][1:] sign = "-" else: sign = "" if scien_notation: coso = float(sign+"0."+"0"*(expo-1)+fs[0]+fs[1]) scient_format = '{:.' + str(n) + 'e}' StrScientific = scient_format.format(coso) # print 'three', StrScientific return symbol + StrScientific else: formated = sign+"0."+"0"*(expo-1)+fs[0]+fs[1] if len(formated) > (n+5): formated = '0.' + '0' * n # print 'cuantro', formated return symbol + formated
python
def round_sig(x, n, scien_notation = False): if x < 0: x = x * -1 symbol = '-' else: symbol = '' '''round floating point x to n significant figures''' if type(n) is not types.IntType: raise TypeError, "n must be an integer" try: x = float(x) except: raise TypeError, "x must be a floating point object" form = "%0." + str(n-1) + "e" st = form % x num,expo = epat.findall(st)[0] expo = int(expo) fs = string.split(num,'.') if len(fs) < 2: fs = [fs[0],""] if expo == 0: # print 'One', num return symbol + num elif expo > 0: if len(fs[1]) < expo: fs[1] += "0"*(expo-len(fs[1])) st = fs[0]+fs[1][0:expo] if len(fs[1][expo:]) > 0: st += '.'+fs[1][expo:] # print 'Two', st return symbol + st else: expo = -expo if fs[0][0] == '-': fs[0] = fs[0][1:] sign = "-" else: sign = "" if scien_notation: coso = float(sign+"0."+"0"*(expo-1)+fs[0]+fs[1]) scient_format = '{:.' + str(n) + 'e}' StrScientific = scient_format.format(coso) # print 'three', StrScientific return symbol + StrScientific else: formated = sign+"0."+"0"*(expo-1)+fs[0]+fs[1] if len(formated) > (n+5): formated = '0.' + '0' * n # print 'cuantro', formated return symbol + formated
[ "def", "round_sig", "(", "x", ",", "n", ",", "scien_notation", "=", "False", ")", ":", "if", "x", "<", "0", ":", "x", "=", "x", "*", "-", "1", "symbol", "=", "'-'", "else", ":", "symbol", "=", "''", "if", "type", "(", "n", ")", "is", "not", "types", ".", "IntType", ":", "raise", "TypeError", ",", "\"n must be an integer\"", "try", ":", "x", "=", "float", "(", "x", ")", "except", ":", "raise", "TypeError", ",", "\"x must be a floating point object\"", "form", "=", "\"%0.\"", "+", "str", "(", "n", "-", "1", ")", "+", "\"e\"", "st", "=", "form", "%", "x", "num", ",", "expo", "=", "epat", ".", "findall", "(", "st", ")", "[", "0", "]", "expo", "=", "int", "(", "expo", ")", "fs", "=", "string", ".", "split", "(", "num", ",", "'.'", ")", "if", "len", "(", "fs", ")", "<", "2", ":", "fs", "=", "[", "fs", "[", "0", "]", ",", "\"\"", "]", "if", "expo", "==", "0", ":", "# print 'One', num", "return", "symbol", "+", "num", "elif", "expo", ">", "0", ":", "if", "len", "(", "fs", "[", "1", "]", ")", "<", "expo", ":", "fs", "[", "1", "]", "+=", "\"0\"", "*", "(", "expo", "-", "len", "(", "fs", "[", "1", "]", ")", ")", "st", "=", "fs", "[", "0", "]", "+", "fs", "[", "1", "]", "[", "0", ":", "expo", "]", "if", "len", "(", "fs", "[", "1", "]", "[", "expo", ":", "]", ")", ">", "0", ":", "st", "+=", "'.'", "+", "fs", "[", "1", "]", "[", "expo", ":", "]", "# print 'Two', st", "return", "symbol", "+", "st", "else", ":", "expo", "=", "-", "expo", "if", "fs", "[", "0", "]", "[", "0", "]", "==", "'-'", ":", "fs", "[", "0", "]", "=", "fs", "[", "0", "]", "[", "1", ":", "]", "sign", "=", "\"-\"", "else", ":", "sign", "=", "\"\"", "if", "scien_notation", ":", "coso", "=", "float", "(", "sign", "+", "\"0.\"", "+", "\"0\"", "*", "(", "expo", "-", "1", ")", "+", "fs", "[", "0", "]", "+", "fs", "[", "1", "]", ")", "scient_format", "=", "'{:.'", "+", "str", "(", "n", ")", "+", "'e}'", "StrScientific", "=", "scient_format", ".", "format", "(", "coso", ")", "# print 'three', StrScientific", "return", "symbol", "+", "StrScientific", "else", ":", "formated", "=", "sign", "+", "\"0.\"", "+", "\"0\"", "*", "(", "expo", "-", "1", ")", "+", "fs", "[", "0", "]", "+", "fs", "[", "1", "]", "if", "len", "(", "formated", ")", ">", "(", "n", "+", "5", ")", ":", "formated", "=", "'0.'", "+", "'0'", "*", "n", "# print 'cuantro', formated", "return", "symbol", "+", "formated" ]
round floating point x to n significant figures
[ "round", "floating", "point", "x", "to", "n", "significant", "figures" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Plotting_Libraries/sigfig.py#L7-L60
Vital-Fernandez/dazer
bin/lib/Plotting_Libraries/sigfig.py
round_sig_error
def round_sig_error(x, ex, n, paren=False): '''Find ex rounded to n sig-figs and make the floating point x match the number of decimals. If [paren], the string is returned as quantity(error) format''' stex = round_sig(ex,n) if stex.find('.') < 0: extra_zeros = len(stex) - n sigfigs = len(str(int(x))) - extra_zeros stx = round_sig(x,sigfigs) else: num_after_dec = len(string.split(stex,'.')[1]) stx = ("%%.%df" % num_after_dec) % (x) if paren: if stex.find('.') >= 0: stex = stex[stex.find('.')+1:] return "%s(%s)" % (stx,stex) return stx,stex
python
def round_sig_error(x, ex, n, paren=False): '''Find ex rounded to n sig-figs and make the floating point x match the number of decimals. If [paren], the string is returned as quantity(error) format''' stex = round_sig(ex,n) if stex.find('.') < 0: extra_zeros = len(stex) - n sigfigs = len(str(int(x))) - extra_zeros stx = round_sig(x,sigfigs) else: num_after_dec = len(string.split(stex,'.')[1]) stx = ("%%.%df" % num_after_dec) % (x) if paren: if stex.find('.') >= 0: stex = stex[stex.find('.')+1:] return "%s(%s)" % (stx,stex) return stx,stex
[ "def", "round_sig_error", "(", "x", ",", "ex", ",", "n", ",", "paren", "=", "False", ")", ":", "stex", "=", "round_sig", "(", "ex", ",", "n", ")", "if", "stex", ".", "find", "(", "'.'", ")", "<", "0", ":", "extra_zeros", "=", "len", "(", "stex", ")", "-", "n", "sigfigs", "=", "len", "(", "str", "(", "int", "(", "x", ")", ")", ")", "-", "extra_zeros", "stx", "=", "round_sig", "(", "x", ",", "sigfigs", ")", "else", ":", "num_after_dec", "=", "len", "(", "string", ".", "split", "(", "stex", ",", "'.'", ")", "[", "1", "]", ")", "stx", "=", "(", "\"%%.%df\"", "%", "num_after_dec", ")", "%", "(", "x", ")", "if", "paren", ":", "if", "stex", ".", "find", "(", "'.'", ")", ">=", "0", ":", "stex", "=", "stex", "[", "stex", ".", "find", "(", "'.'", ")", "+", "1", ":", "]", "return", "\"%s(%s)\"", "%", "(", "stx", ",", "stex", ")", "return", "stx", ",", "stex" ]
Find ex rounded to n sig-figs and make the floating point x match the number of decimals. If [paren], the string is returned as quantity(error) format
[ "Find", "ex", "rounded", "to", "n", "sig", "-", "figs", "and", "make", "the", "floating", "point", "x", "match", "the", "number", "of", "decimals", ".", "If", "[", "paren", "]", "the", "string", "is", "returned", "as", "quantity", "(", "error", ")", "format" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Plotting_Libraries/sigfig.py#L62-L78
Vital-Fernandez/dazer
bin/lib/Plotting_Libraries/sigfig.py
format_table
def format_table(cols, errors, n, labels=None, headers=None, latex=False): '''Format a table such that the errors have n significant figures. [cols] and [errors] should be a list of 1D arrays that correspond to data and errors in columns. [n] is the number of significant figures to keep in the errors. [labels] is an optional column of strings that will be in the first column. [headers] is an optional list of column headers. If [latex] is true, myformat the table so that it can be included in a LaTeX table ''' if len(cols) != len(errors): raise ValueError, "Error: cols and errors must have same length" ncols = len(cols) nrows = len(cols[0]) if headers is not None: if labels is not None: if len(headers) == ncols: headers = [""] + headers elif len(headers) == ncols+1: pass else: raise ValueError, "length of headers should be %d" % (ncols+1) else: if len(headers) != ncols: raise ValueError, "length of headers should be %d" % (ncols) if labels is not None: if len(labels) != nrows: raise ValueError, "length of labels should be %d" % (nrows) strcols = [] for col,error in zip(cols,errors): strcols.append([]) strcols.append([]) for i in range(nrows): val,err = round_sig_error(col[i], error[i], n) strcols[-2].append(val) strcols[-1].append(err) lengths = [max([len(item) for item in strcol]) for strcol in strcols] myformat = "" if labels is not None: myformat += "%%%ds " % (max(map(len, labels))) if latex: myformat += "& " for length in lengths: myformat += "%%%ds " % (length) if latex: myformat += "& " if latex: myformat = myformat[:-2] + " \\\\" output = [] if headers: if labels: hs = [headers[0]] for head in headers[1:]: hs.append(head) hs.append('+/-') else: hs = [] for head in headers: hs.append(head) hs.append('+/-') output.append(myformat % tuple(hs)) for i in range(nrows): if labels is not None: output.append(myformat % tuple([labels[i]] + [strcol[i] for strcol in strcols])) else: output.append(myformat % tuple([strcol[i] for strcol in strcols])) return output
python
def format_table(cols, errors, n, labels=None, headers=None, latex=False): '''Format a table such that the errors have n significant figures. [cols] and [errors] should be a list of 1D arrays that correspond to data and errors in columns. [n] is the number of significant figures to keep in the errors. [labels] is an optional column of strings that will be in the first column. [headers] is an optional list of column headers. If [latex] is true, myformat the table so that it can be included in a LaTeX table ''' if len(cols) != len(errors): raise ValueError, "Error: cols and errors must have same length" ncols = len(cols) nrows = len(cols[0]) if headers is not None: if labels is not None: if len(headers) == ncols: headers = [""] + headers elif len(headers) == ncols+1: pass else: raise ValueError, "length of headers should be %d" % (ncols+1) else: if len(headers) != ncols: raise ValueError, "length of headers should be %d" % (ncols) if labels is not None: if len(labels) != nrows: raise ValueError, "length of labels should be %d" % (nrows) strcols = [] for col,error in zip(cols,errors): strcols.append([]) strcols.append([]) for i in range(nrows): val,err = round_sig_error(col[i], error[i], n) strcols[-2].append(val) strcols[-1].append(err) lengths = [max([len(item) for item in strcol]) for strcol in strcols] myformat = "" if labels is not None: myformat += "%%%ds " % (max(map(len, labels))) if latex: myformat += "& " for length in lengths: myformat += "%%%ds " % (length) if latex: myformat += "& " if latex: myformat = myformat[:-2] + " \\\\" output = [] if headers: if labels: hs = [headers[0]] for head in headers[1:]: hs.append(head) hs.append('+/-') else: hs = [] for head in headers: hs.append(head) hs.append('+/-') output.append(myformat % tuple(hs)) for i in range(nrows): if labels is not None: output.append(myformat % tuple([labels[i]] + [strcol[i] for strcol in strcols])) else: output.append(myformat % tuple([strcol[i] for strcol in strcols])) return output
[ "def", "format_table", "(", "cols", ",", "errors", ",", "n", ",", "labels", "=", "None", ",", "headers", "=", "None", ",", "latex", "=", "False", ")", ":", "if", "len", "(", "cols", ")", "!=", "len", "(", "errors", ")", ":", "raise", "ValueError", ",", "\"Error: cols and errors must have same length\"", "ncols", "=", "len", "(", "cols", ")", "nrows", "=", "len", "(", "cols", "[", "0", "]", ")", "if", "headers", "is", "not", "None", ":", "if", "labels", "is", "not", "None", ":", "if", "len", "(", "headers", ")", "==", "ncols", ":", "headers", "=", "[", "\"\"", "]", "+", "headers", "elif", "len", "(", "headers", ")", "==", "ncols", "+", "1", ":", "pass", "else", ":", "raise", "ValueError", ",", "\"length of headers should be %d\"", "%", "(", "ncols", "+", "1", ")", "else", ":", "if", "len", "(", "headers", ")", "!=", "ncols", ":", "raise", "ValueError", ",", "\"length of headers should be %d\"", "%", "(", "ncols", ")", "if", "labels", "is", "not", "None", ":", "if", "len", "(", "labels", ")", "!=", "nrows", ":", "raise", "ValueError", ",", "\"length of labels should be %d\"", "%", "(", "nrows", ")", "strcols", "=", "[", "]", "for", "col", ",", "error", "in", "zip", "(", "cols", ",", "errors", ")", ":", "strcols", ".", "append", "(", "[", "]", ")", "strcols", ".", "append", "(", "[", "]", ")", "for", "i", "in", "range", "(", "nrows", ")", ":", "val", ",", "err", "=", "round_sig_error", "(", "col", "[", "i", "]", ",", "error", "[", "i", "]", ",", "n", ")", "strcols", "[", "-", "2", "]", ".", "append", "(", "val", ")", "strcols", "[", "-", "1", "]", ".", "append", "(", "err", ")", "lengths", "=", "[", "max", "(", "[", "len", "(", "item", ")", "for", "item", "in", "strcol", "]", ")", "for", "strcol", "in", "strcols", "]", "myformat", "=", "\"\"", "if", "labels", "is", "not", "None", ":", "myformat", "+=", "\"%%%ds \"", "%", "(", "max", "(", "map", "(", "len", ",", "labels", ")", ")", ")", "if", "latex", ":", "myformat", "+=", "\"& \"", "for", "length", "in", "lengths", ":", "myformat", "+=", "\"%%%ds \"", "%", "(", "length", ")", "if", "latex", ":", "myformat", "+=", "\"& \"", "if", "latex", ":", "myformat", "=", "myformat", "[", ":", "-", "2", "]", "+", "\" \\\\\\\\\"", "output", "=", "[", "]", "if", "headers", ":", "if", "labels", ":", "hs", "=", "[", "headers", "[", "0", "]", "]", "for", "head", "in", "headers", "[", "1", ":", "]", ":", "hs", ".", "append", "(", "head", ")", "hs", ".", "append", "(", "'+/-'", ")", "else", ":", "hs", "=", "[", "]", "for", "head", "in", "headers", ":", "hs", ".", "append", "(", "head", ")", "hs", ".", "append", "(", "'+/-'", ")", "output", ".", "append", "(", "myformat", "%", "tuple", "(", "hs", ")", ")", "for", "i", "in", "range", "(", "nrows", ")", ":", "if", "labels", "is", "not", "None", ":", "output", ".", "append", "(", "myformat", "%", "tuple", "(", "[", "labels", "[", "i", "]", "]", "+", "[", "strcol", "[", "i", "]", "for", "strcol", "in", "strcols", "]", ")", ")", "else", ":", "output", ".", "append", "(", "myformat", "%", "tuple", "(", "[", "strcol", "[", "i", "]", "for", "strcol", "in", "strcols", "]", ")", ")", "return", "output" ]
Format a table such that the errors have n significant figures. [cols] and [errors] should be a list of 1D arrays that correspond to data and errors in columns. [n] is the number of significant figures to keep in the errors. [labels] is an optional column of strings that will be in the first column. [headers] is an optional list of column headers. If [latex] is true, myformat the table so that it can be included in a LaTeX table
[ "Format", "a", "table", "such", "that", "the", "errors", "have", "n", "significant", "figures", ".", "[", "cols", "]", "and", "[", "errors", "]", "should", "be", "a", "list", "of", "1D", "arrays", "that", "correspond", "to", "data", "and", "errors", "in", "columns", ".", "[", "n", "]", "is", "the", "number", "of", "significant", "figures", "to", "keep", "in", "the", "errors", ".", "[", "labels", "]", "is", "an", "optional", "column", "of", "strings", "that", "will", "be", "in", "the", "first", "column", ".", "[", "headers", "]", "is", "an", "optional", "list", "of", "column", "headers", ".", "If", "[", "latex", "]", "is", "true", "myformat", "the", "table", "so", "that", "it", "can", "be", "included", "in", "a", "LaTeX", "table" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Plotting_Libraries/sigfig.py#L80-L149
Vital-Fernandez/dazer
bin/lib/Plotting_Libraries/sigfig.py
round_sig_error2
def round_sig_error2(x, ex1, ex2, n): '''Find min(ex1,ex2) rounded to n sig-figs and make the floating point x and max(ex,ex2) match the number of decimals.''' minerr = min(ex1,ex2) minstex = round_sig(minerr,n) if minstex.find('.') < 0: extra_zeros = len(minstex) - n sigfigs = len(str(int(x))) - extra_zeros stx = round_sig(x,sigfigs) maxstex = round_sig(max(ex1,ex2),sigfigs) else: num_after_dec = len(string.split(minstex,'.')[1]) stx = ("%%.%df" % num_after_dec) % (x) maxstex = ("%%.%df" % num_after_dec) % (max(ex1,ex2)) if ex1 < ex2: return stx,minstex,maxstex else: return stx,maxstex,minstex
python
def round_sig_error2(x, ex1, ex2, n): '''Find min(ex1,ex2) rounded to n sig-figs and make the floating point x and max(ex,ex2) match the number of decimals.''' minerr = min(ex1,ex2) minstex = round_sig(minerr,n) if minstex.find('.') < 0: extra_zeros = len(minstex) - n sigfigs = len(str(int(x))) - extra_zeros stx = round_sig(x,sigfigs) maxstex = round_sig(max(ex1,ex2),sigfigs) else: num_after_dec = len(string.split(minstex,'.')[1]) stx = ("%%.%df" % num_after_dec) % (x) maxstex = ("%%.%df" % num_after_dec) % (max(ex1,ex2)) if ex1 < ex2: return stx,minstex,maxstex else: return stx,maxstex,minstex
[ "def", "round_sig_error2", "(", "x", ",", "ex1", ",", "ex2", ",", "n", ")", ":", "minerr", "=", "min", "(", "ex1", ",", "ex2", ")", "minstex", "=", "round_sig", "(", "minerr", ",", "n", ")", "if", "minstex", ".", "find", "(", "'.'", ")", "<", "0", ":", "extra_zeros", "=", "len", "(", "minstex", ")", "-", "n", "sigfigs", "=", "len", "(", "str", "(", "int", "(", "x", ")", ")", ")", "-", "extra_zeros", "stx", "=", "round_sig", "(", "x", ",", "sigfigs", ")", "maxstex", "=", "round_sig", "(", "max", "(", "ex1", ",", "ex2", ")", ",", "sigfigs", ")", "else", ":", "num_after_dec", "=", "len", "(", "string", ".", "split", "(", "minstex", ",", "'.'", ")", "[", "1", "]", ")", "stx", "=", "(", "\"%%.%df\"", "%", "num_after_dec", ")", "%", "(", "x", ")", "maxstex", "=", "(", "\"%%.%df\"", "%", "num_after_dec", ")", "%", "(", "max", "(", "ex1", ",", "ex2", ")", ")", "if", "ex1", "<", "ex2", ":", "return", "stx", ",", "minstex", ",", "maxstex", "else", ":", "return", "stx", ",", "maxstex", ",", "minstex" ]
Find min(ex1,ex2) rounded to n sig-figs and make the floating point x and max(ex,ex2) match the number of decimals.
[ "Find", "min", "(", "ex1", "ex2", ")", "rounded", "to", "n", "sig", "-", "figs", "and", "make", "the", "floating", "point", "x", "and", "max", "(", "ex", "ex2", ")", "match", "the", "number", "of", "decimals", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Plotting_Libraries/sigfig.py#L151-L168
dacker-team/pyzure
pyzure/send/send_multi_threads.py
send_to_azure_multi_threads
def send_to_azure_multi_threads(instance, data, nb_threads=4, replace=True, types=None, primary_key=(), sub_commit=False): """ data = { "table_name" : 'name_of_the_azure_schema' + '.' + 'name_of_the_azure_table' #Must already exist, "columns_name" : [first_column_name,second_column_name,...,last_column_name], "rows" : [[first_raw_value,second_raw_value,...,last_raw_value],...] } """ # Time initialization start = datetime.datetime.now() # Extract info table_name = data["table_name"] columns_name = data["columns_name"] rows = data["rows"] total_len_data = len(rows) # Create table if needed if not existing_test(instance, table_name) or (types is not None) or (primary_key != ()): create.create_table(instance, data, primary_key, types) # Clean table if needed if replace: cleaning_function(instance, table_name) # Define batch size batch_size = int(total_len_data / nb_threads) + 1 if total_len_data < nb_threads: batch_size = 1 # Get table info table_info = get_table_info(instance, table_name) # Split data in batches of batch_size length split_data = [] # global threads_state # threads_state = {} for i in range(nb_threads): batch = create_a_batch(rows, batch_size, i) split_data.append( { "data": { "table_name": table_name, "columns_name": columns_name, "rows": batch }, "instance": instance, "thread_number": i, "nb_threads": nb_threads, "sub_commit": sub_commit, "table_info": table_info, } ) write_in_file("threads_state_%s" % str(i), str({ "iteration": 0, "total": len(batch) })) with concurrent.futures.ProcessPoolExecutor() as executor: r = list(executor.map(send_to_azure_from_one_thread, split_data)) print() for num_thread in range(nb_threads): insert_query = "INSERT INTO %s SELECT * FROM %s" % (table_name, table_name + "_" + str(num_thread)) print(insert_query) execute_query(instance, insert_query) for num_thread in range(nb_threads): sub_table = table_name + "_" + str(num_thread) print(C.HEADER + "DROP TABLE %s..." % sub_table + C.ENDC) execute_query(instance, "DROP TABLE %s" % sub_table) print(C.HEADER + "DROP TABLE %s...OK" % sub_table + C.ENDC) total_length_data = 0 for element in split_data: total_length_data = total_length_data + len(element["data"]["rows"]) for i in range(len(r)): print("Thread %s : %s seconds" % (str(i), str(r[i]))) print("Total rows: %s" % str(total_length_data)) print(C.BOLD + "Total time in seconds : %s" % str((datetime.datetime.now() - start).seconds) + C.ENDC) return 0
python
def send_to_azure_multi_threads(instance, data, nb_threads=4, replace=True, types=None, primary_key=(), sub_commit=False): """ data = { "table_name" : 'name_of_the_azure_schema' + '.' + 'name_of_the_azure_table' #Must already exist, "columns_name" : [first_column_name,second_column_name,...,last_column_name], "rows" : [[first_raw_value,second_raw_value,...,last_raw_value],...] } """ # Time initialization start = datetime.datetime.now() # Extract info table_name = data["table_name"] columns_name = data["columns_name"] rows = data["rows"] total_len_data = len(rows) # Create table if needed if not existing_test(instance, table_name) or (types is not None) or (primary_key != ()): create.create_table(instance, data, primary_key, types) # Clean table if needed if replace: cleaning_function(instance, table_name) # Define batch size batch_size = int(total_len_data / nb_threads) + 1 if total_len_data < nb_threads: batch_size = 1 # Get table info table_info = get_table_info(instance, table_name) # Split data in batches of batch_size length split_data = [] # global threads_state # threads_state = {} for i in range(nb_threads): batch = create_a_batch(rows, batch_size, i) split_data.append( { "data": { "table_name": table_name, "columns_name": columns_name, "rows": batch }, "instance": instance, "thread_number": i, "nb_threads": nb_threads, "sub_commit": sub_commit, "table_info": table_info, } ) write_in_file("threads_state_%s" % str(i), str({ "iteration": 0, "total": len(batch) })) with concurrent.futures.ProcessPoolExecutor() as executor: r = list(executor.map(send_to_azure_from_one_thread, split_data)) print() for num_thread in range(nb_threads): insert_query = "INSERT INTO %s SELECT * FROM %s" % (table_name, table_name + "_" + str(num_thread)) print(insert_query) execute_query(instance, insert_query) for num_thread in range(nb_threads): sub_table = table_name + "_" + str(num_thread) print(C.HEADER + "DROP TABLE %s..." % sub_table + C.ENDC) execute_query(instance, "DROP TABLE %s" % sub_table) print(C.HEADER + "DROP TABLE %s...OK" % sub_table + C.ENDC) total_length_data = 0 for element in split_data: total_length_data = total_length_data + len(element["data"]["rows"]) for i in range(len(r)): print("Thread %s : %s seconds" % (str(i), str(r[i]))) print("Total rows: %s" % str(total_length_data)) print(C.BOLD + "Total time in seconds : %s" % str((datetime.datetime.now() - start).seconds) + C.ENDC) return 0
[ "def", "send_to_azure_multi_threads", "(", "instance", ",", "data", ",", "nb_threads", "=", "4", ",", "replace", "=", "True", ",", "types", "=", "None", ",", "primary_key", "=", "(", ")", ",", "sub_commit", "=", "False", ")", ":", "# Time initialization", "start", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "# Extract info", "table_name", "=", "data", "[", "\"table_name\"", "]", "columns_name", "=", "data", "[", "\"columns_name\"", "]", "rows", "=", "data", "[", "\"rows\"", "]", "total_len_data", "=", "len", "(", "rows", ")", "# Create table if needed", "if", "not", "existing_test", "(", "instance", ",", "table_name", ")", "or", "(", "types", "is", "not", "None", ")", "or", "(", "primary_key", "!=", "(", ")", ")", ":", "create", ".", "create_table", "(", "instance", ",", "data", ",", "primary_key", ",", "types", ")", "# Clean table if needed", "if", "replace", ":", "cleaning_function", "(", "instance", ",", "table_name", ")", "# Define batch size", "batch_size", "=", "int", "(", "total_len_data", "/", "nb_threads", ")", "+", "1", "if", "total_len_data", "<", "nb_threads", ":", "batch_size", "=", "1", "# Get table info", "table_info", "=", "get_table_info", "(", "instance", ",", "table_name", ")", "# Split data in batches of batch_size length", "split_data", "=", "[", "]", "# global threads_state", "# threads_state = {}", "for", "i", "in", "range", "(", "nb_threads", ")", ":", "batch", "=", "create_a_batch", "(", "rows", ",", "batch_size", ",", "i", ")", "split_data", ".", "append", "(", "{", "\"data\"", ":", "{", "\"table_name\"", ":", "table_name", ",", "\"columns_name\"", ":", "columns_name", ",", "\"rows\"", ":", "batch", "}", ",", "\"instance\"", ":", "instance", ",", "\"thread_number\"", ":", "i", ",", "\"nb_threads\"", ":", "nb_threads", ",", "\"sub_commit\"", ":", "sub_commit", ",", "\"table_info\"", ":", "table_info", ",", "}", ")", "write_in_file", "(", "\"threads_state_%s\"", "%", "str", "(", "i", ")", ",", "str", "(", "{", "\"iteration\"", ":", "0", ",", "\"total\"", ":", "len", "(", "batch", ")", "}", ")", ")", "with", "concurrent", ".", "futures", ".", "ProcessPoolExecutor", "(", ")", "as", "executor", ":", "r", "=", "list", "(", "executor", ".", "map", "(", "send_to_azure_from_one_thread", ",", "split_data", ")", ")", "print", "(", ")", "for", "num_thread", "in", "range", "(", "nb_threads", ")", ":", "insert_query", "=", "\"INSERT INTO %s SELECT * FROM %s\"", "%", "(", "table_name", ",", "table_name", "+", "\"_\"", "+", "str", "(", "num_thread", ")", ")", "print", "(", "insert_query", ")", "execute_query", "(", "instance", ",", "insert_query", ")", "for", "num_thread", "in", "range", "(", "nb_threads", ")", ":", "sub_table", "=", "table_name", "+", "\"_\"", "+", "str", "(", "num_thread", ")", "print", "(", "C", ".", "HEADER", "+", "\"DROP TABLE %s...\"", "%", "sub_table", "+", "C", ".", "ENDC", ")", "execute_query", "(", "instance", ",", "\"DROP TABLE %s\"", "%", "sub_table", ")", "print", "(", "C", ".", "HEADER", "+", "\"DROP TABLE %s...OK\"", "%", "sub_table", "+", "C", ".", "ENDC", ")", "total_length_data", "=", "0", "for", "element", "in", "split_data", ":", "total_length_data", "=", "total_length_data", "+", "len", "(", "element", "[", "\"data\"", "]", "[", "\"rows\"", "]", ")", "for", "i", "in", "range", "(", "len", "(", "r", ")", ")", ":", "print", "(", "\"Thread %s : %s seconds\"", "%", "(", "str", "(", "i", ")", ",", "str", "(", "r", "[", "i", "]", ")", ")", ")", "print", "(", "\"Total rows: %s\"", "%", "str", "(", "total_length_data", ")", ")", "print", "(", "C", ".", "BOLD", "+", "\"Total time in seconds : %s\"", "%", "str", "(", "(", "datetime", ".", "datetime", ".", "now", "(", ")", "-", "start", ")", ".", "seconds", ")", "+", "C", ".", "ENDC", ")", "return", "0" ]
data = { "table_name" : 'name_of_the_azure_schema' + '.' + 'name_of_the_azure_table' #Must already exist, "columns_name" : [first_column_name,second_column_name,...,last_column_name], "rows" : [[first_raw_value,second_raw_value,...,last_raw_value],...] }
[ "data", "=", "{", "table_name", ":", "name_of_the_azure_schema", "+", ".", "+", "name_of_the_azure_table", "#Must", "already", "exist", "columns_name", ":", "[", "first_column_name", "second_column_name", "...", "last_column_name", "]", "rows", ":", "[[", "first_raw_value", "second_raw_value", "...", "last_raw_value", "]", "...", "]", "}" ]
train
https://github.com/dacker-team/pyzure/blob/1e6d202f91ca0f080635adc470d9d18585056d53/pyzure/send/send_multi_threads.py#L26-L113
dacker-team/pyzure
pyzure/send/send_multi_threads.py
send_to_azure
def send_to_azure(instance, data, thread_number, sub_commit, table_info, nb_threads): """ data = { "table_name" : 'name_of_the_azure_schema' + '.' + 'name_of_the_azure_table' #Must already exist, "columns_name" : [first_column_name,second_column_name,...,last_column_name], "rows" : [[first_raw_value,second_raw_value,...,last_raw_value],...] } """ rows = data["rows"] if not rows: return 0 columns_name = data["columns_name"] table_name = data["table_name"] + "_" + str(thread_number) print(C.HEADER + "Create table %s..." % table_name + C.ENDC) create_table_from_info(instance, table_info, table_name) print(C.OKGREEN + "Create table %s...OK" % table_name + C.ENDC) small_batch_size = int(2099 / len(columns_name)) cnxn = connect(instance) cursor = cnxn.cursor() # Initialize counters boolean = True total_rows = len(rows) question_mark_pattern = "(%s)" % ",".join(["?" for i in range(len(rows[0]))]) counter = 0 while boolean: temp_row = [] question_mark_list = [] for i in range(small_batch_size): if rows: temp_row.append(rows.pop()) question_mark_list.append(question_mark_pattern) else: boolean = False continue counter = counter + len(temp_row) # percent = round(float(counter * 100) / total_rows) threads_state = eval(read_file("threads_state_%s" % str(thread_number))) threads_state["iteration"] = counter write_in_file("threads_state_%s" % str(thread_number), str(threads_state)) # print(threads_state) if sub_commit: suffix = "rows sent" # print("Thread %s : %s %% rows sent" % (str(thread_number), str(percent))) else: suffix = "rows prepared to be sent" print_progress_bar_multi_threads(nb_threads, suffix=suffix) # print("Thread %s : %s %% rows prepared to be sent" % (str(thread_number), str(percent))) data_values_str = ','.join(question_mark_list) columns_name_str = ", ".join(columns_name) inserting_request = '''INSERT INTO %s (%s) VALUES %s ;''' % (table_name, columns_name_str, data_values_str) final_data = [y for x in temp_row for y in x] if final_data: cursor.execute(inserting_request, final_data) if sub_commit: commit_function(cnxn) if not sub_commit: commit_function(cnxn) cursor.close() cnxn.close() return 0
python
def send_to_azure(instance, data, thread_number, sub_commit, table_info, nb_threads): """ data = { "table_name" : 'name_of_the_azure_schema' + '.' + 'name_of_the_azure_table' #Must already exist, "columns_name" : [first_column_name,second_column_name,...,last_column_name], "rows" : [[first_raw_value,second_raw_value,...,last_raw_value],...] } """ rows = data["rows"] if not rows: return 0 columns_name = data["columns_name"] table_name = data["table_name"] + "_" + str(thread_number) print(C.HEADER + "Create table %s..." % table_name + C.ENDC) create_table_from_info(instance, table_info, table_name) print(C.OKGREEN + "Create table %s...OK" % table_name + C.ENDC) small_batch_size = int(2099 / len(columns_name)) cnxn = connect(instance) cursor = cnxn.cursor() # Initialize counters boolean = True total_rows = len(rows) question_mark_pattern = "(%s)" % ",".join(["?" for i in range(len(rows[0]))]) counter = 0 while boolean: temp_row = [] question_mark_list = [] for i in range(small_batch_size): if rows: temp_row.append(rows.pop()) question_mark_list.append(question_mark_pattern) else: boolean = False continue counter = counter + len(temp_row) # percent = round(float(counter * 100) / total_rows) threads_state = eval(read_file("threads_state_%s" % str(thread_number))) threads_state["iteration"] = counter write_in_file("threads_state_%s" % str(thread_number), str(threads_state)) # print(threads_state) if sub_commit: suffix = "rows sent" # print("Thread %s : %s %% rows sent" % (str(thread_number), str(percent))) else: suffix = "rows prepared to be sent" print_progress_bar_multi_threads(nb_threads, suffix=suffix) # print("Thread %s : %s %% rows prepared to be sent" % (str(thread_number), str(percent))) data_values_str = ','.join(question_mark_list) columns_name_str = ", ".join(columns_name) inserting_request = '''INSERT INTO %s (%s) VALUES %s ;''' % (table_name, columns_name_str, data_values_str) final_data = [y for x in temp_row for y in x] if final_data: cursor.execute(inserting_request, final_data) if sub_commit: commit_function(cnxn) if not sub_commit: commit_function(cnxn) cursor.close() cnxn.close() return 0
[ "def", "send_to_azure", "(", "instance", ",", "data", ",", "thread_number", ",", "sub_commit", ",", "table_info", ",", "nb_threads", ")", ":", "rows", "=", "data", "[", "\"rows\"", "]", "if", "not", "rows", ":", "return", "0", "columns_name", "=", "data", "[", "\"columns_name\"", "]", "table_name", "=", "data", "[", "\"table_name\"", "]", "+", "\"_\"", "+", "str", "(", "thread_number", ")", "print", "(", "C", ".", "HEADER", "+", "\"Create table %s...\"", "%", "table_name", "+", "C", ".", "ENDC", ")", "create_table_from_info", "(", "instance", ",", "table_info", ",", "table_name", ")", "print", "(", "C", ".", "OKGREEN", "+", "\"Create table %s...OK\"", "%", "table_name", "+", "C", ".", "ENDC", ")", "small_batch_size", "=", "int", "(", "2099", "/", "len", "(", "columns_name", ")", ")", "cnxn", "=", "connect", "(", "instance", ")", "cursor", "=", "cnxn", ".", "cursor", "(", ")", "# Initialize counters", "boolean", "=", "True", "total_rows", "=", "len", "(", "rows", ")", "question_mark_pattern", "=", "\"(%s)\"", "%", "\",\"", ".", "join", "(", "[", "\"?\"", "for", "i", "in", "range", "(", "len", "(", "rows", "[", "0", "]", ")", ")", "]", ")", "counter", "=", "0", "while", "boolean", ":", "temp_row", "=", "[", "]", "question_mark_list", "=", "[", "]", "for", "i", "in", "range", "(", "small_batch_size", ")", ":", "if", "rows", ":", "temp_row", ".", "append", "(", "rows", ".", "pop", "(", ")", ")", "question_mark_list", ".", "append", "(", "question_mark_pattern", ")", "else", ":", "boolean", "=", "False", "continue", "counter", "=", "counter", "+", "len", "(", "temp_row", ")", "# percent = round(float(counter * 100) / total_rows)", "threads_state", "=", "eval", "(", "read_file", "(", "\"threads_state_%s\"", "%", "str", "(", "thread_number", ")", ")", ")", "threads_state", "[", "\"iteration\"", "]", "=", "counter", "write_in_file", "(", "\"threads_state_%s\"", "%", "str", "(", "thread_number", ")", ",", "str", "(", "threads_state", ")", ")", "# print(threads_state)", "if", "sub_commit", ":", "suffix", "=", "\"rows sent\"", "# print(\"Thread %s : %s %% rows sent\" % (str(thread_number), str(percent)))", "else", ":", "suffix", "=", "\"rows prepared to be sent\"", "print_progress_bar_multi_threads", "(", "nb_threads", ",", "suffix", "=", "suffix", ")", "# print(\"Thread %s : %s %% rows prepared to be sent\" % (str(thread_number), str(percent)))", "data_values_str", "=", "','", ".", "join", "(", "question_mark_list", ")", "columns_name_str", "=", "\", \"", ".", "join", "(", "columns_name", ")", "inserting_request", "=", "'''INSERT INTO %s (%s) VALUES %s ;'''", "%", "(", "table_name", ",", "columns_name_str", ",", "data_values_str", ")", "final_data", "=", "[", "y", "for", "x", "in", "temp_row", "for", "y", "in", "x", "]", "if", "final_data", ":", "cursor", ".", "execute", "(", "inserting_request", ",", "final_data", ")", "if", "sub_commit", ":", "commit_function", "(", "cnxn", ")", "if", "not", "sub_commit", ":", "commit_function", "(", "cnxn", ")", "cursor", ".", "close", "(", ")", "cnxn", ".", "close", "(", ")", "return", "0" ]
data = { "table_name" : 'name_of_the_azure_schema' + '.' + 'name_of_the_azure_table' #Must already exist, "columns_name" : [first_column_name,second_column_name,...,last_column_name], "rows" : [[first_raw_value,second_raw_value,...,last_raw_value],...] }
[ "data", "=", "{", "table_name", ":", "name_of_the_azure_schema", "+", ".", "+", "name_of_the_azure_table", "#Must", "already", "exist", "columns_name", ":", "[", "first_column_name", "second_column_name", "...", "last_column_name", "]", "rows", ":", "[[", "first_raw_value", "second_raw_value", "...", "last_raw_value", "]", "...", "]", "}" ]
train
https://github.com/dacker-team/pyzure/blob/1e6d202f91ca0f080635adc470d9d18585056d53/pyzure/send/send_multi_threads.py#L116-L182
paydunya/paydunya-python
paydunya/invoice.py
Invoice.create
def create(self, items=[], taxes=[], custom_data=[]): """Adds the items to the invoice Format of 'items': [ InvoiceItem( name="VIP Ticket", quantity= 2, unit_price= "3500", total_price= "7000", description= "VIP Tickets for the Party" } ,... ] """ self.add_items(items) self.add_taxes(taxes) self.add_custom_data(custom_data) return self._process('checkout-invoice/create', self._prepare_data)
python
def create(self, items=[], taxes=[], custom_data=[]): """Adds the items to the invoice Format of 'items': [ InvoiceItem( name="VIP Ticket", quantity= 2, unit_price= "3500", total_price= "7000", description= "VIP Tickets for the Party" } ,... ] """ self.add_items(items) self.add_taxes(taxes) self.add_custom_data(custom_data) return self._process('checkout-invoice/create', self._prepare_data)
[ "def", "create", "(", "self", ",", "items", "=", "[", "]", ",", "taxes", "=", "[", "]", ",", "custom_data", "=", "[", "]", ")", ":", "self", ".", "add_items", "(", "items", ")", "self", ".", "add_taxes", "(", "taxes", ")", "self", ".", "add_custom_data", "(", "custom_data", ")", "return", "self", ".", "_process", "(", "'checkout-invoice/create'", ",", "self", ".", "_prepare_data", ")" ]
Adds the items to the invoice Format of 'items': [ InvoiceItem( name="VIP Ticket", quantity= 2, unit_price= "3500", total_price= "7000", description= "VIP Tickets for the Party" } ,... ]
[ "Adds", "the", "items", "to", "the", "invoice" ]
train
https://github.com/paydunya/paydunya-python/blob/bb55791e2814788aec74162d9d78970815f37c30/paydunya/invoice.py#L31-L49
paydunya/paydunya-python
paydunya/invoice.py
Invoice.confirm
def confirm(self, token=None): """Returns the status of the invoice STATUSES: pending, completed, cancelled """ _token = token if token else self._response.get("token") return self._process('checkout-invoice/confirm/' + str(_token))
python
def confirm(self, token=None): """Returns the status of the invoice STATUSES: pending, completed, cancelled """ _token = token if token else self._response.get("token") return self._process('checkout-invoice/confirm/' + str(_token))
[ "def", "confirm", "(", "self", ",", "token", "=", "None", ")", ":", "_token", "=", "token", "if", "token", "else", "self", ".", "_response", ".", "get", "(", "\"token\"", ")", "return", "self", ".", "_process", "(", "'checkout-invoice/confirm/'", "+", "str", "(", "_token", ")", ")" ]
Returns the status of the invoice STATUSES: pending, completed, cancelled
[ "Returns", "the", "status", "of", "the", "invoice" ]
train
https://github.com/paydunya/paydunya-python/blob/bb55791e2814788aec74162d9d78970815f37c30/paydunya/invoice.py#L51-L57
paydunya/paydunya-python
paydunya/invoice.py
Invoice.add_taxes
def add_taxes(self, taxes): """Appends the data to the 'taxes' key in the request object 'taxes' should be in format: [("tax_name", "tax_amount")] For example: [("Other TAX", 700), ("VAT", 5000)] """ # fixme: how to resolve duplicate tax names _idx = len(self.taxes) # current index to prevent overwriting for idx, tax in enumerate(taxes): tax_key = "tax_" + str(idx + _idx) self.taxes[tax_key] = {"name": tax[0], "amount": tax[1]}
python
def add_taxes(self, taxes): """Appends the data to the 'taxes' key in the request object 'taxes' should be in format: [("tax_name", "tax_amount")] For example: [("Other TAX", 700), ("VAT", 5000)] """ # fixme: how to resolve duplicate tax names _idx = len(self.taxes) # current index to prevent overwriting for idx, tax in enumerate(taxes): tax_key = "tax_" + str(idx + _idx) self.taxes[tax_key] = {"name": tax[0], "amount": tax[1]}
[ "def", "add_taxes", "(", "self", ",", "taxes", ")", ":", "# fixme: how to resolve duplicate tax names", "_idx", "=", "len", "(", "self", ".", "taxes", ")", "# current index to prevent overwriting", "for", "idx", ",", "tax", "in", "enumerate", "(", "taxes", ")", ":", "tax_key", "=", "\"tax_\"", "+", "str", "(", "idx", "+", "_idx", ")", "self", ".", "taxes", "[", "tax_key", "]", "=", "{", "\"name\"", ":", "tax", "[", "0", "]", ",", "\"amount\"", ":", "tax", "[", "1", "]", "}" ]
Appends the data to the 'taxes' key in the request object 'taxes' should be in format: [("tax_name", "tax_amount")] For example: [("Other TAX", 700), ("VAT", 5000)]
[ "Appends", "the", "data", "to", "the", "taxes", "key", "in", "the", "request", "object" ]
train
https://github.com/paydunya/paydunya-python/blob/bb55791e2814788aec74162d9d78970815f37c30/paydunya/invoice.py#L59-L70
paydunya/paydunya-python
paydunya/invoice.py
Invoice.add_item
def add_item(self, item): """Updates the list of items in the current transaction""" _idx = len(self.items) self.items.update({"item_" + str(_idx + 1): item})
python
def add_item(self, item): """Updates the list of items in the current transaction""" _idx = len(self.items) self.items.update({"item_" + str(_idx + 1): item})
[ "def", "add_item", "(", "self", ",", "item", ")", ":", "_idx", "=", "len", "(", "self", ".", "items", ")", "self", ".", "items", ".", "update", "(", "{", "\"item_\"", "+", "str", "(", "_idx", "+", "1", ")", ":", "item", "}", ")" ]
Updates the list of items in the current transaction
[ "Updates", "the", "list", "of", "items", "in", "the", "current", "transaction" ]
train
https://github.com/paydunya/paydunya-python/blob/bb55791e2814788aec74162d9d78970815f37c30/paydunya/invoice.py#L79-L82
paydunya/paydunya-python
paydunya/invoice.py
Invoice._prepare_data
def _prepare_data(self): """Formats the data in the current transaction for processing""" total_amount = self.total_amount or self.calculate_total_amt() self._data = { "invoice": { "items": self.__encode_items(self.items), "taxes": self.taxes, "total_amount": total_amount, "description": self.description, "channels": self.channels }, "store": self.store.info, "custom_data": self.custom_data, "actions": { "cancel_url": self.cancel_url, "return_url": self.return_url, "callback_url": self.callback_url } } return self._data
python
def _prepare_data(self): """Formats the data in the current transaction for processing""" total_amount = self.total_amount or self.calculate_total_amt() self._data = { "invoice": { "items": self.__encode_items(self.items), "taxes": self.taxes, "total_amount": total_amount, "description": self.description, "channels": self.channels }, "store": self.store.info, "custom_data": self.custom_data, "actions": { "cancel_url": self.cancel_url, "return_url": self.return_url, "callback_url": self.callback_url } } return self._data
[ "def", "_prepare_data", "(", "self", ")", ":", "total_amount", "=", "self", ".", "total_amount", "or", "self", ".", "calculate_total_amt", "(", ")", "self", ".", "_data", "=", "{", "\"invoice\"", ":", "{", "\"items\"", ":", "self", ".", "__encode_items", "(", "self", ".", "items", ")", ",", "\"taxes\"", ":", "self", ".", "taxes", ",", "\"total_amount\"", ":", "total_amount", ",", "\"description\"", ":", "self", ".", "description", ",", "\"channels\"", ":", "self", ".", "channels", "}", ",", "\"store\"", ":", "self", ".", "store", ".", "info", ",", "\"custom_data\"", ":", "self", ".", "custom_data", ",", "\"actions\"", ":", "{", "\"cancel_url\"", ":", "self", ".", "cancel_url", ",", "\"return_url\"", ":", "self", ".", "return_url", ",", "\"callback_url\"", ":", "self", ".", "callback_url", "}", "}", "return", "self", ".", "_data" ]
Formats the data in the current transaction for processing
[ "Formats", "the", "data", "in", "the", "current", "transaction", "for", "processing" ]
train
https://github.com/paydunya/paydunya-python/blob/bb55791e2814788aec74162d9d78970815f37c30/paydunya/invoice.py#L97-L116