partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
GuppiRaw.read_next_data_block_int8
Read the next block of data and its header Returns: (header, data) header (dict): dictionary of header metadata data (np.array): Numpy array of data, converted into to complex64.
blimpy/guppi.py
def read_next_data_block_int8(self): """ Read the next block of data and its header Returns: (header, data) header (dict): dictionary of header metadata data (np.array): Numpy array of data, converted into to complex64. """ header, data_idx = self.read_header() self.file_obj.seek(data_idx) # Read data and reshape n_chan = int(header['OBSNCHAN']) n_pol = int(header['NPOL']) n_bit = int(header['NBITS']) n_samples = int(int(header['BLOCSIZE']) / (n_chan * n_pol * (n_bit / 8))) d = np.fromfile(self.file_obj, count=header['BLOCSIZE'], dtype='int8') # Handle 2-bit and 4-bit data if n_bit != 8: d = unpack(d, n_bit) d = d.reshape((n_chan, n_samples, n_pol)) # Real, imag if self._d_x.shape != d[..., 0:2].shape: self._d_x = np.ascontiguousarray(np.zeros(d[..., 0:2].shape, dtype='int8')) self._d_y = np.ascontiguousarray(np.zeros(d[..., 2:4].shape, dtype='int8')) self._d_x[:] = d[..., 0:2] self._d_y[:] = d[..., 2:4] return header, self._d_x, self._d_y
def read_next_data_block_int8(self): """ Read the next block of data and its header Returns: (header, data) header (dict): dictionary of header metadata data (np.array): Numpy array of data, converted into to complex64. """ header, data_idx = self.read_header() self.file_obj.seek(data_idx) # Read data and reshape n_chan = int(header['OBSNCHAN']) n_pol = int(header['NPOL']) n_bit = int(header['NBITS']) n_samples = int(int(header['BLOCSIZE']) / (n_chan * n_pol * (n_bit / 8))) d = np.fromfile(self.file_obj, count=header['BLOCSIZE'], dtype='int8') # Handle 2-bit and 4-bit data if n_bit != 8: d = unpack(d, n_bit) d = d.reshape((n_chan, n_samples, n_pol)) # Real, imag if self._d_x.shape != d[..., 0:2].shape: self._d_x = np.ascontiguousarray(np.zeros(d[..., 0:2].shape, dtype='int8')) self._d_y = np.ascontiguousarray(np.zeros(d[..., 2:4].shape, dtype='int8')) self._d_x[:] = d[..., 0:2] self._d_y[:] = d[..., 2:4] return header, self._d_x, self._d_y
[ "Read", "the", "next", "block", "of", "data", "and", "its", "header" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/guppi.py#L208-L239
[ "def", "read_next_data_block_int8", "(", "self", ")", ":", "header", ",", "data_idx", "=", "self", ".", "read_header", "(", ")", "self", ".", "file_obj", ".", "seek", "(", "data_idx", ")", "# Read data and reshape", "n_chan", "=", "int", "(", "header", "[", "'OBSNCHAN'", "]", ")", "n_pol", "=", "int", "(", "header", "[", "'NPOL'", "]", ")", "n_bit", "=", "int", "(", "header", "[", "'NBITS'", "]", ")", "n_samples", "=", "int", "(", "int", "(", "header", "[", "'BLOCSIZE'", "]", ")", "/", "(", "n_chan", "*", "n_pol", "*", "(", "n_bit", "/", "8", ")", ")", ")", "d", "=", "np", ".", "fromfile", "(", "self", ".", "file_obj", ",", "count", "=", "header", "[", "'BLOCSIZE'", "]", ",", "dtype", "=", "'int8'", ")", "# Handle 2-bit and 4-bit data", "if", "n_bit", "!=", "8", ":", "d", "=", "unpack", "(", "d", ",", "n_bit", ")", "d", "=", "d", ".", "reshape", "(", "(", "n_chan", ",", "n_samples", ",", "n_pol", ")", ")", "# Real, imag", "if", "self", ".", "_d_x", ".", "shape", "!=", "d", "[", "...", ",", "0", ":", "2", "]", ".", "shape", ":", "self", ".", "_d_x", "=", "np", ".", "ascontiguousarray", "(", "np", ".", "zeros", "(", "d", "[", "...", ",", "0", ":", "2", "]", ".", "shape", ",", "dtype", "=", "'int8'", ")", ")", "self", ".", "_d_y", "=", "np", ".", "ascontiguousarray", "(", "np", ".", "zeros", "(", "d", "[", "...", ",", "2", ":", "4", "]", ".", "shape", ",", "dtype", "=", "'int8'", ")", ")", "self", ".", "_d_x", "[", ":", "]", "=", "d", "[", "...", ",", "0", ":", "2", "]", "self", ".", "_d_y", "[", ":", "]", "=", "d", "[", "...", ",", "2", ":", "4", "]", "return", "header", ",", "self", ".", "_d_x", ",", "self", ".", "_d_y" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
GuppiRaw.read_next_data_block_int8_2x
Read the next block of data and its header Returns: (header, data) header (dict): dictionary of header metadata data (np.array): Numpy array of data, converted into to complex64.
blimpy/guppi.py
def read_next_data_block_int8_2x(self): """ Read the next block of data and its header Returns: (header, data) header (dict): dictionary of header metadata data (np.array): Numpy array of data, converted into to complex64. """ header, data_idx = self.read_header() self.file_obj.seek(data_idx) # Read data and reshape n_chan = int(header['OBSNCHAN']) n_pol = int(header['NPOL']) n_bit = int(header['NBITS']) n_samples = int(int(header['BLOCSIZE']) / (n_chan * n_pol * (n_bit / 8))) d = np.fromfile(self.file_obj, count=header['BLOCSIZE'], dtype='int8') header, data_idx = self.read_header() self.file_obj.seek(data_idx) d2 = np.fromfile(self.file_obj, count=header['BLOCSIZE'], dtype='int8') # Handle 2-bit and 4-bit data if n_bit != 8: d = unpack(d, n_bit) d = d.reshape((n_chan, n_samples, n_pol)) # Real, imag d2 = d2.reshape((n_chan, n_samples, n_pol)) d = np.concatenate((d, d2), axis=1) print(d.shape) if self._d_x.shape != (n_chan, n_samples * 2, n_pol): self._d_x = np.ascontiguousarray(np.zeros(d[..., 0:2].shape, dtype='int8')) self._d_y = np.ascontiguousarray(np.zeros(d[..., 2:4].shape, dtype='int8')) self._d_x[:] = d[..., 0:2] self._d_y[:] = d[..., 2:4] return header, self._d_x, self._d_y
def read_next_data_block_int8_2x(self): """ Read the next block of data and its header Returns: (header, data) header (dict): dictionary of header metadata data (np.array): Numpy array of data, converted into to complex64. """ header, data_idx = self.read_header() self.file_obj.seek(data_idx) # Read data and reshape n_chan = int(header['OBSNCHAN']) n_pol = int(header['NPOL']) n_bit = int(header['NBITS']) n_samples = int(int(header['BLOCSIZE']) / (n_chan * n_pol * (n_bit / 8))) d = np.fromfile(self.file_obj, count=header['BLOCSIZE'], dtype='int8') header, data_idx = self.read_header() self.file_obj.seek(data_idx) d2 = np.fromfile(self.file_obj, count=header['BLOCSIZE'], dtype='int8') # Handle 2-bit and 4-bit data if n_bit != 8: d = unpack(d, n_bit) d = d.reshape((n_chan, n_samples, n_pol)) # Real, imag d2 = d2.reshape((n_chan, n_samples, n_pol)) d = np.concatenate((d, d2), axis=1) print(d.shape) if self._d_x.shape != (n_chan, n_samples * 2, n_pol): self._d_x = np.ascontiguousarray(np.zeros(d[..., 0:2].shape, dtype='int8')) self._d_y = np.ascontiguousarray(np.zeros(d[..., 2:4].shape, dtype='int8')) self._d_x[:] = d[..., 0:2] self._d_y[:] = d[..., 2:4] return header, self._d_x, self._d_y
[ "Read", "the", "next", "block", "of", "data", "and", "its", "header" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/guppi.py#L241-L279
[ "def", "read_next_data_block_int8_2x", "(", "self", ")", ":", "header", ",", "data_idx", "=", "self", ".", "read_header", "(", ")", "self", ".", "file_obj", ".", "seek", "(", "data_idx", ")", "# Read data and reshape", "n_chan", "=", "int", "(", "header", "[", "'OBSNCHAN'", "]", ")", "n_pol", "=", "int", "(", "header", "[", "'NPOL'", "]", ")", "n_bit", "=", "int", "(", "header", "[", "'NBITS'", "]", ")", "n_samples", "=", "int", "(", "int", "(", "header", "[", "'BLOCSIZE'", "]", ")", "/", "(", "n_chan", "*", "n_pol", "*", "(", "n_bit", "/", "8", ")", ")", ")", "d", "=", "np", ".", "fromfile", "(", "self", ".", "file_obj", ",", "count", "=", "header", "[", "'BLOCSIZE'", "]", ",", "dtype", "=", "'int8'", ")", "header", ",", "data_idx", "=", "self", ".", "read_header", "(", ")", "self", ".", "file_obj", ".", "seek", "(", "data_idx", ")", "d2", "=", "np", ".", "fromfile", "(", "self", ".", "file_obj", ",", "count", "=", "header", "[", "'BLOCSIZE'", "]", ",", "dtype", "=", "'int8'", ")", "# Handle 2-bit and 4-bit data", "if", "n_bit", "!=", "8", ":", "d", "=", "unpack", "(", "d", ",", "n_bit", ")", "d", "=", "d", ".", "reshape", "(", "(", "n_chan", ",", "n_samples", ",", "n_pol", ")", ")", "# Real, imag", "d2", "=", "d2", ".", "reshape", "(", "(", "n_chan", ",", "n_samples", ",", "n_pol", ")", ")", "d", "=", "np", ".", "concatenate", "(", "(", "d", ",", "d2", ")", ",", "axis", "=", "1", ")", "print", "(", "d", ".", "shape", ")", "if", "self", ".", "_d_x", ".", "shape", "!=", "(", "n_chan", ",", "n_samples", "*", "2", ",", "n_pol", ")", ":", "self", ".", "_d_x", "=", "np", ".", "ascontiguousarray", "(", "np", ".", "zeros", "(", "d", "[", "...", ",", "0", ":", "2", "]", ".", "shape", ",", "dtype", "=", "'int8'", ")", ")", "self", ".", "_d_y", "=", "np", ".", "ascontiguousarray", "(", "np", ".", "zeros", "(", "d", "[", "...", ",", "2", ":", "4", "]", ".", "shape", ",", "dtype", "=", "'int8'", ")", ")", "self", ".", "_d_x", "[", ":", "]", "=", "d", "[", "...", ",", "0", ":", "2", "]", "self", ".", "_d_y", "[", ":", "]", "=", "d", "[", "...", ",", "2", ":", "4", "]", "return", "header", ",", "self", ".", "_d_x", ",", "self", ".", "_d_y" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
GuppiRaw.read_next_data_block
Read the next block of data and its header Returns: (header, data) header (dict): dictionary of header metadata data (np.array): Numpy array of data, converted into to complex64.
blimpy/guppi.py
def read_next_data_block(self): """ Read the next block of data and its header Returns: (header, data) header (dict): dictionary of header metadata data (np.array): Numpy array of data, converted into to complex64. """ header, data_idx = self.read_header() self.file_obj.seek(data_idx) # Read data and reshape n_chan = int(header['OBSNCHAN']) n_pol = int(header['NPOL']) n_bit = int(header['NBITS']) n_samples = int(int(header['BLOCSIZE']) / (n_chan * n_pol * (n_bit / 8))) d = np.ascontiguousarray(np.fromfile(self.file_obj, count=header['BLOCSIZE'], dtype='int8')) # Handle 2-bit and 4-bit data if n_bit != 8: d = unpack(d, n_bit) dshape = self.read_next_data_block_shape() d = d.reshape(dshape) # Real, imag if self._d.shape != d.shape: self._d = np.zeros(d.shape, dtype='float32') self._d[:] = d return header, self._d[:].view('complex64')
def read_next_data_block(self): """ Read the next block of data and its header Returns: (header, data) header (dict): dictionary of header metadata data (np.array): Numpy array of data, converted into to complex64. """ header, data_idx = self.read_header() self.file_obj.seek(data_idx) # Read data and reshape n_chan = int(header['OBSNCHAN']) n_pol = int(header['NPOL']) n_bit = int(header['NBITS']) n_samples = int(int(header['BLOCSIZE']) / (n_chan * n_pol * (n_bit / 8))) d = np.ascontiguousarray(np.fromfile(self.file_obj, count=header['BLOCSIZE'], dtype='int8')) # Handle 2-bit and 4-bit data if n_bit != 8: d = unpack(d, n_bit) dshape = self.read_next_data_block_shape() d = d.reshape(dshape) # Real, imag if self._d.shape != d.shape: self._d = np.zeros(d.shape, dtype='float32') self._d[:] = d return header, self._d[:].view('complex64')
[ "Read", "the", "next", "block", "of", "data", "and", "its", "header" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/guppi.py#L281-L313
[ "def", "read_next_data_block", "(", "self", ")", ":", "header", ",", "data_idx", "=", "self", ".", "read_header", "(", ")", "self", ".", "file_obj", ".", "seek", "(", "data_idx", ")", "# Read data and reshape", "n_chan", "=", "int", "(", "header", "[", "'OBSNCHAN'", "]", ")", "n_pol", "=", "int", "(", "header", "[", "'NPOL'", "]", ")", "n_bit", "=", "int", "(", "header", "[", "'NBITS'", "]", ")", "n_samples", "=", "int", "(", "int", "(", "header", "[", "'BLOCSIZE'", "]", ")", "/", "(", "n_chan", "*", "n_pol", "*", "(", "n_bit", "/", "8", ")", ")", ")", "d", "=", "np", ".", "ascontiguousarray", "(", "np", ".", "fromfile", "(", "self", ".", "file_obj", ",", "count", "=", "header", "[", "'BLOCSIZE'", "]", ",", "dtype", "=", "'int8'", ")", ")", "# Handle 2-bit and 4-bit data", "if", "n_bit", "!=", "8", ":", "d", "=", "unpack", "(", "d", ",", "n_bit", ")", "dshape", "=", "self", ".", "read_next_data_block_shape", "(", ")", "d", "=", "d", ".", "reshape", "(", "dshape", ")", "# Real, imag", "if", "self", ".", "_d", ".", "shape", "!=", "d", ".", "shape", ":", "self", ".", "_d", "=", "np", ".", "zeros", "(", "d", ".", "shape", ",", "dtype", "=", "'float32'", ")", "self", ".", "_d", "[", ":", "]", "=", "d", "return", "header", ",", "self", ".", "_d", "[", ":", "]", ".", "view", "(", "'complex64'", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
GuppiRaw.find_n_data_blocks
Seek through the file to find how many data blocks there are in the file Returns: n_blocks (int): number of data blocks in the file
blimpy/guppi.py
def find_n_data_blocks(self): """ Seek through the file to find how many data blocks there are in the file Returns: n_blocks (int): number of data blocks in the file """ self.file_obj.seek(0) header0, data_idx0 = self.read_header() self.file_obj.seek(data_idx0) block_size = int(header0['BLOCSIZE']) n_bits = int(header0['NBITS']) self.file_obj.seek(int(header0['BLOCSIZE']), 1) n_blocks = 1 end_found = False while not end_found: try: header, data_idx = self.read_header() self.file_obj.seek(data_idx) self.file_obj.seek(header['BLOCSIZE'], 1) n_blocks += 1 except EndOfFileError: end_found = True break self.file_obj.seek(0) return n_blocks
def find_n_data_blocks(self): """ Seek through the file to find how many data blocks there are in the file Returns: n_blocks (int): number of data blocks in the file """ self.file_obj.seek(0) header0, data_idx0 = self.read_header() self.file_obj.seek(data_idx0) block_size = int(header0['BLOCSIZE']) n_bits = int(header0['NBITS']) self.file_obj.seek(int(header0['BLOCSIZE']), 1) n_blocks = 1 end_found = False while not end_found: try: header, data_idx = self.read_header() self.file_obj.seek(data_idx) self.file_obj.seek(header['BLOCSIZE'], 1) n_blocks += 1 except EndOfFileError: end_found = True break self.file_obj.seek(0) return n_blocks
[ "Seek", "through", "the", "file", "to", "find", "how", "many", "data", "blocks", "there", "are", "in", "the", "file" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/guppi.py#L315-L341
[ "def", "find_n_data_blocks", "(", "self", ")", ":", "self", ".", "file_obj", ".", "seek", "(", "0", ")", "header0", ",", "data_idx0", "=", "self", ".", "read_header", "(", ")", "self", ".", "file_obj", ".", "seek", "(", "data_idx0", ")", "block_size", "=", "int", "(", "header0", "[", "'BLOCSIZE'", "]", ")", "n_bits", "=", "int", "(", "header0", "[", "'NBITS'", "]", ")", "self", ".", "file_obj", ".", "seek", "(", "int", "(", "header0", "[", "'BLOCSIZE'", "]", ")", ",", "1", ")", "n_blocks", "=", "1", "end_found", "=", "False", "while", "not", "end_found", ":", "try", ":", "header", ",", "data_idx", "=", "self", ".", "read_header", "(", ")", "self", ".", "file_obj", ".", "seek", "(", "data_idx", ")", "self", ".", "file_obj", ".", "seek", "(", "header", "[", "'BLOCSIZE'", "]", ",", "1", ")", "n_blocks", "+=", "1", "except", "EndOfFileError", ":", "end_found", "=", "True", "break", "self", ".", "file_obj", ".", "seek", "(", "0", ")", "return", "n_blocks" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
GuppiRaw.print_stats
Compute some basic stats on the next block of data
blimpy/guppi.py
def print_stats(self): """ Compute some basic stats on the next block of data """ header, data = self.read_next_data_block() data = data.view('float32') print("AVG: %2.3f" % data.mean()) print("STD: %2.3f" % data.std()) print("MAX: %2.3f" % data.max()) print("MIN: %2.3f" % data.min()) import pylab as plt
def print_stats(self): """ Compute some basic stats on the next block of data """ header, data = self.read_next_data_block() data = data.view('float32') print("AVG: %2.3f" % data.mean()) print("STD: %2.3f" % data.std()) print("MAX: %2.3f" % data.max()) print("MIN: %2.3f" % data.min()) import pylab as plt
[ "Compute", "some", "basic", "stats", "on", "the", "next", "block", "of", "data" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/guppi.py#L347-L358
[ "def", "print_stats", "(", "self", ")", ":", "header", ",", "data", "=", "self", ".", "read_next_data_block", "(", ")", "data", "=", "data", ".", "view", "(", "'float32'", ")", "print", "(", "\"AVG: %2.3f\"", "%", "data", ".", "mean", "(", ")", ")", "print", "(", "\"STD: %2.3f\"", "%", "data", ".", "std", "(", ")", ")", "print", "(", "\"MAX: %2.3f\"", "%", "data", ".", "max", "(", ")", ")", "print", "(", "\"MIN: %2.3f\"", "%", "data", ".", "min", "(", ")", ")", "import", "pylab", "as", "plt" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
GuppiRaw.plot_histogram
Plot a histogram of data values
blimpy/guppi.py
def plot_histogram(self, filename=None): """ Plot a histogram of data values """ header, data = self.read_next_data_block() data = data.view('float32') plt.figure("Histogram") plt.hist(data.flatten(), 65, facecolor='#cc0000') if filename: plt.savefig(filename) plt.show()
def plot_histogram(self, filename=None): """ Plot a histogram of data values """ header, data = self.read_next_data_block() data = data.view('float32') plt.figure("Histogram") plt.hist(data.flatten(), 65, facecolor='#cc0000') if filename: plt.savefig(filename) plt.show()
[ "Plot", "a", "histogram", "of", "data", "values" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/guppi.py#L360-L369
[ "def", "plot_histogram", "(", "self", ",", "filename", "=", "None", ")", ":", "header", ",", "data", "=", "self", ".", "read_next_data_block", "(", ")", "data", "=", "data", ".", "view", "(", "'float32'", ")", "plt", ".", "figure", "(", "\"Histogram\"", ")", "plt", ".", "hist", "(", "data", ".", "flatten", "(", ")", ",", "65", ",", "facecolor", "=", "'#cc0000'", ")", "if", "filename", ":", "plt", ".", "savefig", "(", "filename", ")", "plt", ".", "show", "(", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
GuppiRaw.plot_spectrum
Do a (slow) numpy FFT and take power of data
blimpy/guppi.py
def plot_spectrum(self, filename=None, plot_db=True): """ Do a (slow) numpy FFT and take power of data """ header, data = self.read_next_data_block() print("Computing FFT...") d_xx_fft = np.abs(np.fft.fft(data[..., 0])) d_xx_fft = d_xx_fft.flatten() # Rebin to max number of points dec_fac_x = 1 if d_xx_fft.shape[0] > MAX_PLT_POINTS: dec_fac_x = d_xx_fft.shape[0] / MAX_PLT_POINTS d_xx_fft = rebin(d_xx_fft, dec_fac_x) print("Plotting...") if plot_db: plt.plot(10 * np.log10(d_xx_fft)) plt.ylabel("Power [dB]") else: plt.plot(d_xx_fft) plt.ylabel("Power") plt.xlabel("Channel") plt.title(self.filename) if filename: plt.savefig(filename) plt.show()
def plot_spectrum(self, filename=None, plot_db=True): """ Do a (slow) numpy FFT and take power of data """ header, data = self.read_next_data_block() print("Computing FFT...") d_xx_fft = np.abs(np.fft.fft(data[..., 0])) d_xx_fft = d_xx_fft.flatten() # Rebin to max number of points dec_fac_x = 1 if d_xx_fft.shape[0] > MAX_PLT_POINTS: dec_fac_x = d_xx_fft.shape[0] / MAX_PLT_POINTS d_xx_fft = rebin(d_xx_fft, dec_fac_x) print("Plotting...") if plot_db: plt.plot(10 * np.log10(d_xx_fft)) plt.ylabel("Power [dB]") else: plt.plot(d_xx_fft) plt.ylabel("Power") plt.xlabel("Channel") plt.title(self.filename) if filename: plt.savefig(filename) plt.show()
[ "Do", "a", "(", "slow", ")", "numpy", "FFT", "and", "take", "power", "of", "data" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/guppi.py#L371-L397
[ "def", "plot_spectrum", "(", "self", ",", "filename", "=", "None", ",", "plot_db", "=", "True", ")", ":", "header", ",", "data", "=", "self", ".", "read_next_data_block", "(", ")", "print", "(", "\"Computing FFT...\"", ")", "d_xx_fft", "=", "np", ".", "abs", "(", "np", ".", "fft", ".", "fft", "(", "data", "[", "...", ",", "0", "]", ")", ")", "d_xx_fft", "=", "d_xx_fft", ".", "flatten", "(", ")", "# Rebin to max number of points", "dec_fac_x", "=", "1", "if", "d_xx_fft", ".", "shape", "[", "0", "]", ">", "MAX_PLT_POINTS", ":", "dec_fac_x", "=", "d_xx_fft", ".", "shape", "[", "0", "]", "/", "MAX_PLT_POINTS", "d_xx_fft", "=", "rebin", "(", "d_xx_fft", ",", "dec_fac_x", ")", "print", "(", "\"Plotting...\"", ")", "if", "plot_db", ":", "plt", ".", "plot", "(", "10", "*", "np", ".", "log10", "(", "d_xx_fft", ")", ")", "plt", ".", "ylabel", "(", "\"Power [dB]\"", ")", "else", ":", "plt", ".", "plot", "(", "d_xx_fft", ")", "plt", ".", "ylabel", "(", "\"Power\"", ")", "plt", ".", "xlabel", "(", "\"Channel\"", ")", "plt", ".", "title", "(", "self", ".", "filename", ")", "if", "filename", ":", "plt", ".", "savefig", "(", "filename", ")", "plt", ".", "show", "(", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
GuppiRaw.generate_filterbank_header
Generate a blimpy header dictionary
blimpy/guppi.py
def generate_filterbank_header(self, nchans=1, ): """ Generate a blimpy header dictionary """ gp_head = self.read_first_header() fb_head = {} telescope_str = gp_head.get("TELESCOP", "unknown") if telescope_str in ('GBT', 'GREENBANK'): fb_head["telescope_id"] = 6 elif telescope_str in ('PKS', 'PARKES'): fb_head["telescop_id"] = 7 else: fb_head["telescop_id"] = 0 # Using .get() method allows us to fill in default values if not present fb_head["source_name"] = gp_head.get("SRC_NAME", "unknown") fb_head["az_start"] = gp_head.get("AZ", 0) fb_head["za_start"] = gp_head.get("ZA", 0) fb_head["src_raj"] = Angle(str(gp_head.get("RA", 0.0)) + "hr") fb_head["src_dej"] = Angle(str(gp_head.get("DEC", 0.0)) + "deg") fb_head["rawdatafile"] = self.filename # hardcoded fb_head["machine_id"] = 20 fb_head["data_type"] = 1 # blio datatype fb_head["barycentric"] = 0 fb_head["pulsarcentric"] = 0 fb_head["nbits"] = 32 # TODO - compute these values. Need to figure out the correct calcs fb_head["tstart"] = 0.0 fb_head["tsamp"] = 1.0 fb_head["fch1"] = 0.0 fb_head["foff"] = 187.5 / nchans # Need to be updated based on output specs fb_head["nchans"] = nchans fb_head["nifs"] = 1 fb_head["nbeams"] = 1 return fb_head
def generate_filterbank_header(self, nchans=1, ): """ Generate a blimpy header dictionary """ gp_head = self.read_first_header() fb_head = {} telescope_str = gp_head.get("TELESCOP", "unknown") if telescope_str in ('GBT', 'GREENBANK'): fb_head["telescope_id"] = 6 elif telescope_str in ('PKS', 'PARKES'): fb_head["telescop_id"] = 7 else: fb_head["telescop_id"] = 0 # Using .get() method allows us to fill in default values if not present fb_head["source_name"] = gp_head.get("SRC_NAME", "unknown") fb_head["az_start"] = gp_head.get("AZ", 0) fb_head["za_start"] = gp_head.get("ZA", 0) fb_head["src_raj"] = Angle(str(gp_head.get("RA", 0.0)) + "hr") fb_head["src_dej"] = Angle(str(gp_head.get("DEC", 0.0)) + "deg") fb_head["rawdatafile"] = self.filename # hardcoded fb_head["machine_id"] = 20 fb_head["data_type"] = 1 # blio datatype fb_head["barycentric"] = 0 fb_head["pulsarcentric"] = 0 fb_head["nbits"] = 32 # TODO - compute these values. Need to figure out the correct calcs fb_head["tstart"] = 0.0 fb_head["tsamp"] = 1.0 fb_head["fch1"] = 0.0 fb_head["foff"] = 187.5 / nchans # Need to be updated based on output specs fb_head["nchans"] = nchans fb_head["nifs"] = 1 fb_head["nbeams"] = 1 return fb_head
[ "Generate", "a", "blimpy", "header", "dictionary" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/guppi.py#L399-L439
[ "def", "generate_filterbank_header", "(", "self", ",", "nchans", "=", "1", ",", ")", ":", "gp_head", "=", "self", ".", "read_first_header", "(", ")", "fb_head", "=", "{", "}", "telescope_str", "=", "gp_head", ".", "get", "(", "\"TELESCOP\"", ",", "\"unknown\"", ")", "if", "telescope_str", "in", "(", "'GBT'", ",", "'GREENBANK'", ")", ":", "fb_head", "[", "\"telescope_id\"", "]", "=", "6", "elif", "telescope_str", "in", "(", "'PKS'", ",", "'PARKES'", ")", ":", "fb_head", "[", "\"telescop_id\"", "]", "=", "7", "else", ":", "fb_head", "[", "\"telescop_id\"", "]", "=", "0", "# Using .get() method allows us to fill in default values if not present", "fb_head", "[", "\"source_name\"", "]", "=", "gp_head", ".", "get", "(", "\"SRC_NAME\"", ",", "\"unknown\"", ")", "fb_head", "[", "\"az_start\"", "]", "=", "gp_head", ".", "get", "(", "\"AZ\"", ",", "0", ")", "fb_head", "[", "\"za_start\"", "]", "=", "gp_head", ".", "get", "(", "\"ZA\"", ",", "0", ")", "fb_head", "[", "\"src_raj\"", "]", "=", "Angle", "(", "str", "(", "gp_head", ".", "get", "(", "\"RA\"", ",", "0.0", ")", ")", "+", "\"hr\"", ")", "fb_head", "[", "\"src_dej\"", "]", "=", "Angle", "(", "str", "(", "gp_head", ".", "get", "(", "\"DEC\"", ",", "0.0", ")", ")", "+", "\"deg\"", ")", "fb_head", "[", "\"rawdatafile\"", "]", "=", "self", ".", "filename", "# hardcoded", "fb_head", "[", "\"machine_id\"", "]", "=", "20", "fb_head", "[", "\"data_type\"", "]", "=", "1", "# blio datatype", "fb_head", "[", "\"barycentric\"", "]", "=", "0", "fb_head", "[", "\"pulsarcentric\"", "]", "=", "0", "fb_head", "[", "\"nbits\"", "]", "=", "32", "# TODO - compute these values. Need to figure out the correct calcs", "fb_head", "[", "\"tstart\"", "]", "=", "0.0", "fb_head", "[", "\"tsamp\"", "]", "=", "1.0", "fb_head", "[", "\"fch1\"", "]", "=", "0.0", "fb_head", "[", "\"foff\"", "]", "=", "187.5", "/", "nchans", "# Need to be updated based on output specs", "fb_head", "[", "\"nchans\"", "]", "=", "nchans", "fb_head", "[", "\"nifs\"", "]", "=", "1", "fb_head", "[", "\"nbeams\"", "]", "=", "1", "return", "fb_head" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
find_header_size
Script to find the header size of a filterbank file
blimpy/match_fils.py
def find_header_size(filename): ''' Script to find the header size of a filterbank file''' # open datafile filfile=open(filename,'rb') # go to the start of the file filfile.seek(0) #read some region larger than the header. round1 = filfile.read(1000) headersize = round1.find('HEADER_END')+len('HEADER_END') return headersize
def find_header_size(filename): ''' Script to find the header size of a filterbank file''' # open datafile filfile=open(filename,'rb') # go to the start of the file filfile.seek(0) #read some region larger than the header. round1 = filfile.read(1000) headersize = round1.find('HEADER_END')+len('HEADER_END') return headersize
[ "Script", "to", "find", "the", "header", "size", "of", "a", "filterbank", "file" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/match_fils.py#L33-L44
[ "def", "find_header_size", "(", "filename", ")", ":", "# open datafile", "filfile", "=", "open", "(", "filename", ",", "'rb'", ")", "# go to the start of the file", "filfile", ".", "seek", "(", "0", ")", "#read some region larger than the header.", "round1", "=", "filfile", ".", "read", "(", "1000", ")", "headersize", "=", "round1", ".", "find", "(", "'HEADER_END'", ")", "+", "len", "(", "'HEADER_END'", ")", "return", "headersize" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
cmd_tool
Command line tool to make a md5sum comparison of two .fil files.
blimpy/match_fils.py
def cmd_tool(args=None): """ Command line tool to make a md5sum comparison of two .fil files. """ if 'bl' in local_host: header_loc = '/usr/local/sigproc/bin/header' #Current location of header command in GBT. else: raise IOError('Script only able to run in BL systems.') p = OptionParser() p.set_usage('matchfils <FIL_FILE1> <FIL_FILE2>') opts, args = p.parse_args(sys.argv[1:]) file1 = args[0] file2 = args[1] #------------------------------------ #Create batch script make_batch_script() #------------------------------------ #First checksum headersize1 = find_header_size(file1) file_size1 = os.path.getsize(file1) #Strip header from file, and calculate the md5sum of the rest. #command=['tail','-c',str(file_size1-headersize1),file1,'|','md5sum'] command=['./tail_sum.sh',file1,str(file_size1-headersize1)] print('[matchfils] '+' '.join(command)) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() check_sum1 = out.split()[0] print('[matchfils] Checksum is:', check_sum1) if err: raise Error('There is an error.') #--- out,err = reset_outs() command=[header_loc,file1] print('[matchfils] Header information:') proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() header1 = out print(header1) #------------------------------------ #Second checksum out,err = reset_outs() headersize2 = find_header_size(file2) file_size2 = os.path.getsize(file2) #Strip header from file, and calculate the md5sum of the rest. command=['./tail_sum.sh',file2,str(file_size2-headersize2)] print('[matchfils] '+' '.join(command)) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() check_sum2 = out.split()[0] print('[matchfils] Checksum is:', check_sum2) if err: raise Error('There is an error.') #--- out,err = reset_outs() command=[header_loc,file2] print('[matchfils] Header information:') proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() header2 = out print(header2) #------------------------------------ #check the checksums if check_sum1 != check_sum2: print('[matchfils] Booo! Checksum does not match between files.') else: print('[matchfils] Hooray! Checksum matches between files.') #------------------------------------ #Remove batch script os.remove('tail_sum.sh')
def cmd_tool(args=None): """ Command line tool to make a md5sum comparison of two .fil files. """ if 'bl' in local_host: header_loc = '/usr/local/sigproc/bin/header' #Current location of header command in GBT. else: raise IOError('Script only able to run in BL systems.') p = OptionParser() p.set_usage('matchfils <FIL_FILE1> <FIL_FILE2>') opts, args = p.parse_args(sys.argv[1:]) file1 = args[0] file2 = args[1] #------------------------------------ #Create batch script make_batch_script() #------------------------------------ #First checksum headersize1 = find_header_size(file1) file_size1 = os.path.getsize(file1) #Strip header from file, and calculate the md5sum of the rest. #command=['tail','-c',str(file_size1-headersize1),file1,'|','md5sum'] command=['./tail_sum.sh',file1,str(file_size1-headersize1)] print('[matchfils] '+' '.join(command)) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() check_sum1 = out.split()[0] print('[matchfils] Checksum is:', check_sum1) if err: raise Error('There is an error.') #--- out,err = reset_outs() command=[header_loc,file1] print('[matchfils] Header information:') proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() header1 = out print(header1) #------------------------------------ #Second checksum out,err = reset_outs() headersize2 = find_header_size(file2) file_size2 = os.path.getsize(file2) #Strip header from file, and calculate the md5sum of the rest. command=['./tail_sum.sh',file2,str(file_size2-headersize2)] print('[matchfils] '+' '.join(command)) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() check_sum2 = out.split()[0] print('[matchfils] Checksum is:', check_sum2) if err: raise Error('There is an error.') #--- out,err = reset_outs() command=[header_loc,file2] print('[matchfils] Header information:') proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() header2 = out print(header2) #------------------------------------ #check the checksums if check_sum1 != check_sum2: print('[matchfils] Booo! Checksum does not match between files.') else: print('[matchfils] Hooray! Checksum matches between files.') #------------------------------------ #Remove batch script os.remove('tail_sum.sh')
[ "Command", "line", "tool", "to", "make", "a", "md5sum", "comparison", "of", "two", ".", "fil", "files", "." ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/match_fils.py#L46-L142
[ "def", "cmd_tool", "(", "args", "=", "None", ")", ":", "if", "'bl'", "in", "local_host", ":", "header_loc", "=", "'/usr/local/sigproc/bin/header'", "#Current location of header command in GBT.", "else", ":", "raise", "IOError", "(", "'Script only able to run in BL systems.'", ")", "p", "=", "OptionParser", "(", ")", "p", ".", "set_usage", "(", "'matchfils <FIL_FILE1> <FIL_FILE2>'", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "sys", ".", "argv", "[", "1", ":", "]", ")", "file1", "=", "args", "[", "0", "]", "file2", "=", "args", "[", "1", "]", "#------------------------------------", "#Create batch script", "make_batch_script", "(", ")", "#------------------------------------", "#First checksum", "headersize1", "=", "find_header_size", "(", "file1", ")", "file_size1", "=", "os", ".", "path", ".", "getsize", "(", "file1", ")", "#Strip header from file, and calculate the md5sum of the rest.", "#command=['tail','-c',str(file_size1-headersize1),file1,'|','md5sum']", "command", "=", "[", "'./tail_sum.sh'", ",", "file1", ",", "str", "(", "file_size1", "-", "headersize1", ")", "]", "print", "(", "'[matchfils] '", "+", "' '", ".", "join", "(", "command", ")", ")", "proc", "=", "subprocess", ".", "Popen", "(", "command", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "out", ",", "err", ")", "=", "proc", ".", "communicate", "(", ")", "check_sum1", "=", "out", ".", "split", "(", ")", "[", "0", "]", "print", "(", "'[matchfils] Checksum is:'", ",", "check_sum1", ")", "if", "err", ":", "raise", "Error", "(", "'There is an error.'", ")", "#---", "out", ",", "err", "=", "reset_outs", "(", ")", "command", "=", "[", "header_loc", ",", "file1", "]", "print", "(", "'[matchfils] Header information:'", ")", "proc", "=", "subprocess", ".", "Popen", "(", "command", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "out", ",", "err", ")", "=", "proc", ".", "communicate", "(", ")", "header1", "=", "out", "print", "(", "header1", ")", "#------------------------------------", "#Second checksum", "out", ",", "err", "=", "reset_outs", "(", ")", "headersize2", "=", "find_header_size", "(", "file2", ")", "file_size2", "=", "os", ".", "path", ".", "getsize", "(", "file2", ")", "#Strip header from file, and calculate the md5sum of the rest.", "command", "=", "[", "'./tail_sum.sh'", ",", "file2", ",", "str", "(", "file_size2", "-", "headersize2", ")", "]", "print", "(", "'[matchfils] '", "+", "' '", ".", "join", "(", "command", ")", ")", "proc", "=", "subprocess", ".", "Popen", "(", "command", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "out", ",", "err", ")", "=", "proc", ".", "communicate", "(", ")", "check_sum2", "=", "out", ".", "split", "(", ")", "[", "0", "]", "print", "(", "'[matchfils] Checksum is:'", ",", "check_sum2", ")", "if", "err", ":", "raise", "Error", "(", "'There is an error.'", ")", "#---", "out", ",", "err", "=", "reset_outs", "(", ")", "command", "=", "[", "header_loc", ",", "file2", "]", "print", "(", "'[matchfils] Header information:'", ")", "proc", "=", "subprocess", ".", "Popen", "(", "command", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "out", ",", "err", ")", "=", "proc", ".", "communicate", "(", ")", "header2", "=", "out", "print", "(", "header2", ")", "#------------------------------------", "#check the checksums", "if", "check_sum1", "!=", "check_sum2", ":", "print", "(", "'[matchfils] Booo! Checksum does not match between files.'", ")", "else", ":", "print", "(", "'[matchfils] Hooray! Checksum matches between files.'", ")", "#------------------------------------", "#Remove batch script", "os", ".", "remove", "(", "'tail_sum.sh'", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
make_h5_file
Converts file to HDF5 (.h5) format. Default saves output in current dir.
blimpy/fil2h5.py
def make_h5_file(filename,out_dir='./', new_filename = None, max_load = None): ''' Converts file to HDF5 (.h5) format. Default saves output in current dir. ''' fil_file = Waterfall(filename, max_load = max_load) if not new_filename: new_filename = out_dir+filename.replace('.fil','.h5').split('/')[-1] if '.h5' not in new_filename: new_filename = new_filename+'.h5' fil_file.write_to_hdf5(new_filename)
def make_h5_file(filename,out_dir='./', new_filename = None, max_load = None): ''' Converts file to HDF5 (.h5) format. Default saves output in current dir. ''' fil_file = Waterfall(filename, max_load = max_load) if not new_filename: new_filename = out_dir+filename.replace('.fil','.h5').split('/')[-1] if '.h5' not in new_filename: new_filename = new_filename+'.h5' fil_file.write_to_hdf5(new_filename)
[ "Converts", "file", "to", "HDF5", "(", ".", "h5", ")", "format", ".", "Default", "saves", "output", "in", "current", "dir", "." ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/fil2h5.py#L37-L48
[ "def", "make_h5_file", "(", "filename", ",", "out_dir", "=", "'./'", ",", "new_filename", "=", "None", ",", "max_load", "=", "None", ")", ":", "fil_file", "=", "Waterfall", "(", "filename", ",", "max_load", "=", "max_load", ")", "if", "not", "new_filename", ":", "new_filename", "=", "out_dir", "+", "filename", ".", "replace", "(", "'.fil'", ",", "'.h5'", ")", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "if", "'.h5'", "not", "in", "new_filename", ":", "new_filename", "=", "new_filename", "+", "'.h5'", "fil_file", ".", "write_to_hdf5", "(", "new_filename", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
cmd_tool
Command line tool for converting guppi raw into HDF5 versions of guppi raw
blimpy/gup2hdf.py
def cmd_tool(args=None): """ Command line tool for converting guppi raw into HDF5 versions of guppi raw """ from argparse import ArgumentParser if not HAS_BITSHUFFLE: print("Error: the bitshuffle library is required to run this script.") exit() parser = ArgumentParser(description="Command line utility for creating HDF5 Raw files.") parser.add_argument('filename', type=str, help='Name of filename to read') args = parser.parse_args() fileroot = args.filename.split('.0000.raw')[0] filelist = glob.glob(fileroot + '*.raw') filelist = sorted(filelist) # Read first file r = GuppiRaw(filelist[0]) header, data = r.read_next_data_block() dshape = data.shape #r.read_next_data_block_shape() print(dshape) n_blocks_total = 0 for filename in filelist: print(filename) r = GuppiRaw(filename) n_blocks_total += r.n_blocks print(n_blocks_total) full_dshape = np.concatenate(((n_blocks_total,), dshape)) # Create h5py file h5 = h5py.File(fileroot + '.h5', 'w') h5.attrs['CLASS'] = 'GUPPIRAW' block_size = 0 # This is chunk block size dset = h5.create_dataset('data', shape=full_dshape, #compression=bitshuffle.h5.H5FILTER, #compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4), dtype=data.dtype) h5_idx = 0 for filename in filelist: print("\nReading %s header..." % filename) r = GuppiRaw(filename) h5 = h5py.File(filename + '.h5', 'w') header, data = r.read_next_data_block() for ii in range(0, r.n_blocks): t0 = time.time() print("Reading block %i of %i" % (h5_idx+1, full_dshape[0])) header, data = r.read_next_data_block() t1 = time.time() t2 = time.time() print("Writing block %i of %i" % (h5_idx+1, full_dshape[0])) dset[h5_idx, :] = data t3 = time.time() print("Read: %2.2fs, Write %2.2fs" % ((t1-t0), (t3-t2))) h5_idx += 1 # Copy over header information as attributes for key, value in header.items(): dset.attrs[key] = value h5.close() t1 = time.time() print("Conversion time: %2.2fs" % (t1- t0))
def cmd_tool(args=None): """ Command line tool for converting guppi raw into HDF5 versions of guppi raw """ from argparse import ArgumentParser if not HAS_BITSHUFFLE: print("Error: the bitshuffle library is required to run this script.") exit() parser = ArgumentParser(description="Command line utility for creating HDF5 Raw files.") parser.add_argument('filename', type=str, help='Name of filename to read') args = parser.parse_args() fileroot = args.filename.split('.0000.raw')[0] filelist = glob.glob(fileroot + '*.raw') filelist = sorted(filelist) # Read first file r = GuppiRaw(filelist[0]) header, data = r.read_next_data_block() dshape = data.shape #r.read_next_data_block_shape() print(dshape) n_blocks_total = 0 for filename in filelist: print(filename) r = GuppiRaw(filename) n_blocks_total += r.n_blocks print(n_blocks_total) full_dshape = np.concatenate(((n_blocks_total,), dshape)) # Create h5py file h5 = h5py.File(fileroot + '.h5', 'w') h5.attrs['CLASS'] = 'GUPPIRAW' block_size = 0 # This is chunk block size dset = h5.create_dataset('data', shape=full_dshape, #compression=bitshuffle.h5.H5FILTER, #compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4), dtype=data.dtype) h5_idx = 0 for filename in filelist: print("\nReading %s header..." % filename) r = GuppiRaw(filename) h5 = h5py.File(filename + '.h5', 'w') header, data = r.read_next_data_block() for ii in range(0, r.n_blocks): t0 = time.time() print("Reading block %i of %i" % (h5_idx+1, full_dshape[0])) header, data = r.read_next_data_block() t1 = time.time() t2 = time.time() print("Writing block %i of %i" % (h5_idx+1, full_dshape[0])) dset[h5_idx, :] = data t3 = time.time() print("Read: %2.2fs, Write %2.2fs" % ((t1-t0), (t3-t2))) h5_idx += 1 # Copy over header information as attributes for key, value in header.items(): dset.attrs[key] = value h5.close() t1 = time.time() print("Conversion time: %2.2fs" % (t1- t0))
[ "Command", "line", "tool", "for", "converting", "guppi", "raw", "into", "HDF5", "versions", "of", "guppi", "raw" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/gup2hdf.py#L16-L89
[ "def", "cmd_tool", "(", "args", "=", "None", ")", ":", "from", "argparse", "import", "ArgumentParser", "if", "not", "HAS_BITSHUFFLE", ":", "print", "(", "\"Error: the bitshuffle library is required to run this script.\"", ")", "exit", "(", ")", "parser", "=", "ArgumentParser", "(", "description", "=", "\"Command line utility for creating HDF5 Raw files.\"", ")", "parser", ".", "add_argument", "(", "'filename'", ",", "type", "=", "str", ",", "help", "=", "'Name of filename to read'", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "fileroot", "=", "args", ".", "filename", ".", "split", "(", "'.0000.raw'", ")", "[", "0", "]", "filelist", "=", "glob", ".", "glob", "(", "fileroot", "+", "'*.raw'", ")", "filelist", "=", "sorted", "(", "filelist", ")", "# Read first file", "r", "=", "GuppiRaw", "(", "filelist", "[", "0", "]", ")", "header", ",", "data", "=", "r", ".", "read_next_data_block", "(", ")", "dshape", "=", "data", ".", "shape", "#r.read_next_data_block_shape()", "print", "(", "dshape", ")", "n_blocks_total", "=", "0", "for", "filename", "in", "filelist", ":", "print", "(", "filename", ")", "r", "=", "GuppiRaw", "(", "filename", ")", "n_blocks_total", "+=", "r", ".", "n_blocks", "print", "(", "n_blocks_total", ")", "full_dshape", "=", "np", ".", "concatenate", "(", "(", "(", "n_blocks_total", ",", ")", ",", "dshape", ")", ")", "# Create h5py file", "h5", "=", "h5py", ".", "File", "(", "fileroot", "+", "'.h5'", ",", "'w'", ")", "h5", ".", "attrs", "[", "'CLASS'", "]", "=", "'GUPPIRAW'", "block_size", "=", "0", "# This is chunk block size", "dset", "=", "h5", ".", "create_dataset", "(", "'data'", ",", "shape", "=", "full_dshape", ",", "#compression=bitshuffle.h5.H5FILTER,", "#compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4),", "dtype", "=", "data", ".", "dtype", ")", "h5_idx", "=", "0", "for", "filename", "in", "filelist", ":", "print", "(", "\"\\nReading %s header...\"", "%", "filename", ")", "r", "=", "GuppiRaw", "(", "filename", ")", "h5", "=", "h5py", ".", "File", "(", "filename", "+", "'.h5'", ",", "'w'", ")", "header", ",", "data", "=", "r", ".", "read_next_data_block", "(", ")", "for", "ii", "in", "range", "(", "0", ",", "r", ".", "n_blocks", ")", ":", "t0", "=", "time", ".", "time", "(", ")", "print", "(", "\"Reading block %i of %i\"", "%", "(", "h5_idx", "+", "1", ",", "full_dshape", "[", "0", "]", ")", ")", "header", ",", "data", "=", "r", ".", "read_next_data_block", "(", ")", "t1", "=", "time", ".", "time", "(", ")", "t2", "=", "time", ".", "time", "(", ")", "print", "(", "\"Writing block %i of %i\"", "%", "(", "h5_idx", "+", "1", ",", "full_dshape", "[", "0", "]", ")", ")", "dset", "[", "h5_idx", ",", ":", "]", "=", "data", "t3", "=", "time", ".", "time", "(", ")", "print", "(", "\"Read: %2.2fs, Write %2.2fs\"", "%", "(", "(", "t1", "-", "t0", ")", ",", "(", "t3", "-", "t2", ")", ")", ")", "h5_idx", "+=", "1", "# Copy over header information as attributes", "for", "key", ",", "value", "in", "header", ".", "items", "(", ")", ":", "dset", ".", "attrs", "[", "key", "]", "=", "value", "h5", ".", "close", "(", ")", "t1", "=", "time", ".", "time", "(", ")", "print", "(", "\"Conversion time: %2.2fs\"", "%", "(", "t1", "-", "t0", ")", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
foldcal
Returns time-averaged spectra of the ON and OFF measurements in a calibrator measurement with flickering noise diode Parameters ---------- data : 2D Array object (float) 2D dynamic spectrum for data (any Stokes parameter) with flickering noise diode. tsamp : float Sampling time of data in seconds diode_p : float Period of the flickering noise diode in seconds numsamps : int Number of samples over which to average noise diode ON and OFF switch : boolean Use switch=True if the noise diode "skips" turning from OFF to ON once or vice versa inds : boolean Use inds=True to also return the indexes of the time series where the ND is ON and OFF
blimpy/calib_utils/fluxcal.py
def foldcal(data,tsamp, diode_p=0.04,numsamps=1000,switch=False,inds=False): ''' Returns time-averaged spectra of the ON and OFF measurements in a calibrator measurement with flickering noise diode Parameters ---------- data : 2D Array object (float) 2D dynamic spectrum for data (any Stokes parameter) with flickering noise diode. tsamp : float Sampling time of data in seconds diode_p : float Period of the flickering noise diode in seconds numsamps : int Number of samples over which to average noise diode ON and OFF switch : boolean Use switch=True if the noise diode "skips" turning from OFF to ON once or vice versa inds : boolean Use inds=True to also return the indexes of the time series where the ND is ON and OFF ''' halfper = diode_p/2.0 foldt = halfper/tsamp #number of time samples per diode switch onesec = 1/tsamp #number of time samples in the first second #Find diode switches in units of time samples and round down to the nearest int ints = np.arange(0,numsamps) t_switch = (onesec+ints*foldt) t_switch = t_switch.astype('int') ONints = np.array(np.reshape(t_switch[:],(numsamps/2,2))) ONints[:,0] = ONints[:,0]+1 #Find index ranges of ON time samples OFFints = np.array(np.reshape(t_switch[1:-1],(numsamps/2-1,2))) OFFints[:,0] = OFFints[:,0]+1 #Find index ranges of OFF time samples av_ON = [] av_OFF = [] #Average ON and OFF spectra separately with respect to time for i in ONints: if i[1]!=i[0]: av_ON.append(np.sum(data[i[0]:i[1],:,:],axis=0)/(i[1]-i[0])) for i in OFFints: if i[1]!=i[0]: av_OFF.append(np.sum(data[i[0]:i[1],:,:],axis=0)/(i[1]-i[0])) #If switch=True, flip the return statement since ON is actually OFF if switch==False: if inds==False: return np.squeeze(np.mean(av_ON,axis=0)), np.squeeze(np.mean(av_OFF,axis=0)) else: return np.squeeze(np.mean(av_ON,axis=0)), np.squeeze(np.mean(av_OFF,axis=0)),ONints,OFFints if switch==True: if inds==False: return np.squeeze(np.mean(av_OFF,axis=0)), np.squeeze(np.mean(av_ON,axis=0)) else: return np.squeeze(np.mean(av_OFF,axis=0)), np.squeeze(np.mean(av_ON,axis=0)),OFFints,ONints
def foldcal(data,tsamp, diode_p=0.04,numsamps=1000,switch=False,inds=False): ''' Returns time-averaged spectra of the ON and OFF measurements in a calibrator measurement with flickering noise diode Parameters ---------- data : 2D Array object (float) 2D dynamic spectrum for data (any Stokes parameter) with flickering noise diode. tsamp : float Sampling time of data in seconds diode_p : float Period of the flickering noise diode in seconds numsamps : int Number of samples over which to average noise diode ON and OFF switch : boolean Use switch=True if the noise diode "skips" turning from OFF to ON once or vice versa inds : boolean Use inds=True to also return the indexes of the time series where the ND is ON and OFF ''' halfper = diode_p/2.0 foldt = halfper/tsamp #number of time samples per diode switch onesec = 1/tsamp #number of time samples in the first second #Find diode switches in units of time samples and round down to the nearest int ints = np.arange(0,numsamps) t_switch = (onesec+ints*foldt) t_switch = t_switch.astype('int') ONints = np.array(np.reshape(t_switch[:],(numsamps/2,2))) ONints[:,0] = ONints[:,0]+1 #Find index ranges of ON time samples OFFints = np.array(np.reshape(t_switch[1:-1],(numsamps/2-1,2))) OFFints[:,0] = OFFints[:,0]+1 #Find index ranges of OFF time samples av_ON = [] av_OFF = [] #Average ON and OFF spectra separately with respect to time for i in ONints: if i[1]!=i[0]: av_ON.append(np.sum(data[i[0]:i[1],:,:],axis=0)/(i[1]-i[0])) for i in OFFints: if i[1]!=i[0]: av_OFF.append(np.sum(data[i[0]:i[1],:,:],axis=0)/(i[1]-i[0])) #If switch=True, flip the return statement since ON is actually OFF if switch==False: if inds==False: return np.squeeze(np.mean(av_ON,axis=0)), np.squeeze(np.mean(av_OFF,axis=0)) else: return np.squeeze(np.mean(av_ON,axis=0)), np.squeeze(np.mean(av_OFF,axis=0)),ONints,OFFints if switch==True: if inds==False: return np.squeeze(np.mean(av_OFF,axis=0)), np.squeeze(np.mean(av_ON,axis=0)) else: return np.squeeze(np.mean(av_OFF,axis=0)), np.squeeze(np.mean(av_ON,axis=0)),OFFints,ONints
[ "Returns", "time", "-", "averaged", "spectra", "of", "the", "ON", "and", "OFF", "measurements", "in", "a", "calibrator", "measurement", "with", "flickering", "noise", "diode" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/calib_utils/fluxcal.py#L6-L66
[ "def", "foldcal", "(", "data", ",", "tsamp", ",", "diode_p", "=", "0.04", ",", "numsamps", "=", "1000", ",", "switch", "=", "False", ",", "inds", "=", "False", ")", ":", "halfper", "=", "diode_p", "/", "2.0", "foldt", "=", "halfper", "/", "tsamp", "#number of time samples per diode switch", "onesec", "=", "1", "/", "tsamp", "#number of time samples in the first second", "#Find diode switches in units of time samples and round down to the nearest int", "ints", "=", "np", ".", "arange", "(", "0", ",", "numsamps", ")", "t_switch", "=", "(", "onesec", "+", "ints", "*", "foldt", ")", "t_switch", "=", "t_switch", ".", "astype", "(", "'int'", ")", "ONints", "=", "np", ".", "array", "(", "np", ".", "reshape", "(", "t_switch", "[", ":", "]", ",", "(", "numsamps", "/", "2", ",", "2", ")", ")", ")", "ONints", "[", ":", ",", "0", "]", "=", "ONints", "[", ":", ",", "0", "]", "+", "1", "#Find index ranges of ON time samples", "OFFints", "=", "np", ".", "array", "(", "np", ".", "reshape", "(", "t_switch", "[", "1", ":", "-", "1", "]", ",", "(", "numsamps", "/", "2", "-", "1", ",", "2", ")", ")", ")", "OFFints", "[", ":", ",", "0", "]", "=", "OFFints", "[", ":", ",", "0", "]", "+", "1", "#Find index ranges of OFF time samples", "av_ON", "=", "[", "]", "av_OFF", "=", "[", "]", "#Average ON and OFF spectra separately with respect to time", "for", "i", "in", "ONints", ":", "if", "i", "[", "1", "]", "!=", "i", "[", "0", "]", ":", "av_ON", ".", "append", "(", "np", ".", "sum", "(", "data", "[", "i", "[", "0", "]", ":", "i", "[", "1", "]", ",", ":", ",", ":", "]", ",", "axis", "=", "0", ")", "/", "(", "i", "[", "1", "]", "-", "i", "[", "0", "]", ")", ")", "for", "i", "in", "OFFints", ":", "if", "i", "[", "1", "]", "!=", "i", "[", "0", "]", ":", "av_OFF", ".", "append", "(", "np", ".", "sum", "(", "data", "[", "i", "[", "0", "]", ":", "i", "[", "1", "]", ",", ":", ",", ":", "]", ",", "axis", "=", "0", ")", "/", "(", "i", "[", "1", "]", "-", "i", "[", "0", "]", ")", ")", "#If switch=True, flip the return statement since ON is actually OFF", "if", "switch", "==", "False", ":", "if", "inds", "==", "False", ":", "return", "np", ".", "squeeze", "(", "np", ".", "mean", "(", "av_ON", ",", "axis", "=", "0", ")", ")", ",", "np", ".", "squeeze", "(", "np", ".", "mean", "(", "av_OFF", ",", "axis", "=", "0", ")", ")", "else", ":", "return", "np", ".", "squeeze", "(", "np", ".", "mean", "(", "av_ON", ",", "axis", "=", "0", ")", ")", ",", "np", ".", "squeeze", "(", "np", ".", "mean", "(", "av_OFF", ",", "axis", "=", "0", ")", ")", ",", "ONints", ",", "OFFints", "if", "switch", "==", "True", ":", "if", "inds", "==", "False", ":", "return", "np", ".", "squeeze", "(", "np", ".", "mean", "(", "av_OFF", ",", "axis", "=", "0", ")", ")", ",", "np", ".", "squeeze", "(", "np", ".", "mean", "(", "av_ON", ",", "axis", "=", "0", ")", ")", "else", ":", "return", "np", ".", "squeeze", "(", "np", ".", "mean", "(", "av_OFF", ",", "axis", "=", "0", ")", ")", ",", "np", ".", "squeeze", "(", "np", ".", "mean", "(", "av_ON", ",", "axis", "=", "0", ")", ")", ",", "OFFints", ",", "ONints" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
integrate_chans
Integrates over each core channel of a given spectrum. Important for calibrating data with frequency/time resolution different from noise diode data Parameters ---------- spec : 1D Array (float) Spectrum (any Stokes parameter) to be integrated freqs : 1D Array (float) Frequency values for each bin of the spectrum chan_per_coarse: int Number of frequency bins per coarse channel
blimpy/calib_utils/fluxcal.py
def integrate_chans(spec,freqs,chan_per_coarse): ''' Integrates over each core channel of a given spectrum. Important for calibrating data with frequency/time resolution different from noise diode data Parameters ---------- spec : 1D Array (float) Spectrum (any Stokes parameter) to be integrated freqs : 1D Array (float) Frequency values for each bin of the spectrum chan_per_coarse: int Number of frequency bins per coarse channel ''' num_coarse = spec.size/chan_per_coarse #Calculate total number of coarse channels #Rearrange spectrum by coarse channel spec_shaped = np.array(np.reshape(spec,(num_coarse,chan_per_coarse))) freqs_shaped = np.array(np.reshape(freqs,(num_coarse,chan_per_coarse))) #Average over coarse channels return np.mean(spec_shaped[:,1:-1],axis=1)
def integrate_chans(spec,freqs,chan_per_coarse): ''' Integrates over each core channel of a given spectrum. Important for calibrating data with frequency/time resolution different from noise diode data Parameters ---------- spec : 1D Array (float) Spectrum (any Stokes parameter) to be integrated freqs : 1D Array (float) Frequency values for each bin of the spectrum chan_per_coarse: int Number of frequency bins per coarse channel ''' num_coarse = spec.size/chan_per_coarse #Calculate total number of coarse channels #Rearrange spectrum by coarse channel spec_shaped = np.array(np.reshape(spec,(num_coarse,chan_per_coarse))) freqs_shaped = np.array(np.reshape(freqs,(num_coarse,chan_per_coarse))) #Average over coarse channels return np.mean(spec_shaped[:,1:-1],axis=1)
[ "Integrates", "over", "each", "core", "channel", "of", "a", "given", "spectrum", ".", "Important", "for", "calibrating", "data", "with", "frequency", "/", "time", "resolution", "different", "from", "noise", "diode", "data" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/calib_utils/fluxcal.py#L68-L90
[ "def", "integrate_chans", "(", "spec", ",", "freqs", ",", "chan_per_coarse", ")", ":", "num_coarse", "=", "spec", ".", "size", "/", "chan_per_coarse", "#Calculate total number of coarse channels", "#Rearrange spectrum by coarse channel", "spec_shaped", "=", "np", ".", "array", "(", "np", ".", "reshape", "(", "spec", ",", "(", "num_coarse", ",", "chan_per_coarse", ")", ")", ")", "freqs_shaped", "=", "np", ".", "array", "(", "np", ".", "reshape", "(", "freqs", ",", "(", "num_coarse", ",", "chan_per_coarse", ")", ")", ")", "#Average over coarse channels", "return", "np", ".", "mean", "(", "spec_shaped", "[", ":", ",", "1", ":", "-", "1", "]", ",", "axis", "=", "1", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
integrate_calib
Folds Stokes I noise diode data and integrates along coarse channels Parameters ---------- name : str Path to noise diode filterbank file chan_per_coarse : int Number of frequency bins per coarse channel fullstokes : boolean Use fullstokes=True if data is in IQUV format or just Stokes I, use fullstokes=False if it is in cross_pols format
blimpy/calib_utils/fluxcal.py
def integrate_calib(name,chan_per_coarse,fullstokes=False,**kwargs): ''' Folds Stokes I noise diode data and integrates along coarse channels Parameters ---------- name : str Path to noise diode filterbank file chan_per_coarse : int Number of frequency bins per coarse channel fullstokes : boolean Use fullstokes=True if data is in IQUV format or just Stokes I, use fullstokes=False if it is in cross_pols format ''' #Load data obs = Waterfall(name,max_load=150) data = obs.data #If the data has cross_pols format calculate Stokes I if fullstokes==False and data.shape[1]>1: data = data[:,0,:]+data[:,1,:] data = np.expand_dims(data,axis=1) #If the data has IQUV format get Stokes I if fullstokes==True: data = data[:,0,:] data = np.expand_dims(data,axis=1) tsamp = obs.header['tsamp'] #Calculate ON and OFF values OFF,ON = foldcal(data,tsamp,**kwargs) freqs = obs.populate_freqs() #Find ON and OFF spectra by coarse channel ON_int = integrate_chans(ON,freqs,chan_per_coarse) OFF_int = integrate_chans(OFF,freqs,chan_per_coarse) #If "ON" is actually "OFF" switch them if np.sum(ON_int)<np.sum(OFF_int): temp = ON_int ON_int = OFF_int OFF_int = temp #Return coarse channel spectrum of OFF and ON return OFF_int,ON_int
def integrate_calib(name,chan_per_coarse,fullstokes=False,**kwargs): ''' Folds Stokes I noise diode data and integrates along coarse channels Parameters ---------- name : str Path to noise diode filterbank file chan_per_coarse : int Number of frequency bins per coarse channel fullstokes : boolean Use fullstokes=True if data is in IQUV format or just Stokes I, use fullstokes=False if it is in cross_pols format ''' #Load data obs = Waterfall(name,max_load=150) data = obs.data #If the data has cross_pols format calculate Stokes I if fullstokes==False and data.shape[1]>1: data = data[:,0,:]+data[:,1,:] data = np.expand_dims(data,axis=1) #If the data has IQUV format get Stokes I if fullstokes==True: data = data[:,0,:] data = np.expand_dims(data,axis=1) tsamp = obs.header['tsamp'] #Calculate ON and OFF values OFF,ON = foldcal(data,tsamp,**kwargs) freqs = obs.populate_freqs() #Find ON and OFF spectra by coarse channel ON_int = integrate_chans(ON,freqs,chan_per_coarse) OFF_int = integrate_chans(OFF,freqs,chan_per_coarse) #If "ON" is actually "OFF" switch them if np.sum(ON_int)<np.sum(OFF_int): temp = ON_int ON_int = OFF_int OFF_int = temp #Return coarse channel spectrum of OFF and ON return OFF_int,ON_int
[ "Folds", "Stokes", "I", "noise", "diode", "data", "and", "integrates", "along", "coarse", "channels" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/calib_utils/fluxcal.py#L92-L137
[ "def", "integrate_calib", "(", "name", ",", "chan_per_coarse", ",", "fullstokes", "=", "False", ",", "*", "*", "kwargs", ")", ":", "#Load data", "obs", "=", "Waterfall", "(", "name", ",", "max_load", "=", "150", ")", "data", "=", "obs", ".", "data", "#If the data has cross_pols format calculate Stokes I", "if", "fullstokes", "==", "False", "and", "data", ".", "shape", "[", "1", "]", ">", "1", ":", "data", "=", "data", "[", ":", ",", "0", ",", ":", "]", "+", "data", "[", ":", ",", "1", ",", ":", "]", "data", "=", "np", ".", "expand_dims", "(", "data", ",", "axis", "=", "1", ")", "#If the data has IQUV format get Stokes I", "if", "fullstokes", "==", "True", ":", "data", "=", "data", "[", ":", ",", "0", ",", ":", "]", "data", "=", "np", ".", "expand_dims", "(", "data", ",", "axis", "=", "1", ")", "tsamp", "=", "obs", ".", "header", "[", "'tsamp'", "]", "#Calculate ON and OFF values", "OFF", ",", "ON", "=", "foldcal", "(", "data", ",", "tsamp", ",", "*", "*", "kwargs", ")", "freqs", "=", "obs", ".", "populate_freqs", "(", ")", "#Find ON and OFF spectra by coarse channel", "ON_int", "=", "integrate_chans", "(", "ON", ",", "freqs", ",", "chan_per_coarse", ")", "OFF_int", "=", "integrate_chans", "(", "OFF", ",", "freqs", ",", "chan_per_coarse", ")", "#If \"ON\" is actually \"OFF\" switch them", "if", "np", ".", "sum", "(", "ON_int", ")", "<", "np", ".", "sum", "(", "OFF_int", ")", ":", "temp", "=", "ON_int", "ON_int", "=", "OFF_int", "OFF_int", "=", "temp", "#Return coarse channel spectrum of OFF and ON", "return", "OFF_int", ",", "ON_int" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
get_calfluxes
Given properties of the calibrator source, calculate fluxes of the source in a particular frequency range Parameters ---------- calflux : float Known flux of calibrator source at a particular frequency calfreq : float Frequency where calibrator source has flux calflux (see above) spec_in : float Known power-law spectral index of calibrator source. Use convention flux(frequency) = constant * frequency^(spec_in) centerfreqs : 1D Array (float) Central frequency values of each coarse channel oneflux : boolean Use oneflux to choose between calculating the flux for each core channel (False) or using one value for the entire frequency range (True)
blimpy/calib_utils/fluxcal.py
def get_calfluxes(calflux,calfreq,spec_in,centerfreqs,oneflux): ''' Given properties of the calibrator source, calculate fluxes of the source in a particular frequency range Parameters ---------- calflux : float Known flux of calibrator source at a particular frequency calfreq : float Frequency where calibrator source has flux calflux (see above) spec_in : float Known power-law spectral index of calibrator source. Use convention flux(frequency) = constant * frequency^(spec_in) centerfreqs : 1D Array (float) Central frequency values of each coarse channel oneflux : boolean Use oneflux to choose between calculating the flux for each core channel (False) or using one value for the entire frequency range (True) ''' const = calflux/np.power(calfreq,spec_in) if oneflux==False: return const*np.power(centerfreqs,spec_in) else: return const*np.power(np.mean(centerfreqs),spec_in)
def get_calfluxes(calflux,calfreq,spec_in,centerfreqs,oneflux): ''' Given properties of the calibrator source, calculate fluxes of the source in a particular frequency range Parameters ---------- calflux : float Known flux of calibrator source at a particular frequency calfreq : float Frequency where calibrator source has flux calflux (see above) spec_in : float Known power-law spectral index of calibrator source. Use convention flux(frequency) = constant * frequency^(spec_in) centerfreqs : 1D Array (float) Central frequency values of each coarse channel oneflux : boolean Use oneflux to choose between calculating the flux for each core channel (False) or using one value for the entire frequency range (True) ''' const = calflux/np.power(calfreq,spec_in) if oneflux==False: return const*np.power(centerfreqs,spec_in) else: return const*np.power(np.mean(centerfreqs),spec_in)
[ "Given", "properties", "of", "the", "calibrator", "source", "calculate", "fluxes", "of", "the", "source", "in", "a", "particular", "frequency", "range" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/calib_utils/fluxcal.py#L139-L163
[ "def", "get_calfluxes", "(", "calflux", ",", "calfreq", ",", "spec_in", ",", "centerfreqs", ",", "oneflux", ")", ":", "const", "=", "calflux", "/", "np", ".", "power", "(", "calfreq", ",", "spec_in", ")", "if", "oneflux", "==", "False", ":", "return", "const", "*", "np", ".", "power", "(", "centerfreqs", ",", "spec_in", ")", "else", ":", "return", "const", "*", "np", ".", "power", "(", "np", ".", "mean", "(", "centerfreqs", ")", ",", "spec_in", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
get_centerfreqs
Returns central frequency of each coarse channel Parameters ---------- freqs : 1D Array (float) Frequency values for each bin of the spectrum chan_per_coarse: int Number of frequency bins per coarse channel
blimpy/calib_utils/fluxcal.py
def get_centerfreqs(freqs,chan_per_coarse): ''' Returns central frequency of each coarse channel Parameters ---------- freqs : 1D Array (float) Frequency values for each bin of the spectrum chan_per_coarse: int Number of frequency bins per coarse channel ''' num_coarse = freqs.size/chan_per_coarse freqs = np.reshape(freqs,(num_coarse,chan_per_coarse)) return np.mean(freqs,axis=1)
def get_centerfreqs(freqs,chan_per_coarse): ''' Returns central frequency of each coarse channel Parameters ---------- freqs : 1D Array (float) Frequency values for each bin of the spectrum chan_per_coarse: int Number of frequency bins per coarse channel ''' num_coarse = freqs.size/chan_per_coarse freqs = np.reshape(freqs,(num_coarse,chan_per_coarse)) return np.mean(freqs,axis=1)
[ "Returns", "central", "frequency", "of", "each", "coarse", "channel" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/calib_utils/fluxcal.py#L165-L179
[ "def", "get_centerfreqs", "(", "freqs", ",", "chan_per_coarse", ")", ":", "num_coarse", "=", "freqs", ".", "size", "/", "chan_per_coarse", "freqs", "=", "np", ".", "reshape", "(", "freqs", ",", "(", "num_coarse", ",", "chan_per_coarse", ")", ")", "return", "np", ".", "mean", "(", "freqs", ",", "axis", "=", "1", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
f_ratios
Calculate f_ON, and f_OFF as defined in van Straten et al. 2012 equations 2 and 3 Parameters ---------- calON_obs : str Path to filterbank file (any format) for observation ON the calibrator source calOFF_obs : str Path to filterbank file (any format) for observation OFF the calibrator source
blimpy/calib_utils/fluxcal.py
def f_ratios(calON_obs,calOFF_obs,chan_per_coarse,**kwargs): ''' Calculate f_ON, and f_OFF as defined in van Straten et al. 2012 equations 2 and 3 Parameters ---------- calON_obs : str Path to filterbank file (any format) for observation ON the calibrator source calOFF_obs : str Path to filterbank file (any format) for observation OFF the calibrator source ''' #Calculate noise diode ON and noise diode OFF spectra (H and L) for both observations L_ON,H_ON = integrate_calib(calON_obs,chan_per_coarse,**kwargs) L_OFF,H_OFF = integrate_calib(calOFF_obs,chan_per_coarse,**kwargs) f_ON = H_ON/L_ON-1 f_OFF = H_OFF/L_OFF-1 return f_ON, f_OFF
def f_ratios(calON_obs,calOFF_obs,chan_per_coarse,**kwargs): ''' Calculate f_ON, and f_OFF as defined in van Straten et al. 2012 equations 2 and 3 Parameters ---------- calON_obs : str Path to filterbank file (any format) for observation ON the calibrator source calOFF_obs : str Path to filterbank file (any format) for observation OFF the calibrator source ''' #Calculate noise diode ON and noise diode OFF spectra (H and L) for both observations L_ON,H_ON = integrate_calib(calON_obs,chan_per_coarse,**kwargs) L_OFF,H_OFF = integrate_calib(calOFF_obs,chan_per_coarse,**kwargs) f_ON = H_ON/L_ON-1 f_OFF = H_OFF/L_OFF-1 return f_ON, f_OFF
[ "Calculate", "f_ON", "and", "f_OFF", "as", "defined", "in", "van", "Straten", "et", "al", ".", "2012", "equations", "2", "and", "3" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/calib_utils/fluxcal.py#L181-L199
[ "def", "f_ratios", "(", "calON_obs", ",", "calOFF_obs", ",", "chan_per_coarse", ",", "*", "*", "kwargs", ")", ":", "#Calculate noise diode ON and noise diode OFF spectra (H and L) for both observations", "L_ON", ",", "H_ON", "=", "integrate_calib", "(", "calON_obs", ",", "chan_per_coarse", ",", "*", "*", "kwargs", ")", "L_OFF", ",", "H_OFF", "=", "integrate_calib", "(", "calOFF_obs", ",", "chan_per_coarse", ",", "*", "*", "kwargs", ")", "f_ON", "=", "H_ON", "/", "L_ON", "-", "1", "f_OFF", "=", "H_OFF", "/", "L_OFF", "-", "1", "return", "f_ON", ",", "f_OFF" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
diode_spec
Calculate the coarse channel spectrum and system temperature of the noise diode in Jy given two noise diode measurements ON and OFF the calibrator source with the same frequency and time resolution Parameters ---------- calON_obs : str (see f_ratios() above) calOFF_obs : str (see f_ratios() above) calflux : float Known flux of calibrator source at a particular frequency calfreq : float Frequency where calibrator source has flux calflux (see above) spec_in : float Known power-law spectral index of calibrator source. Use convention flux(frequency) = constant * frequency^(spec_in) average : boolean Use average=True to return noise diode and Tsys spectra averaged over frequencies
blimpy/calib_utils/fluxcal.py
def diode_spec(calON_obs,calOFF_obs,calflux,calfreq,spec_in,average=True,oneflux=False,**kwargs): ''' Calculate the coarse channel spectrum and system temperature of the noise diode in Jy given two noise diode measurements ON and OFF the calibrator source with the same frequency and time resolution Parameters ---------- calON_obs : str (see f_ratios() above) calOFF_obs : str (see f_ratios() above) calflux : float Known flux of calibrator source at a particular frequency calfreq : float Frequency where calibrator source has flux calflux (see above) spec_in : float Known power-law spectral index of calibrator source. Use convention flux(frequency) = constant * frequency^(spec_in) average : boolean Use average=True to return noise diode and Tsys spectra averaged over frequencies ''' #Load frequencies and calculate number of channels per coarse channel obs = Waterfall(calON_obs,max_load=150) freqs = obs.populate_freqs() ncoarse = obs.calc_n_coarse_chan() nchans = obs.header['nchans'] chan_per_coarse = nchans/ncoarse f_ON, f_OFF = f_ratios(calON_obs,calOFF_obs,chan_per_coarse,**kwargs) #Obtain spectrum of the calibrator source for the given frequency range centerfreqs = get_centerfreqs(freqs,chan_per_coarse) calfluxes = get_calfluxes(calflux,calfreq,spec_in,centerfreqs,oneflux) #C_o and Tsys as defined in van Straten et al. 2012 C_o = calfluxes/(1/f_ON-1/f_OFF) Tsys = C_o/f_OFF #return coarse channel diode spectrum if average==True: return np.mean(C_o),np.mean(Tsys) else: return C_o,Tsys
def diode_spec(calON_obs,calOFF_obs,calflux,calfreq,spec_in,average=True,oneflux=False,**kwargs): ''' Calculate the coarse channel spectrum and system temperature of the noise diode in Jy given two noise diode measurements ON and OFF the calibrator source with the same frequency and time resolution Parameters ---------- calON_obs : str (see f_ratios() above) calOFF_obs : str (see f_ratios() above) calflux : float Known flux of calibrator source at a particular frequency calfreq : float Frequency where calibrator source has flux calflux (see above) spec_in : float Known power-law spectral index of calibrator source. Use convention flux(frequency) = constant * frequency^(spec_in) average : boolean Use average=True to return noise diode and Tsys spectra averaged over frequencies ''' #Load frequencies and calculate number of channels per coarse channel obs = Waterfall(calON_obs,max_load=150) freqs = obs.populate_freqs() ncoarse = obs.calc_n_coarse_chan() nchans = obs.header['nchans'] chan_per_coarse = nchans/ncoarse f_ON, f_OFF = f_ratios(calON_obs,calOFF_obs,chan_per_coarse,**kwargs) #Obtain spectrum of the calibrator source for the given frequency range centerfreqs = get_centerfreqs(freqs,chan_per_coarse) calfluxes = get_calfluxes(calflux,calfreq,spec_in,centerfreqs,oneflux) #C_o and Tsys as defined in van Straten et al. 2012 C_o = calfluxes/(1/f_ON-1/f_OFF) Tsys = C_o/f_OFF #return coarse channel diode spectrum if average==True: return np.mean(C_o),np.mean(Tsys) else: return C_o,Tsys
[ "Calculate", "the", "coarse", "channel", "spectrum", "and", "system", "temperature", "of", "the", "noise", "diode", "in", "Jy", "given", "two", "noise", "diode", "measurements", "ON", "and", "OFF", "the", "calibrator", "source", "with", "the", "same", "frequency", "and", "time", "resolution" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/calib_utils/fluxcal.py#L202-L243
[ "def", "diode_spec", "(", "calON_obs", ",", "calOFF_obs", ",", "calflux", ",", "calfreq", ",", "spec_in", ",", "average", "=", "True", ",", "oneflux", "=", "False", ",", "*", "*", "kwargs", ")", ":", "#Load frequencies and calculate number of channels per coarse channel", "obs", "=", "Waterfall", "(", "calON_obs", ",", "max_load", "=", "150", ")", "freqs", "=", "obs", ".", "populate_freqs", "(", ")", "ncoarse", "=", "obs", ".", "calc_n_coarse_chan", "(", ")", "nchans", "=", "obs", ".", "header", "[", "'nchans'", "]", "chan_per_coarse", "=", "nchans", "/", "ncoarse", "f_ON", ",", "f_OFF", "=", "f_ratios", "(", "calON_obs", ",", "calOFF_obs", ",", "chan_per_coarse", ",", "*", "*", "kwargs", ")", "#Obtain spectrum of the calibrator source for the given frequency range", "centerfreqs", "=", "get_centerfreqs", "(", "freqs", ",", "chan_per_coarse", ")", "calfluxes", "=", "get_calfluxes", "(", "calflux", ",", "calfreq", ",", "spec_in", ",", "centerfreqs", ",", "oneflux", ")", "#C_o and Tsys as defined in van Straten et al. 2012", "C_o", "=", "calfluxes", "/", "(", "1", "/", "f_ON", "-", "1", "/", "f_OFF", ")", "Tsys", "=", "C_o", "/", "f_OFF", "#return coarse channel diode spectrum", "if", "average", "==", "True", ":", "return", "np", ".", "mean", "(", "C_o", ")", ",", "np", ".", "mean", "(", "Tsys", ")", "else", ":", "return", "C_o", ",", "Tsys" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
get_Tsys
Returns frequency dependent system temperature given observations on and off a calibrator source Parameters ---------- (See diode_spec())
blimpy/calib_utils/fluxcal.py
def get_Tsys(calON_obs,calOFF_obs,calflux,calfreq,spec_in,oneflux=False,**kwargs): ''' Returns frequency dependent system temperature given observations on and off a calibrator source Parameters ---------- (See diode_spec()) ''' return diode_spec(calON_obs,calOFF_obs,calflux,calfreq,spec_in,average=False,oneflux=False,**kwargs)[1]
def get_Tsys(calON_obs,calOFF_obs,calflux,calfreq,spec_in,oneflux=False,**kwargs): ''' Returns frequency dependent system temperature given observations on and off a calibrator source Parameters ---------- (See diode_spec()) ''' return diode_spec(calON_obs,calOFF_obs,calflux,calfreq,spec_in,average=False,oneflux=False,**kwargs)[1]
[ "Returns", "frequency", "dependent", "system", "temperature", "given", "observations", "on", "and", "off", "a", "calibrator", "source" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/calib_utils/fluxcal.py#L245-L253
[ "def", "get_Tsys", "(", "calON_obs", ",", "calOFF_obs", ",", "calflux", ",", "calfreq", ",", "spec_in", ",", "oneflux", "=", "False", ",", "*", "*", "kwargs", ")", ":", "return", "diode_spec", "(", "calON_obs", ",", "calOFF_obs", ",", "calflux", ",", "calfreq", ",", "spec_in", ",", "average", "=", "False", ",", "oneflux", "=", "False", ",", "*", "*", "kwargs", ")", "[", "1", "]" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
calibrate_fluxes
Produce calibrated Stokes I for an observation given a noise diode measurement on the source and a diode spectrum with the same number of coarse channels Parameters ---------- main_obs_name : str Path to filterbank file containing final data to be calibrated dio_name : str Path to filterbank file for observation on the target source with flickering noise diode dspec : 1D Array (float) or float Coarse channel spectrum (or average) of the noise diode in Jy (obtained from diode_spec()) Tsys : 1D Array (float) or float Coarse channel spectrum (or average) of the system temperature in Jy fullstokes: boolean Use fullstokes=True if data is in IQUV format or just Stokes I, use fullstokes=False if it is in cross_pols format
blimpy/calib_utils/fluxcal.py
def calibrate_fluxes(main_obs_name,dio_name,dspec,Tsys,fullstokes=False,**kwargs): ''' Produce calibrated Stokes I for an observation given a noise diode measurement on the source and a diode spectrum with the same number of coarse channels Parameters ---------- main_obs_name : str Path to filterbank file containing final data to be calibrated dio_name : str Path to filterbank file for observation on the target source with flickering noise diode dspec : 1D Array (float) or float Coarse channel spectrum (or average) of the noise diode in Jy (obtained from diode_spec()) Tsys : 1D Array (float) or float Coarse channel spectrum (or average) of the system temperature in Jy fullstokes: boolean Use fullstokes=True if data is in IQUV format or just Stokes I, use fullstokes=False if it is in cross_pols format ''' #Find folded spectra of the target source with the noise diode ON and OFF main_obs = Waterfall(main_obs_name,max_load=150) ncoarse = main_obs.calc_n_coarse_chan() dio_obs = Waterfall(dio_name,max_load=150) dio_chan_per_coarse = dio_obs.header['nchans']/ncoarse dOFF,dON = integrate_calib(dio_name,dio_chan_per_coarse,fullstokes,**kwargs) #Find Jy/count for each coarse channel using the diode spectrum main_dat = main_obs.data scale_facs = dspec/(dON-dOFF) print(scale_facs) nchans = main_obs.header['nchans'] obs_chan_per_coarse = nchans/ncoarse ax0_size = np.size(main_dat,0) ax1_size = np.size(main_dat,1) #Reshape data array of target observation and multiply coarse channels by the scale factors main_dat = np.reshape(main_dat,(ax0_size,ax1_size,ncoarse,obs_chan_per_coarse)) main_dat = np.swapaxes(main_dat,2,3) main_dat = main_dat*scale_facs main_dat = main_dat-Tsys main_dat = np.swapaxes(main_dat,2,3) main_dat = np.reshape(main_dat,(ax0_size,ax1_size,nchans)) #Write calibrated data to a new filterbank file with ".fluxcal" extension main_obs.data = main_dat main_obs.write_to_filterbank(main_obs_name[:-4]+'.fluxcal.fil') print('Finished: calibrated product written to ' + main_obs_name[:-4]+'.fluxcal.fil')
def calibrate_fluxes(main_obs_name,dio_name,dspec,Tsys,fullstokes=False,**kwargs): ''' Produce calibrated Stokes I for an observation given a noise diode measurement on the source and a diode spectrum with the same number of coarse channels Parameters ---------- main_obs_name : str Path to filterbank file containing final data to be calibrated dio_name : str Path to filterbank file for observation on the target source with flickering noise diode dspec : 1D Array (float) or float Coarse channel spectrum (or average) of the noise diode in Jy (obtained from diode_spec()) Tsys : 1D Array (float) or float Coarse channel spectrum (or average) of the system temperature in Jy fullstokes: boolean Use fullstokes=True if data is in IQUV format or just Stokes I, use fullstokes=False if it is in cross_pols format ''' #Find folded spectra of the target source with the noise diode ON and OFF main_obs = Waterfall(main_obs_name,max_load=150) ncoarse = main_obs.calc_n_coarse_chan() dio_obs = Waterfall(dio_name,max_load=150) dio_chan_per_coarse = dio_obs.header['nchans']/ncoarse dOFF,dON = integrate_calib(dio_name,dio_chan_per_coarse,fullstokes,**kwargs) #Find Jy/count for each coarse channel using the diode spectrum main_dat = main_obs.data scale_facs = dspec/(dON-dOFF) print(scale_facs) nchans = main_obs.header['nchans'] obs_chan_per_coarse = nchans/ncoarse ax0_size = np.size(main_dat,0) ax1_size = np.size(main_dat,1) #Reshape data array of target observation and multiply coarse channels by the scale factors main_dat = np.reshape(main_dat,(ax0_size,ax1_size,ncoarse,obs_chan_per_coarse)) main_dat = np.swapaxes(main_dat,2,3) main_dat = main_dat*scale_facs main_dat = main_dat-Tsys main_dat = np.swapaxes(main_dat,2,3) main_dat = np.reshape(main_dat,(ax0_size,ax1_size,nchans)) #Write calibrated data to a new filterbank file with ".fluxcal" extension main_obs.data = main_dat main_obs.write_to_filterbank(main_obs_name[:-4]+'.fluxcal.fil') print('Finished: calibrated product written to ' + main_obs_name[:-4]+'.fluxcal.fil')
[ "Produce", "calibrated", "Stokes", "I", "for", "an", "observation", "given", "a", "noise", "diode", "measurement", "on", "the", "source", "and", "a", "diode", "spectrum", "with", "the", "same", "number", "of", "coarse", "channels" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/calib_utils/fluxcal.py#L255-L307
[ "def", "calibrate_fluxes", "(", "main_obs_name", ",", "dio_name", ",", "dspec", ",", "Tsys", ",", "fullstokes", "=", "False", ",", "*", "*", "kwargs", ")", ":", "#Find folded spectra of the target source with the noise diode ON and OFF", "main_obs", "=", "Waterfall", "(", "main_obs_name", ",", "max_load", "=", "150", ")", "ncoarse", "=", "main_obs", ".", "calc_n_coarse_chan", "(", ")", "dio_obs", "=", "Waterfall", "(", "dio_name", ",", "max_load", "=", "150", ")", "dio_chan_per_coarse", "=", "dio_obs", ".", "header", "[", "'nchans'", "]", "/", "ncoarse", "dOFF", ",", "dON", "=", "integrate_calib", "(", "dio_name", ",", "dio_chan_per_coarse", ",", "fullstokes", ",", "*", "*", "kwargs", ")", "#Find Jy/count for each coarse channel using the diode spectrum", "main_dat", "=", "main_obs", ".", "data", "scale_facs", "=", "dspec", "/", "(", "dON", "-", "dOFF", ")", "print", "(", "scale_facs", ")", "nchans", "=", "main_obs", ".", "header", "[", "'nchans'", "]", "obs_chan_per_coarse", "=", "nchans", "/", "ncoarse", "ax0_size", "=", "np", ".", "size", "(", "main_dat", ",", "0", ")", "ax1_size", "=", "np", ".", "size", "(", "main_dat", ",", "1", ")", "#Reshape data array of target observation and multiply coarse channels by the scale factors", "main_dat", "=", "np", ".", "reshape", "(", "main_dat", ",", "(", "ax0_size", ",", "ax1_size", ",", "ncoarse", ",", "obs_chan_per_coarse", ")", ")", "main_dat", "=", "np", ".", "swapaxes", "(", "main_dat", ",", "2", ",", "3", ")", "main_dat", "=", "main_dat", "*", "scale_facs", "main_dat", "=", "main_dat", "-", "Tsys", "main_dat", "=", "np", ".", "swapaxes", "(", "main_dat", ",", "2", ",", "3", ")", "main_dat", "=", "np", ".", "reshape", "(", "main_dat", ",", "(", "ax0_size", ",", "ax1_size", ",", "nchans", ")", ")", "#Write calibrated data to a new filterbank file with \".fluxcal\" extension", "main_obs", ".", "data", "=", "main_dat", "main_obs", ".", "write_to_filterbank", "(", "main_obs_name", "[", ":", "-", "4", "]", "+", "'.fluxcal.fil'", ")", "print", "(", "'Finished: calibrated product written to '", "+", "main_obs_name", "[", ":", "-", "4", "]", "+", "'.fluxcal.fil'", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
len_header
Return the length of the blimpy header, in bytes Args: filename (str): name of file to open Returns: idx_end (int): length of header, in bytes
blimpy/sigproc.py
def len_header(filename): """ Return the length of the blimpy header, in bytes Args: filename (str): name of file to open Returns: idx_end (int): length of header, in bytes """ with open(filename, 'rb') as f: header_sub_count = 0 eoh_found = False while not eoh_found: header_sub = f.read(512) header_sub_count += 1 if b'HEADER_END' in header_sub: idx_end = header_sub.index(b'HEADER_END') + len(b'HEADER_END') eoh_found = True break idx_end = (header_sub_count -1) * 512 + idx_end return idx_end
def len_header(filename): """ Return the length of the blimpy header, in bytes Args: filename (str): name of file to open Returns: idx_end (int): length of header, in bytes """ with open(filename, 'rb') as f: header_sub_count = 0 eoh_found = False while not eoh_found: header_sub = f.read(512) header_sub_count += 1 if b'HEADER_END' in header_sub: idx_end = header_sub.index(b'HEADER_END') + len(b'HEADER_END') eoh_found = True break idx_end = (header_sub_count -1) * 512 + idx_end return idx_end
[ "Return", "the", "length", "of", "the", "blimpy", "header", "in", "bytes" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/sigproc.py#L78-L99
[ "def", "len_header", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "f", ":", "header_sub_count", "=", "0", "eoh_found", "=", "False", "while", "not", "eoh_found", ":", "header_sub", "=", "f", ".", "read", "(", "512", ")", "header_sub_count", "+=", "1", "if", "b'HEADER_END'", "in", "header_sub", ":", "idx_end", "=", "header_sub", ".", "index", "(", "b'HEADER_END'", ")", "+", "len", "(", "b'HEADER_END'", ")", "eoh_found", "=", "True", "break", "idx_end", "=", "(", "header_sub_count", "-", "1", ")", "*", "512", "+", "idx_end", "return", "idx_end" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
is_filterbank
Open file and confirm if it is a filterbank file or not.
blimpy/sigproc.py
def is_filterbank(filename): """ Open file and confirm if it is a filterbank file or not. """ with open(filename, 'rb') as fh: is_fil = True # Check this is a blimpy file try: keyword, value, idx = read_next_header_keyword(fh) try: assert keyword == b'HEADER_START' except AssertionError: is_fil = False except KeyError: is_fil = False return is_fil
def is_filterbank(filename): """ Open file and confirm if it is a filterbank file or not. """ with open(filename, 'rb') as fh: is_fil = True # Check this is a blimpy file try: keyword, value, idx = read_next_header_keyword(fh) try: assert keyword == b'HEADER_START' except AssertionError: is_fil = False except KeyError: is_fil = False return is_fil
[ "Open", "file", "and", "confirm", "if", "it", "is", "a", "filterbank", "file", "or", "not", "." ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/sigproc.py#L143-L157
[ "def", "is_filterbank", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "fh", ":", "is_fil", "=", "True", "# Check this is a blimpy file", "try", ":", "keyword", ",", "value", ",", "idx", "=", "read_next_header_keyword", "(", "fh", ")", "try", ":", "assert", "keyword", "==", "b'HEADER_START'", "except", "AssertionError", ":", "is_fil", "=", "False", "except", "KeyError", ":", "is_fil", "=", "False", "return", "is_fil" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
read_header
Read blimpy header and return a Python dictionary of key:value pairs Args: filename (str): name of file to open Optional args: return_idxs (bool): Default False. If true, returns the file offset indexes for values returns
blimpy/sigproc.py
def read_header(filename, return_idxs=False): """ Read blimpy header and return a Python dictionary of key:value pairs Args: filename (str): name of file to open Optional args: return_idxs (bool): Default False. If true, returns the file offset indexes for values returns """ with open(filename, 'rb') as fh: header_dict = {} header_idxs = {} # Check this is a blimpy file keyword, value, idx = read_next_header_keyword(fh) try: assert keyword == b'HEADER_START' except AssertionError: raise RuntimeError("Not a valid blimpy file.") while True: keyword, value, idx = read_next_header_keyword(fh) if keyword == b'HEADER_END': break else: header_dict[keyword] = value header_idxs[keyword] = idx if return_idxs: return header_idxs else: return header_dict
def read_header(filename, return_idxs=False): """ Read blimpy header and return a Python dictionary of key:value pairs Args: filename (str): name of file to open Optional args: return_idxs (bool): Default False. If true, returns the file offset indexes for values returns """ with open(filename, 'rb') as fh: header_dict = {} header_idxs = {} # Check this is a blimpy file keyword, value, idx = read_next_header_keyword(fh) try: assert keyword == b'HEADER_START' except AssertionError: raise RuntimeError("Not a valid blimpy file.") while True: keyword, value, idx = read_next_header_keyword(fh) if keyword == b'HEADER_END': break else: header_dict[keyword] = value header_idxs[keyword] = idx if return_idxs: return header_idxs else: return header_dict
[ "Read", "blimpy", "header", "and", "return", "a", "Python", "dictionary", "of", "key", ":", "value", "pairs" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/sigproc.py#L160-L196
[ "def", "read_header", "(", "filename", ",", "return_idxs", "=", "False", ")", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "fh", ":", "header_dict", "=", "{", "}", "header_idxs", "=", "{", "}", "# Check this is a blimpy file", "keyword", ",", "value", ",", "idx", "=", "read_next_header_keyword", "(", "fh", ")", "try", ":", "assert", "keyword", "==", "b'HEADER_START'", "except", "AssertionError", ":", "raise", "RuntimeError", "(", "\"Not a valid blimpy file.\"", ")", "while", "True", ":", "keyword", ",", "value", ",", "idx", "=", "read_next_header_keyword", "(", "fh", ")", "if", "keyword", "==", "b'HEADER_END'", ":", "break", "else", ":", "header_dict", "[", "keyword", "]", "=", "value", "header_idxs", "[", "keyword", "]", "=", "idx", "if", "return_idxs", ":", "return", "header_idxs", "else", ":", "return", "header_dict" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
fix_header
Apply a quick patch-up to a Filterbank header by overwriting a header value Args: filename (str): name of file to open and fix. WILL BE MODIFIED. keyword (stt): header keyword to update new_value (long, double, angle or string): New value to write. Notes: This will overwrite the current value of the blimpy with a desired 'fixed' version. Note that this has limited support for patching string-type values - if the length of the string changes, all hell will break loose.
blimpy/sigproc.py
def fix_header(filename, keyword, new_value): """ Apply a quick patch-up to a Filterbank header by overwriting a header value Args: filename (str): name of file to open and fix. WILL BE MODIFIED. keyword (stt): header keyword to update new_value (long, double, angle or string): New value to write. Notes: This will overwrite the current value of the blimpy with a desired 'fixed' version. Note that this has limited support for patching string-type values - if the length of the string changes, all hell will break loose. """ # Read header data and return indexes of data offsets in file hd = read_header(filename) hi = read_header(filename, return_idxs=True) idx = hi[keyword] # Find out the datatype for the given keyword dtype = header_keyword_types[keyword] dtype_to_type = {b'<l' : np.int32, b'str' : bytes, b'<d' : np.float64, b'angle' : to_sigproc_angle} value_dtype = dtype_to_type[dtype] # Generate the new string if isinstance(value_dtype, bytes): if len(hd[keyword]) == len(new_value): val_str = np.int32(len(new_value)).tostring() + new_value else: raise RuntimeError("String size mismatch. Cannot update without rewriting entire file.") else: val_str = value_dtype(new_value).tostring() # Write the new string to file with open(filename, 'rb+') as fh: fh.seek(idx) fh.write(val_str)
def fix_header(filename, keyword, new_value): """ Apply a quick patch-up to a Filterbank header by overwriting a header value Args: filename (str): name of file to open and fix. WILL BE MODIFIED. keyword (stt): header keyword to update new_value (long, double, angle or string): New value to write. Notes: This will overwrite the current value of the blimpy with a desired 'fixed' version. Note that this has limited support for patching string-type values - if the length of the string changes, all hell will break loose. """ # Read header data and return indexes of data offsets in file hd = read_header(filename) hi = read_header(filename, return_idxs=True) idx = hi[keyword] # Find out the datatype for the given keyword dtype = header_keyword_types[keyword] dtype_to_type = {b'<l' : np.int32, b'str' : bytes, b'<d' : np.float64, b'angle' : to_sigproc_angle} value_dtype = dtype_to_type[dtype] # Generate the new string if isinstance(value_dtype, bytes): if len(hd[keyword]) == len(new_value): val_str = np.int32(len(new_value)).tostring() + new_value else: raise RuntimeError("String size mismatch. Cannot update without rewriting entire file.") else: val_str = value_dtype(new_value).tostring() # Write the new string to file with open(filename, 'rb+') as fh: fh.seek(idx) fh.write(val_str)
[ "Apply", "a", "quick", "patch", "-", "up", "to", "a", "Filterbank", "header", "by", "overwriting", "a", "header", "value" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/sigproc.py#L198-L240
[ "def", "fix_header", "(", "filename", ",", "keyword", ",", "new_value", ")", ":", "# Read header data and return indexes of data offsets in file", "hd", "=", "read_header", "(", "filename", ")", "hi", "=", "read_header", "(", "filename", ",", "return_idxs", "=", "True", ")", "idx", "=", "hi", "[", "keyword", "]", "# Find out the datatype for the given keyword", "dtype", "=", "header_keyword_types", "[", "keyword", "]", "dtype_to_type", "=", "{", "b'<l'", ":", "np", ".", "int32", ",", "b'str'", ":", "bytes", ",", "b'<d'", ":", "np", ".", "float64", ",", "b'angle'", ":", "to_sigproc_angle", "}", "value_dtype", "=", "dtype_to_type", "[", "dtype", "]", "# Generate the new string", "if", "isinstance", "(", "value_dtype", ",", "bytes", ")", ":", "if", "len", "(", "hd", "[", "keyword", "]", ")", "==", "len", "(", "new_value", ")", ":", "val_str", "=", "np", ".", "int32", "(", "len", "(", "new_value", ")", ")", ".", "tostring", "(", ")", "+", "new_value", "else", ":", "raise", "RuntimeError", "(", "\"String size mismatch. Cannot update without rewriting entire file.\"", ")", "else", ":", "val_str", "=", "value_dtype", "(", "new_value", ")", ".", "tostring", "(", ")", "# Write the new string to file", "with", "open", "(", "filename", ",", "'rb+'", ")", "as", "fh", ":", "fh", ".", "seek", "(", "idx", ")", "fh", ".", "write", "(", "val_str", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
fil_double_to_angle
Reads a little-endian double in ddmmss.s (or hhmmss.s) format and then converts to Float degrees (or hours). This is primarily used to read src_raj and src_dej header values.
blimpy/sigproc.py
def fil_double_to_angle(angle): """ Reads a little-endian double in ddmmss.s (or hhmmss.s) format and then converts to Float degrees (or hours). This is primarily used to read src_raj and src_dej header values. """ negative = (angle < 0.0) angle = np.abs(angle) dd = np.floor((angle / 10000)) angle -= 10000 * dd mm = np.floor((angle / 100)) ss = angle - 100 * mm dd += mm/60.0 + ss/3600.0 if negative: dd *= -1 return dd
def fil_double_to_angle(angle): """ Reads a little-endian double in ddmmss.s (or hhmmss.s) format and then converts to Float degrees (or hours). This is primarily used to read src_raj and src_dej header values. """ negative = (angle < 0.0) angle = np.abs(angle) dd = np.floor((angle / 10000)) angle -= 10000 * dd mm = np.floor((angle / 100)) ss = angle - 100 * mm dd += mm/60.0 + ss/3600.0 if negative: dd *= -1 return dd
[ "Reads", "a", "little", "-", "endian", "double", "in", "ddmmss", ".", "s", "(", "or", "hhmmss", ".", "s", ")", "format", "and", "then", "converts", "to", "Float", "degrees", "(", "or", "hours", ")", ".", "This", "is", "primarily", "used", "to", "read", "src_raj", "and", "src_dej", "header", "values", "." ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/sigproc.py#L242-L259
[ "def", "fil_double_to_angle", "(", "angle", ")", ":", "negative", "=", "(", "angle", "<", "0.0", ")", "angle", "=", "np", ".", "abs", "(", "angle", ")", "dd", "=", "np", ".", "floor", "(", "(", "angle", "/", "10000", ")", ")", "angle", "-=", "10000", "*", "dd", "mm", "=", "np", ".", "floor", "(", "(", "angle", "/", "100", ")", ")", "ss", "=", "angle", "-", "100", "*", "mm", "dd", "+=", "mm", "/", "60.0", "+", "ss", "/", "3600.0", "if", "negative", ":", "dd", "*=", "-", "1", "return", "dd" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
to_sigproc_keyword
Generate a serialized string for a sigproc keyword:value pair If value=None, just the keyword will be written with no payload. Data type is inferred by keyword name (via a lookup table) Args: keyword (str): Keyword to write value (None, float, str, double or angle): value to write to file Returns: value_str (str): serialized string to write to file.
blimpy/sigproc.py
def to_sigproc_keyword(keyword, value=None): """ Generate a serialized string for a sigproc keyword:value pair If value=None, just the keyword will be written with no payload. Data type is inferred by keyword name (via a lookup table) Args: keyword (str): Keyword to write value (None, float, str, double or angle): value to write to file Returns: value_str (str): serialized string to write to file. """ keyword = bytes(keyword) if value is None: return np.int32(len(keyword)).tostring() + keyword else: dtype = header_keyword_types[keyword] dtype_to_type = {b'<l' : np.int32, b'str' : str, b'<d' : np.float64, b'angle' : to_sigproc_angle} value_dtype = dtype_to_type[dtype] if value_dtype is str: return np.int32(len(keyword)).tostring() + keyword + np.int32(len(value)).tostring() + value else: return np.int32(len(keyword)).tostring() + keyword + value_dtype(value).tostring()
def to_sigproc_keyword(keyword, value=None): """ Generate a serialized string for a sigproc keyword:value pair If value=None, just the keyword will be written with no payload. Data type is inferred by keyword name (via a lookup table) Args: keyword (str): Keyword to write value (None, float, str, double or angle): value to write to file Returns: value_str (str): serialized string to write to file. """ keyword = bytes(keyword) if value is None: return np.int32(len(keyword)).tostring() + keyword else: dtype = header_keyword_types[keyword] dtype_to_type = {b'<l' : np.int32, b'str' : str, b'<d' : np.float64, b'angle' : to_sigproc_angle} value_dtype = dtype_to_type[dtype] if value_dtype is str: return np.int32(len(keyword)).tostring() + keyword + np.int32(len(value)).tostring() + value else: return np.int32(len(keyword)).tostring() + keyword + value_dtype(value).tostring()
[ "Generate", "a", "serialized", "string", "for", "a", "sigproc", "keyword", ":", "value", "pair" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/sigproc.py#L265-L296
[ "def", "to_sigproc_keyword", "(", "keyword", ",", "value", "=", "None", ")", ":", "keyword", "=", "bytes", "(", "keyword", ")", "if", "value", "is", "None", ":", "return", "np", ".", "int32", "(", "len", "(", "keyword", ")", ")", ".", "tostring", "(", ")", "+", "keyword", "else", ":", "dtype", "=", "header_keyword_types", "[", "keyword", "]", "dtype_to_type", "=", "{", "b'<l'", ":", "np", ".", "int32", ",", "b'str'", ":", "str", ",", "b'<d'", ":", "np", ".", "float64", ",", "b'angle'", ":", "to_sigproc_angle", "}", "value_dtype", "=", "dtype_to_type", "[", "dtype", "]", "if", "value_dtype", "is", "str", ":", "return", "np", ".", "int32", "(", "len", "(", "keyword", ")", ")", ".", "tostring", "(", ")", "+", "keyword", "+", "np", ".", "int32", "(", "len", "(", "value", ")", ")", ".", "tostring", "(", ")", "+", "value", "else", ":", "return", "np", ".", "int32", "(", "len", "(", "keyword", ")", ")", ".", "tostring", "(", ")", "+", "keyword", "+", "value_dtype", "(", "value", ")", ".", "tostring", "(", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
generate_sigproc_header
Generate a serialzed sigproc header which can be written to disk. Args: f (Filterbank object): Filterbank object for which to generate header Returns: header_str (str): Serialized string corresponding to header
blimpy/sigproc.py
def generate_sigproc_header(f): """ Generate a serialzed sigproc header which can be written to disk. Args: f (Filterbank object): Filterbank object for which to generate header Returns: header_str (str): Serialized string corresponding to header """ header_string = b'' header_string += to_sigproc_keyword(b'HEADER_START') for keyword in f.header.keys(): if keyword == b'src_raj': header_string += to_sigproc_keyword(b'src_raj') + to_sigproc_angle(f.header[b'src_raj']) elif keyword == b'src_dej': header_string += to_sigproc_keyword(b'src_dej') + to_sigproc_angle(f.header[b'src_dej']) elif keyword == b'az_start' or keyword == b'za_start': header_string += to_sigproc_keyword(keyword) + np.float64(f.header[keyword]).tostring() elif keyword not in header_keyword_types.keys(): pass else: header_string += to_sigproc_keyword(keyword, f.header[keyword]) header_string += to_sigproc_keyword(b'HEADER_END') return header_string
def generate_sigproc_header(f): """ Generate a serialzed sigproc header which can be written to disk. Args: f (Filterbank object): Filterbank object for which to generate header Returns: header_str (str): Serialized string corresponding to header """ header_string = b'' header_string += to_sigproc_keyword(b'HEADER_START') for keyword in f.header.keys(): if keyword == b'src_raj': header_string += to_sigproc_keyword(b'src_raj') + to_sigproc_angle(f.header[b'src_raj']) elif keyword == b'src_dej': header_string += to_sigproc_keyword(b'src_dej') + to_sigproc_angle(f.header[b'src_dej']) elif keyword == b'az_start' or keyword == b'za_start': header_string += to_sigproc_keyword(keyword) + np.float64(f.header[keyword]).tostring() elif keyword not in header_keyword_types.keys(): pass else: header_string += to_sigproc_keyword(keyword, f.header[keyword]) header_string += to_sigproc_keyword(b'HEADER_END') return header_string
[ "Generate", "a", "serialzed", "sigproc", "header", "which", "can", "be", "written", "to", "disk", "." ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/sigproc.py#L298-L324
[ "def", "generate_sigproc_header", "(", "f", ")", ":", "header_string", "=", "b''", "header_string", "+=", "to_sigproc_keyword", "(", "b'HEADER_START'", ")", "for", "keyword", "in", "f", ".", "header", ".", "keys", "(", ")", ":", "if", "keyword", "==", "b'src_raj'", ":", "header_string", "+=", "to_sigproc_keyword", "(", "b'src_raj'", ")", "+", "to_sigproc_angle", "(", "f", ".", "header", "[", "b'src_raj'", "]", ")", "elif", "keyword", "==", "b'src_dej'", ":", "header_string", "+=", "to_sigproc_keyword", "(", "b'src_dej'", ")", "+", "to_sigproc_angle", "(", "f", ".", "header", "[", "b'src_dej'", "]", ")", "elif", "keyword", "==", "b'az_start'", "or", "keyword", "==", "b'za_start'", ":", "header_string", "+=", "to_sigproc_keyword", "(", "keyword", ")", "+", "np", ".", "float64", "(", "f", ".", "header", "[", "keyword", "]", ")", ".", "tostring", "(", ")", "elif", "keyword", "not", "in", "header_keyword_types", ".", "keys", "(", ")", ":", "pass", "else", ":", "header_string", "+=", "to_sigproc_keyword", "(", "keyword", ",", "f", ".", "header", "[", "keyword", "]", ")", "header_string", "+=", "to_sigproc_keyword", "(", "b'HEADER_END'", ")", "return", "header_string" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
to_sigproc_angle
Convert an astropy.Angle to the ridiculous sigproc angle format string.
blimpy/sigproc.py
def to_sigproc_angle(angle_val): """ Convert an astropy.Angle to the ridiculous sigproc angle format string. """ x = str(angle_val) if '.' in x: if 'h' in x: d, m, s, ss = int(x[0:x.index('h')]), int(x[x.index('h')+1:x.index('m')]), \ int(x[x.index('m')+1:x.index('.')]), float(x[x.index('.'):x.index('s')]) if 'd' in x: d, m, s, ss = int(x[0:x.index('d')]), int(x[x.index('d')+1:x.index('m')]), \ int(x[x.index('m')+1:x.index('.')]), float(x[x.index('.'):x.index('s')]) else: if 'h' in x: d, m, s = int(x[0:x.index('h')]), int(x[x.index('h')+1:x.index('m')]), \ int(x[x.index('m')+1:x.index('s')]) if 'd' in x: d, m, s = int(x[0:x.index('d')]), int(x[x.index('d')+1:x.index('m')]), \ int(x[x.index('m')+1:x.index('s')]) ss = 0 num = str(d).zfill(2) + str(m).zfill(2) + str(s).zfill(2)+ '.' + str(ss).split(".")[-1] return np.float64(num).tostring()
def to_sigproc_angle(angle_val): """ Convert an astropy.Angle to the ridiculous sigproc angle format string. """ x = str(angle_val) if '.' in x: if 'h' in x: d, m, s, ss = int(x[0:x.index('h')]), int(x[x.index('h')+1:x.index('m')]), \ int(x[x.index('m')+1:x.index('.')]), float(x[x.index('.'):x.index('s')]) if 'd' in x: d, m, s, ss = int(x[0:x.index('d')]), int(x[x.index('d')+1:x.index('m')]), \ int(x[x.index('m')+1:x.index('.')]), float(x[x.index('.'):x.index('s')]) else: if 'h' in x: d, m, s = int(x[0:x.index('h')]), int(x[x.index('h')+1:x.index('m')]), \ int(x[x.index('m')+1:x.index('s')]) if 'd' in x: d, m, s = int(x[0:x.index('d')]), int(x[x.index('d')+1:x.index('m')]), \ int(x[x.index('m')+1:x.index('s')]) ss = 0 num = str(d).zfill(2) + str(m).zfill(2) + str(s).zfill(2)+ '.' + str(ss).split(".")[-1] return np.float64(num).tostring()
[ "Convert", "an", "astropy", ".", "Angle", "to", "the", "ridiculous", "sigproc", "angle", "format", "string", "." ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/sigproc.py#L327-L347
[ "def", "to_sigproc_angle", "(", "angle_val", ")", ":", "x", "=", "str", "(", "angle_val", ")", "if", "'.'", "in", "x", ":", "if", "'h'", "in", "x", ":", "d", ",", "m", ",", "s", ",", "ss", "=", "int", "(", "x", "[", "0", ":", "x", ".", "index", "(", "'h'", ")", "]", ")", ",", "int", "(", "x", "[", "x", ".", "index", "(", "'h'", ")", "+", "1", ":", "x", ".", "index", "(", "'m'", ")", "]", ")", ",", "int", "(", "x", "[", "x", ".", "index", "(", "'m'", ")", "+", "1", ":", "x", ".", "index", "(", "'.'", ")", "]", ")", ",", "float", "(", "x", "[", "x", ".", "index", "(", "'.'", ")", ":", "x", ".", "index", "(", "'s'", ")", "]", ")", "if", "'d'", "in", "x", ":", "d", ",", "m", ",", "s", ",", "ss", "=", "int", "(", "x", "[", "0", ":", "x", ".", "index", "(", "'d'", ")", "]", ")", ",", "int", "(", "x", "[", "x", ".", "index", "(", "'d'", ")", "+", "1", ":", "x", ".", "index", "(", "'m'", ")", "]", ")", ",", "int", "(", "x", "[", "x", ".", "index", "(", "'m'", ")", "+", "1", ":", "x", ".", "index", "(", "'.'", ")", "]", ")", ",", "float", "(", "x", "[", "x", ".", "index", "(", "'.'", ")", ":", "x", ".", "index", "(", "'s'", ")", "]", ")", "else", ":", "if", "'h'", "in", "x", ":", "d", ",", "m", ",", "s", "=", "int", "(", "x", "[", "0", ":", "x", ".", "index", "(", "'h'", ")", "]", ")", ",", "int", "(", "x", "[", "x", ".", "index", "(", "'h'", ")", "+", "1", ":", "x", ".", "index", "(", "'m'", ")", "]", ")", ",", "int", "(", "x", "[", "x", ".", "index", "(", "'m'", ")", "+", "1", ":", "x", ".", "index", "(", "'s'", ")", "]", ")", "if", "'d'", "in", "x", ":", "d", ",", "m", ",", "s", "=", "int", "(", "x", "[", "0", ":", "x", ".", "index", "(", "'d'", ")", "]", ")", ",", "int", "(", "x", "[", "x", ".", "index", "(", "'d'", ")", "+", "1", ":", "x", ".", "index", "(", "'m'", ")", "]", ")", ",", "int", "(", "x", "[", "x", ".", "index", "(", "'m'", ")", "+", "1", ":", "x", ".", "index", "(", "'s'", ")", "]", ")", "ss", "=", "0", "num", "=", "str", "(", "d", ")", ".", "zfill", "(", "2", ")", "+", "str", "(", "m", ")", ".", "zfill", "(", "2", ")", "+", "str", "(", "s", ")", ".", "zfill", "(", "2", ")", "+", "'.'", "+", "str", "(", "ss", ")", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", "return", "np", ".", "float64", "(", "num", ")", ".", "tostring", "(", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
calc_n_ints_in_file
Calculate number of integrations in a given file
blimpy/sigproc.py
def calc_n_ints_in_file(filename): """ Calculate number of integrations in a given file """ # Load binary data h = read_header(filename) n_bytes = int(h[b'nbits'] / 8) n_chans = h[b'nchans'] n_ifs = h[b'nifs'] idx_data = len_header(filename) f = open(filename, 'rb') f.seek(idx_data) filesize = os.path.getsize(filename) n_bytes_data = filesize - idx_data if h[b'nbits'] == 2: n_ints = int(4 * n_bytes_data / (n_chans * n_ifs)) else: n_ints = int(n_bytes_data / (n_bytes * n_chans * n_ifs)) return n_ints
def calc_n_ints_in_file(filename): """ Calculate number of integrations in a given file """ # Load binary data h = read_header(filename) n_bytes = int(h[b'nbits'] / 8) n_chans = h[b'nchans'] n_ifs = h[b'nifs'] idx_data = len_header(filename) f = open(filename, 'rb') f.seek(idx_data) filesize = os.path.getsize(filename) n_bytes_data = filesize - idx_data if h[b'nbits'] == 2: n_ints = int(4 * n_bytes_data / (n_chans * n_ifs)) else: n_ints = int(n_bytes_data / (n_bytes * n_chans * n_ifs)) return n_ints
[ "Calculate", "number", "of", "integrations", "in", "a", "given", "file" ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/sigproc.py#L350-L369
[ "def", "calc_n_ints_in_file", "(", "filename", ")", ":", "# Load binary data", "h", "=", "read_header", "(", "filename", ")", "n_bytes", "=", "int", "(", "h", "[", "b'nbits'", "]", "/", "8", ")", "n_chans", "=", "h", "[", "b'nchans'", "]", "n_ifs", "=", "h", "[", "b'nifs'", "]", "idx_data", "=", "len_header", "(", "filename", ")", "f", "=", "open", "(", "filename", ",", "'rb'", ")", "f", ".", "seek", "(", "idx_data", ")", "filesize", "=", "os", ".", "path", ".", "getsize", "(", "filename", ")", "n_bytes_data", "=", "filesize", "-", "idx_data", "if", "h", "[", "b'nbits'", "]", "==", "2", ":", "n_ints", "=", "int", "(", "4", "*", "n_bytes_data", "/", "(", "n_chans", "*", "n_ifs", ")", ")", "else", ":", "n_ints", "=", "int", "(", "n_bytes_data", "/", "(", "n_bytes", "*", "n_chans", "*", "n_ifs", ")", ")", "return", "n_ints" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
make_fil_file
Converts file to Sigproc filterbank (.fil) format. Default saves output in current dir.
blimpy/h52fil.py
def make_fil_file(filename,out_dir='./', new_filename=None, max_load = None): ''' Converts file to Sigproc filterbank (.fil) format. Default saves output in current dir. ''' fil_file = Waterfall(filename, max_load = max_load) if not new_filename: new_filename = out_dir+filename.replace('.h5','.fil').split('/')[-1] if '.fil' not in new_filename: new_filename = new_filename+'.fil' fil_file.write_to_fil(new_filename)
def make_fil_file(filename,out_dir='./', new_filename=None, max_load = None): ''' Converts file to Sigproc filterbank (.fil) format. Default saves output in current dir. ''' fil_file = Waterfall(filename, max_load = max_load) if not new_filename: new_filename = out_dir+filename.replace('.h5','.fil').split('/')[-1] if '.fil' not in new_filename: new_filename = new_filename+'.fil' fil_file.write_to_fil(new_filename)
[ "Converts", "file", "to", "Sigproc", "filterbank", "(", ".", "fil", ")", "format", ".", "Default", "saves", "output", "in", "current", "dir", "." ]
UCBerkeleySETI/blimpy
python
https://github.com/UCBerkeleySETI/blimpy/blob/b8822d3e3e911944370d84371a91fa0c29e9772e/blimpy/h52fil.py#L37-L48
[ "def", "make_fil_file", "(", "filename", ",", "out_dir", "=", "'./'", ",", "new_filename", "=", "None", ",", "max_load", "=", "None", ")", ":", "fil_file", "=", "Waterfall", "(", "filename", ",", "max_load", "=", "max_load", ")", "if", "not", "new_filename", ":", "new_filename", "=", "out_dir", "+", "filename", ".", "replace", "(", "'.h5'", ",", "'.fil'", ")", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "if", "'.fil'", "not", "in", "new_filename", ":", "new_filename", "=", "new_filename", "+", "'.fil'", "fil_file", ".", "write_to_fil", "(", "new_filename", ")" ]
b8822d3e3e911944370d84371a91fa0c29e9772e
test
Traceback.to_dict
Convert a Traceback into a dictionary representation
src/tblib/__init__.py
def to_dict(self): """Convert a Traceback into a dictionary representation""" if self.tb_next is None: tb_next = None else: tb_next = self.tb_next.to_dict() code = { 'co_filename': self.tb_frame.f_code.co_filename, 'co_name': self.tb_frame.f_code.co_name, } frame = { 'f_globals': self.tb_frame.f_globals, 'f_code': code, } return { 'tb_frame': frame, 'tb_lineno': self.tb_lineno, 'tb_next': tb_next, }
def to_dict(self): """Convert a Traceback into a dictionary representation""" if self.tb_next is None: tb_next = None else: tb_next = self.tb_next.to_dict() code = { 'co_filename': self.tb_frame.f_code.co_filename, 'co_name': self.tb_frame.f_code.co_name, } frame = { 'f_globals': self.tb_frame.f_globals, 'f_code': code, } return { 'tb_frame': frame, 'tb_lineno': self.tb_lineno, 'tb_next': tb_next, }
[ "Convert", "a", "Traceback", "into", "a", "dictionary", "representation" ]
ionelmc/python-tblib
python
https://github.com/ionelmc/python-tblib/blob/00be69aa97e1eb1c09282b1cdb72539c947d4515/src/tblib/__init__.py#L141-L160
[ "def", "to_dict", "(", "self", ")", ":", "if", "self", ".", "tb_next", "is", "None", ":", "tb_next", "=", "None", "else", ":", "tb_next", "=", "self", ".", "tb_next", ".", "to_dict", "(", ")", "code", "=", "{", "'co_filename'", ":", "self", ".", "tb_frame", ".", "f_code", ".", "co_filename", ",", "'co_name'", ":", "self", ".", "tb_frame", ".", "f_code", ".", "co_name", ",", "}", "frame", "=", "{", "'f_globals'", ":", "self", ".", "tb_frame", ".", "f_globals", ",", "'f_code'", ":", "code", ",", "}", "return", "{", "'tb_frame'", ":", "frame", ",", "'tb_lineno'", ":", "self", ".", "tb_lineno", ",", "'tb_next'", ":", "tb_next", ",", "}" ]
00be69aa97e1eb1c09282b1cdb72539c947d4515
test
make_rr_subparser
Make a subparser for a given type of DNS record
blockstack_zones/parse_zone_file.py
def make_rr_subparser(subparsers, rec_type, args_and_types): """ Make a subparser for a given type of DNS record """ sp = subparsers.add_parser(rec_type) sp.add_argument("name", type=str) sp.add_argument("ttl", type=int, nargs='?') sp.add_argument(rec_type, type=str) for my_spec in args_and_types: (argname, argtype) = my_spec[:2] if len(my_spec) > 2: nargs = my_spec[2] sp.add_argument(argname, type=argtype, nargs=nargs) else: sp.add_argument(argname, type=argtype) return sp
def make_rr_subparser(subparsers, rec_type, args_and_types): """ Make a subparser for a given type of DNS record """ sp = subparsers.add_parser(rec_type) sp.add_argument("name", type=str) sp.add_argument("ttl", type=int, nargs='?') sp.add_argument(rec_type, type=str) for my_spec in args_and_types: (argname, argtype) = my_spec[:2] if len(my_spec) > 2: nargs = my_spec[2] sp.add_argument(argname, type=argtype, nargs=nargs) else: sp.add_argument(argname, type=argtype) return sp
[ "Make", "a", "subparser", "for", "a", "given", "type", "of", "DNS", "record" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L32-L49
[ "def", "make_rr_subparser", "(", "subparsers", ",", "rec_type", ",", "args_and_types", ")", ":", "sp", "=", "subparsers", ".", "add_parser", "(", "rec_type", ")", "sp", ".", "add_argument", "(", "\"name\"", ",", "type", "=", "str", ")", "sp", ".", "add_argument", "(", "\"ttl\"", ",", "type", "=", "int", ",", "nargs", "=", "'?'", ")", "sp", ".", "add_argument", "(", "rec_type", ",", "type", "=", "str", ")", "for", "my_spec", "in", "args_and_types", ":", "(", "argname", ",", "argtype", ")", "=", "my_spec", "[", ":", "2", "]", "if", "len", "(", "my_spec", ")", ">", "2", ":", "nargs", "=", "my_spec", "[", "2", "]", "sp", ".", "add_argument", "(", "argname", ",", "type", "=", "argtype", ",", "nargs", "=", "nargs", ")", "else", ":", "sp", ".", "add_argument", "(", "argname", ",", "type", "=", "argtype", ")", "return", "sp" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
make_parser
Make an ArgumentParser that accepts DNS RRs
blockstack_zones/parse_zone_file.py
def make_parser(): """ Make an ArgumentParser that accepts DNS RRs """ line_parser = ZonefileLineParser() subparsers = line_parser.add_subparsers() # parse $ORIGIN sp = subparsers.add_parser("$ORIGIN") sp.add_argument("$ORIGIN", type=str) # parse $TTL sp = subparsers.add_parser("$TTL") sp.add_argument("$TTL", type=int) # parse each RR args_and_types = [ ("mname", str), ("rname", str), ("serial", int), ("refresh", int), ("retry", int), ("expire", int), ("minimum", int) ] make_rr_subparser(subparsers, "SOA", args_and_types) make_rr_subparser(subparsers, "NS", [("host", str)]) make_rr_subparser(subparsers, "A", [("ip", str)]) make_rr_subparser(subparsers, "AAAA", [("ip", str)]) make_rr_subparser(subparsers, "CNAME", [("alias", str)]) make_rr_subparser(subparsers, "ALIAS", [("host", str)]) make_rr_subparser(subparsers, "MX", [("preference", str), ("host", str)]) make_txt_subparser(subparsers) make_rr_subparser(subparsers, "PTR", [("host", str)]) make_rr_subparser(subparsers, "SRV", [("priority", int), ("weight", int), ("port", int), ("target", str)]) make_rr_subparser(subparsers, "SPF", [("data", str)]) make_rr_subparser(subparsers, "URI", [("priority", int), ("weight", int), ("target", str)]) return line_parser
def make_parser(): """ Make an ArgumentParser that accepts DNS RRs """ line_parser = ZonefileLineParser() subparsers = line_parser.add_subparsers() # parse $ORIGIN sp = subparsers.add_parser("$ORIGIN") sp.add_argument("$ORIGIN", type=str) # parse $TTL sp = subparsers.add_parser("$TTL") sp.add_argument("$TTL", type=int) # parse each RR args_and_types = [ ("mname", str), ("rname", str), ("serial", int), ("refresh", int), ("retry", int), ("expire", int), ("minimum", int) ] make_rr_subparser(subparsers, "SOA", args_and_types) make_rr_subparser(subparsers, "NS", [("host", str)]) make_rr_subparser(subparsers, "A", [("ip", str)]) make_rr_subparser(subparsers, "AAAA", [("ip", str)]) make_rr_subparser(subparsers, "CNAME", [("alias", str)]) make_rr_subparser(subparsers, "ALIAS", [("host", str)]) make_rr_subparser(subparsers, "MX", [("preference", str), ("host", str)]) make_txt_subparser(subparsers) make_rr_subparser(subparsers, "PTR", [("host", str)]) make_rr_subparser(subparsers, "SRV", [("priority", int), ("weight", int), ("port", int), ("target", str)]) make_rr_subparser(subparsers, "SPF", [("data", str)]) make_rr_subparser(subparsers, "URI", [("priority", int), ("weight", int), ("target", str)]) return line_parser
[ "Make", "an", "ArgumentParser", "that", "accepts", "DNS", "RRs" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L60-L94
[ "def", "make_parser", "(", ")", ":", "line_parser", "=", "ZonefileLineParser", "(", ")", "subparsers", "=", "line_parser", ".", "add_subparsers", "(", ")", "# parse $ORIGIN", "sp", "=", "subparsers", ".", "add_parser", "(", "\"$ORIGIN\"", ")", "sp", ".", "add_argument", "(", "\"$ORIGIN\"", ",", "type", "=", "str", ")", "# parse $TTL", "sp", "=", "subparsers", ".", "add_parser", "(", "\"$TTL\"", ")", "sp", ".", "add_argument", "(", "\"$TTL\"", ",", "type", "=", "int", ")", "# parse each RR", "args_and_types", "=", "[", "(", "\"mname\"", ",", "str", ")", ",", "(", "\"rname\"", ",", "str", ")", ",", "(", "\"serial\"", ",", "int", ")", ",", "(", "\"refresh\"", ",", "int", ")", ",", "(", "\"retry\"", ",", "int", ")", ",", "(", "\"expire\"", ",", "int", ")", ",", "(", "\"minimum\"", ",", "int", ")", "]", "make_rr_subparser", "(", "subparsers", ",", "\"SOA\"", ",", "args_and_types", ")", "make_rr_subparser", "(", "subparsers", ",", "\"NS\"", ",", "[", "(", "\"host\"", ",", "str", ")", "]", ")", "make_rr_subparser", "(", "subparsers", ",", "\"A\"", ",", "[", "(", "\"ip\"", ",", "str", ")", "]", ")", "make_rr_subparser", "(", "subparsers", ",", "\"AAAA\"", ",", "[", "(", "\"ip\"", ",", "str", ")", "]", ")", "make_rr_subparser", "(", "subparsers", ",", "\"CNAME\"", ",", "[", "(", "\"alias\"", ",", "str", ")", "]", ")", "make_rr_subparser", "(", "subparsers", ",", "\"ALIAS\"", ",", "[", "(", "\"host\"", ",", "str", ")", "]", ")", "make_rr_subparser", "(", "subparsers", ",", "\"MX\"", ",", "[", "(", "\"preference\"", ",", "str", ")", ",", "(", "\"host\"", ",", "str", ")", "]", ")", "make_txt_subparser", "(", "subparsers", ")", "make_rr_subparser", "(", "subparsers", ",", "\"PTR\"", ",", "[", "(", "\"host\"", ",", "str", ")", "]", ")", "make_rr_subparser", "(", "subparsers", ",", "\"SRV\"", ",", "[", "(", "\"priority\"", ",", "int", ")", ",", "(", "\"weight\"", ",", "int", ")", ",", "(", "\"port\"", ",", "int", ")", ",", "(", "\"target\"", ",", "str", ")", "]", ")", "make_rr_subparser", "(", "subparsers", ",", "\"SPF\"", ",", "[", "(", "\"data\"", ",", "str", ")", "]", ")", "make_rr_subparser", "(", "subparsers", ",", "\"URI\"", ",", "[", "(", "\"priority\"", ",", "int", ")", ",", "(", "\"weight\"", ",", "int", ")", ",", "(", "\"target\"", ",", "str", ")", "]", ")", "return", "line_parser" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
tokenize_line
Tokenize a line: * split tokens on whitespace * treat quoted strings as a single token * drop comments * handle escaped spaces and comment delimiters
blockstack_zones/parse_zone_file.py
def tokenize_line(line): """ Tokenize a line: * split tokens on whitespace * treat quoted strings as a single token * drop comments * handle escaped spaces and comment delimiters """ ret = [] escape = False quote = False tokbuf = "" ll = list(line) while len(ll) > 0: c = ll.pop(0) if c.isspace(): if not quote and not escape: # end of token if len(tokbuf) > 0: ret.append(tokbuf) tokbuf = "" elif quote: # in quotes tokbuf += c elif escape: # escaped space tokbuf += c escape = False else: tokbuf = "" continue if c == '\\': escape = True continue elif c == '"': if not escape: if quote: # end of quote ret.append(tokbuf) tokbuf = "" quote = False continue else: # beginning of quote quote = True continue elif c == ';': if not escape: # comment ret.append(tokbuf) tokbuf = "" break # normal character tokbuf += c escape = False if len(tokbuf.strip(" ").strip("\n")) > 0: ret.append(tokbuf) return ret
def tokenize_line(line): """ Tokenize a line: * split tokens on whitespace * treat quoted strings as a single token * drop comments * handle escaped spaces and comment delimiters """ ret = [] escape = False quote = False tokbuf = "" ll = list(line) while len(ll) > 0: c = ll.pop(0) if c.isspace(): if not quote and not escape: # end of token if len(tokbuf) > 0: ret.append(tokbuf) tokbuf = "" elif quote: # in quotes tokbuf += c elif escape: # escaped space tokbuf += c escape = False else: tokbuf = "" continue if c == '\\': escape = True continue elif c == '"': if not escape: if quote: # end of quote ret.append(tokbuf) tokbuf = "" quote = False continue else: # beginning of quote quote = True continue elif c == ';': if not escape: # comment ret.append(tokbuf) tokbuf = "" break # normal character tokbuf += c escape = False if len(tokbuf.strip(" ").strip("\n")) > 0: ret.append(tokbuf) return ret
[ "Tokenize", "a", "line", ":", "*", "split", "tokens", "on", "whitespace", "*", "treat", "quoted", "strings", "as", "a", "single", "token", "*", "drop", "comments", "*", "handle", "escaped", "spaces", "and", "comment", "delimiters" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L97-L160
[ "def", "tokenize_line", "(", "line", ")", ":", "ret", "=", "[", "]", "escape", "=", "False", "quote", "=", "False", "tokbuf", "=", "\"\"", "ll", "=", "list", "(", "line", ")", "while", "len", "(", "ll", ")", ">", "0", ":", "c", "=", "ll", ".", "pop", "(", "0", ")", "if", "c", ".", "isspace", "(", ")", ":", "if", "not", "quote", "and", "not", "escape", ":", "# end of token", "if", "len", "(", "tokbuf", ")", ">", "0", ":", "ret", ".", "append", "(", "tokbuf", ")", "tokbuf", "=", "\"\"", "elif", "quote", ":", "# in quotes", "tokbuf", "+=", "c", "elif", "escape", ":", "# escaped space", "tokbuf", "+=", "c", "escape", "=", "False", "else", ":", "tokbuf", "=", "\"\"", "continue", "if", "c", "==", "'\\\\'", ":", "escape", "=", "True", "continue", "elif", "c", "==", "'\"'", ":", "if", "not", "escape", ":", "if", "quote", ":", "# end of quote", "ret", ".", "append", "(", "tokbuf", ")", "tokbuf", "=", "\"\"", "quote", "=", "False", "continue", "else", ":", "# beginning of quote", "quote", "=", "True", "continue", "elif", "c", "==", "';'", ":", "if", "not", "escape", ":", "# comment ", "ret", ".", "append", "(", "tokbuf", ")", "tokbuf", "=", "\"\"", "break", "# normal character", "tokbuf", "+=", "c", "escape", "=", "False", "if", "len", "(", "tokbuf", ".", "strip", "(", "\" \"", ")", ".", "strip", "(", "\"\\n\"", ")", ")", ">", "0", ":", "ret", ".", "append", "(", "tokbuf", ")", "return", "ret" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
serialize
Serialize tokens: * quote whitespace-containing tokens * escape semicolons
blockstack_zones/parse_zone_file.py
def serialize(tokens): """ Serialize tokens: * quote whitespace-containing tokens * escape semicolons """ ret = [] for tok in tokens: if " " in tok: tok = '"%s"' % tok if ";" in tok: tok = tok.replace(";", "\;") ret.append(tok) return " ".join(ret)
def serialize(tokens): """ Serialize tokens: * quote whitespace-containing tokens * escape semicolons """ ret = [] for tok in tokens: if " " in tok: tok = '"%s"' % tok if ";" in tok: tok = tok.replace(";", "\;") ret.append(tok) return " ".join(ret)
[ "Serialize", "tokens", ":", "*", "quote", "whitespace", "-", "containing", "tokens", "*", "escape", "semicolons" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L163-L179
[ "def", "serialize", "(", "tokens", ")", ":", "ret", "=", "[", "]", "for", "tok", "in", "tokens", ":", "if", "\" \"", "in", "tok", ":", "tok", "=", "'\"%s\"'", "%", "tok", "if", "\";\"", "in", "tok", ":", "tok", "=", "tok", ".", "replace", "(", "\";\"", ",", "\"\\;\"", ")", "ret", ".", "append", "(", "tok", ")", "return", "\" \"", ".", "join", "(", "ret", ")" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
remove_comments
Remove comments from a zonefile
blockstack_zones/parse_zone_file.py
def remove_comments(text): """ Remove comments from a zonefile """ ret = [] lines = text.split("\n") for line in lines: if len(line) == 0: continue line = serialize(tokenize_line(line)) ret.append(line) return "\n".join(ret)
def remove_comments(text): """ Remove comments from a zonefile """ ret = [] lines = text.split("\n") for line in lines: if len(line) == 0: continue line = serialize(tokenize_line(line)) ret.append(line) return "\n".join(ret)
[ "Remove", "comments", "from", "a", "zonefile" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L182-L195
[ "def", "remove_comments", "(", "text", ")", ":", "ret", "=", "[", "]", "lines", "=", "text", ".", "split", "(", "\"\\n\"", ")", "for", "line", "in", "lines", ":", "if", "len", "(", "line", ")", "==", "0", ":", "continue", "line", "=", "serialize", "(", "tokenize_line", "(", "line", ")", ")", "ret", ".", "append", "(", "line", ")", "return", "\"\\n\"", ".", "join", "(", "ret", ")" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
flatten
Flatten the text: * make sure each record is on one line. * remove parenthesis
blockstack_zones/parse_zone_file.py
def flatten(text): """ Flatten the text: * make sure each record is on one line. * remove parenthesis """ lines = text.split("\n") # tokens: sequence of non-whitespace separated by '' where a newline was tokens = [] for l in lines: if len(l) == 0: continue l = l.replace("\t", " ") tokens += filter(lambda x: len(x) > 0, l.split(" ")) + [''] # find (...) and turn it into a single line ("capture" it) capturing = False captured = [] flattened = [] while len(tokens) > 0: tok = tokens.pop(0) if not capturing and len(tok) == 0: # normal end-of-line if len(captured) > 0: flattened.append(" ".join(captured)) captured = [] continue if tok.startswith("("): # begin grouping tok = tok.lstrip("(") capturing = True if capturing and tok.endswith(")"): # end grouping. next end-of-line will turn this sequence into a flat line tok = tok.rstrip(")") capturing = False captured.append(tok) return "\n".join(flattened)
def flatten(text): """ Flatten the text: * make sure each record is on one line. * remove parenthesis """ lines = text.split("\n") # tokens: sequence of non-whitespace separated by '' where a newline was tokens = [] for l in lines: if len(l) == 0: continue l = l.replace("\t", " ") tokens += filter(lambda x: len(x) > 0, l.split(" ")) + [''] # find (...) and turn it into a single line ("capture" it) capturing = False captured = [] flattened = [] while len(tokens) > 0: tok = tokens.pop(0) if not capturing and len(tok) == 0: # normal end-of-line if len(captured) > 0: flattened.append(" ".join(captured)) captured = [] continue if tok.startswith("("): # begin grouping tok = tok.lstrip("(") capturing = True if capturing and tok.endswith(")"): # end grouping. next end-of-line will turn this sequence into a flat line tok = tok.rstrip(")") capturing = False captured.append(tok) return "\n".join(flattened)
[ "Flatten", "the", "text", ":", "*", "make", "sure", "each", "record", "is", "on", "one", "line", ".", "*", "remove", "parenthesis" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L198-L241
[ "def", "flatten", "(", "text", ")", ":", "lines", "=", "text", ".", "split", "(", "\"\\n\"", ")", "# tokens: sequence of non-whitespace separated by '' where a newline was", "tokens", "=", "[", "]", "for", "l", "in", "lines", ":", "if", "len", "(", "l", ")", "==", "0", ":", "continue", "l", "=", "l", ".", "replace", "(", "\"\\t\"", ",", "\" \"", ")", "tokens", "+=", "filter", "(", "lambda", "x", ":", "len", "(", "x", ")", ">", "0", ",", "l", ".", "split", "(", "\" \"", ")", ")", "+", "[", "''", "]", "# find (...) and turn it into a single line (\"capture\" it)", "capturing", "=", "False", "captured", "=", "[", "]", "flattened", "=", "[", "]", "while", "len", "(", "tokens", ")", ">", "0", ":", "tok", "=", "tokens", ".", "pop", "(", "0", ")", "if", "not", "capturing", "and", "len", "(", "tok", ")", "==", "0", ":", "# normal end-of-line", "if", "len", "(", "captured", ")", ">", "0", ":", "flattened", ".", "append", "(", "\" \"", ".", "join", "(", "captured", ")", ")", "captured", "=", "[", "]", "continue", "if", "tok", ".", "startswith", "(", "\"(\"", ")", ":", "# begin grouping", "tok", "=", "tok", ".", "lstrip", "(", "\"(\"", ")", "capturing", "=", "True", "if", "capturing", "and", "tok", ".", "endswith", "(", "\")\"", ")", ":", "# end grouping. next end-of-line will turn this sequence into a flat line", "tok", "=", "tok", ".", "rstrip", "(", "\")\"", ")", "capturing", "=", "False", "captured", ".", "append", "(", "tok", ")", "return", "\"\\n\"", ".", "join", "(", "flattened", ")" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
remove_class
Remove the CLASS from each DNS record, if present. The only class that gets used today (for all intents and purposes) is 'IN'.
blockstack_zones/parse_zone_file.py
def remove_class(text): """ Remove the CLASS from each DNS record, if present. The only class that gets used today (for all intents and purposes) is 'IN'. """ # see RFC 1035 for list of classes lines = text.split("\n") ret = [] for line in lines: tokens = tokenize_line(line) tokens_upper = [t.upper() for t in tokens] if "IN" in tokens_upper: tokens.remove("IN") elif "CS" in tokens_upper: tokens.remove("CS") elif "CH" in tokens_upper: tokens.remove("CH") elif "HS" in tokens_upper: tokens.remove("HS") ret.append(serialize(tokens)) return "\n".join(ret)
def remove_class(text): """ Remove the CLASS from each DNS record, if present. The only class that gets used today (for all intents and purposes) is 'IN'. """ # see RFC 1035 for list of classes lines = text.split("\n") ret = [] for line in lines: tokens = tokenize_line(line) tokens_upper = [t.upper() for t in tokens] if "IN" in tokens_upper: tokens.remove("IN") elif "CS" in tokens_upper: tokens.remove("CS") elif "CH" in tokens_upper: tokens.remove("CH") elif "HS" in tokens_upper: tokens.remove("HS") ret.append(serialize(tokens)) return "\n".join(ret)
[ "Remove", "the", "CLASS", "from", "each", "DNS", "record", "if", "present", ".", "The", "only", "class", "that", "gets", "used", "today", "(", "for", "all", "intents", "and", "purposes", ")", "is", "IN", "." ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L244-L269
[ "def", "remove_class", "(", "text", ")", ":", "# see RFC 1035 for list of classes", "lines", "=", "text", ".", "split", "(", "\"\\n\"", ")", "ret", "=", "[", "]", "for", "line", "in", "lines", ":", "tokens", "=", "tokenize_line", "(", "line", ")", "tokens_upper", "=", "[", "t", ".", "upper", "(", ")", "for", "t", "in", "tokens", "]", "if", "\"IN\"", "in", "tokens_upper", ":", "tokens", ".", "remove", "(", "\"IN\"", ")", "elif", "\"CS\"", "in", "tokens_upper", ":", "tokens", ".", "remove", "(", "\"CS\"", ")", "elif", "\"CH\"", "in", "tokens_upper", ":", "tokens", ".", "remove", "(", "\"CH\"", ")", "elif", "\"HS\"", "in", "tokens_upper", ":", "tokens", ".", "remove", "(", "\"HS\"", ")", "ret", ".", "append", "(", "serialize", "(", "tokens", ")", ")", "return", "\"\\n\"", ".", "join", "(", "ret", ")" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
add_default_name
Go through each line of the text and ensure that a name is defined. Use '@' if there is none.
blockstack_zones/parse_zone_file.py
def add_default_name(text): """ Go through each line of the text and ensure that a name is defined. Use '@' if there is none. """ global SUPPORTED_RECORDS lines = text.split("\n") ret = [] for line in lines: tokens = tokenize_line(line) if len(tokens) == 0: continue if tokens[0] in SUPPORTED_RECORDS and not tokens[0].startswith("$"): # add back the name tokens = ['@'] + tokens ret.append(serialize(tokens)) return "\n".join(ret)
def add_default_name(text): """ Go through each line of the text and ensure that a name is defined. Use '@' if there is none. """ global SUPPORTED_RECORDS lines = text.split("\n") ret = [] for line in lines: tokens = tokenize_line(line) if len(tokens) == 0: continue if tokens[0] in SUPPORTED_RECORDS and not tokens[0].startswith("$"): # add back the name tokens = ['@'] + tokens ret.append(serialize(tokens)) return "\n".join(ret)
[ "Go", "through", "each", "line", "of", "the", "text", "and", "ensure", "that", "a", "name", "is", "defined", ".", "Use" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L272-L292
[ "def", "add_default_name", "(", "text", ")", ":", "global", "SUPPORTED_RECORDS", "lines", "=", "text", ".", "split", "(", "\"\\n\"", ")", "ret", "=", "[", "]", "for", "line", "in", "lines", ":", "tokens", "=", "tokenize_line", "(", "line", ")", "if", "len", "(", "tokens", ")", "==", "0", ":", "continue", "if", "tokens", "[", "0", "]", "in", "SUPPORTED_RECORDS", "and", "not", "tokens", "[", "0", "]", ".", "startswith", "(", "\"$\"", ")", ":", "# add back the name", "tokens", "=", "[", "'@'", "]", "+", "tokens", "ret", ".", "append", "(", "serialize", "(", "tokens", ")", ")", "return", "\"\\n\"", ".", "join", "(", "ret", ")" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
parse_line
Given the parser, capitalized list of a line's tokens, and the current set of records parsed so far, parse it into a dictionary. Return the new set of parsed records. Raise an exception on error.
blockstack_zones/parse_zone_file.py
def parse_line(parser, record_token, parsed_records): """ Given the parser, capitalized list of a line's tokens, and the current set of records parsed so far, parse it into a dictionary. Return the new set of parsed records. Raise an exception on error. """ global SUPPORTED_RECORDS line = " ".join(record_token) # match parser to record type if len(record_token) >= 2 and record_token[1] in SUPPORTED_RECORDS: # with no ttl record_token = [record_token[1]] + record_token elif len(record_token) >= 3 and record_token[2] in SUPPORTED_RECORDS: # with ttl record_token = [record_token[2]] + record_token if record_token[0] == "TXT": record_token = record_token[:2] + ["--ttl"] + record_token[2:] try: rr, unmatched = parser.parse_known_args(record_token) assert len(unmatched) == 0, "Unmatched fields: %s" % unmatched except (SystemExit, AssertionError, InvalidLineException): # invalid argument raise InvalidLineException(line) record_dict = rr.__dict__ if record_token[0] == "TXT" and len(record_dict['txt']) == 1: record_dict['txt'] = record_dict['txt'][0] # what kind of record? including origin and ttl record_type = None for key in record_dict.keys(): if key in SUPPORTED_RECORDS and (key.startswith("$") or record_dict[key] == key): record_type = key if record_dict[key] == key: del record_dict[key] break assert record_type is not None, "Unknown record type in %s" % rr # clean fields for field in record_dict.keys(): if record_dict[field] is None: del record_dict[field] current_origin = record_dict.get('$ORIGIN', parsed_records.get('$ORIGIN', None)) # special record-specific fix-ups if record_type == 'PTR': record_dict['fullname'] = record_dict['name'] + '.' + current_origin if len(record_dict) > 0: if record_type.startswith("$"): # put the value directly record_dict_key = record_type.lower() parsed_records[record_dict_key] = record_dict[record_type] else: record_dict_key = record_type.lower() parsed_records[record_dict_key].append(record_dict) return parsed_records
def parse_line(parser, record_token, parsed_records): """ Given the parser, capitalized list of a line's tokens, and the current set of records parsed so far, parse it into a dictionary. Return the new set of parsed records. Raise an exception on error. """ global SUPPORTED_RECORDS line = " ".join(record_token) # match parser to record type if len(record_token) >= 2 and record_token[1] in SUPPORTED_RECORDS: # with no ttl record_token = [record_token[1]] + record_token elif len(record_token) >= 3 and record_token[2] in SUPPORTED_RECORDS: # with ttl record_token = [record_token[2]] + record_token if record_token[0] == "TXT": record_token = record_token[:2] + ["--ttl"] + record_token[2:] try: rr, unmatched = parser.parse_known_args(record_token) assert len(unmatched) == 0, "Unmatched fields: %s" % unmatched except (SystemExit, AssertionError, InvalidLineException): # invalid argument raise InvalidLineException(line) record_dict = rr.__dict__ if record_token[0] == "TXT" and len(record_dict['txt']) == 1: record_dict['txt'] = record_dict['txt'][0] # what kind of record? including origin and ttl record_type = None for key in record_dict.keys(): if key in SUPPORTED_RECORDS and (key.startswith("$") or record_dict[key] == key): record_type = key if record_dict[key] == key: del record_dict[key] break assert record_type is not None, "Unknown record type in %s" % rr # clean fields for field in record_dict.keys(): if record_dict[field] is None: del record_dict[field] current_origin = record_dict.get('$ORIGIN', parsed_records.get('$ORIGIN', None)) # special record-specific fix-ups if record_type == 'PTR': record_dict['fullname'] = record_dict['name'] + '.' + current_origin if len(record_dict) > 0: if record_type.startswith("$"): # put the value directly record_dict_key = record_type.lower() parsed_records[record_dict_key] = record_dict[record_type] else: record_dict_key = record_type.lower() parsed_records[record_dict_key].append(record_dict) return parsed_records
[ "Given", "the", "parser", "capitalized", "list", "of", "a", "line", "s", "tokens", "and", "the", "current", "set", "of", "records", "parsed", "so", "far", "parse", "it", "into", "a", "dictionary", "." ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L295-L359
[ "def", "parse_line", "(", "parser", ",", "record_token", ",", "parsed_records", ")", ":", "global", "SUPPORTED_RECORDS", "line", "=", "\" \"", ".", "join", "(", "record_token", ")", "# match parser to record type", "if", "len", "(", "record_token", ")", ">=", "2", "and", "record_token", "[", "1", "]", "in", "SUPPORTED_RECORDS", ":", "# with no ttl", "record_token", "=", "[", "record_token", "[", "1", "]", "]", "+", "record_token", "elif", "len", "(", "record_token", ")", ">=", "3", "and", "record_token", "[", "2", "]", "in", "SUPPORTED_RECORDS", ":", "# with ttl", "record_token", "=", "[", "record_token", "[", "2", "]", "]", "+", "record_token", "if", "record_token", "[", "0", "]", "==", "\"TXT\"", ":", "record_token", "=", "record_token", "[", ":", "2", "]", "+", "[", "\"--ttl\"", "]", "+", "record_token", "[", "2", ":", "]", "try", ":", "rr", ",", "unmatched", "=", "parser", ".", "parse_known_args", "(", "record_token", ")", "assert", "len", "(", "unmatched", ")", "==", "0", ",", "\"Unmatched fields: %s\"", "%", "unmatched", "except", "(", "SystemExit", ",", "AssertionError", ",", "InvalidLineException", ")", ":", "# invalid argument ", "raise", "InvalidLineException", "(", "line", ")", "record_dict", "=", "rr", ".", "__dict__", "if", "record_token", "[", "0", "]", "==", "\"TXT\"", "and", "len", "(", "record_dict", "[", "'txt'", "]", ")", "==", "1", ":", "record_dict", "[", "'txt'", "]", "=", "record_dict", "[", "'txt'", "]", "[", "0", "]", "# what kind of record? including origin and ttl", "record_type", "=", "None", "for", "key", "in", "record_dict", ".", "keys", "(", ")", ":", "if", "key", "in", "SUPPORTED_RECORDS", "and", "(", "key", ".", "startswith", "(", "\"$\"", ")", "or", "record_dict", "[", "key", "]", "==", "key", ")", ":", "record_type", "=", "key", "if", "record_dict", "[", "key", "]", "==", "key", ":", "del", "record_dict", "[", "key", "]", "break", "assert", "record_type", "is", "not", "None", ",", "\"Unknown record type in %s\"", "%", "rr", "# clean fields", "for", "field", "in", "record_dict", ".", "keys", "(", ")", ":", "if", "record_dict", "[", "field", "]", "is", "None", ":", "del", "record_dict", "[", "field", "]", "current_origin", "=", "record_dict", ".", "get", "(", "'$ORIGIN'", ",", "parsed_records", ".", "get", "(", "'$ORIGIN'", ",", "None", ")", ")", "# special record-specific fix-ups", "if", "record_type", "==", "'PTR'", ":", "record_dict", "[", "'fullname'", "]", "=", "record_dict", "[", "'name'", "]", "+", "'.'", "+", "current_origin", "if", "len", "(", "record_dict", ")", ">", "0", ":", "if", "record_type", ".", "startswith", "(", "\"$\"", ")", ":", "# put the value directly", "record_dict_key", "=", "record_type", ".", "lower", "(", ")", "parsed_records", "[", "record_dict_key", "]", "=", "record_dict", "[", "record_type", "]", "else", ":", "record_dict_key", "=", "record_type", ".", "lower", "(", ")", "parsed_records", "[", "record_dict_key", "]", ".", "append", "(", "record_dict", ")", "return", "parsed_records" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
parse_lines
Parse a zonefile into a dict. @text must be flattened--each record must be on one line. Also, all comments must be removed.
blockstack_zones/parse_zone_file.py
def parse_lines(text, ignore_invalid=False): """ Parse a zonefile into a dict. @text must be flattened--each record must be on one line. Also, all comments must be removed. """ json_zone_file = defaultdict(list) record_lines = text.split("\n") parser = make_parser() for record_line in record_lines: record_token = tokenize_line(record_line) try: json_zone_file = parse_line(parser, record_token, json_zone_file) except InvalidLineException: if ignore_invalid: continue else: raise return json_zone_file
def parse_lines(text, ignore_invalid=False): """ Parse a zonefile into a dict. @text must be flattened--each record must be on one line. Also, all comments must be removed. """ json_zone_file = defaultdict(list) record_lines = text.split("\n") parser = make_parser() for record_line in record_lines: record_token = tokenize_line(record_line) try: json_zone_file = parse_line(parser, record_token, json_zone_file) except InvalidLineException: if ignore_invalid: continue else: raise return json_zone_file
[ "Parse", "a", "zonefile", "into", "a", "dict", "." ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L362-L382
[ "def", "parse_lines", "(", "text", ",", "ignore_invalid", "=", "False", ")", ":", "json_zone_file", "=", "defaultdict", "(", "list", ")", "record_lines", "=", "text", ".", "split", "(", "\"\\n\"", ")", "parser", "=", "make_parser", "(", ")", "for", "record_line", "in", "record_lines", ":", "record_token", "=", "tokenize_line", "(", "record_line", ")", "try", ":", "json_zone_file", "=", "parse_line", "(", "parser", ",", "record_token", ",", "json_zone_file", ")", "except", "InvalidLineException", ":", "if", "ignore_invalid", ":", "continue", "else", ":", "raise", "return", "json_zone_file" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
parse_zone_file
Parse a zonefile into a dict
blockstack_zones/parse_zone_file.py
def parse_zone_file(text, ignore_invalid=False): """ Parse a zonefile into a dict """ text = remove_comments(text) text = flatten(text) text = remove_class(text) text = add_default_name(text) json_zone_file = parse_lines(text, ignore_invalid=ignore_invalid) return json_zone_file
def parse_zone_file(text, ignore_invalid=False): """ Parse a zonefile into a dict """ text = remove_comments(text) text = flatten(text) text = remove_class(text) text = add_default_name(text) json_zone_file = parse_lines(text, ignore_invalid=ignore_invalid) return json_zone_file
[ "Parse", "a", "zonefile", "into", "a", "dict" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/parse_zone_file.py#L385-L394
[ "def", "parse_zone_file", "(", "text", ",", "ignore_invalid", "=", "False", ")", ":", "text", "=", "remove_comments", "(", "text", ")", "text", "=", "flatten", "(", "text", ")", "text", "=", "remove_class", "(", "text", ")", "text", "=", "add_default_name", "(", "text", ")", "json_zone_file", "=", "parse_lines", "(", "text", ",", "ignore_invalid", "=", "ignore_invalid", ")", "return", "json_zone_file" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
make_zone_file
Generate the DNS zonefile, given a json-encoded description of the zone file (@json_zone_file) and the template to fill in (@template) json_zone_file = { "$origin": origin server, "$ttl": default time-to-live, "soa": [ soa records ], "ns": [ ns records ], "a": [ a records ], "aaaa": [ aaaa records ] "cname": [ cname records ] "alias": [ alias records ] "mx": [ mx records ] "ptr": [ ptr records ] "txt": [ txt records ] "srv": [ srv records ] "spf": [ spf records ] "uri": [ uri records ] }
blockstack_zones/make_zone_file.py
def make_zone_file(json_zone_file_input, origin=None, ttl=None, template=None): """ Generate the DNS zonefile, given a json-encoded description of the zone file (@json_zone_file) and the template to fill in (@template) json_zone_file = { "$origin": origin server, "$ttl": default time-to-live, "soa": [ soa records ], "ns": [ ns records ], "a": [ a records ], "aaaa": [ aaaa records ] "cname": [ cname records ] "alias": [ alias records ] "mx": [ mx records ] "ptr": [ ptr records ] "txt": [ txt records ] "srv": [ srv records ] "spf": [ spf records ] "uri": [ uri records ] } """ if template is None: template = DEFAULT_TEMPLATE[:] # careful... json_zone_file = copy.deepcopy(json_zone_file_input) if origin is not None: json_zone_file['$origin'] = origin if ttl is not None: json_zone_file['$ttl'] = ttl soa_records = [json_zone_file.get('soa')] if json_zone_file.get('soa') else None zone_file = template zone_file = process_origin(json_zone_file.get('$origin', None), zone_file) zone_file = process_ttl(json_zone_file.get('$ttl', None), zone_file) zone_file = process_soa(soa_records, zone_file) zone_file = process_ns(json_zone_file.get('ns', None), zone_file) zone_file = process_a(json_zone_file.get('a', None), zone_file) zone_file = process_aaaa(json_zone_file.get('aaaa', None), zone_file) zone_file = process_cname(json_zone_file.get('cname', None), zone_file) zone_file = process_alias(json_zone_file.get('alias', None), zone_file) zone_file = process_mx(json_zone_file.get('mx', None), zone_file) zone_file = process_ptr(json_zone_file.get('ptr', None), zone_file) zone_file = process_txt(json_zone_file.get('txt', None), zone_file) zone_file = process_srv(json_zone_file.get('srv', None), zone_file) zone_file = process_spf(json_zone_file.get('spf', None), zone_file) zone_file = process_uri(json_zone_file.get('uri', None), zone_file) # remove newlines, but terminate with one zone_file = "\n".join( filter( lambda l: len(l.strip()) > 0, [tl.strip() for tl in zone_file.split("\n")] ) ) + "\n" return zone_file
def make_zone_file(json_zone_file_input, origin=None, ttl=None, template=None): """ Generate the DNS zonefile, given a json-encoded description of the zone file (@json_zone_file) and the template to fill in (@template) json_zone_file = { "$origin": origin server, "$ttl": default time-to-live, "soa": [ soa records ], "ns": [ ns records ], "a": [ a records ], "aaaa": [ aaaa records ] "cname": [ cname records ] "alias": [ alias records ] "mx": [ mx records ] "ptr": [ ptr records ] "txt": [ txt records ] "srv": [ srv records ] "spf": [ spf records ] "uri": [ uri records ] } """ if template is None: template = DEFAULT_TEMPLATE[:] # careful... json_zone_file = copy.deepcopy(json_zone_file_input) if origin is not None: json_zone_file['$origin'] = origin if ttl is not None: json_zone_file['$ttl'] = ttl soa_records = [json_zone_file.get('soa')] if json_zone_file.get('soa') else None zone_file = template zone_file = process_origin(json_zone_file.get('$origin', None), zone_file) zone_file = process_ttl(json_zone_file.get('$ttl', None), zone_file) zone_file = process_soa(soa_records, zone_file) zone_file = process_ns(json_zone_file.get('ns', None), zone_file) zone_file = process_a(json_zone_file.get('a', None), zone_file) zone_file = process_aaaa(json_zone_file.get('aaaa', None), zone_file) zone_file = process_cname(json_zone_file.get('cname', None), zone_file) zone_file = process_alias(json_zone_file.get('alias', None), zone_file) zone_file = process_mx(json_zone_file.get('mx', None), zone_file) zone_file = process_ptr(json_zone_file.get('ptr', None), zone_file) zone_file = process_txt(json_zone_file.get('txt', None), zone_file) zone_file = process_srv(json_zone_file.get('srv', None), zone_file) zone_file = process_spf(json_zone_file.get('spf', None), zone_file) zone_file = process_uri(json_zone_file.get('uri', None), zone_file) # remove newlines, but terminate with one zone_file = "\n".join( filter( lambda l: len(l.strip()) > 0, [tl.strip() for tl in zone_file.split("\n")] ) ) + "\n" return zone_file
[ "Generate", "the", "DNS", "zonefile", "given", "a", "json", "-", "encoded", "description", "of", "the", "zone", "file", "(", "@json_zone_file", ")", "and", "the", "template", "to", "fill", "in", "(", "@template", ")" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/make_zone_file.py#L10-L69
[ "def", "make_zone_file", "(", "json_zone_file_input", ",", "origin", "=", "None", ",", "ttl", "=", "None", ",", "template", "=", "None", ")", ":", "if", "template", "is", "None", ":", "template", "=", "DEFAULT_TEMPLATE", "[", ":", "]", "# careful...", "json_zone_file", "=", "copy", ".", "deepcopy", "(", "json_zone_file_input", ")", "if", "origin", "is", "not", "None", ":", "json_zone_file", "[", "'$origin'", "]", "=", "origin", "if", "ttl", "is", "not", "None", ":", "json_zone_file", "[", "'$ttl'", "]", "=", "ttl", "soa_records", "=", "[", "json_zone_file", ".", "get", "(", "'soa'", ")", "]", "if", "json_zone_file", ".", "get", "(", "'soa'", ")", "else", "None", "zone_file", "=", "template", "zone_file", "=", "process_origin", "(", "json_zone_file", ".", "get", "(", "'$origin'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_ttl", "(", "json_zone_file", ".", "get", "(", "'$ttl'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_soa", "(", "soa_records", ",", "zone_file", ")", "zone_file", "=", "process_ns", "(", "json_zone_file", ".", "get", "(", "'ns'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_a", "(", "json_zone_file", ".", "get", "(", "'a'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_aaaa", "(", "json_zone_file", ".", "get", "(", "'aaaa'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_cname", "(", "json_zone_file", ".", "get", "(", "'cname'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_alias", "(", "json_zone_file", ".", "get", "(", "'alias'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_mx", "(", "json_zone_file", ".", "get", "(", "'mx'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_ptr", "(", "json_zone_file", ".", "get", "(", "'ptr'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_txt", "(", "json_zone_file", ".", "get", "(", "'txt'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_srv", "(", "json_zone_file", ".", "get", "(", "'srv'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_spf", "(", "json_zone_file", ".", "get", "(", "'spf'", ",", "None", ")", ",", "zone_file", ")", "zone_file", "=", "process_uri", "(", "json_zone_file", ".", "get", "(", "'uri'", ",", "None", ")", ",", "zone_file", ")", "# remove newlines, but terminate with one", "zone_file", "=", "\"\\n\"", ".", "join", "(", "filter", "(", "lambda", "l", ":", "len", "(", "l", ".", "strip", "(", ")", ")", ">", "0", ",", "[", "tl", ".", "strip", "(", ")", "for", "tl", "in", "zone_file", ".", "split", "(", "\"\\n\"", ")", "]", ")", ")", "+", "\"\\n\"", "return", "zone_file" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
process_origin
Replace {$origin} in template with a serialized $ORIGIN record
blockstack_zones/record_processors.py
def process_origin(data, template): """ Replace {$origin} in template with a serialized $ORIGIN record """ record = "" if data is not None: record += "$ORIGIN %s" % data return template.replace("{$origin}", record)
def process_origin(data, template): """ Replace {$origin} in template with a serialized $ORIGIN record """ record = "" if data is not None: record += "$ORIGIN %s" % data return template.replace("{$origin}", record)
[ "Replace", "{", "$origin", "}", "in", "template", "with", "a", "serialized", "$ORIGIN", "record" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/record_processors.py#L4-L12
[ "def", "process_origin", "(", "data", ",", "template", ")", ":", "record", "=", "\"\"", "if", "data", "is", "not", "None", ":", "record", "+=", "\"$ORIGIN %s\"", "%", "data", "return", "template", ".", "replace", "(", "\"{$origin}\"", ",", "record", ")" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
process_ttl
Replace {$ttl} in template with a serialized $TTL record
blockstack_zones/record_processors.py
def process_ttl(data, template): """ Replace {$ttl} in template with a serialized $TTL record """ record = "" if data is not None: record += "$TTL %s" % data return template.replace("{$ttl}", record)
def process_ttl(data, template): """ Replace {$ttl} in template with a serialized $TTL record """ record = "" if data is not None: record += "$TTL %s" % data return template.replace("{$ttl}", record)
[ "Replace", "{", "$ttl", "}", "in", "template", "with", "a", "serialized", "$TTL", "record" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/record_processors.py#L15-L23
[ "def", "process_ttl", "(", "data", ",", "template", ")", ":", "record", "=", "\"\"", "if", "data", "is", "not", "None", ":", "record", "+=", "\"$TTL %s\"", "%", "data", "return", "template", ".", "replace", "(", "\"{$ttl}\"", ",", "record", ")" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
process_soa
Replace {SOA} in template with a set of serialized SOA records
blockstack_zones/record_processors.py
def process_soa(data, template): """ Replace {SOA} in template with a set of serialized SOA records """ record = template[:] if data is not None: assert len(data) == 1, "Only support one SOA RR at this time" data = data[0] soadat = [] domain_fields = ['mname', 'rname'] param_fields = ['serial', 'refresh', 'retry', 'expire', 'minimum'] for f in domain_fields + param_fields: assert f in data.keys(), "Missing '%s' (%s)" % (f, data) data_name = str(data.get('name', '@')) soadat.append(data_name) if data.get('ttl') is not None: soadat.append( str(data['ttl']) ) soadat.append("IN") soadat.append("SOA") for key in domain_fields: value = str(data[key]) soadat.append(value) soadat.append("(") for key in param_fields: value = str(data[key]) soadat.append(value) soadat.append(")") soa_txt = " ".join(soadat) record = record.replace("{soa}", soa_txt) else: # clear all SOA fields record = record.replace("{soa}", "") return record
def process_soa(data, template): """ Replace {SOA} in template with a set of serialized SOA records """ record = template[:] if data is not None: assert len(data) == 1, "Only support one SOA RR at this time" data = data[0] soadat = [] domain_fields = ['mname', 'rname'] param_fields = ['serial', 'refresh', 'retry', 'expire', 'minimum'] for f in domain_fields + param_fields: assert f in data.keys(), "Missing '%s' (%s)" % (f, data) data_name = str(data.get('name', '@')) soadat.append(data_name) if data.get('ttl') is not None: soadat.append( str(data['ttl']) ) soadat.append("IN") soadat.append("SOA") for key in domain_fields: value = str(data[key]) soadat.append(value) soadat.append("(") for key in param_fields: value = str(data[key]) soadat.append(value) soadat.append(")") soa_txt = " ".join(soadat) record = record.replace("{soa}", soa_txt) else: # clear all SOA fields record = record.replace("{soa}", "") return record
[ "Replace", "{", "SOA", "}", "in", "template", "with", "a", "set", "of", "serialized", "SOA", "records" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/record_processors.py#L26-L72
[ "def", "process_soa", "(", "data", ",", "template", ")", ":", "record", "=", "template", "[", ":", "]", "if", "data", "is", "not", "None", ":", "assert", "len", "(", "data", ")", "==", "1", ",", "\"Only support one SOA RR at this time\"", "data", "=", "data", "[", "0", "]", "soadat", "=", "[", "]", "domain_fields", "=", "[", "'mname'", ",", "'rname'", "]", "param_fields", "=", "[", "'serial'", ",", "'refresh'", ",", "'retry'", ",", "'expire'", ",", "'minimum'", "]", "for", "f", "in", "domain_fields", "+", "param_fields", ":", "assert", "f", "in", "data", ".", "keys", "(", ")", ",", "\"Missing '%s' (%s)\"", "%", "(", "f", ",", "data", ")", "data_name", "=", "str", "(", "data", ".", "get", "(", "'name'", ",", "'@'", ")", ")", "soadat", ".", "append", "(", "data_name", ")", "if", "data", ".", "get", "(", "'ttl'", ")", "is", "not", "None", ":", "soadat", ".", "append", "(", "str", "(", "data", "[", "'ttl'", "]", ")", ")", "soadat", ".", "append", "(", "\"IN\"", ")", "soadat", ".", "append", "(", "\"SOA\"", ")", "for", "key", "in", "domain_fields", ":", "value", "=", "str", "(", "data", "[", "key", "]", ")", "soadat", ".", "append", "(", "value", ")", "soadat", ".", "append", "(", "\"(\"", ")", "for", "key", "in", "param_fields", ":", "value", "=", "str", "(", "data", "[", "key", "]", ")", "soadat", ".", "append", "(", "value", ")", "soadat", ".", "append", "(", "\")\"", ")", "soa_txt", "=", "\" \"", ".", "join", "(", "soadat", ")", "record", "=", "record", ".", "replace", "(", "\"{soa}\"", ",", "soa_txt", ")", "else", ":", "# clear all SOA fields ", "record", "=", "record", ".", "replace", "(", "\"{soa}\"", ",", "\"\"", ")", "return", "record" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
quote_field
Quote a field in a list of DNS records. Return the new data records.
blockstack_zones/record_processors.py
def quote_field(data, field): """ Quote a field in a list of DNS records. Return the new data records. """ if data is None: return None data_dup = copy.deepcopy(data) for i in xrange(0, len(data_dup)): data_dup[i][field] = '"%s"' % data_dup[i][field] data_dup[i][field] = data_dup[i][field].replace(";", "\;") return data_dup
def quote_field(data, field): """ Quote a field in a list of DNS records. Return the new data records. """ if data is None: return None data_dup = copy.deepcopy(data) for i in xrange(0, len(data_dup)): data_dup[i][field] = '"%s"' % data_dup[i][field] data_dup[i][field] = data_dup[i][field].replace(";", "\;") return data_dup
[ "Quote", "a", "field", "in", "a", "list", "of", "DNS", "records", ".", "Return", "the", "new", "data", "records", "." ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/record_processors.py#L75-L88
[ "def", "quote_field", "(", "data", ",", "field", ")", ":", "if", "data", "is", "None", ":", "return", "None", "data_dup", "=", "copy", ".", "deepcopy", "(", "data", ")", "for", "i", "in", "xrange", "(", "0", ",", "len", "(", "data_dup", ")", ")", ":", "data_dup", "[", "i", "]", "[", "field", "]", "=", "'\"%s\"'", "%", "data_dup", "[", "i", "]", "[", "field", "]", "data_dup", "[", "i", "]", "[", "field", "]", "=", "data_dup", "[", "i", "]", "[", "field", "]", ".", "replace", "(", "\";\"", ",", "\"\\;\"", ")", "return", "data_dup" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
process_rr
Meta method: Replace $field in template with the serialized $record_type records, using @record_key from each datum.
blockstack_zones/record_processors.py
def process_rr(data, record_type, record_keys, field, template): """ Meta method: Replace $field in template with the serialized $record_type records, using @record_key from each datum. """ if data is None: return template.replace(field, "") if type(record_keys) == list: pass elif type(record_keys) == str: record_keys = [record_keys] else: raise ValueError("Invalid record keys") assert type(data) == list, "Data must be a list" record = "" for i in xrange(0, len(data)): for record_key in record_keys: assert record_key in data[i].keys(), "Missing '%s'" % record_key record_data = [] record_data.append( str(data[i].get('name', '@')) ) if data[i].get('ttl') is not None: record_data.append( str(data[i]['ttl']) ) record_data.append(record_type) record_data += [str(data[i][record_key]) for record_key in record_keys] record += " ".join(record_data) + "\n" return template.replace(field, record)
def process_rr(data, record_type, record_keys, field, template): """ Meta method: Replace $field in template with the serialized $record_type records, using @record_key from each datum. """ if data is None: return template.replace(field, "") if type(record_keys) == list: pass elif type(record_keys) == str: record_keys = [record_keys] else: raise ValueError("Invalid record keys") assert type(data) == list, "Data must be a list" record = "" for i in xrange(0, len(data)): for record_key in record_keys: assert record_key in data[i].keys(), "Missing '%s'" % record_key record_data = [] record_data.append( str(data[i].get('name', '@')) ) if data[i].get('ttl') is not None: record_data.append( str(data[i]['ttl']) ) record_data.append(record_type) record_data += [str(data[i][record_key]) for record_key in record_keys] record += " ".join(record_data) + "\n" return template.replace(field, record)
[ "Meta", "method", ":", "Replace", "$field", "in", "template", "with", "the", "serialized", "$record_type", "records", "using" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/record_processors.py#L91-L124
[ "def", "process_rr", "(", "data", ",", "record_type", ",", "record_keys", ",", "field", ",", "template", ")", ":", "if", "data", "is", "None", ":", "return", "template", ".", "replace", "(", "field", ",", "\"\"", ")", "if", "type", "(", "record_keys", ")", "==", "list", ":", "pass", "elif", "type", "(", "record_keys", ")", "==", "str", ":", "record_keys", "=", "[", "record_keys", "]", "else", ":", "raise", "ValueError", "(", "\"Invalid record keys\"", ")", "assert", "type", "(", "data", ")", "==", "list", ",", "\"Data must be a list\"", "record", "=", "\"\"", "for", "i", "in", "xrange", "(", "0", ",", "len", "(", "data", ")", ")", ":", "for", "record_key", "in", "record_keys", ":", "assert", "record_key", "in", "data", "[", "i", "]", ".", "keys", "(", ")", ",", "\"Missing '%s'\"", "%", "record_key", "record_data", "=", "[", "]", "record_data", ".", "append", "(", "str", "(", "data", "[", "i", "]", ".", "get", "(", "'name'", ",", "'@'", ")", ")", ")", "if", "data", "[", "i", "]", ".", "get", "(", "'ttl'", ")", "is", "not", "None", ":", "record_data", ".", "append", "(", "str", "(", "data", "[", "i", "]", "[", "'ttl'", "]", ")", ")", "record_data", ".", "append", "(", "record_type", ")", "record_data", "+=", "[", "str", "(", "data", "[", "i", "]", "[", "record_key", "]", ")", "for", "record_key", "in", "record_keys", "]", "record", "+=", "\" \"", ".", "join", "(", "record_data", ")", "+", "\"\\n\"", "return", "template", ".", "replace", "(", "field", ",", "record", ")" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
process_txt
Replace {txt} in template with the serialized TXT records
blockstack_zones/record_processors.py
def process_txt(data, template): """ Replace {txt} in template with the serialized TXT records """ if data is None: to_process = None else: # quote txt to_process = copy.deepcopy(data) for datum in to_process: if isinstance(datum["txt"], list): datum["txt"] = " ".join(['"%s"' % entry.replace(";", "\;") for entry in datum["txt"]]) else: datum["txt"] = '"%s"' % datum["txt"].replace(";", "\;") return process_rr(to_process, "TXT", "txt", "{txt}", template)
def process_txt(data, template): """ Replace {txt} in template with the serialized TXT records """ if data is None: to_process = None else: # quote txt to_process = copy.deepcopy(data) for datum in to_process: if isinstance(datum["txt"], list): datum["txt"] = " ".join(['"%s"' % entry.replace(";", "\;") for entry in datum["txt"]]) else: datum["txt"] = '"%s"' % datum["txt"].replace(";", "\;") return process_rr(to_process, "TXT", "txt", "{txt}", template)
[ "Replace", "{", "txt", "}", "in", "template", "with", "the", "serialized", "TXT", "records" ]
blockstack/zone-file-py
python
https://github.com/blockstack/zone-file-py/blob/c1078c8c3c28f0881bc9a3af53d4972c4a6862d0/blockstack_zones/record_processors.py#L176-L191
[ "def", "process_txt", "(", "data", ",", "template", ")", ":", "if", "data", "is", "None", ":", "to_process", "=", "None", "else", ":", "# quote txt", "to_process", "=", "copy", ".", "deepcopy", "(", "data", ")", "for", "datum", "in", "to_process", ":", "if", "isinstance", "(", "datum", "[", "\"txt\"", "]", ",", "list", ")", ":", "datum", "[", "\"txt\"", "]", "=", "\" \"", ".", "join", "(", "[", "'\"%s\"'", "%", "entry", ".", "replace", "(", "\";\"", ",", "\"\\;\"", ")", "for", "entry", "in", "datum", "[", "\"txt\"", "]", "]", ")", "else", ":", "datum", "[", "\"txt\"", "]", "=", "'\"%s\"'", "%", "datum", "[", "\"txt\"", "]", ".", "replace", "(", "\";\"", ",", "\"\\;\"", ")", "return", "process_rr", "(", "to_process", ",", "\"TXT\"", ",", "\"txt\"", ",", "\"{txt}\"", ",", "template", ")" ]
c1078c8c3c28f0881bc9a3af53d4972c4a6862d0
test
parse_schema_string
Load and return a PySchema class from an avsc string
pyschema_extensions/avro_schema_parser.py
def parse_schema_string(schema_string): """ Load and return a PySchema class from an avsc string """ if isinstance(schema_string, str): schema_string = schema_string.decode("utf8") schema_struct = json.loads(schema_string) return AvroSchemaParser().parse_schema_struct(schema_struct)
def parse_schema_string(schema_string): """ Load and return a PySchema class from an avsc string """ if isinstance(schema_string, str): schema_string = schema_string.decode("utf8") schema_struct = json.loads(schema_string) return AvroSchemaParser().parse_schema_struct(schema_struct)
[ "Load", "and", "return", "a", "PySchema", "class", "from", "an", "avsc", "string" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema_extensions/avro_schema_parser.py#L49-L57
[ "def", "parse_schema_string", "(", "schema_string", ")", ":", "if", "isinstance", "(", "schema_string", ",", "str", ")", ":", "schema_string", "=", "schema_string", ".", "decode", "(", "\"utf8\"", ")", "schema_struct", "=", "json", ".", "loads", "(", "schema_string", ")", "return", "AvroSchemaParser", "(", ")", ".", "parse_schema_struct", "(", "schema_struct", ")" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
to_python_package
This function can be used to build a python package representation of pyschema classes. One module is created per namespace in a package matching the namespace hierarchy. Args: classes: A collection of classes to build the package from target_folder: Root folder of the package parent_package: Prepended on all import statements in order to support absolute imports. parent_package is not used when building the package file structure indent: Indent level. Defaults to 4 spaces
pyschema/source_generation.py
def to_python_package(classes, target_folder, parent_package=None, indent=DEFAULT_INDENT): ''' This function can be used to build a python package representation of pyschema classes. One module is created per namespace in a package matching the namespace hierarchy. Args: classes: A collection of classes to build the package from target_folder: Root folder of the package parent_package: Prepended on all import statements in order to support absolute imports. parent_package is not used when building the package file structure indent: Indent level. Defaults to 4 spaces ''' PackageBuilder(target_folder, parent_package, indent).from_classes_with_refs(classes)
def to_python_package(classes, target_folder, parent_package=None, indent=DEFAULT_INDENT): ''' This function can be used to build a python package representation of pyschema classes. One module is created per namespace in a package matching the namespace hierarchy. Args: classes: A collection of classes to build the package from target_folder: Root folder of the package parent_package: Prepended on all import statements in order to support absolute imports. parent_package is not used when building the package file structure indent: Indent level. Defaults to 4 spaces ''' PackageBuilder(target_folder, parent_package, indent).from_classes_with_refs(classes)
[ "This", "function", "can", "be", "used", "to", "build", "a", "python", "package", "representation", "of", "pyschema", "classes", ".", "One", "module", "is", "created", "per", "namespace", "in", "a", "package", "matching", "the", "namespace", "hierarchy", "." ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/source_generation.py#L158-L170
[ "def", "to_python_package", "(", "classes", ",", "target_folder", ",", "parent_package", "=", "None", ",", "indent", "=", "DEFAULT_INDENT", ")", ":", "PackageBuilder", "(", "target_folder", ",", "parent_package", ",", "indent", ")", ".", "from_classes_with_refs", "(", "classes", ")" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
_class_source
Generate Python source code for one specific class Doesn't include or take into account any dependencies between record types
pyschema/source_generation.py
def _class_source(schema, indent): """Generate Python source code for one specific class Doesn't include or take into account any dependencies between record types """ def_pattern = ( "class {class_name}(pyschema.Record):\n" "{indent}# WARNING: This class was generated by pyschema.to_python_source\n" "{indent}# there is a risk that any modification made to this class will be overwritten\n" "{optional_namespace_def}" "{field_defs}\n" ) if hasattr(schema, '_namespace'): optional_namespace_def = "{indent}_namespace = {namespace!r}\n".format( namespace=schema._namespace, indent=indent) else: optional_namespace_def = "" field_defs = [ "{indent}{field_name} = {field!r}".format(field_name=field_name, field=field, indent=indent) for field_name, field in schema._fields.iteritems() ] if not field_defs: field_defs = ["{indent}pass".format(indent=indent)] return def_pattern.format( class_name=schema._schema_name, optional_namespace_def=optional_namespace_def, field_defs="\n".join(field_defs), indent=indent )
def _class_source(schema, indent): """Generate Python source code for one specific class Doesn't include or take into account any dependencies between record types """ def_pattern = ( "class {class_name}(pyschema.Record):\n" "{indent}# WARNING: This class was generated by pyschema.to_python_source\n" "{indent}# there is a risk that any modification made to this class will be overwritten\n" "{optional_namespace_def}" "{field_defs}\n" ) if hasattr(schema, '_namespace'): optional_namespace_def = "{indent}_namespace = {namespace!r}\n".format( namespace=schema._namespace, indent=indent) else: optional_namespace_def = "" field_defs = [ "{indent}{field_name} = {field!r}".format(field_name=field_name, field=field, indent=indent) for field_name, field in schema._fields.iteritems() ] if not field_defs: field_defs = ["{indent}pass".format(indent=indent)] return def_pattern.format( class_name=schema._schema_name, optional_namespace_def=optional_namespace_def, field_defs="\n".join(field_defs), indent=indent )
[ "Generate", "Python", "source", "code", "for", "one", "specific", "class" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/source_generation.py#L193-L224
[ "def", "_class_source", "(", "schema", ",", "indent", ")", ":", "def_pattern", "=", "(", "\"class {class_name}(pyschema.Record):\\n\"", "\"{indent}# WARNING: This class was generated by pyschema.to_python_source\\n\"", "\"{indent}# there is a risk that any modification made to this class will be overwritten\\n\"", "\"{optional_namespace_def}\"", "\"{field_defs}\\n\"", ")", "if", "hasattr", "(", "schema", ",", "'_namespace'", ")", ":", "optional_namespace_def", "=", "\"{indent}_namespace = {namespace!r}\\n\"", ".", "format", "(", "namespace", "=", "schema", ".", "_namespace", ",", "indent", "=", "indent", ")", "else", ":", "optional_namespace_def", "=", "\"\"", "field_defs", "=", "[", "\"{indent}{field_name} = {field!r}\"", ".", "format", "(", "field_name", "=", "field_name", ",", "field", "=", "field", ",", "indent", "=", "indent", ")", "for", "field_name", ",", "field", "in", "schema", ".", "_fields", ".", "iteritems", "(", ")", "]", "if", "not", "field_defs", ":", "field_defs", "=", "[", "\"{indent}pass\"", ".", "format", "(", "indent", "=", "indent", ")", "]", "return", "def_pattern", ".", "format", "(", "class_name", "=", "schema", ".", "_schema_name", ",", "optional_namespace_def", "=", "optional_namespace_def", ",", "field_defs", "=", "\"\\n\"", ".", "join", "(", "field_defs", ")", ",", "indent", "=", "indent", ")" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
no_auto_store
Temporarily disable automatic registration of records in the auto_store Decorator factory. This is _NOT_ thread safe >>> @no_auto_store() ... class BarRecord(Record): ... pass >>> BarRecord in auto_store False
pyschema/core.py
def no_auto_store(): """ Temporarily disable automatic registration of records in the auto_store Decorator factory. This is _NOT_ thread safe >>> @no_auto_store() ... class BarRecord(Record): ... pass >>> BarRecord in auto_store False """ original_auto_register_value = PySchema.auto_register disable_auto_register() def decorator(cls): PySchema.auto_register = original_auto_register_value return cls return decorator
def no_auto_store(): """ Temporarily disable automatic registration of records in the auto_store Decorator factory. This is _NOT_ thread safe >>> @no_auto_store() ... class BarRecord(Record): ... pass >>> BarRecord in auto_store False """ original_auto_register_value = PySchema.auto_register disable_auto_register() def decorator(cls): PySchema.auto_register = original_auto_register_value return cls return decorator
[ "Temporarily", "disable", "automatic", "registration", "of", "records", "in", "the", "auto_store" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/core.py#L416-L435
[ "def", "no_auto_store", "(", ")", ":", "original_auto_register_value", "=", "PySchema", ".", "auto_register", "disable_auto_register", "(", ")", "def", "decorator", "(", "cls", ")", ":", "PySchema", ".", "auto_register", "=", "original_auto_register_value", "return", "cls", "return", "decorator" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
to_json_compatible
Dump record in json-encodable object format
pyschema/core.py
def to_json_compatible(record): "Dump record in json-encodable object format" d = {} for fname, f in record._fields.iteritems(): val = getattr(record, fname) if val is not None: d[fname] = f.dump(val) return d
def to_json_compatible(record): "Dump record in json-encodable object format" d = {} for fname, f in record._fields.iteritems(): val = getattr(record, fname) if val is not None: d[fname] = f.dump(val) return d
[ "Dump", "record", "in", "json", "-", "encodable", "object", "format" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/core.py#L502-L509
[ "def", "to_json_compatible", "(", "record", ")", ":", "d", "=", "{", "}", "for", "fname", ",", "f", "in", "record", ".", "_fields", ".", "iteritems", "(", ")", ":", "val", "=", "getattr", "(", "record", ",", "fname", ")", "if", "val", "is", "not", "None", ":", "d", "[", "fname", "]", "=", "f", ".", "dump", "(", "val", ")", "return", "d" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
from_json_compatible
Load from json-encodable
pyschema/core.py
def from_json_compatible(schema, dct): "Load from json-encodable" kwargs = {} for key in dct: field_type = schema._fields.get(key) if field_type is None: raise ParseError("Unexpected field encountered in line for record %s: %s" % (schema.__name__, key)) kwargs[key] = field_type.load(dct[key]) return schema(**kwargs)
def from_json_compatible(schema, dct): "Load from json-encodable" kwargs = {} for key in dct: field_type = schema._fields.get(key) if field_type is None: raise ParseError("Unexpected field encountered in line for record %s: %s" % (schema.__name__, key)) kwargs[key] = field_type.load(dct[key]) return schema(**kwargs)
[ "Load", "from", "json", "-", "encodable" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/core.py#L512-L522
[ "def", "from_json_compatible", "(", "schema", ",", "dct", ")", ":", "kwargs", "=", "{", "}", "for", "key", "in", "dct", ":", "field_type", "=", "schema", ".", "_fields", ".", "get", "(", "key", ")", "if", "field_type", "is", "None", ":", "raise", "ParseError", "(", "\"Unexpected field encountered in line for record %s: %s\"", "%", "(", "schema", ".", "__name__", ",", "key", ")", ")", "kwargs", "[", "key", "]", "=", "field_type", ".", "load", "(", "dct", "[", "key", "]", ")", "return", "schema", "(", "*", "*", "kwargs", ")" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
load_json_dct
Create a Record instance from a json-compatible dictionary The dictionary values should have types that are json compatible, as if just loaded from a json serialized record string. :param dct: Python dictionary with key/value pairs for the record :param record_store: Record store to use for schema lookups (when $schema field is present) :param schema: PySchema Record class for the record to load. This will override any $schema fields specified in `dct`
pyschema/core.py
def load_json_dct( dct, record_store=None, schema=None, loader=from_json_compatible ): """ Create a Record instance from a json-compatible dictionary The dictionary values should have types that are json compatible, as if just loaded from a json serialized record string. :param dct: Python dictionary with key/value pairs for the record :param record_store: Record store to use for schema lookups (when $schema field is present) :param schema: PySchema Record class for the record to load. This will override any $schema fields specified in `dct` """ if schema is None: if record_store is None: record_store = auto_store try: schema_name = dct.pop(SCHEMA_FIELD_NAME) except KeyError: raise ParseError(( "Serialized record missing '{0}' " "record identifier and no schema supplied") .format(SCHEMA_FIELD_NAME) ) try: schema = record_store.get(schema_name) except KeyError: raise ParseError( "Can't recognize record type %r" % (schema_name,), schema_name) # if schema is explicit, use that instead of SCHEMA_FIELD_NAME elif SCHEMA_FIELD_NAME in dct: dct.pop(SCHEMA_FIELD_NAME) record = loader(schema, dct) return record
def load_json_dct( dct, record_store=None, schema=None, loader=from_json_compatible ): """ Create a Record instance from a json-compatible dictionary The dictionary values should have types that are json compatible, as if just loaded from a json serialized record string. :param dct: Python dictionary with key/value pairs for the record :param record_store: Record store to use for schema lookups (when $schema field is present) :param schema: PySchema Record class for the record to load. This will override any $schema fields specified in `dct` """ if schema is None: if record_store is None: record_store = auto_store try: schema_name = dct.pop(SCHEMA_FIELD_NAME) except KeyError: raise ParseError(( "Serialized record missing '{0}' " "record identifier and no schema supplied") .format(SCHEMA_FIELD_NAME) ) try: schema = record_store.get(schema_name) except KeyError: raise ParseError( "Can't recognize record type %r" % (schema_name,), schema_name) # if schema is explicit, use that instead of SCHEMA_FIELD_NAME elif SCHEMA_FIELD_NAME in dct: dct.pop(SCHEMA_FIELD_NAME) record = loader(schema, dct) return record
[ "Create", "a", "Record", "instance", "from", "a", "json", "-", "compatible", "dictionary" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/core.py#L541-L586
[ "def", "load_json_dct", "(", "dct", ",", "record_store", "=", "None", ",", "schema", "=", "None", ",", "loader", "=", "from_json_compatible", ")", ":", "if", "schema", "is", "None", ":", "if", "record_store", "is", "None", ":", "record_store", "=", "auto_store", "try", ":", "schema_name", "=", "dct", ".", "pop", "(", "SCHEMA_FIELD_NAME", ")", "except", "KeyError", ":", "raise", "ParseError", "(", "(", "\"Serialized record missing '{0}' \"", "\"record identifier and no schema supplied\"", ")", ".", "format", "(", "SCHEMA_FIELD_NAME", ")", ")", "try", ":", "schema", "=", "record_store", ".", "get", "(", "schema_name", ")", "except", "KeyError", ":", "raise", "ParseError", "(", "\"Can't recognize record type %r\"", "%", "(", "schema_name", ",", ")", ",", "schema_name", ")", "# if schema is explicit, use that instead of SCHEMA_FIELD_NAME", "elif", "SCHEMA_FIELD_NAME", "in", "dct", ":", "dct", ".", "pop", "(", "SCHEMA_FIELD_NAME", ")", "record", "=", "loader", "(", "schema", ",", "dct", ")", "return", "record" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
loads
Create a Record instance from a json serialized dictionary :param s: String with a json-serialized dictionary :param record_store: Record store to use for schema lookups (when $schema field is present) :param loader: Function called to fetch attributes from json. Typically shouldn't be used by end users :param schema: PySchema Record class for the record to load. This will override any $schema fields specified in `s` :param record_class: DEPRECATED option, old name for the `schema` parameter
pyschema/core.py
def loads( s, record_store=None, schema=None, loader=from_json_compatible, record_class=None # deprecated in favor of schema ): """ Create a Record instance from a json serialized dictionary :param s: String with a json-serialized dictionary :param record_store: Record store to use for schema lookups (when $schema field is present) :param loader: Function called to fetch attributes from json. Typically shouldn't be used by end users :param schema: PySchema Record class for the record to load. This will override any $schema fields specified in `s` :param record_class: DEPRECATED option, old name for the `schema` parameter """ if record_class is not None: warnings.warn( "The record_class parameter is deprecated in favour of schema", DeprecationWarning, stacklevel=2 ) schema = record_class if not isinstance(s, unicode): s = s.decode('utf8') if s.startswith(u"{"): json_dct = json.loads(s) return load_json_dct(json_dct, record_store, schema, loader) else: raise ParseError("Not a json record")
def loads( s, record_store=None, schema=None, loader=from_json_compatible, record_class=None # deprecated in favor of schema ): """ Create a Record instance from a json serialized dictionary :param s: String with a json-serialized dictionary :param record_store: Record store to use for schema lookups (when $schema field is present) :param loader: Function called to fetch attributes from json. Typically shouldn't be used by end users :param schema: PySchema Record class for the record to load. This will override any $schema fields specified in `s` :param record_class: DEPRECATED option, old name for the `schema` parameter """ if record_class is not None: warnings.warn( "The record_class parameter is deprecated in favour of schema", DeprecationWarning, stacklevel=2 ) schema = record_class if not isinstance(s, unicode): s = s.decode('utf8') if s.startswith(u"{"): json_dct = json.loads(s) return load_json_dct(json_dct, record_store, schema, loader) else: raise ParseError("Not a json record")
[ "Create", "a", "Record", "instance", "from", "a", "json", "serialized", "dictionary" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/core.py#L589-L628
[ "def", "loads", "(", "s", ",", "record_store", "=", "None", ",", "schema", "=", "None", ",", "loader", "=", "from_json_compatible", ",", "record_class", "=", "None", "# deprecated in favor of schema", ")", ":", "if", "record_class", "is", "not", "None", ":", "warnings", ".", "warn", "(", "\"The record_class parameter is deprecated in favour of schema\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "schema", "=", "record_class", "if", "not", "isinstance", "(", "s", ",", "unicode", ")", ":", "s", "=", "s", ".", "decode", "(", "'utf8'", ")", "if", "s", ".", "startswith", "(", "u\"{\"", ")", ":", "json_dct", "=", "json", ".", "loads", "(", "s", ")", "return", "load_json_dct", "(", "json_dct", ",", "record_store", ",", "schema", ",", "loader", ")", "else", ":", "raise", "ParseError", "(", "\"Not a json record\"", ")" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
SchemaStore.add_record
Add record class to record store for retrieval at record load time. Can be used as a class decorator
pyschema/core.py
def add_record(self, schema, _bump_stack_level=False): """ Add record class to record store for retrieval at record load time. Can be used as a class decorator """ full_name = get_full_name(schema) has_namespace = '.' in full_name self._force_add(full_name, schema, _bump_stack_level, _raise_on_existing=has_namespace) if has_namespace and schema.__name__ not in self._schema_map: self._force_add(schema.__name__, schema, _bump_stack_level) return schema
def add_record(self, schema, _bump_stack_level=False): """ Add record class to record store for retrieval at record load time. Can be used as a class decorator """ full_name = get_full_name(schema) has_namespace = '.' in full_name self._force_add(full_name, schema, _bump_stack_level, _raise_on_existing=has_namespace) if has_namespace and schema.__name__ not in self._schema_map: self._force_add(schema.__name__, schema, _bump_stack_level) return schema
[ "Add", "record", "class", "to", "record", "store", "for", "retrieval", "at", "record", "load", "time", "." ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/core.py#L97-L107
[ "def", "add_record", "(", "self", ",", "schema", ",", "_bump_stack_level", "=", "False", ")", ":", "full_name", "=", "get_full_name", "(", "schema", ")", "has_namespace", "=", "'.'", "in", "full_name", "self", ".", "_force_add", "(", "full_name", ",", "schema", ",", "_bump_stack_level", ",", "_raise_on_existing", "=", "has_namespace", ")", "if", "has_namespace", "and", "schema", ".", "__name__", "not", "in", "self", ".", "_schema_map", ":", "self", ".", "_force_add", "(", "schema", ".", "__name__", ",", "schema", ",", "_bump_stack_level", ")", "return", "schema" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
SchemaStore.get
Will return a matching record or raise KeyError is no record is found. If the record name is a full name we will first check for a record matching the full name. If no such record is found any record matching the last part of the full name (without the namespace) will be returned.
pyschema/core.py
def get(self, record_name): """ Will return a matching record or raise KeyError is no record is found. If the record name is a full name we will first check for a record matching the full name. If no such record is found any record matching the last part of the full name (without the namespace) will be returned. """ if record_name in self._schema_map: return self._schema_map[record_name] else: last_name = record_name.split('.')[-1] return self._schema_map[last_name]
def get(self, record_name): """ Will return a matching record or raise KeyError is no record is found. If the record name is a full name we will first check for a record matching the full name. If no such record is found any record matching the last part of the full name (without the namespace) will be returned. """ if record_name in self._schema_map: return self._schema_map[record_name] else: last_name = record_name.split('.')[-1] return self._schema_map[last_name]
[ "Will", "return", "a", "matching", "record", "or", "raise", "KeyError", "is", "no", "record", "is", "found", "." ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/core.py#L152-L164
[ "def", "get", "(", "self", ",", "record_name", ")", ":", "if", "record_name", "in", "self", ".", "_schema_map", ":", "return", "self", ".", "_schema_map", "[", "record_name", "]", "else", ":", "last_name", "=", "record_name", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "return", "self", ".", "_schema_map", "[", "last_name", "]" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
Field.repr_vars
Return a dictionary the field definition Should contain all fields that are required for the definition of this field in a pyschema class
pyschema/core.py
def repr_vars(self): """Return a dictionary the field definition Should contain all fields that are required for the definition of this field in a pyschema class""" d = OrderedDict() d["nullable"] = repr(self.nullable) d["default"] = repr(self.default) if self.description is not None: d["description"] = repr(self.description) return d
def repr_vars(self): """Return a dictionary the field definition Should contain all fields that are required for the definition of this field in a pyschema class""" d = OrderedDict() d["nullable"] = repr(self.nullable) d["default"] = repr(self.default) if self.description is not None: d["description"] = repr(self.description) return d
[ "Return", "a", "dictionary", "the", "field", "definition" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/core.py#L243-L252
[ "def", "repr_vars", "(", "self", ")", ":", "d", "=", "OrderedDict", "(", ")", "d", "[", "\"nullable\"", "]", "=", "repr", "(", "self", ".", "nullable", ")", "d", "[", "\"default\"", "]", "=", "repr", "(", "self", ".", "default", ")", "if", "self", ".", "description", "is", "not", "None", ":", "d", "[", "\"description\"", "]", "=", "repr", "(", "self", ".", "description", ")", "return", "d" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
Field.mixin
Decorator for mixing in additional functionality into field type Example: >>> @Integer.mixin ... class IntegerPostgresExtensions: ... postgres_type = 'INT' ... ... def postgres_dump(self, obj): ... self.dump(obj) + "::integer" Is roughly equivalent to: >>> Integer.postgres_type = 'INT' ... ... def postgres_dump(self, obj): ... self.dump(obj) + "::integer" ... ... Integer.postgres_dump = postgres_dump
pyschema/core.py
def mixin(cls, mixin_cls): """Decorator for mixing in additional functionality into field type Example: >>> @Integer.mixin ... class IntegerPostgresExtensions: ... postgres_type = 'INT' ... ... def postgres_dump(self, obj): ... self.dump(obj) + "::integer" Is roughly equivalent to: >>> Integer.postgres_type = 'INT' ... ... def postgres_dump(self, obj): ... self.dump(obj) + "::integer" ... ... Integer.postgres_dump = postgres_dump """ for item_name in dir(mixin_cls): if item_name.startswith("__"): # don't copy magic properties continue item = getattr(mixin_cls, item_name) if isinstance(item, types.MethodType): # unbound method will cause problems # so get the underlying function instead item = item.im_func setattr(cls, item_name, item) return mixin_cls
def mixin(cls, mixin_cls): """Decorator for mixing in additional functionality into field type Example: >>> @Integer.mixin ... class IntegerPostgresExtensions: ... postgres_type = 'INT' ... ... def postgres_dump(self, obj): ... self.dump(obj) + "::integer" Is roughly equivalent to: >>> Integer.postgres_type = 'INT' ... ... def postgres_dump(self, obj): ... self.dump(obj) + "::integer" ... ... Integer.postgres_dump = postgres_dump """ for item_name in dir(mixin_cls): if item_name.startswith("__"): # don't copy magic properties continue item = getattr(mixin_cls, item_name) if isinstance(item, types.MethodType): # unbound method will cause problems # so get the underlying function instead item = item.im_func setattr(cls, item_name, item) return mixin_cls
[ "Decorator", "for", "mixing", "in", "additional", "functionality", "into", "field", "type" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/core.py#L272-L306
[ "def", "mixin", "(", "cls", ",", "mixin_cls", ")", ":", "for", "item_name", "in", "dir", "(", "mixin_cls", ")", ":", "if", "item_name", ".", "startswith", "(", "\"__\"", ")", ":", "# don't copy magic properties", "continue", "item", "=", "getattr", "(", "mixin_cls", ",", "item_name", ")", "if", "isinstance", "(", "item", ",", "types", ".", "MethodType", ")", ":", "# unbound method will cause problems", "# so get the underlying function instead", "item", "=", "item", ".", "im_func", "setattr", "(", "cls", ",", "item_name", ",", "item", ")", "return", "mixin_cls" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
PySchema.from_class
Create proper PySchema class from cls Any methods and attributes will be transferred to the new object
pyschema/core.py
def from_class(metacls, cls, auto_store=True): """Create proper PySchema class from cls Any methods and attributes will be transferred to the new object """ if auto_store: def wrap(cls): return cls else: wrap = no_auto_store() return wrap(metacls.__new__( metacls, cls.__name__, (Record,), dict(cls.__dict__) ))
def from_class(metacls, cls, auto_store=True): """Create proper PySchema class from cls Any methods and attributes will be transferred to the new object """ if auto_store: def wrap(cls): return cls else: wrap = no_auto_store() return wrap(metacls.__new__( metacls, cls.__name__, (Record,), dict(cls.__dict__) ))
[ "Create", "proper", "PySchema", "class", "from", "cls" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/core.py#L388-L405
[ "def", "from_class", "(", "metacls", ",", "cls", ",", "auto_store", "=", "True", ")", ":", "if", "auto_store", ":", "def", "wrap", "(", "cls", ")", ":", "return", "cls", "else", ":", "wrap", "=", "no_auto_store", "(", ")", "return", "wrap", "(", "metacls", ".", "__new__", "(", "metacls", ",", "cls", ".", "__name__", ",", "(", "Record", ",", ")", ",", "dict", "(", "cls", ".", "__dict__", ")", ")", ")" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
get_schema_dict
Return a python dict representing the jsonschema of a record Any references to sub-schemas will be URI fragments that won't be resolvable without a root schema, available from get_root_schema_dict.
pyschema_extensions/jsonschema.py
def get_schema_dict(record, state=None): """Return a python dict representing the jsonschema of a record Any references to sub-schemas will be URI fragments that won't be resolvable without a root schema, available from get_root_schema_dict. """ state = state or SchemaGeneratorState() schema = OrderedDict([ ('type', 'object'), ('id', record._schema_name), ]) fields = dict() for field_name, field_type in record._fields.iteritems(): fields[field_name] = field_type.jsonschema_type_schema(state) required = set(fields.keys()) schema['properties'] = fields schema['required'] = sorted(list(required)) schema['additionalProperties'] = False state.record_schemas[record._schema_name] = schema return schema
def get_schema_dict(record, state=None): """Return a python dict representing the jsonschema of a record Any references to sub-schemas will be URI fragments that won't be resolvable without a root schema, available from get_root_schema_dict. """ state = state or SchemaGeneratorState() schema = OrderedDict([ ('type', 'object'), ('id', record._schema_name), ]) fields = dict() for field_name, field_type in record._fields.iteritems(): fields[field_name] = field_type.jsonschema_type_schema(state) required = set(fields.keys()) schema['properties'] = fields schema['required'] = sorted(list(required)) schema['additionalProperties'] = False state.record_schemas[record._schema_name] = schema return schema
[ "Return", "a", "python", "dict", "representing", "the", "jsonschema", "of", "a", "record" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema_extensions/jsonschema.py#L116-L137
[ "def", "get_schema_dict", "(", "record", ",", "state", "=", "None", ")", ":", "state", "=", "state", "or", "SchemaGeneratorState", "(", ")", "schema", "=", "OrderedDict", "(", "[", "(", "'type'", ",", "'object'", ")", ",", "(", "'id'", ",", "record", ".", "_schema_name", ")", ",", "]", ")", "fields", "=", "dict", "(", ")", "for", "field_name", ",", "field_type", "in", "record", ".", "_fields", ".", "iteritems", "(", ")", ":", "fields", "[", "field_name", "]", "=", "field_type", ".", "jsonschema_type_schema", "(", "state", ")", "required", "=", "set", "(", "fields", ".", "keys", "(", ")", ")", "schema", "[", "'properties'", "]", "=", "fields", "schema", "[", "'required'", "]", "=", "sorted", "(", "list", "(", "required", ")", ")", "schema", "[", "'additionalProperties'", "]", "=", "False", "state", ".", "record_schemas", "[", "record", ".", "_schema_name", "]", "=", "schema", "return", "schema" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
get_root_schema_dict
Return a root jsonschema for a given record A root schema includes the $schema attribute and all sub-record schemas and definitions.
pyschema_extensions/jsonschema.py
def get_root_schema_dict(record): """Return a root jsonschema for a given record A root schema includes the $schema attribute and all sub-record schemas and definitions. """ state = SchemaGeneratorState() schema = get_schema_dict(record, state) del state.record_schemas[record._schema_name] if state.record_schemas: schema['definitions'] = dict() for name, sub_schema in state.record_schemas.iteritems(): schema['definitions'][name] = sub_schema return schema
def get_root_schema_dict(record): """Return a root jsonschema for a given record A root schema includes the $schema attribute and all sub-record schemas and definitions. """ state = SchemaGeneratorState() schema = get_schema_dict(record, state) del state.record_schemas[record._schema_name] if state.record_schemas: schema['definitions'] = dict() for name, sub_schema in state.record_schemas.iteritems(): schema['definitions'][name] = sub_schema return schema
[ "Return", "a", "root", "jsonschema", "for", "a", "given", "record" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema_extensions/jsonschema.py#L140-L153
[ "def", "get_root_schema_dict", "(", "record", ")", ":", "state", "=", "SchemaGeneratorState", "(", ")", "schema", "=", "get_schema_dict", "(", "record", ",", "state", ")", "del", "state", ".", "record_schemas", "[", "record", ".", "_schema_name", "]", "if", "state", ".", "record_schemas", ":", "schema", "[", "'definitions'", "]", "=", "dict", "(", ")", "for", "name", ",", "sub_schema", "in", "state", ".", "record_schemas", ".", "iteritems", "(", ")", ":", "schema", "[", "'definitions'", "]", "[", "name", "]", "=", "sub_schema", "return", "schema" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
from_json_compatible
Load from json-encodable
pyschema_extensions/avro.py
def from_json_compatible(schema, dct): "Load from json-encodable" kwargs = {} for key in dct: field_type = schema._fields.get(key) if field_type is None: warnings.warn("Unexpected field encountered in line for record %s: %r" % (schema.__name__, key)) continue kwargs[key] = field_type.avro_load(dct[key]) return schema(**kwargs)
def from_json_compatible(schema, dct): "Load from json-encodable" kwargs = {} for key in dct: field_type = schema._fields.get(key) if field_type is None: warnings.warn("Unexpected field encountered in line for record %s: %r" % (schema.__name__, key)) continue kwargs[key] = field_type.avro_load(dct[key]) return schema(**kwargs)
[ "Load", "from", "json", "-", "encodable" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema_extensions/avro.py#L310-L321
[ "def", "from_json_compatible", "(", "schema", ",", "dct", ")", ":", "kwargs", "=", "{", "}", "for", "key", "in", "dct", ":", "field_type", "=", "schema", ".", "_fields", ".", "get", "(", "key", ")", "if", "field_type", "is", "None", ":", "warnings", ".", "warn", "(", "\"Unexpected field encountered in line for record %s: %r\"", "%", "(", "schema", ".", "__name__", ",", "key", ")", ")", "continue", "kwargs", "[", "key", "]", "=", "field_type", ".", "avro_load", "(", "dct", "[", "key", "]", ")", "return", "schema", "(", "*", "*", "kwargs", ")" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
mr_reader
Converts a file object with json serialised pyschema records to a stream of pyschema objects Can be used as job.reader in luigi.hadoop.JobTask
pyschema_extensions/luigi.py
def mr_reader(job, input_stream, loads=core.loads): """ Converts a file object with json serialised pyschema records to a stream of pyschema objects Can be used as job.reader in luigi.hadoop.JobTask """ for line in input_stream: yield loads(line),
def mr_reader(job, input_stream, loads=core.loads): """ Converts a file object with json serialised pyschema records to a stream of pyschema objects Can be used as job.reader in luigi.hadoop.JobTask """ for line in input_stream: yield loads(line),
[ "Converts", "a", "file", "object", "with", "json", "serialised", "pyschema", "records", "to", "a", "stream", "of", "pyschema", "objects" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema_extensions/luigi.py#L21-L28
[ "def", "mr_reader", "(", "job", ",", "input_stream", ",", "loads", "=", "core", ".", "loads", ")", ":", "for", "line", "in", "input_stream", ":", "yield", "loads", "(", "line", ")", "," ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
mr_writer
Writes a stream of json serialised pyschema Records to a file object Can be used as job.writer in luigi.hadoop.JobTask
pyschema_extensions/luigi.py
def mr_writer(job, outputs, output_stream, stderr=sys.stderr, dumps=core.dumps): """ Writes a stream of json serialised pyschema Records to a file object Can be used as job.writer in luigi.hadoop.JobTask """ for output in outputs: try: print >> output_stream, dumps(output) except core.ParseError, e: print >> stderr, e raise
def mr_writer(job, outputs, output_stream, stderr=sys.stderr, dumps=core.dumps): """ Writes a stream of json serialised pyschema Records to a file object Can be used as job.writer in luigi.hadoop.JobTask """ for output in outputs: try: print >> output_stream, dumps(output) except core.ParseError, e: print >> stderr, e raise
[ "Writes", "a", "stream", "of", "json", "serialised", "pyschema", "Records", "to", "a", "file", "object" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema_extensions/luigi.py#L31-L42
[ "def", "mr_writer", "(", "job", ",", "outputs", ",", "output_stream", ",", "stderr", "=", "sys", ".", "stderr", ",", "dumps", "=", "core", ".", "dumps", ")", ":", "for", "output", "in", "outputs", ":", "try", ":", "print", ">>", "output_stream", ",", "dumps", "(", "output", ")", "except", "core", ".", "ParseError", ",", "e", ":", "print", ">>", "stderr", ",", "e", "raise" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
ordereddict_push_front
Set a value at the front of an OrderedDict The original dict isn't modified, instead a copy is returned
pyschema/types.py
def ordereddict_push_front(dct, key, value): """Set a value at the front of an OrderedDict The original dict isn't modified, instead a copy is returned """ d = OrderedDict() d[key] = value d.update(dct) return d
def ordereddict_push_front(dct, key, value): """Set a value at the front of an OrderedDict The original dict isn't modified, instead a copy is returned """ d = OrderedDict() d[key] = value d.update(dct) return d
[ "Set", "a", "value", "at", "the", "front", "of", "an", "OrderedDict" ]
spotify/pyschema
python
https://github.com/spotify/pyschema/blob/7e6c3934150bcb040c628d74ace6caf5fcf867df/pyschema/types.py#L26-L34
[ "def", "ordereddict_push_front", "(", "dct", ",", "key", ",", "value", ")", ":", "d", "=", "OrderedDict", "(", ")", "d", "[", "key", "]", "=", "value", "d", ".", "update", "(", "dct", ")", "return", "d" ]
7e6c3934150bcb040c628d74ace6caf5fcf867df
test
gen_filter
Generates a single filter expression for ``filter[]``.
src/manageiq_client/filters.py
def gen_filter(name, op, value, is_or=False): """Generates a single filter expression for ``filter[]``.""" if op not in OPERATORS: raise ValueError('Unknown operator {}'.format(op)) result = u'{} {} {}'.format(name, op, escape_filter(value)) if is_or: result = u'or ' + result return result
def gen_filter(name, op, value, is_or=False): """Generates a single filter expression for ``filter[]``.""" if op not in OPERATORS: raise ValueError('Unknown operator {}'.format(op)) result = u'{} {} {}'.format(name, op, escape_filter(value)) if is_or: result = u'or ' + result return result
[ "Generates", "a", "single", "filter", "expression", "for", "filter", "[]", "." ]
ManageIQ/manageiq-api-client-python
python
https://github.com/ManageIQ/manageiq-api-client-python/blob/e0c8884929e45766c2835bc7dcf4e78b0794248f/src/manageiq_client/filters.py#L7-L14
[ "def", "gen_filter", "(", "name", ",", "op", ",", "value", ",", "is_or", "=", "False", ")", ":", "if", "op", "not", "in", "OPERATORS", ":", "raise", "ValueError", "(", "'Unknown operator {}'", ".", "format", "(", "op", ")", ")", "result", "=", "u'{} {} {}'", ".", "format", "(", "name", ",", "op", ",", "escape_filter", "(", "value", ")", ")", "if", "is_or", ":", "result", "=", "u'or '", "+", "result", "return", "result" ]
e0c8884929e45766c2835bc7dcf4e78b0794248f
test
Q.from_dict
Creates a query (AND and =) from a dictionary.
src/manageiq_client/filters.py
def from_dict(cls, d): """Creates a query (AND and =) from a dictionary.""" if not d: raise ValueError('Empty dictionary!') items = list(d.items()) key, value = items.pop(0) q = cls(key, u'=', value) for key, value in items: q = q & cls(key, u'=', value) return q
def from_dict(cls, d): """Creates a query (AND and =) from a dictionary.""" if not d: raise ValueError('Empty dictionary!') items = list(d.items()) key, value = items.pop(0) q = cls(key, u'=', value) for key, value in items: q = q & cls(key, u'=', value) return q
[ "Creates", "a", "query", "(", "AND", "and", "=", ")", "from", "a", "dictionary", "." ]
ManageIQ/manageiq-api-client-python
python
https://github.com/ManageIQ/manageiq-api-client-python/blob/e0c8884929e45766c2835bc7dcf4e78b0794248f/src/manageiq_client/filters.py#L37-L46
[ "def", "from_dict", "(", "cls", ",", "d", ")", ":", "if", "not", "d", ":", "raise", "ValueError", "(", "'Empty dictionary!'", ")", "items", "=", "list", "(", "d", ".", "items", "(", ")", ")", "key", ",", "value", "=", "items", ".", "pop", "(", "0", ")", "q", "=", "cls", "(", "key", ",", "u'='", ",", "value", ")", "for", "key", ",", "value", "in", "items", ":", "q", "=", "q", "&", "cls", "(", "key", ",", "u'='", ",", "value", ")", "return", "q" ]
e0c8884929e45766c2835bc7dcf4e78b0794248f
test
Collection.query_string
Specify query string to use with the collection. Returns: :py:class:`SearchResult`
src/manageiq_client/api.py
def query_string(self, **params): """Specify query string to use with the collection. Returns: :py:class:`SearchResult` """ return SearchResult(self, self._api.get(self._href, **params))
def query_string(self, **params): """Specify query string to use with the collection. Returns: :py:class:`SearchResult` """ return SearchResult(self, self._api.get(self._href, **params))
[ "Specify", "query", "string", "to", "use", "with", "the", "collection", "." ]
ManageIQ/manageiq-api-client-python
python
https://github.com/ManageIQ/manageiq-api-client-python/blob/e0c8884929e45766c2835bc7dcf4e78b0794248f/src/manageiq_client/api.py#L332-L337
[ "def", "query_string", "(", "self", ",", "*", "*", "params", ")", ":", "return", "SearchResult", "(", "self", ",", "self", ".", "_api", ".", "get", "(", "self", ".", "_href", ",", "*", "*", "params", ")", ")" ]
e0c8884929e45766c2835bc7dcf4e78b0794248f
test
Collection.raw_filter
Sends all filters to the API. No fancy, just a wrapper. Any advanced functionality shall be implemented as another method. Args: filters: List of filters (strings) Returns: :py:class:`SearchResult`
src/manageiq_client/api.py
def raw_filter(self, filters): """Sends all filters to the API. No fancy, just a wrapper. Any advanced functionality shall be implemented as another method. Args: filters: List of filters (strings) Returns: :py:class:`SearchResult` """ return SearchResult(self, self._api.get(self._href, **{"filter[]": filters}))
def raw_filter(self, filters): """Sends all filters to the API. No fancy, just a wrapper. Any advanced functionality shall be implemented as another method. Args: filters: List of filters (strings) Returns: :py:class:`SearchResult` """ return SearchResult(self, self._api.get(self._href, **{"filter[]": filters}))
[ "Sends", "all", "filters", "to", "the", "API", "." ]
ManageIQ/manageiq-api-client-python
python
https://github.com/ManageIQ/manageiq-api-client-python/blob/e0c8884929e45766c2835bc7dcf4e78b0794248f/src/manageiq_client/api.py#L339-L349
[ "def", "raw_filter", "(", "self", ",", "filters", ")", ":", "return", "SearchResult", "(", "self", ",", "self", ".", "_api", ".", "get", "(", "self", ".", "_href", ",", "*", "*", "{", "\"filter[]\"", ":", "filters", "}", ")", ")" ]
e0c8884929e45766c2835bc7dcf4e78b0794248f
test
Collection.all_include_attributes
Returns all entities present in the collection with ``attributes`` included.
src/manageiq_client/api.py
def all_include_attributes(self, attributes): """Returns all entities present in the collection with ``attributes`` included.""" self.reload(expand=True, attributes=attributes) entities = [Entity(self, r, attributes=attributes) for r in self._resources] self.reload() return entities
def all_include_attributes(self, attributes): """Returns all entities present in the collection with ``attributes`` included.""" self.reload(expand=True, attributes=attributes) entities = [Entity(self, r, attributes=attributes) for r in self._resources] self.reload() return entities
[ "Returns", "all", "entities", "present", "in", "the", "collection", "with", "attributes", "included", "." ]
ManageIQ/manageiq-api-client-python
python
https://github.com/ManageIQ/manageiq-api-client-python/blob/e0c8884929e45766c2835bc7dcf4e78b0794248f/src/manageiq_client/api.py#L393-L398
[ "def", "all_include_attributes", "(", "self", ",", "attributes", ")", ":", "self", ".", "reload", "(", "expand", "=", "True", ",", "attributes", "=", "attributes", ")", "entities", "=", "[", "Entity", "(", "self", ",", "r", ",", "attributes", "=", "attributes", ")", "for", "r", "in", "self", ".", "_resources", "]", "self", ".", "reload", "(", ")", "return", "entities" ]
e0c8884929e45766c2835bc7dcf4e78b0794248f
test
Action._get_entity_from_href
Returns entity in correct collection. If the "href" value in result doesn't match the current collection, try to find the collection that the "href" refers to.
src/manageiq_client/api.py
def _get_entity_from_href(self, result): """Returns entity in correct collection. If the "href" value in result doesn't match the current collection, try to find the collection that the "href" refers to. """ href_result = result['href'] if self.collection._href.startswith(href_result): return Entity(self.collection, result, incomplete=True) href_match = re.match(r"(https?://.+/api[^?]*)/([a-z_-]+)", href_result) if not href_match: raise ValueError("Malformed href: {}".format(href_result)) collection_name = href_match.group(2) entry_point = href_match.group(1) new_collection = Collection( self.collection.api, "{}/{}".format(entry_point, collection_name), collection_name ) return Entity(new_collection, result, incomplete=True)
def _get_entity_from_href(self, result): """Returns entity in correct collection. If the "href" value in result doesn't match the current collection, try to find the collection that the "href" refers to. """ href_result = result['href'] if self.collection._href.startswith(href_result): return Entity(self.collection, result, incomplete=True) href_match = re.match(r"(https?://.+/api[^?]*)/([a-z_-]+)", href_result) if not href_match: raise ValueError("Malformed href: {}".format(href_result)) collection_name = href_match.group(2) entry_point = href_match.group(1) new_collection = Collection( self.collection.api, "{}/{}".format(entry_point, collection_name), collection_name ) return Entity(new_collection, result, incomplete=True)
[ "Returns", "entity", "in", "correct", "collection", "." ]
ManageIQ/manageiq-api-client-python
python
https://github.com/ManageIQ/manageiq-api-client-python/blob/e0c8884929e45766c2835bc7dcf4e78b0794248f/src/manageiq_client/api.py#L703-L724
[ "def", "_get_entity_from_href", "(", "self", ",", "result", ")", ":", "href_result", "=", "result", "[", "'href'", "]", "if", "self", ".", "collection", ".", "_href", ".", "startswith", "(", "href_result", ")", ":", "return", "Entity", "(", "self", ".", "collection", ",", "result", ",", "incomplete", "=", "True", ")", "href_match", "=", "re", ".", "match", "(", "r\"(https?://.+/api[^?]*)/([a-z_-]+)\"", ",", "href_result", ")", "if", "not", "href_match", ":", "raise", "ValueError", "(", "\"Malformed href: {}\"", ".", "format", "(", "href_result", ")", ")", "collection_name", "=", "href_match", ".", "group", "(", "2", ")", "entry_point", "=", "href_match", ".", "group", "(", "1", ")", "new_collection", "=", "Collection", "(", "self", ".", "collection", ".", "api", ",", "\"{}/{}\"", ".", "format", "(", "entry_point", ",", "collection_name", ")", ",", "collection_name", ")", "return", "Entity", "(", "new_collection", ",", "result", ",", "incomplete", "=", "True", ")" ]
e0c8884929e45766c2835bc7dcf4e78b0794248f
test
give_another_quote
When you pass a quote character, returns you an another one if possible
src/manageiq_client/utils.py
def give_another_quote(q): """When you pass a quote character, returns you an another one if possible""" for qc in QUOTES: if qc != q: return qc else: raise ValueError(u'Could not find a different quote for {}'.format(q))
def give_another_quote(q): """When you pass a quote character, returns you an another one if possible""" for qc in QUOTES: if qc != q: return qc else: raise ValueError(u'Could not find a different quote for {}'.format(q))
[ "When", "you", "pass", "a", "quote", "character", "returns", "you", "an", "another", "one", "if", "possible" ]
ManageIQ/manageiq-api-client-python
python
https://github.com/ManageIQ/manageiq-api-client-python/blob/e0c8884929e45766c2835bc7dcf4e78b0794248f/src/manageiq_client/utils.py#L7-L13
[ "def", "give_another_quote", "(", "q", ")", ":", "for", "qc", "in", "QUOTES", ":", "if", "qc", "!=", "q", ":", "return", "qc", "else", ":", "raise", "ValueError", "(", "u'Could not find a different quote for {}'", ".", "format", "(", "q", ")", ")" ]
e0c8884929e45766c2835bc7dcf4e78b0794248f
test
escape_filter
Tries to escape the values that are passed to filter as correctly as possible. No standard way is followed, but at least it is simple.
src/manageiq_client/utils.py
def escape_filter(o): """Tries to escape the values that are passed to filter as correctly as possible. No standard way is followed, but at least it is simple. """ if o is None: return u'NULL' if isinstance(o, int): return str(o) if not isinstance(o, six.string_types): raise ValueError('Filters take only None, int or a string type') if not o: # Empty string return u"''" # Now enforce unicode o = unicode_process(o) if u'"' not in o: # Simple case, just put the quote that does not exist in the string return u'"' + o + u'"' elif u"'" not in o: # Simple case, just put the quote that does not exist in the string return u"'" + o + u"'" else: # Both are there, so start guessing # Empty strings are sorted out, so the string must contain something. # String with length == 1 are sorted out because if they have a quote, they would be quoted # with the another quote in preceeding branch. Therefore the string is at least 2 chars long # here which allows us to NOT check the length here. first_char = o[0] last_char = o[-1] if first_char in QUOTES and last_char in QUOTES: # The first and last chars definitely are quotes if first_char == last_char: # Simple, just put another ones around them quote = give_another_quote(first_char) return quote + o + quote else: # I don't like this but the nature of the escape is like that ... # Since now it uses both of the quotes, just pick the simple ones and surround it return u"'" + o + u"'" elif first_char not in QUOTES and last_char not in QUOTES: # First and last chars are not quotes, so a simple solution return u"'" + o + u"'" else: # One of the first or last chars is not a quote if first_char in QUOTES: quote = give_another_quote(first_char) else: # last_char quote = give_another_quote(last_char) return quote + o + quote
def escape_filter(o): """Tries to escape the values that are passed to filter as correctly as possible. No standard way is followed, but at least it is simple. """ if o is None: return u'NULL' if isinstance(o, int): return str(o) if not isinstance(o, six.string_types): raise ValueError('Filters take only None, int or a string type') if not o: # Empty string return u"''" # Now enforce unicode o = unicode_process(o) if u'"' not in o: # Simple case, just put the quote that does not exist in the string return u'"' + o + u'"' elif u"'" not in o: # Simple case, just put the quote that does not exist in the string return u"'" + o + u"'" else: # Both are there, so start guessing # Empty strings are sorted out, so the string must contain something. # String with length == 1 are sorted out because if they have a quote, they would be quoted # with the another quote in preceeding branch. Therefore the string is at least 2 chars long # here which allows us to NOT check the length here. first_char = o[0] last_char = o[-1] if first_char in QUOTES and last_char in QUOTES: # The first and last chars definitely are quotes if first_char == last_char: # Simple, just put another ones around them quote = give_another_quote(first_char) return quote + o + quote else: # I don't like this but the nature of the escape is like that ... # Since now it uses both of the quotes, just pick the simple ones and surround it return u"'" + o + u"'" elif first_char not in QUOTES and last_char not in QUOTES: # First and last chars are not quotes, so a simple solution return u"'" + o + u"'" else: # One of the first or last chars is not a quote if first_char in QUOTES: quote = give_another_quote(first_char) else: # last_char quote = give_another_quote(last_char) return quote + o + quote
[ "Tries", "to", "escape", "the", "values", "that", "are", "passed", "to", "filter", "as", "correctly", "as", "possible", "." ]
ManageIQ/manageiq-api-client-python
python
https://github.com/ManageIQ/manageiq-api-client-python/blob/e0c8884929e45766c2835bc7dcf4e78b0794248f/src/manageiq_client/utils.py#L16-L66
[ "def", "escape_filter", "(", "o", ")", ":", "if", "o", "is", "None", ":", "return", "u'NULL'", "if", "isinstance", "(", "o", ",", "int", ")", ":", "return", "str", "(", "o", ")", "if", "not", "isinstance", "(", "o", ",", "six", ".", "string_types", ")", ":", "raise", "ValueError", "(", "'Filters take only None, int or a string type'", ")", "if", "not", "o", ":", "# Empty string", "return", "u\"''\"", "# Now enforce unicode", "o", "=", "unicode_process", "(", "o", ")", "if", "u'\"'", "not", "in", "o", ":", "# Simple case, just put the quote that does not exist in the string", "return", "u'\"'", "+", "o", "+", "u'\"'", "elif", "u\"'\"", "not", "in", "o", ":", "# Simple case, just put the quote that does not exist in the string", "return", "u\"'\"", "+", "o", "+", "u\"'\"", "else", ":", "# Both are there, so start guessing", "# Empty strings are sorted out, so the string must contain something.", "# String with length == 1 are sorted out because if they have a quote, they would be quoted", "# with the another quote in preceeding branch. Therefore the string is at least 2 chars long", "# here which allows us to NOT check the length here.", "first_char", "=", "o", "[", "0", "]", "last_char", "=", "o", "[", "-", "1", "]", "if", "first_char", "in", "QUOTES", "and", "last_char", "in", "QUOTES", ":", "# The first and last chars definitely are quotes", "if", "first_char", "==", "last_char", ":", "# Simple, just put another ones around them", "quote", "=", "give_another_quote", "(", "first_char", ")", "return", "quote", "+", "o", "+", "quote", "else", ":", "# I don't like this but the nature of the escape is like that ...", "# Since now it uses both of the quotes, just pick the simple ones and surround it", "return", "u\"'\"", "+", "o", "+", "u\"'\"", "elif", "first_char", "not", "in", "QUOTES", "and", "last_char", "not", "in", "QUOTES", ":", "# First and last chars are not quotes, so a simple solution", "return", "u\"'\"", "+", "o", "+", "u\"'\"", "else", ":", "# One of the first or last chars is not a quote", "if", "first_char", "in", "QUOTES", ":", "quote", "=", "give_another_quote", "(", "first_char", ")", "else", ":", "# last_char", "quote", "=", "give_another_quote", "(", "last_char", ")", "return", "quote", "+", "o", "+", "quote" ]
e0c8884929e45766c2835bc7dcf4e78b0794248f
test
makePlot
Make the plot with parallax performance predictions. :argument args: command line arguments
examples/plotParallaxErrorsSkyAvg.py
def makePlot(args): """ Make the plot with parallax performance predictions. :argument args: command line arguments """ gmag=np.linspace(5.7,20.0,101) vminiB1V=vminiFromSpt('B1V') vminiG2V=vminiFromSpt('G2V') vminiM6V=vminiFromSpt('M6V') vmagB1V=gmag-gminvFromVmini(vminiB1V) vmagG2V=gmag-gminvFromVmini(vminiG2V) vmagM6V=gmag-gminvFromVmini(vminiM6V) sigparB1V=parallaxErrorSkyAvg(gmag,vminiB1V) sigparB1Vmin=parallaxMinError(gmag,vminiB1V) sigparB1Vmax=parallaxMaxError(gmag,vminiB1V) sigparG2V=parallaxErrorSkyAvg(gmag,vminiG2V) sigparG2Vmin=parallaxMinError(gmag,vminiG2V) sigparG2Vmax=parallaxMaxError(gmag,vminiG2V) sigparM6V=parallaxErrorSkyAvg(gmag,vminiM6V) sigparM6Vmin=parallaxMinError(gmag,vminiM6V) sigparM6Vmax=parallaxMaxError(gmag,vminiM6V) fig=plt.figure(figsize=(10,6.5)) if (args['gmagAbscissa']): plt.semilogy(gmag, sigparB1V, 'b', label='B1V') plt.semilogy(gmag, sigparG2V, 'g', label='G2V') plt.semilogy(gmag, sigparM6V, 'r', label='M6V') plt.xlim((5,20)) plt.ylim((4,1000)) plt.legend(loc=4) plt.xlabel('$G$ [mag]') else: ax=fig.add_subplot(111) plt.semilogy(vmagB1V, sigparB1V, 'b', label='B1V') #plt.semilogy(vmagG2V, sigparG2V, 'g', label='G2V') plt.semilogy(vmagM6V, sigparM6V, 'r', label='M6V') plt.fill_between(vmagB1V, sigparB1Vmin, sigparB1Vmax, color='b', alpha=0.3) plt.fill_between(vmagM6V, sigparM6Vmin, sigparM6Vmax, color='r', alpha=0.3) plt.xlim((5,22.5)) plt.ylim((4,1000)) plt.text(17.2,190,'B1V',color='b') plt.text(18,20,'M6V',color='r') plt.xlabel('$V$ [mag]') plt.text(7,17,'calibration noise floor', size=12, bbox=dict(boxstyle="round,pad=0.3", ec=(0.0, 0.0, 0.0), fc=(1.0, 1.0, 1.0), )) plt.text(14.75,80,'photon noise', rotation=45, size=12, bbox=dict(boxstyle="round,pad=0.3", ec=(0.0, 0.0, 0.0), fc=(1.0, 1.0, 1.0), )) ax.annotate('non-uniformity\nover the sky', xy=(21.5, 320), xycoords='data', xytext=(21.5,80), textcoords='data', ha='center', size='12', bbox=dict(boxstyle="round,pad=0.3",ec=(0,0,0),fc=(1,1,1)), arrowprops=dict(facecolor='black', shrink=0.15, width=1, headwidth=6), horizontalalignment='right', verticalalignment='top', ) ax.annotate('', xy=(21.5, 500), xycoords='data', xytext=(21.5,950), textcoords='data', ha='center', size='12', arrowprops=dict(facecolor='black', shrink=0.15, width=1, headwidth=6), horizontalalignment='right', verticalalignment='bottom', ) plt.xticks(np.arange(6,24,2)) ax = plt.gca().yaxis ax.set_major_formatter(matplotlib.ticker.ScalarFormatter()) plt.ticklabel_format(axis='y',style='plain') plt.grid(which='both') plt.ylabel('End-of-mission parallax standard error [$\mu$as]') if (args['pdfOutput']): plt.savefig('ParallaxErrors.pdf') elif (args['pngOutput']): plt.savefig('ParallaxErrors.png') else: plt.show()
def makePlot(args): """ Make the plot with parallax performance predictions. :argument args: command line arguments """ gmag=np.linspace(5.7,20.0,101) vminiB1V=vminiFromSpt('B1V') vminiG2V=vminiFromSpt('G2V') vminiM6V=vminiFromSpt('M6V') vmagB1V=gmag-gminvFromVmini(vminiB1V) vmagG2V=gmag-gminvFromVmini(vminiG2V) vmagM6V=gmag-gminvFromVmini(vminiM6V) sigparB1V=parallaxErrorSkyAvg(gmag,vminiB1V) sigparB1Vmin=parallaxMinError(gmag,vminiB1V) sigparB1Vmax=parallaxMaxError(gmag,vminiB1V) sigparG2V=parallaxErrorSkyAvg(gmag,vminiG2V) sigparG2Vmin=parallaxMinError(gmag,vminiG2V) sigparG2Vmax=parallaxMaxError(gmag,vminiG2V) sigparM6V=parallaxErrorSkyAvg(gmag,vminiM6V) sigparM6Vmin=parallaxMinError(gmag,vminiM6V) sigparM6Vmax=parallaxMaxError(gmag,vminiM6V) fig=plt.figure(figsize=(10,6.5)) if (args['gmagAbscissa']): plt.semilogy(gmag, sigparB1V, 'b', label='B1V') plt.semilogy(gmag, sigparG2V, 'g', label='G2V') plt.semilogy(gmag, sigparM6V, 'r', label='M6V') plt.xlim((5,20)) plt.ylim((4,1000)) plt.legend(loc=4) plt.xlabel('$G$ [mag]') else: ax=fig.add_subplot(111) plt.semilogy(vmagB1V, sigparB1V, 'b', label='B1V') #plt.semilogy(vmagG2V, sigparG2V, 'g', label='G2V') plt.semilogy(vmagM6V, sigparM6V, 'r', label='M6V') plt.fill_between(vmagB1V, sigparB1Vmin, sigparB1Vmax, color='b', alpha=0.3) plt.fill_between(vmagM6V, sigparM6Vmin, sigparM6Vmax, color='r', alpha=0.3) plt.xlim((5,22.5)) plt.ylim((4,1000)) plt.text(17.2,190,'B1V',color='b') plt.text(18,20,'M6V',color='r') plt.xlabel('$V$ [mag]') plt.text(7,17,'calibration noise floor', size=12, bbox=dict(boxstyle="round,pad=0.3", ec=(0.0, 0.0, 0.0), fc=(1.0, 1.0, 1.0), )) plt.text(14.75,80,'photon noise', rotation=45, size=12, bbox=dict(boxstyle="round,pad=0.3", ec=(0.0, 0.0, 0.0), fc=(1.0, 1.0, 1.0), )) ax.annotate('non-uniformity\nover the sky', xy=(21.5, 320), xycoords='data', xytext=(21.5,80), textcoords='data', ha='center', size='12', bbox=dict(boxstyle="round,pad=0.3",ec=(0,0,0),fc=(1,1,1)), arrowprops=dict(facecolor='black', shrink=0.15, width=1, headwidth=6), horizontalalignment='right', verticalalignment='top', ) ax.annotate('', xy=(21.5, 500), xycoords='data', xytext=(21.5,950), textcoords='data', ha='center', size='12', arrowprops=dict(facecolor='black', shrink=0.15, width=1, headwidth=6), horizontalalignment='right', verticalalignment='bottom', ) plt.xticks(np.arange(6,24,2)) ax = plt.gca().yaxis ax.set_major_formatter(matplotlib.ticker.ScalarFormatter()) plt.ticklabel_format(axis='y',style='plain') plt.grid(which='both') plt.ylabel('End-of-mission parallax standard error [$\mu$as]') if (args['pdfOutput']): plt.savefig('ParallaxErrors.pdf') elif (args['pngOutput']): plt.savefig('ParallaxErrors.png') else: plt.show()
[ "Make", "the", "plot", "with", "parallax", "performance", "predictions", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/examples/plotParallaxErrorsSkyAvg.py#L32-L116
[ "def", "makePlot", "(", "args", ")", ":", "gmag", "=", "np", ".", "linspace", "(", "5.7", ",", "20.0", ",", "101", ")", "vminiB1V", "=", "vminiFromSpt", "(", "'B1V'", ")", "vminiG2V", "=", "vminiFromSpt", "(", "'G2V'", ")", "vminiM6V", "=", "vminiFromSpt", "(", "'M6V'", ")", "vmagB1V", "=", "gmag", "-", "gminvFromVmini", "(", "vminiB1V", ")", "vmagG2V", "=", "gmag", "-", "gminvFromVmini", "(", "vminiG2V", ")", "vmagM6V", "=", "gmag", "-", "gminvFromVmini", "(", "vminiM6V", ")", "sigparB1V", "=", "parallaxErrorSkyAvg", "(", "gmag", ",", "vminiB1V", ")", "sigparB1Vmin", "=", "parallaxMinError", "(", "gmag", ",", "vminiB1V", ")", "sigparB1Vmax", "=", "parallaxMaxError", "(", "gmag", ",", "vminiB1V", ")", "sigparG2V", "=", "parallaxErrorSkyAvg", "(", "gmag", ",", "vminiG2V", ")", "sigparG2Vmin", "=", "parallaxMinError", "(", "gmag", ",", "vminiG2V", ")", "sigparG2Vmax", "=", "parallaxMaxError", "(", "gmag", ",", "vminiG2V", ")", "sigparM6V", "=", "parallaxErrorSkyAvg", "(", "gmag", ",", "vminiM6V", ")", "sigparM6Vmin", "=", "parallaxMinError", "(", "gmag", ",", "vminiM6V", ")", "sigparM6Vmax", "=", "parallaxMaxError", "(", "gmag", ",", "vminiM6V", ")", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "10", ",", "6.5", ")", ")", "if", "(", "args", "[", "'gmagAbscissa'", "]", ")", ":", "plt", ".", "semilogy", "(", "gmag", ",", "sigparB1V", ",", "'b'", ",", "label", "=", "'B1V'", ")", "plt", ".", "semilogy", "(", "gmag", ",", "sigparG2V", ",", "'g'", ",", "label", "=", "'G2V'", ")", "plt", ".", "semilogy", "(", "gmag", ",", "sigparM6V", ",", "'r'", ",", "label", "=", "'M6V'", ")", "plt", ".", "xlim", "(", "(", "5", ",", "20", ")", ")", "plt", ".", "ylim", "(", "(", "4", ",", "1000", ")", ")", "plt", ".", "legend", "(", "loc", "=", "4", ")", "plt", ".", "xlabel", "(", "'$G$ [mag]'", ")", "else", ":", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "plt", ".", "semilogy", "(", "vmagB1V", ",", "sigparB1V", ",", "'b'", ",", "label", "=", "'B1V'", ")", "#plt.semilogy(vmagG2V, sigparG2V, 'g', label='G2V')", "plt", ".", "semilogy", "(", "vmagM6V", ",", "sigparM6V", ",", "'r'", ",", "label", "=", "'M6V'", ")", "plt", ".", "fill_between", "(", "vmagB1V", ",", "sigparB1Vmin", ",", "sigparB1Vmax", ",", "color", "=", "'b'", ",", "alpha", "=", "0.3", ")", "plt", ".", "fill_between", "(", "vmagM6V", ",", "sigparM6Vmin", ",", "sigparM6Vmax", ",", "color", "=", "'r'", ",", "alpha", "=", "0.3", ")", "plt", ".", "xlim", "(", "(", "5", ",", "22.5", ")", ")", "plt", ".", "ylim", "(", "(", "4", ",", "1000", ")", ")", "plt", ".", "text", "(", "17.2", ",", "190", ",", "'B1V'", ",", "color", "=", "'b'", ")", "plt", ".", "text", "(", "18", ",", "20", ",", "'M6V'", ",", "color", "=", "'r'", ")", "plt", ".", "xlabel", "(", "'$V$ [mag]'", ")", "plt", ".", "text", "(", "7", ",", "17", ",", "'calibration noise floor'", ",", "size", "=", "12", ",", "bbox", "=", "dict", "(", "boxstyle", "=", "\"round,pad=0.3\"", ",", "ec", "=", "(", "0.0", ",", "0.0", ",", "0.0", ")", ",", "fc", "=", "(", "1.0", ",", "1.0", ",", "1.0", ")", ",", ")", ")", "plt", ".", "text", "(", "14.75", ",", "80", ",", "'photon noise'", ",", "rotation", "=", "45", ",", "size", "=", "12", ",", "bbox", "=", "dict", "(", "boxstyle", "=", "\"round,pad=0.3\"", ",", "ec", "=", "(", "0.0", ",", "0.0", ",", "0.0", ")", ",", "fc", "=", "(", "1.0", ",", "1.0", ",", "1.0", ")", ",", ")", ")", "ax", ".", "annotate", "(", "'non-uniformity\\nover the sky'", ",", "xy", "=", "(", "21.5", ",", "320", ")", ",", "xycoords", "=", "'data'", ",", "xytext", "=", "(", "21.5", ",", "80", ")", ",", "textcoords", "=", "'data'", ",", "ha", "=", "'center'", ",", "size", "=", "'12'", ",", "bbox", "=", "dict", "(", "boxstyle", "=", "\"round,pad=0.3\"", ",", "ec", "=", "(", "0", ",", "0", ",", "0", ")", ",", "fc", "=", "(", "1", ",", "1", ",", "1", ")", ")", ",", "arrowprops", "=", "dict", "(", "facecolor", "=", "'black'", ",", "shrink", "=", "0.15", ",", "width", "=", "1", ",", "headwidth", "=", "6", ")", ",", "horizontalalignment", "=", "'right'", ",", "verticalalignment", "=", "'top'", ",", ")", "ax", ".", "annotate", "(", "''", ",", "xy", "=", "(", "21.5", ",", "500", ")", ",", "xycoords", "=", "'data'", ",", "xytext", "=", "(", "21.5", ",", "950", ")", ",", "textcoords", "=", "'data'", ",", "ha", "=", "'center'", ",", "size", "=", "'12'", ",", "arrowprops", "=", "dict", "(", "facecolor", "=", "'black'", ",", "shrink", "=", "0.15", ",", "width", "=", "1", ",", "headwidth", "=", "6", ")", ",", "horizontalalignment", "=", "'right'", ",", "verticalalignment", "=", "'bottom'", ",", ")", "plt", ".", "xticks", "(", "np", ".", "arange", "(", "6", ",", "24", ",", "2", ")", ")", "ax", "=", "plt", ".", "gca", "(", ")", ".", "yaxis", "ax", ".", "set_major_formatter", "(", "matplotlib", ".", "ticker", ".", "ScalarFormatter", "(", ")", ")", "plt", ".", "ticklabel_format", "(", "axis", "=", "'y'", ",", "style", "=", "'plain'", ")", "plt", ".", "grid", "(", "which", "=", "'both'", ")", "plt", ".", "ylabel", "(", "'End-of-mission parallax standard error [$\\mu$as]'", ")", "if", "(", "args", "[", "'pdfOutput'", "]", ")", ":", "plt", ".", "savefig", "(", "'ParallaxErrors.pdf'", ")", "elif", "(", "args", "[", "'pngOutput'", "]", ")", ":", "plt", ".", "savefig", "(", "'ParallaxErrors.png'", ")", "else", ":", "plt", ".", "show", "(", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
plotBrightLimitInV
Plot the bright limit of Gaia in V as a function of (V-I). Parameters ---------- gBright - The bright limit of Gaia in G
examples/brightLimitInVband.py
def plotBrightLimitInV(gBright, pdf=False, png=False): """ Plot the bright limit of Gaia in V as a function of (V-I). Parameters ---------- gBright - The bright limit of Gaia in G """ vmini=np.linspace(0.0,6.0,1001) gminv=gminvFromVmini(vmini) vBright=gBright-gminv fig=plt.figure(figsize=(10,6.5)) plt.plot(vmini,vBright,'b-') plt.xlabel('$(V-I)$') plt.ylabel('Bright limit of Gaia in $V$') plt.xlim(0,6) plt.ylim(5,11) plt.grid(which='both') plt.title("Bright limit in $G$: {0}".format(gBright)) if (pdf): plt.savefig('VBandBrightLimit.pdf') elif (png): plt.savefig('VBandBrightLimit.png') else: plt.show()
def plotBrightLimitInV(gBright, pdf=False, png=False): """ Plot the bright limit of Gaia in V as a function of (V-I). Parameters ---------- gBright - The bright limit of Gaia in G """ vmini=np.linspace(0.0,6.0,1001) gminv=gminvFromVmini(vmini) vBright=gBright-gminv fig=plt.figure(figsize=(10,6.5)) plt.plot(vmini,vBright,'b-') plt.xlabel('$(V-I)$') plt.ylabel('Bright limit of Gaia in $V$') plt.xlim(0,6) plt.ylim(5,11) plt.grid(which='both') plt.title("Bright limit in $G$: {0}".format(gBright)) if (pdf): plt.savefig('VBandBrightLimit.pdf') elif (png): plt.savefig('VBandBrightLimit.png') else: plt.show()
[ "Plot", "the", "bright", "limit", "of", "Gaia", "in", "V", "as", "a", "function", "of", "(", "V", "-", "I", ")", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/examples/brightLimitInVband.py#L26-L53
[ "def", "plotBrightLimitInV", "(", "gBright", ",", "pdf", "=", "False", ",", "png", "=", "False", ")", ":", "vmini", "=", "np", ".", "linspace", "(", "0.0", ",", "6.0", ",", "1001", ")", "gminv", "=", "gminvFromVmini", "(", "vmini", ")", "vBright", "=", "gBright", "-", "gminv", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "10", ",", "6.5", ")", ")", "plt", ".", "plot", "(", "vmini", ",", "vBright", ",", "'b-'", ")", "plt", ".", "xlabel", "(", "'$(V-I)$'", ")", "plt", ".", "ylabel", "(", "'Bright limit of Gaia in $V$'", ")", "plt", ".", "xlim", "(", "0", ",", "6", ")", "plt", ".", "ylim", "(", "5", ",", "11", ")", "plt", ".", "grid", "(", "which", "=", "'both'", ")", "plt", ".", "title", "(", "\"Bright limit in $G$: {0}\"", ".", "format", "(", "gBright", ")", ")", "if", "(", "pdf", ")", ":", "plt", ".", "savefig", "(", "'VBandBrightLimit.pdf'", ")", "elif", "(", "png", ")", ":", "plt", ".", "savefig", "(", "'VBandBrightLimit.png'", ")", "else", ":", "plt", ".", "show", "(", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
sphericalToCartesian
Convert spherical to Cartesian coordinates. The input can be scalars or 1-dimensional numpy arrays. Note that the angle coordinates follow the astronomical convention of using elevation (declination, latitude) rather than its complement (pi/2-elevation), where the latter is commonly used in the mathematical treatment of spherical coordinates. Parameters ---------- r - length of input Cartesian vector. phi - longitude-like angle (e.g., right ascension, ecliptic longitude) in radians theta - latitide-like angle (e.g., declination, ecliptic latitude) in radians Returns ------- The Cartesian vector components x, y, z
pygaia/astrometry/vectorastrometry.py
def sphericalToCartesian(r, phi, theta): """ Convert spherical to Cartesian coordinates. The input can be scalars or 1-dimensional numpy arrays. Note that the angle coordinates follow the astronomical convention of using elevation (declination, latitude) rather than its complement (pi/2-elevation), where the latter is commonly used in the mathematical treatment of spherical coordinates. Parameters ---------- r - length of input Cartesian vector. phi - longitude-like angle (e.g., right ascension, ecliptic longitude) in radians theta - latitide-like angle (e.g., declination, ecliptic latitude) in radians Returns ------- The Cartesian vector components x, y, z """ ctheta=cos(theta) x=r*cos(phi)*ctheta y=r*sin(phi)*ctheta z=r*sin(theta) return x, y, z
def sphericalToCartesian(r, phi, theta): """ Convert spherical to Cartesian coordinates. The input can be scalars or 1-dimensional numpy arrays. Note that the angle coordinates follow the astronomical convention of using elevation (declination, latitude) rather than its complement (pi/2-elevation), where the latter is commonly used in the mathematical treatment of spherical coordinates. Parameters ---------- r - length of input Cartesian vector. phi - longitude-like angle (e.g., right ascension, ecliptic longitude) in radians theta - latitide-like angle (e.g., declination, ecliptic latitude) in radians Returns ------- The Cartesian vector components x, y, z """ ctheta=cos(theta) x=r*cos(phi)*ctheta y=r*sin(phi)*ctheta z=r*sin(theta) return x, y, z
[ "Convert", "spherical", "to", "Cartesian", "coordinates", ".", "The", "input", "can", "be", "scalars", "or", "1", "-", "dimensional", "numpy", "arrays", ".", "Note", "that", "the", "angle", "coordinates", "follow", "the", "astronomical", "convention", "of", "using", "elevation", "(", "declination", "latitude", ")", "rather", "than", "its", "complement", "(", "pi", "/", "2", "-", "elevation", ")", "where", "the", "latter", "is", "commonly", "used", "in", "the", "mathematical", "treatment", "of", "spherical", "coordinates", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/astrometry/vectorastrometry.py#L19-L42
[ "def", "sphericalToCartesian", "(", "r", ",", "phi", ",", "theta", ")", ":", "ctheta", "=", "cos", "(", "theta", ")", "x", "=", "r", "*", "cos", "(", "phi", ")", "*", "ctheta", "y", "=", "r", "*", "sin", "(", "phi", ")", "*", "ctheta", "z", "=", "r", "*", "sin", "(", "theta", ")", "return", "x", ",", "y", ",", "z" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
cartesianToSpherical
Convert Cartesian to spherical coordinates. The input can be scalars or 1-dimensional numpy arrays. Note that the angle coordinates follow the astronomical convention of using elevation (declination, latitude) rather than its complement (pi/2-elevation), which is commonly used in the mathematical treatment of spherical coordinates. Parameters ---------- x - Cartesian vector component along the X-axis y - Cartesian vector component along the Y-axis z - Cartesian vector component along the Z-axis Returns ------- The spherical coordinates r=sqrt(x*x+y*y+z*z), longitude phi, latitude theta. NOTE THAT THE LONGITUDE ANGLE IS BETWEEN 0 AND +2PI. FOR r=0 AN EXCEPTION IS RAISED.
pygaia/astrometry/vectorastrometry.py
def cartesianToSpherical(x, y, z): """ Convert Cartesian to spherical coordinates. The input can be scalars or 1-dimensional numpy arrays. Note that the angle coordinates follow the astronomical convention of using elevation (declination, latitude) rather than its complement (pi/2-elevation), which is commonly used in the mathematical treatment of spherical coordinates. Parameters ---------- x - Cartesian vector component along the X-axis y - Cartesian vector component along the Y-axis z - Cartesian vector component along the Z-axis Returns ------- The spherical coordinates r=sqrt(x*x+y*y+z*z), longitude phi, latitude theta. NOTE THAT THE LONGITUDE ANGLE IS BETWEEN 0 AND +2PI. FOR r=0 AN EXCEPTION IS RAISED. """ rCylSq=x*x+y*y r=sqrt(rCylSq+z*z) if any(r==0.0): raise Exception("Error: one or more of the points is at distance zero.") phi = arctan2(y,x) phi = where(phi<0.0, phi+2*pi, phi) return r, phi, arctan2(z,sqrt(rCylSq))
def cartesianToSpherical(x, y, z): """ Convert Cartesian to spherical coordinates. The input can be scalars or 1-dimensional numpy arrays. Note that the angle coordinates follow the astronomical convention of using elevation (declination, latitude) rather than its complement (pi/2-elevation), which is commonly used in the mathematical treatment of spherical coordinates. Parameters ---------- x - Cartesian vector component along the X-axis y - Cartesian vector component along the Y-axis z - Cartesian vector component along the Z-axis Returns ------- The spherical coordinates r=sqrt(x*x+y*y+z*z), longitude phi, latitude theta. NOTE THAT THE LONGITUDE ANGLE IS BETWEEN 0 AND +2PI. FOR r=0 AN EXCEPTION IS RAISED. """ rCylSq=x*x+y*y r=sqrt(rCylSq+z*z) if any(r==0.0): raise Exception("Error: one or more of the points is at distance zero.") phi = arctan2(y,x) phi = where(phi<0.0, phi+2*pi, phi) return r, phi, arctan2(z,sqrt(rCylSq))
[ "Convert", "Cartesian", "to", "spherical", "coordinates", ".", "The", "input", "can", "be", "scalars", "or", "1", "-", "dimensional", "numpy", "arrays", ".", "Note", "that", "the", "angle", "coordinates", "follow", "the", "astronomical", "convention", "of", "using", "elevation", "(", "declination", "latitude", ")", "rather", "than", "its", "complement", "(", "pi", "/", "2", "-", "elevation", ")", "which", "is", "commonly", "used", "in", "the", "mathematical", "treatment", "of", "spherical", "coordinates", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/astrometry/vectorastrometry.py#L44-L71
[ "def", "cartesianToSpherical", "(", "x", ",", "y", ",", "z", ")", ":", "rCylSq", "=", "x", "*", "x", "+", "y", "*", "y", "r", "=", "sqrt", "(", "rCylSq", "+", "z", "*", "z", ")", "if", "any", "(", "r", "==", "0.0", ")", ":", "raise", "Exception", "(", "\"Error: one or more of the points is at distance zero.\"", ")", "phi", "=", "arctan2", "(", "y", ",", "x", ")", "phi", "=", "where", "(", "phi", "<", "0.0", ",", "phi", "+", "2", "*", "pi", ",", "phi", ")", "return", "r", ",", "phi", ",", "arctan2", "(", "z", ",", "sqrt", "(", "rCylSq", ")", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
normalTriad
Calculate the so-called normal triad [p, q, r] which is associated with a spherical coordinate system . The three vectors are: p - The unit tangent vector in the direction of increasing longitudinal angle phi. q - The unit tangent vector in the direction of increasing latitudinal angle theta. r - The unit vector toward the point (phi, theta). Parameters ---------- phi - longitude-like angle (e.g., right ascension, ecliptic longitude) in radians theta - latitide-like angle (e.g., declination, ecliptic latitude) in radians Returns ------- The normal triad as the vectors p, q, r
pygaia/astrometry/vectorastrometry.py
def normalTriad(phi, theta): """ Calculate the so-called normal triad [p, q, r] which is associated with a spherical coordinate system . The three vectors are: p - The unit tangent vector in the direction of increasing longitudinal angle phi. q - The unit tangent vector in the direction of increasing latitudinal angle theta. r - The unit vector toward the point (phi, theta). Parameters ---------- phi - longitude-like angle (e.g., right ascension, ecliptic longitude) in radians theta - latitide-like angle (e.g., declination, ecliptic latitude) in radians Returns ------- The normal triad as the vectors p, q, r """ sphi=sin(phi) stheta=sin(theta) cphi=cos(phi) ctheta=cos(theta) p=array([-sphi, cphi, zeros_like(phi)]) q=array([-stheta*cphi, -stheta*sphi, ctheta]) r=array([ctheta*cphi, ctheta*sphi, stheta]) return p, q, r
def normalTriad(phi, theta): """ Calculate the so-called normal triad [p, q, r] which is associated with a spherical coordinate system . The three vectors are: p - The unit tangent vector in the direction of increasing longitudinal angle phi. q - The unit tangent vector in the direction of increasing latitudinal angle theta. r - The unit vector toward the point (phi, theta). Parameters ---------- phi - longitude-like angle (e.g., right ascension, ecliptic longitude) in radians theta - latitide-like angle (e.g., declination, ecliptic latitude) in radians Returns ------- The normal triad as the vectors p, q, r """ sphi=sin(phi) stheta=sin(theta) cphi=cos(phi) ctheta=cos(theta) p=array([-sphi, cphi, zeros_like(phi)]) q=array([-stheta*cphi, -stheta*sphi, ctheta]) r=array([ctheta*cphi, ctheta*sphi, stheta]) return p, q, r
[ "Calculate", "the", "so", "-", "called", "normal", "triad", "[", "p", "q", "r", "]", "which", "is", "associated", "with", "a", "spherical", "coordinate", "system", ".", "The", "three", "vectors", "are", ":" ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/astrometry/vectorastrometry.py#L73-L100
[ "def", "normalTriad", "(", "phi", ",", "theta", ")", ":", "sphi", "=", "sin", "(", "phi", ")", "stheta", "=", "sin", "(", "theta", ")", "cphi", "=", "cos", "(", "phi", ")", "ctheta", "=", "cos", "(", "theta", ")", "p", "=", "array", "(", "[", "-", "sphi", ",", "cphi", ",", "zeros_like", "(", "phi", ")", "]", ")", "q", "=", "array", "(", "[", "-", "stheta", "*", "cphi", ",", "-", "stheta", "*", "sphi", ",", "ctheta", "]", ")", "r", "=", "array", "(", "[", "ctheta", "*", "cphi", ",", "ctheta", "*", "sphi", ",", "stheta", "]", ")", "return", "p", ",", "q", ",", "r" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
elementaryRotationMatrix
Construct an elementary rotation matrix describing a rotation around the x, y, or z-axis. Parameters ---------- axis - Axis around which to rotate ("x", "y", or "z") rotationAngle - the rotation angle in radians Returns ------- The rotation matrix Example usage ------------- rotmat = elementaryRotationMatrix("y", pi/6.0)
pygaia/astrometry/vectorastrometry.py
def elementaryRotationMatrix(axis, rotationAngle): """ Construct an elementary rotation matrix describing a rotation around the x, y, or z-axis. Parameters ---------- axis - Axis around which to rotate ("x", "y", or "z") rotationAngle - the rotation angle in radians Returns ------- The rotation matrix Example usage ------------- rotmat = elementaryRotationMatrix("y", pi/6.0) """ if (axis=="x" or axis=="X"): return array([[1.0, 0.0, 0.0], [0.0, cos(rotationAngle), sin(rotationAngle)], [0.0, -sin(rotationAngle), cos(rotationAngle)]]) elif (axis=="y" or axis=="Y"): return array([[cos(rotationAngle), 0.0, -sin(rotationAngle)], [0.0, 1.0, 0.0], [sin(rotationAngle), 0.0, cos(rotationAngle)]]) elif (axis=="z" or axis=="Z"): return array([[cos(rotationAngle), sin(rotationAngle), 0.0], [-sin(rotationAngle), cos(rotationAngle), 0.0], [0.0, 0.0, 1.0]]) else: raise Exception("Unknown rotation axis "+axis+"!")
def elementaryRotationMatrix(axis, rotationAngle): """ Construct an elementary rotation matrix describing a rotation around the x, y, or z-axis. Parameters ---------- axis - Axis around which to rotate ("x", "y", or "z") rotationAngle - the rotation angle in radians Returns ------- The rotation matrix Example usage ------------- rotmat = elementaryRotationMatrix("y", pi/6.0) """ if (axis=="x" or axis=="X"): return array([[1.0, 0.0, 0.0], [0.0, cos(rotationAngle), sin(rotationAngle)], [0.0, -sin(rotationAngle), cos(rotationAngle)]]) elif (axis=="y" or axis=="Y"): return array([[cos(rotationAngle), 0.0, -sin(rotationAngle)], [0.0, 1.0, 0.0], [sin(rotationAngle), 0.0, cos(rotationAngle)]]) elif (axis=="z" or axis=="Z"): return array([[cos(rotationAngle), sin(rotationAngle), 0.0], [-sin(rotationAngle), cos(rotationAngle), 0.0], [0.0, 0.0, 1.0]]) else: raise Exception("Unknown rotation axis "+axis+"!")
[ "Construct", "an", "elementary", "rotation", "matrix", "describing", "a", "rotation", "around", "the", "x", "y", "or", "z", "-", "axis", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/astrometry/vectorastrometry.py#L102-L132
[ "def", "elementaryRotationMatrix", "(", "axis", ",", "rotationAngle", ")", ":", "if", "(", "axis", "==", "\"x\"", "or", "axis", "==", "\"X\"", ")", ":", "return", "array", "(", "[", "[", "1.0", ",", "0.0", ",", "0.0", "]", ",", "[", "0.0", ",", "cos", "(", "rotationAngle", ")", ",", "sin", "(", "rotationAngle", ")", "]", ",", "[", "0.0", ",", "-", "sin", "(", "rotationAngle", ")", ",", "cos", "(", "rotationAngle", ")", "]", "]", ")", "elif", "(", "axis", "==", "\"y\"", "or", "axis", "==", "\"Y\"", ")", ":", "return", "array", "(", "[", "[", "cos", "(", "rotationAngle", ")", ",", "0.0", ",", "-", "sin", "(", "rotationAngle", ")", "]", ",", "[", "0.0", ",", "1.0", ",", "0.0", "]", ",", "[", "sin", "(", "rotationAngle", ")", ",", "0.0", ",", "cos", "(", "rotationAngle", ")", "]", "]", ")", "elif", "(", "axis", "==", "\"z\"", "or", "axis", "==", "\"Z\"", ")", ":", "return", "array", "(", "[", "[", "cos", "(", "rotationAngle", ")", ",", "sin", "(", "rotationAngle", ")", ",", "0.0", "]", ",", "[", "-", "sin", "(", "rotationAngle", ")", ",", "cos", "(", "rotationAngle", ")", ",", "0.0", "]", ",", "[", "0.0", ",", "0.0", ",", "1.0", "]", "]", ")", "else", ":", "raise", "Exception", "(", "\"Unknown rotation axis \"", "+", "axis", "+", "\"!\"", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
phaseSpaceToAstrometry
From the given phase space coordinates calculate the astrometric observables, including the radial velocity, which here is seen as the sixth astrometric parameter. The phase space coordinates are assumed to represent barycentric (i.e. centred on the Sun) positions and velocities. This function has no mechanism to deal with units. The velocity units are always assumed to be km/s, and the code is set up such that for positions in pc, the return units for the astrometry are radians, milliarcsec, milliarcsec/year and km/s. For positions in kpc the return units are: radians, microarcsec, microarcsec/year, and km/s. NOTE that the doppler factor k=1/(1-vrad/c) is NOT used in the calculations. This is not a problem for sources moving at typical velocities of Galactic stars. Parameters ---------- x - The x component of the barycentric position vector (in pc or kpc). y - The y component of the barycentric position vector (in pc or kpc). z - The z component of the barycentric position vector (in pc or kpc). vx - The x component of the barycentric velocity vector (in km/s). vy - The y component of the barycentric velocity vector (in km/s). vz - The z component of the barycentric velocity vector (in km/s). Returns ------- phi - The longitude-like angle of the position of the source (radians). theta - The latitude-like angle of the position of the source (radians). parallax - The parallax of the source (in mas or muas, see above) muphistar - The proper motion in the longitude-like angle, multiplied by cos(theta) (mas/yr or muas/yr, see above) mutheta - The proper motion in the latitude-like angle (mas/yr or muas/yr, see above) vrad - The radial velocity (km/s)
pygaia/astrometry/vectorastrometry.py
def phaseSpaceToAstrometry(x, y, z, vx, vy, vz): """ From the given phase space coordinates calculate the astrometric observables, including the radial velocity, which here is seen as the sixth astrometric parameter. The phase space coordinates are assumed to represent barycentric (i.e. centred on the Sun) positions and velocities. This function has no mechanism to deal with units. The velocity units are always assumed to be km/s, and the code is set up such that for positions in pc, the return units for the astrometry are radians, milliarcsec, milliarcsec/year and km/s. For positions in kpc the return units are: radians, microarcsec, microarcsec/year, and km/s. NOTE that the doppler factor k=1/(1-vrad/c) is NOT used in the calculations. This is not a problem for sources moving at typical velocities of Galactic stars. Parameters ---------- x - The x component of the barycentric position vector (in pc or kpc). y - The y component of the barycentric position vector (in pc or kpc). z - The z component of the barycentric position vector (in pc or kpc). vx - The x component of the barycentric velocity vector (in km/s). vy - The y component of the barycentric velocity vector (in km/s). vz - The z component of the barycentric velocity vector (in km/s). Returns ------- phi - The longitude-like angle of the position of the source (radians). theta - The latitude-like angle of the position of the source (radians). parallax - The parallax of the source (in mas or muas, see above) muphistar - The proper motion in the longitude-like angle, multiplied by cos(theta) (mas/yr or muas/yr, see above) mutheta - The proper motion in the latitude-like angle (mas/yr or muas/yr, see above) vrad - The radial velocity (km/s) """ u, phi, theta = cartesianToSpherical(x, y, z) parallax = _auMasParsec/u p, q, r = normalTriad(phi, theta) velocitiesArray=array([vx,vy,vz]) if isscalar(u): muphistar=dot(p,velocitiesArray)*parallax/_auKmYearPerSec mutheta=dot(q,velocitiesArray)*parallax/_auKmYearPerSec vrad=dot(r,velocitiesArray) else: muphistar=zeros_like(parallax) mutheta=zeros_like(parallax) vrad=zeros_like(parallax) for i in range(parallax.size): muphistar[i]=dot(p[:,i],velocitiesArray[:,i])*parallax[i]/_auKmYearPerSec mutheta[i]=dot(q[:,i],velocitiesArray[:,i])*parallax[i]/_auKmYearPerSec vrad[i]=dot(r[:,i],velocitiesArray[:,i]) return phi, theta, parallax, muphistar, mutheta, vrad
def phaseSpaceToAstrometry(x, y, z, vx, vy, vz): """ From the given phase space coordinates calculate the astrometric observables, including the radial velocity, which here is seen as the sixth astrometric parameter. The phase space coordinates are assumed to represent barycentric (i.e. centred on the Sun) positions and velocities. This function has no mechanism to deal with units. The velocity units are always assumed to be km/s, and the code is set up such that for positions in pc, the return units for the astrometry are radians, milliarcsec, milliarcsec/year and km/s. For positions in kpc the return units are: radians, microarcsec, microarcsec/year, and km/s. NOTE that the doppler factor k=1/(1-vrad/c) is NOT used in the calculations. This is not a problem for sources moving at typical velocities of Galactic stars. Parameters ---------- x - The x component of the barycentric position vector (in pc or kpc). y - The y component of the barycentric position vector (in pc or kpc). z - The z component of the barycentric position vector (in pc or kpc). vx - The x component of the barycentric velocity vector (in km/s). vy - The y component of the barycentric velocity vector (in km/s). vz - The z component of the barycentric velocity vector (in km/s). Returns ------- phi - The longitude-like angle of the position of the source (radians). theta - The latitude-like angle of the position of the source (radians). parallax - The parallax of the source (in mas or muas, see above) muphistar - The proper motion in the longitude-like angle, multiplied by cos(theta) (mas/yr or muas/yr, see above) mutheta - The proper motion in the latitude-like angle (mas/yr or muas/yr, see above) vrad - The radial velocity (km/s) """ u, phi, theta = cartesianToSpherical(x, y, z) parallax = _auMasParsec/u p, q, r = normalTriad(phi, theta) velocitiesArray=array([vx,vy,vz]) if isscalar(u): muphistar=dot(p,velocitiesArray)*parallax/_auKmYearPerSec mutheta=dot(q,velocitiesArray)*parallax/_auKmYearPerSec vrad=dot(r,velocitiesArray) else: muphistar=zeros_like(parallax) mutheta=zeros_like(parallax) vrad=zeros_like(parallax) for i in range(parallax.size): muphistar[i]=dot(p[:,i],velocitiesArray[:,i])*parallax[i]/_auKmYearPerSec mutheta[i]=dot(q[:,i],velocitiesArray[:,i])*parallax[i]/_auKmYearPerSec vrad[i]=dot(r[:,i],velocitiesArray[:,i]) return phi, theta, parallax, muphistar, mutheta, vrad
[ "From", "the", "given", "phase", "space", "coordinates", "calculate", "the", "astrometric", "observables", "including", "the", "radial", "velocity", "which", "here", "is", "seen", "as", "the", "sixth", "astrometric", "parameter", ".", "The", "phase", "space", "coordinates", "are", "assumed", "to", "represent", "barycentric", "(", "i", ".", "e", ".", "centred", "on", "the", "Sun", ")", "positions", "and", "velocities", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/astrometry/vectorastrometry.py#L134-L186
[ "def", "phaseSpaceToAstrometry", "(", "x", ",", "y", ",", "z", ",", "vx", ",", "vy", ",", "vz", ")", ":", "u", ",", "phi", ",", "theta", "=", "cartesianToSpherical", "(", "x", ",", "y", ",", "z", ")", "parallax", "=", "_auMasParsec", "/", "u", "p", ",", "q", ",", "r", "=", "normalTriad", "(", "phi", ",", "theta", ")", "velocitiesArray", "=", "array", "(", "[", "vx", ",", "vy", ",", "vz", "]", ")", "if", "isscalar", "(", "u", ")", ":", "muphistar", "=", "dot", "(", "p", ",", "velocitiesArray", ")", "*", "parallax", "/", "_auKmYearPerSec", "mutheta", "=", "dot", "(", "q", ",", "velocitiesArray", ")", "*", "parallax", "/", "_auKmYearPerSec", "vrad", "=", "dot", "(", "r", ",", "velocitiesArray", ")", "else", ":", "muphistar", "=", "zeros_like", "(", "parallax", ")", "mutheta", "=", "zeros_like", "(", "parallax", ")", "vrad", "=", "zeros_like", "(", "parallax", ")", "for", "i", "in", "range", "(", "parallax", ".", "size", ")", ":", "muphistar", "[", "i", "]", "=", "dot", "(", "p", "[", ":", ",", "i", "]", ",", "velocitiesArray", "[", ":", ",", "i", "]", ")", "*", "parallax", "[", "i", "]", "/", "_auKmYearPerSec", "mutheta", "[", "i", "]", "=", "dot", "(", "q", "[", ":", ",", "i", "]", ",", "velocitiesArray", "[", ":", ",", "i", "]", ")", "*", "parallax", "[", "i", "]", "/", "_auKmYearPerSec", "vrad", "[", "i", "]", "=", "dot", "(", "r", "[", ":", ",", "i", "]", ",", "velocitiesArray", "[", ":", ",", "i", "]", ")", "return", "phi", ",", "theta", ",", "parallax", ",", "muphistar", ",", "mutheta", ",", "vrad" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
astrometryToPhaseSpace
From the input astrometric parameters calculate the phase space coordinates. The output phase space coordinates represent barycentric (i.e. centred on the Sun) positions and velocities. This function has no mechanism to deal with units. The code is set up such that for input astrometry with parallaxes and proper motions in mas and mas/yr, and radial velocities in km/s, the phase space coordinates are in pc and km/s. For input astrometry with parallaxes and proper motions in muas and muas/yr, and radial velocities in km/s, the phase space coordinates are in kpc and km/s. Only positive parallaxes are accepted, an exception is thrown if this condition is not met. NOTE that the doppler factor k=1/(1-vrad/c) is NOT used in the calculations. This is not a problem for sources moving at typical velocities of Galactic stars. THIS FUNCTION SHOULD NOT BE USED WHEN THE PARALLAXES HAVE RELATIVE ERRORS LARGER THAN ABOUT 20 PER CENT (see http://arxiv.org/abs/1507.02105 for example). For astrometric data with relatively large parallax errors you should consider doing your analysis in the data space and use forward modelling of some kind. Parameters ---------- phi - The longitude-like angle of the position of the source (radians). theta - The latitude-like angle of the position of the source (radians). parallax - The parallax of the source (in mas or muas, see above) muphistar - The proper motion in the longitude-like angle, multiplied by cos(theta) (mas/yr or muas/yr, see above) mutheta - The proper motion in the latitude-like angle (mas/yr or muas/yr, see above) vrad - The radial velocity (km/s) Returns ------- x - The x component of the barycentric position vector (in pc or kpc). y - The y component of the barycentric position vector (in pc or kpc). z - The z component of the barycentric position vector (in pc or kpc). vx - The x component of the barycentric velocity vector (in km/s). vy - The y component of the barycentric velocity vector (in km/s). vz - The z component of the barycentric velocity vector (in km/s).
pygaia/astrometry/vectorastrometry.py
def astrometryToPhaseSpace(phi, theta, parallax, muphistar, mutheta, vrad): """ From the input astrometric parameters calculate the phase space coordinates. The output phase space coordinates represent barycentric (i.e. centred on the Sun) positions and velocities. This function has no mechanism to deal with units. The code is set up such that for input astrometry with parallaxes and proper motions in mas and mas/yr, and radial velocities in km/s, the phase space coordinates are in pc and km/s. For input astrometry with parallaxes and proper motions in muas and muas/yr, and radial velocities in km/s, the phase space coordinates are in kpc and km/s. Only positive parallaxes are accepted, an exception is thrown if this condition is not met. NOTE that the doppler factor k=1/(1-vrad/c) is NOT used in the calculations. This is not a problem for sources moving at typical velocities of Galactic stars. THIS FUNCTION SHOULD NOT BE USED WHEN THE PARALLAXES HAVE RELATIVE ERRORS LARGER THAN ABOUT 20 PER CENT (see http://arxiv.org/abs/1507.02105 for example). For astrometric data with relatively large parallax errors you should consider doing your analysis in the data space and use forward modelling of some kind. Parameters ---------- phi - The longitude-like angle of the position of the source (radians). theta - The latitude-like angle of the position of the source (radians). parallax - The parallax of the source (in mas or muas, see above) muphistar - The proper motion in the longitude-like angle, multiplied by cos(theta) (mas/yr or muas/yr, see above) mutheta - The proper motion in the latitude-like angle (mas/yr or muas/yr, see above) vrad - The radial velocity (km/s) Returns ------- x - The x component of the barycentric position vector (in pc or kpc). y - The y component of the barycentric position vector (in pc or kpc). z - The z component of the barycentric position vector (in pc or kpc). vx - The x component of the barycentric velocity vector (in km/s). vy - The y component of the barycentric velocity vector (in km/s). vz - The z component of the barycentric velocity vector (in km/s). """ if any(parallax<=0.0): raise Exception("One or more of the input parallaxes is non-positive") x, y, z = sphericalToCartesian(_auMasParsec/parallax, phi, theta) p, q, r = normalTriad(phi, theta) transverseMotionArray = array([muphistar*_auKmYearPerSec/parallax, mutheta*_auKmYearPerSec/parallax, vrad]) if isscalar(parallax): velocityArray=dot(transpose(array([p, q, r])),transverseMotionArray) vx = velocityArray[0] vy = velocityArray[1] vz = velocityArray[2] else: vx = zeros_like(parallax) vy = zeros_like(parallax) vz = zeros_like(parallax) for i in range(parallax.size): velocityArray = dot(transpose(array([p[:,i], q[:,i], r[:,i]])), transverseMotionArray[:,i]) vx[i] = velocityArray[0] vy[i] = velocityArray[1] vz[i] = velocityArray[2] return x, y, z, vx, vy, vz
def astrometryToPhaseSpace(phi, theta, parallax, muphistar, mutheta, vrad): """ From the input astrometric parameters calculate the phase space coordinates. The output phase space coordinates represent barycentric (i.e. centred on the Sun) positions and velocities. This function has no mechanism to deal with units. The code is set up such that for input astrometry with parallaxes and proper motions in mas and mas/yr, and radial velocities in km/s, the phase space coordinates are in pc and km/s. For input astrometry with parallaxes and proper motions in muas and muas/yr, and radial velocities in km/s, the phase space coordinates are in kpc and km/s. Only positive parallaxes are accepted, an exception is thrown if this condition is not met. NOTE that the doppler factor k=1/(1-vrad/c) is NOT used in the calculations. This is not a problem for sources moving at typical velocities of Galactic stars. THIS FUNCTION SHOULD NOT BE USED WHEN THE PARALLAXES HAVE RELATIVE ERRORS LARGER THAN ABOUT 20 PER CENT (see http://arxiv.org/abs/1507.02105 for example). For astrometric data with relatively large parallax errors you should consider doing your analysis in the data space and use forward modelling of some kind. Parameters ---------- phi - The longitude-like angle of the position of the source (radians). theta - The latitude-like angle of the position of the source (radians). parallax - The parallax of the source (in mas or muas, see above) muphistar - The proper motion in the longitude-like angle, multiplied by cos(theta) (mas/yr or muas/yr, see above) mutheta - The proper motion in the latitude-like angle (mas/yr or muas/yr, see above) vrad - The radial velocity (km/s) Returns ------- x - The x component of the barycentric position vector (in pc or kpc). y - The y component of the barycentric position vector (in pc or kpc). z - The z component of the barycentric position vector (in pc or kpc). vx - The x component of the barycentric velocity vector (in km/s). vy - The y component of the barycentric velocity vector (in km/s). vz - The z component of the barycentric velocity vector (in km/s). """ if any(parallax<=0.0): raise Exception("One or more of the input parallaxes is non-positive") x, y, z = sphericalToCartesian(_auMasParsec/parallax, phi, theta) p, q, r = normalTriad(phi, theta) transverseMotionArray = array([muphistar*_auKmYearPerSec/parallax, mutheta*_auKmYearPerSec/parallax, vrad]) if isscalar(parallax): velocityArray=dot(transpose(array([p, q, r])),transverseMotionArray) vx = velocityArray[0] vy = velocityArray[1] vz = velocityArray[2] else: vx = zeros_like(parallax) vy = zeros_like(parallax) vz = zeros_like(parallax) for i in range(parallax.size): velocityArray = dot(transpose(array([p[:,i], q[:,i], r[:,i]])), transverseMotionArray[:,i]) vx[i] = velocityArray[0] vy[i] = velocityArray[1] vz[i] = velocityArray[2] return x, y, z, vx, vy, vz
[ "From", "the", "input", "astrometric", "parameters", "calculate", "the", "phase", "space", "coordinates", ".", "The", "output", "phase", "space", "coordinates", "represent", "barycentric", "(", "i", ".", "e", ".", "centred", "on", "the", "Sun", ")", "positions", "and", "velocities", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/astrometry/vectorastrometry.py#L188-L248
[ "def", "astrometryToPhaseSpace", "(", "phi", ",", "theta", ",", "parallax", ",", "muphistar", ",", "mutheta", ",", "vrad", ")", ":", "if", "any", "(", "parallax", "<=", "0.0", ")", ":", "raise", "Exception", "(", "\"One or more of the input parallaxes is non-positive\"", ")", "x", ",", "y", ",", "z", "=", "sphericalToCartesian", "(", "_auMasParsec", "/", "parallax", ",", "phi", ",", "theta", ")", "p", ",", "q", ",", "r", "=", "normalTriad", "(", "phi", ",", "theta", ")", "transverseMotionArray", "=", "array", "(", "[", "muphistar", "*", "_auKmYearPerSec", "/", "parallax", ",", "mutheta", "*", "_auKmYearPerSec", "/", "parallax", ",", "vrad", "]", ")", "if", "isscalar", "(", "parallax", ")", ":", "velocityArray", "=", "dot", "(", "transpose", "(", "array", "(", "[", "p", ",", "q", ",", "r", "]", ")", ")", ",", "transverseMotionArray", ")", "vx", "=", "velocityArray", "[", "0", "]", "vy", "=", "velocityArray", "[", "1", "]", "vz", "=", "velocityArray", "[", "2", "]", "else", ":", "vx", "=", "zeros_like", "(", "parallax", ")", "vy", "=", "zeros_like", "(", "parallax", ")", "vz", "=", "zeros_like", "(", "parallax", ")", "for", "i", "in", "range", "(", "parallax", ".", "size", ")", ":", "velocityArray", "=", "dot", "(", "transpose", "(", "array", "(", "[", "p", "[", ":", ",", "i", "]", ",", "q", "[", ":", ",", "i", "]", ",", "r", "[", ":", ",", "i", "]", "]", ")", ")", ",", "transverseMotionArray", "[", ":", ",", "i", "]", ")", "vx", "[", "i", "]", "=", "velocityArray", "[", "0", "]", "vy", "[", "i", "]", "=", "velocityArray", "[", "1", "]", "vz", "[", "i", "]", "=", "velocityArray", "[", "2", "]", "return", "x", ",", "y", ",", "z", ",", "vx", ",", "vy", ",", "vz" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
makePlot
Make the plot with proper motion performance predictions. The predictions are for the TOTAL proper motion under the assumption of equal components mu_alpha* and mu_delta. :argument args: command line arguments
examples/plotProperMotionErrorsSkyAvg.py
def makePlot(args): """ Make the plot with proper motion performance predictions. The predictions are for the TOTAL proper motion under the assumption of equal components mu_alpha* and mu_delta. :argument args: command line arguments """ gmag=np.linspace(5.7,20.0,101) vminiB1V=vminiFromSpt('B1V') vminiG2V=vminiFromSpt('G2V') vminiM6V=vminiFromSpt('M6V') vmagB1V=gmag-gminvFromVmini(vminiB1V) vmagG2V=gmag-gminvFromVmini(vminiG2V) vmagM6V=gmag-gminvFromVmini(vminiM6V) sigmualphaB1V, sigmudeltaB1V = properMotionErrorSkyAvg(gmag,vminiB1V) sigmuB1V = np.sqrt(0.5*sigmualphaB1V**2+0.5*sigmudeltaB1V**2) sigmualphaB1V, sigmudeltaB1V = properMotionMinError(gmag,vminiB1V) sigmuB1Vmin = np.sqrt(0.5*sigmualphaB1V**2+0.5*sigmudeltaB1V**2) sigmualphaB1V, sigmudeltaB1V = properMotionMaxError(gmag,vminiB1V) sigmuB1Vmax = np.sqrt(0.5*sigmualphaB1V**2+0.5*sigmudeltaB1V**2) sigmualphaG2V, sigmudeltaG2V = properMotionErrorSkyAvg(gmag,vminiG2V) sigmuG2V = np.sqrt(0.5*sigmualphaG2V**2+0.5*sigmudeltaG2V**2) sigmualphaG2V, sigmudeltaG2V = properMotionMinError(gmag,vminiG2V) sigmuG2Vmin = np.sqrt(0.5*sigmualphaG2V**2+0.5*sigmudeltaG2V**2) sigmualphaG2V, sigmudeltaG2V = properMotionMaxError(gmag,vminiG2V) sigmuG2Vmax = np.sqrt(0.5*sigmualphaG2V**2+0.5*sigmudeltaG2V**2) sigmualphaM6V, sigmudeltaM6V = properMotionErrorSkyAvg(gmag,vminiM6V) sigmuM6V = np.sqrt(0.5*sigmualphaM6V**2+0.5*sigmudeltaM6V**2) sigmualphaM6V, sigmudeltaM6V = properMotionMinError(gmag,vminiM6V) sigmuM6Vmin = np.sqrt(0.5*sigmualphaM6V**2+0.5*sigmudeltaM6V**2) sigmualphaM6V, sigmudeltaM6V = properMotionMaxError(gmag,vminiM6V) sigmuM6Vmax = np.sqrt(0.5*sigmualphaM6V**2+0.5*sigmudeltaM6V**2) fig=plt.figure(figsize=(10,6.5)) if (args['gmagAbscissa']): plt.semilogy(gmag, sigmuB1V, 'b', label='B1V') plt.semilogy(gmag, sigmuG2V, 'g', label='G2V') plt.semilogy(gmag, sigmuM6V, 'r', label='M6V') plt.xlim((5,20)) plt.ylim((1,500)) plt.legend(loc=4) else: ax=fig.add_subplot(111) plt.semilogy(vmagB1V, sigmuB1V, 'b', label='B1V') #plt.semilogy(vmagG2V, sigmuG2V, 'g', label='G2V') plt.semilogy(vmagM6V, sigmuM6V, 'r', label='M6V') plt.fill_between(vmagB1V, sigmuB1Vmin, sigmuB1Vmax, color='b', alpha=0.3) plt.fill_between(vmagM6V, sigmuM6Vmin, sigmuM6Vmax, color='r', alpha=0.3) plt.xlim((5,22.5)) plt.ylim((1,500)) plt.text(17.5,100,'B1V',color='b') plt.text(18,10,'M6V',color='r') plt.text(7,11,'calibration noise floor', size=12, bbox=dict(boxstyle="round,pad=0.3", ec=(0.0, 0.0, 0.0), fc=(1.0, 1.0, 1.0), )) plt.text(14.75,50,'photon noise', rotation=45, size=12, bbox=dict(boxstyle="round,pad=0.3", ec=(0.0, 0.0, 0.0), fc=(1.0, 1.0, 1.0), )) ax.annotate('non-uniformity\nover the sky', xy=(21.5, 80), xycoords='data', xytext=(21.5,30), textcoords='data', ha='center', size='12', bbox=dict(boxstyle="round,pad=0.3",ec=(0,0,0),fc=(1,1,1)), arrowprops=dict(facecolor='black', shrink=0.15, width=1, headwidth=6), horizontalalignment='right', verticalalignment='top', ) ax.annotate('', xy=(21.5, 170), xycoords='data', xytext=(21.5,380), textcoords='data', ha='center', size='12', arrowprops=dict(facecolor='black', shrink=0.15, width=1, headwidth=6), horizontalalignment='right', verticalalignment='bottom', ) plt.xticks(np.arange(6,24,2)) ax = plt.gca().yaxis ax.set_major_formatter(matplotlib.ticker.ScalarFormatter()) plt.ticklabel_format(axis='y',style='plain') plt.grid(which='both') plt.xlabel('$V$ [mag]') plt.ylabel('End-of-mission $\\sigma_\\mu$ [$\mu$as/yr]') basename = 'ProperMotionErrors' if (args['pdfOutput']): plt.savefig(basename+'.pdf') elif (args['pngOutput']): plt.savefig(basename+'.png') else: plt.show()
def makePlot(args): """ Make the plot with proper motion performance predictions. The predictions are for the TOTAL proper motion under the assumption of equal components mu_alpha* and mu_delta. :argument args: command line arguments """ gmag=np.linspace(5.7,20.0,101) vminiB1V=vminiFromSpt('B1V') vminiG2V=vminiFromSpt('G2V') vminiM6V=vminiFromSpt('M6V') vmagB1V=gmag-gminvFromVmini(vminiB1V) vmagG2V=gmag-gminvFromVmini(vminiG2V) vmagM6V=gmag-gminvFromVmini(vminiM6V) sigmualphaB1V, sigmudeltaB1V = properMotionErrorSkyAvg(gmag,vminiB1V) sigmuB1V = np.sqrt(0.5*sigmualphaB1V**2+0.5*sigmudeltaB1V**2) sigmualphaB1V, sigmudeltaB1V = properMotionMinError(gmag,vminiB1V) sigmuB1Vmin = np.sqrt(0.5*sigmualphaB1V**2+0.5*sigmudeltaB1V**2) sigmualphaB1V, sigmudeltaB1V = properMotionMaxError(gmag,vminiB1V) sigmuB1Vmax = np.sqrt(0.5*sigmualphaB1V**2+0.5*sigmudeltaB1V**2) sigmualphaG2V, sigmudeltaG2V = properMotionErrorSkyAvg(gmag,vminiG2V) sigmuG2V = np.sqrt(0.5*sigmualphaG2V**2+0.5*sigmudeltaG2V**2) sigmualphaG2V, sigmudeltaG2V = properMotionMinError(gmag,vminiG2V) sigmuG2Vmin = np.sqrt(0.5*sigmualphaG2V**2+0.5*sigmudeltaG2V**2) sigmualphaG2V, sigmudeltaG2V = properMotionMaxError(gmag,vminiG2V) sigmuG2Vmax = np.sqrt(0.5*sigmualphaG2V**2+0.5*sigmudeltaG2V**2) sigmualphaM6V, sigmudeltaM6V = properMotionErrorSkyAvg(gmag,vminiM6V) sigmuM6V = np.sqrt(0.5*sigmualphaM6V**2+0.5*sigmudeltaM6V**2) sigmualphaM6V, sigmudeltaM6V = properMotionMinError(gmag,vminiM6V) sigmuM6Vmin = np.sqrt(0.5*sigmualphaM6V**2+0.5*sigmudeltaM6V**2) sigmualphaM6V, sigmudeltaM6V = properMotionMaxError(gmag,vminiM6V) sigmuM6Vmax = np.sqrt(0.5*sigmualphaM6V**2+0.5*sigmudeltaM6V**2) fig=plt.figure(figsize=(10,6.5)) if (args['gmagAbscissa']): plt.semilogy(gmag, sigmuB1V, 'b', label='B1V') plt.semilogy(gmag, sigmuG2V, 'g', label='G2V') plt.semilogy(gmag, sigmuM6V, 'r', label='M6V') plt.xlim((5,20)) plt.ylim((1,500)) plt.legend(loc=4) else: ax=fig.add_subplot(111) plt.semilogy(vmagB1V, sigmuB1V, 'b', label='B1V') #plt.semilogy(vmagG2V, sigmuG2V, 'g', label='G2V') plt.semilogy(vmagM6V, sigmuM6V, 'r', label='M6V') plt.fill_between(vmagB1V, sigmuB1Vmin, sigmuB1Vmax, color='b', alpha=0.3) plt.fill_between(vmagM6V, sigmuM6Vmin, sigmuM6Vmax, color='r', alpha=0.3) plt.xlim((5,22.5)) plt.ylim((1,500)) plt.text(17.5,100,'B1V',color='b') plt.text(18,10,'M6V',color='r') plt.text(7,11,'calibration noise floor', size=12, bbox=dict(boxstyle="round,pad=0.3", ec=(0.0, 0.0, 0.0), fc=(1.0, 1.0, 1.0), )) plt.text(14.75,50,'photon noise', rotation=45, size=12, bbox=dict(boxstyle="round,pad=0.3", ec=(0.0, 0.0, 0.0), fc=(1.0, 1.0, 1.0), )) ax.annotate('non-uniformity\nover the sky', xy=(21.5, 80), xycoords='data', xytext=(21.5,30), textcoords='data', ha='center', size='12', bbox=dict(boxstyle="round,pad=0.3",ec=(0,0,0),fc=(1,1,1)), arrowprops=dict(facecolor='black', shrink=0.15, width=1, headwidth=6), horizontalalignment='right', verticalalignment='top', ) ax.annotate('', xy=(21.5, 170), xycoords='data', xytext=(21.5,380), textcoords='data', ha='center', size='12', arrowprops=dict(facecolor='black', shrink=0.15, width=1, headwidth=6), horizontalalignment='right', verticalalignment='bottom', ) plt.xticks(np.arange(6,24,2)) ax = plt.gca().yaxis ax.set_major_formatter(matplotlib.ticker.ScalarFormatter()) plt.ticklabel_format(axis='y',style='plain') plt.grid(which='both') plt.xlabel('$V$ [mag]') plt.ylabel('End-of-mission $\\sigma_\\mu$ [$\mu$as/yr]') basename = 'ProperMotionErrors' if (args['pdfOutput']): plt.savefig(basename+'.pdf') elif (args['pngOutput']): plt.savefig(basename+'.png') else: plt.show()
[ "Make", "the", "plot", "with", "proper", "motion", "performance", "predictions", ".", "The", "predictions", "are", "for", "the", "TOTAL", "proper", "motion", "under", "the", "assumption", "of", "equal", "components", "mu_alpha", "*", "and", "mu_delta", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/examples/plotProperMotionErrorsSkyAvg.py#L31-L125
[ "def", "makePlot", "(", "args", ")", ":", "gmag", "=", "np", ".", "linspace", "(", "5.7", ",", "20.0", ",", "101", ")", "vminiB1V", "=", "vminiFromSpt", "(", "'B1V'", ")", "vminiG2V", "=", "vminiFromSpt", "(", "'G2V'", ")", "vminiM6V", "=", "vminiFromSpt", "(", "'M6V'", ")", "vmagB1V", "=", "gmag", "-", "gminvFromVmini", "(", "vminiB1V", ")", "vmagG2V", "=", "gmag", "-", "gminvFromVmini", "(", "vminiG2V", ")", "vmagM6V", "=", "gmag", "-", "gminvFromVmini", "(", "vminiM6V", ")", "sigmualphaB1V", ",", "sigmudeltaB1V", "=", "properMotionErrorSkyAvg", "(", "gmag", ",", "vminiB1V", ")", "sigmuB1V", "=", "np", ".", "sqrt", "(", "0.5", "*", "sigmualphaB1V", "**", "2", "+", "0.5", "*", "sigmudeltaB1V", "**", "2", ")", "sigmualphaB1V", ",", "sigmudeltaB1V", "=", "properMotionMinError", "(", "gmag", ",", "vminiB1V", ")", "sigmuB1Vmin", "=", "np", ".", "sqrt", "(", "0.5", "*", "sigmualphaB1V", "**", "2", "+", "0.5", "*", "sigmudeltaB1V", "**", "2", ")", "sigmualphaB1V", ",", "sigmudeltaB1V", "=", "properMotionMaxError", "(", "gmag", ",", "vminiB1V", ")", "sigmuB1Vmax", "=", "np", ".", "sqrt", "(", "0.5", "*", "sigmualphaB1V", "**", "2", "+", "0.5", "*", "sigmudeltaB1V", "**", "2", ")", "sigmualphaG2V", ",", "sigmudeltaG2V", "=", "properMotionErrorSkyAvg", "(", "gmag", ",", "vminiG2V", ")", "sigmuG2V", "=", "np", ".", "sqrt", "(", "0.5", "*", "sigmualphaG2V", "**", "2", "+", "0.5", "*", "sigmudeltaG2V", "**", "2", ")", "sigmualphaG2V", ",", "sigmudeltaG2V", "=", "properMotionMinError", "(", "gmag", ",", "vminiG2V", ")", "sigmuG2Vmin", "=", "np", ".", "sqrt", "(", "0.5", "*", "sigmualphaG2V", "**", "2", "+", "0.5", "*", "sigmudeltaG2V", "**", "2", ")", "sigmualphaG2V", ",", "sigmudeltaG2V", "=", "properMotionMaxError", "(", "gmag", ",", "vminiG2V", ")", "sigmuG2Vmax", "=", "np", ".", "sqrt", "(", "0.5", "*", "sigmualphaG2V", "**", "2", "+", "0.5", "*", "sigmudeltaG2V", "**", "2", ")", "sigmualphaM6V", ",", "sigmudeltaM6V", "=", "properMotionErrorSkyAvg", "(", "gmag", ",", "vminiM6V", ")", "sigmuM6V", "=", "np", ".", "sqrt", "(", "0.5", "*", "sigmualphaM6V", "**", "2", "+", "0.5", "*", "sigmudeltaM6V", "**", "2", ")", "sigmualphaM6V", ",", "sigmudeltaM6V", "=", "properMotionMinError", "(", "gmag", ",", "vminiM6V", ")", "sigmuM6Vmin", "=", "np", ".", "sqrt", "(", "0.5", "*", "sigmualphaM6V", "**", "2", "+", "0.5", "*", "sigmudeltaM6V", "**", "2", ")", "sigmualphaM6V", ",", "sigmudeltaM6V", "=", "properMotionMaxError", "(", "gmag", ",", "vminiM6V", ")", "sigmuM6Vmax", "=", "np", ".", "sqrt", "(", "0.5", "*", "sigmualphaM6V", "**", "2", "+", "0.5", "*", "sigmudeltaM6V", "**", "2", ")", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "10", ",", "6.5", ")", ")", "if", "(", "args", "[", "'gmagAbscissa'", "]", ")", ":", "plt", ".", "semilogy", "(", "gmag", ",", "sigmuB1V", ",", "'b'", ",", "label", "=", "'B1V'", ")", "plt", ".", "semilogy", "(", "gmag", ",", "sigmuG2V", ",", "'g'", ",", "label", "=", "'G2V'", ")", "plt", ".", "semilogy", "(", "gmag", ",", "sigmuM6V", ",", "'r'", ",", "label", "=", "'M6V'", ")", "plt", ".", "xlim", "(", "(", "5", ",", "20", ")", ")", "plt", ".", "ylim", "(", "(", "1", ",", "500", ")", ")", "plt", ".", "legend", "(", "loc", "=", "4", ")", "else", ":", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "plt", ".", "semilogy", "(", "vmagB1V", ",", "sigmuB1V", ",", "'b'", ",", "label", "=", "'B1V'", ")", "#plt.semilogy(vmagG2V, sigmuG2V, 'g', label='G2V')", "plt", ".", "semilogy", "(", "vmagM6V", ",", "sigmuM6V", ",", "'r'", ",", "label", "=", "'M6V'", ")", "plt", ".", "fill_between", "(", "vmagB1V", ",", "sigmuB1Vmin", ",", "sigmuB1Vmax", ",", "color", "=", "'b'", ",", "alpha", "=", "0.3", ")", "plt", ".", "fill_between", "(", "vmagM6V", ",", "sigmuM6Vmin", ",", "sigmuM6Vmax", ",", "color", "=", "'r'", ",", "alpha", "=", "0.3", ")", "plt", ".", "xlim", "(", "(", "5", ",", "22.5", ")", ")", "plt", ".", "ylim", "(", "(", "1", ",", "500", ")", ")", "plt", ".", "text", "(", "17.5", ",", "100", ",", "'B1V'", ",", "color", "=", "'b'", ")", "plt", ".", "text", "(", "18", ",", "10", ",", "'M6V'", ",", "color", "=", "'r'", ")", "plt", ".", "text", "(", "7", ",", "11", ",", "'calibration noise floor'", ",", "size", "=", "12", ",", "bbox", "=", "dict", "(", "boxstyle", "=", "\"round,pad=0.3\"", ",", "ec", "=", "(", "0.0", ",", "0.0", ",", "0.0", ")", ",", "fc", "=", "(", "1.0", ",", "1.0", ",", "1.0", ")", ",", ")", ")", "plt", ".", "text", "(", "14.75", ",", "50", ",", "'photon noise'", ",", "rotation", "=", "45", ",", "size", "=", "12", ",", "bbox", "=", "dict", "(", "boxstyle", "=", "\"round,pad=0.3\"", ",", "ec", "=", "(", "0.0", ",", "0.0", ",", "0.0", ")", ",", "fc", "=", "(", "1.0", ",", "1.0", ",", "1.0", ")", ",", ")", ")", "ax", ".", "annotate", "(", "'non-uniformity\\nover the sky'", ",", "xy", "=", "(", "21.5", ",", "80", ")", ",", "xycoords", "=", "'data'", ",", "xytext", "=", "(", "21.5", ",", "30", ")", ",", "textcoords", "=", "'data'", ",", "ha", "=", "'center'", ",", "size", "=", "'12'", ",", "bbox", "=", "dict", "(", "boxstyle", "=", "\"round,pad=0.3\"", ",", "ec", "=", "(", "0", ",", "0", ",", "0", ")", ",", "fc", "=", "(", "1", ",", "1", ",", "1", ")", ")", ",", "arrowprops", "=", "dict", "(", "facecolor", "=", "'black'", ",", "shrink", "=", "0.15", ",", "width", "=", "1", ",", "headwidth", "=", "6", ")", ",", "horizontalalignment", "=", "'right'", ",", "verticalalignment", "=", "'top'", ",", ")", "ax", ".", "annotate", "(", "''", ",", "xy", "=", "(", "21.5", ",", "170", ")", ",", "xycoords", "=", "'data'", ",", "xytext", "=", "(", "21.5", ",", "380", ")", ",", "textcoords", "=", "'data'", ",", "ha", "=", "'center'", ",", "size", "=", "'12'", ",", "arrowprops", "=", "dict", "(", "facecolor", "=", "'black'", ",", "shrink", "=", "0.15", ",", "width", "=", "1", ",", "headwidth", "=", "6", ")", ",", "horizontalalignment", "=", "'right'", ",", "verticalalignment", "=", "'bottom'", ",", ")", "plt", ".", "xticks", "(", "np", ".", "arange", "(", "6", ",", "24", ",", "2", ")", ")", "ax", "=", "plt", ".", "gca", "(", ")", ".", "yaxis", "ax", ".", "set_major_formatter", "(", "matplotlib", ".", "ticker", ".", "ScalarFormatter", "(", ")", ")", "plt", ".", "ticklabel_format", "(", "axis", "=", "'y'", ",", "style", "=", "'plain'", ")", "plt", ".", "grid", "(", "which", "=", "'both'", ")", "plt", ".", "xlabel", "(", "'$V$ [mag]'", ")", "plt", ".", "ylabel", "(", "'End-of-mission $\\\\sigma_\\\\mu$ [$\\mu$as/yr]'", ")", "basename", "=", "'ProperMotionErrors'", "if", "(", "args", "[", "'pdfOutput'", "]", ")", ":", "plt", ".", "savefig", "(", "basename", "+", "'.pdf'", ")", "elif", "(", "args", "[", "'pngOutput'", "]", ")", ":", "plt", ".", "savefig", "(", "basename", "+", "'.png'", ")", "else", ":", "plt", ".", "show", "(", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
parseCommandLineArguments
Set up command line parsing.
examples/plotProperMotionErrorsSkyAvg.py
def parseCommandLineArguments(): """ Set up command line parsing. """ parser = argparse.ArgumentParser(description="Plot predicted Gaia sky averaged proper motion errors as a function of V") parser.add_argument("-p", action="store_true", dest="pdfOutput", help="Make PDF plot") parser.add_argument("-b", action="store_true", dest="pngOutput", help="Make PNG plot") parser.add_argument("-g", action="store_true", dest="gmagAbscissa", help="Plot performance vs G instead of V") args=vars(parser.parse_args()) return args
def parseCommandLineArguments(): """ Set up command line parsing. """ parser = argparse.ArgumentParser(description="Plot predicted Gaia sky averaged proper motion errors as a function of V") parser.add_argument("-p", action="store_true", dest="pdfOutput", help="Make PDF plot") parser.add_argument("-b", action="store_true", dest="pngOutput", help="Make PNG plot") parser.add_argument("-g", action="store_true", dest="gmagAbscissa", help="Plot performance vs G instead of V") args=vars(parser.parse_args()) return args
[ "Set", "up", "command", "line", "parsing", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/examples/plotProperMotionErrorsSkyAvg.py#L127-L136
[ "def", "parseCommandLineArguments", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "\"Plot predicted Gaia sky averaged proper motion errors as a function of V\"", ")", "parser", ".", "add_argument", "(", "\"-p\"", ",", "action", "=", "\"store_true\"", ",", "dest", "=", "\"pdfOutput\"", ",", "help", "=", "\"Make PDF plot\"", ")", "parser", ".", "add_argument", "(", "\"-b\"", ",", "action", "=", "\"store_true\"", ",", "dest", "=", "\"pngOutput\"", ",", "help", "=", "\"Make PNG plot\"", ")", "parser", ".", "add_argument", "(", "\"-g\"", ",", "action", "=", "\"store_true\"", ",", "dest", "=", "\"gmagAbscissa\"", ",", "help", "=", "\"Plot performance vs G instead of V\"", ")", "args", "=", "vars", "(", "parser", ".", "parse_args", "(", ")", ")", "return", "args" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
enum
Create a new enumeration type. Code is copyright (c) Gabriel Genellina, 2010, MIT License. Parameters ---------- typename - Name of the enumerated type field_names - Names of the fields of the enumerated type
pygaia/utils.py
def enum(typename, field_names): """ Create a new enumeration type. Code is copyright (c) Gabriel Genellina, 2010, MIT License. Parameters ---------- typename - Name of the enumerated type field_names - Names of the fields of the enumerated type """ if isinstance(field_names, str): field_names = field_names.replace(',', ' ').split() d = dict((reversed(nv) for nv in enumerate(field_names)), __slots__ = ()) return type(typename, (object,), d)()
def enum(typename, field_names): """ Create a new enumeration type. Code is copyright (c) Gabriel Genellina, 2010, MIT License. Parameters ---------- typename - Name of the enumerated type field_names - Names of the fields of the enumerated type """ if isinstance(field_names, str): field_names = field_names.replace(',', ' ').split() d = dict((reversed(nv) for nv in enumerate(field_names)), __slots__ = ()) return type(typename, (object,), d)()
[ "Create", "a", "new", "enumeration", "type", ".", "Code", "is", "copyright", "(", "c", ")", "Gabriel", "Genellina", "2010", "MIT", "License", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/utils.py#L7-L23
[ "def", "enum", "(", "typename", ",", "field_names", ")", ":", "if", "isinstance", "(", "field_names", ",", "str", ")", ":", "field_names", "=", "field_names", ".", "replace", "(", "','", ",", "' '", ")", ".", "split", "(", ")", "d", "=", "dict", "(", "(", "reversed", "(", "nv", ")", "for", "nv", "in", "enumerate", "(", "field_names", ")", ")", ",", "__slots__", "=", "(", ")", ")", "return", "type", "(", "typename", ",", "(", "object", ",", ")", ",", "d", ")", "(", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
construct_covariance_matrix
Take the astrometric parameter standard uncertainties and the uncertainty correlations as quoted in the Gaia catalogue and construct the covariance matrix. Parameters ---------- cvec : array_like Array of shape (15,) (1 source) or (n,15) (n sources) for the astrometric parameter standard uncertainties and their correlations, as listed in the Gaia catalogue [ra_error, dec_error, parallax_error, pmra_error, pmdec_error, ra_dec_corr, ra_parallax_corr, ra_pmra_corr, ra_pmdec_corr, dec_parallax_corr, dec_pmra_corr, dec_pmdec_corr, parallax_pmra_corr, parallax_pmdec_corr, pmra_pmdec_corr]. Units are (mas^2, mas^2/yr, mas^2/yr^2). parallax : array_like (n elements) Source parallax (mas). radial_velocity : array_like (n elements) Source radial velocity (km/s, does not have to be from Gaia RVS!). If the radial velocity is not known it can be set to zero. radial_velocity_error : array_like (n elements) Source radial velocity uncertainty (km/s). If the radial velocity is not know this can be set to the radial velocity dispersion for the population the source was drawn from. Returns ------- Covariance matrix as a 6x6 array.
pygaia/utils.py
def construct_covariance_matrix(cvec, parallax, radial_velocity, radial_velocity_error): """ Take the astrometric parameter standard uncertainties and the uncertainty correlations as quoted in the Gaia catalogue and construct the covariance matrix. Parameters ---------- cvec : array_like Array of shape (15,) (1 source) or (n,15) (n sources) for the astrometric parameter standard uncertainties and their correlations, as listed in the Gaia catalogue [ra_error, dec_error, parallax_error, pmra_error, pmdec_error, ra_dec_corr, ra_parallax_corr, ra_pmra_corr, ra_pmdec_corr, dec_parallax_corr, dec_pmra_corr, dec_pmdec_corr, parallax_pmra_corr, parallax_pmdec_corr, pmra_pmdec_corr]. Units are (mas^2, mas^2/yr, mas^2/yr^2). parallax : array_like (n elements) Source parallax (mas). radial_velocity : array_like (n elements) Source radial velocity (km/s, does not have to be from Gaia RVS!). If the radial velocity is not known it can be set to zero. radial_velocity_error : array_like (n elements) Source radial velocity uncertainty (km/s). If the radial velocity is not know this can be set to the radial velocity dispersion for the population the source was drawn from. Returns ------- Covariance matrix as a 6x6 array. """ if np.ndim(cvec)==1: cmat = np.zeros((1,6,6)) nsources = 1 cv = np.atleast_2d(cvec) else: nsources = cvec.shape[0] cmat = np.zeros((nsources,6,6)) cv = cvec for k in range(nsources): cmat[k,0:5,0:5] = cv[k,0:5]**2 iu = np.triu_indices(5,k=1) for k in range(10): i = iu[0][k] j = iu[1][k] cmat[:,i,j] = cv[:,i]*cv[:,j]*cv[:,k+5] cmat[:,j,i] = cmat[:,i,j] for k in range(nsources): cmat[k,0:5,5] = cmat[k,0:5,2]*np.atleast_1d(radial_velocity)[k]/auKmYearPerSec cmat[:,5,0:5] = cmat[:,0:5,5] cmat[:,5,5] = cmat[:,2,2]*(radial_velocity**2 + radial_velocity_error**2)/auKmYearPerSec**2 + \ (parallax*radial_velocity_error/auKmYearPerSec)**2 return np.squeeze(cmat)
def construct_covariance_matrix(cvec, parallax, radial_velocity, radial_velocity_error): """ Take the astrometric parameter standard uncertainties and the uncertainty correlations as quoted in the Gaia catalogue and construct the covariance matrix. Parameters ---------- cvec : array_like Array of shape (15,) (1 source) or (n,15) (n sources) for the astrometric parameter standard uncertainties and their correlations, as listed in the Gaia catalogue [ra_error, dec_error, parallax_error, pmra_error, pmdec_error, ra_dec_corr, ra_parallax_corr, ra_pmra_corr, ra_pmdec_corr, dec_parallax_corr, dec_pmra_corr, dec_pmdec_corr, parallax_pmra_corr, parallax_pmdec_corr, pmra_pmdec_corr]. Units are (mas^2, mas^2/yr, mas^2/yr^2). parallax : array_like (n elements) Source parallax (mas). radial_velocity : array_like (n elements) Source radial velocity (km/s, does not have to be from Gaia RVS!). If the radial velocity is not known it can be set to zero. radial_velocity_error : array_like (n elements) Source radial velocity uncertainty (km/s). If the radial velocity is not know this can be set to the radial velocity dispersion for the population the source was drawn from. Returns ------- Covariance matrix as a 6x6 array. """ if np.ndim(cvec)==1: cmat = np.zeros((1,6,6)) nsources = 1 cv = np.atleast_2d(cvec) else: nsources = cvec.shape[0] cmat = np.zeros((nsources,6,6)) cv = cvec for k in range(nsources): cmat[k,0:5,0:5] = cv[k,0:5]**2 iu = np.triu_indices(5,k=1) for k in range(10): i = iu[0][k] j = iu[1][k] cmat[:,i,j] = cv[:,i]*cv[:,j]*cv[:,k+5] cmat[:,j,i] = cmat[:,i,j] for k in range(nsources): cmat[k,0:5,5] = cmat[k,0:5,2]*np.atleast_1d(radial_velocity)[k]/auKmYearPerSec cmat[:,5,0:5] = cmat[:,0:5,5] cmat[:,5,5] = cmat[:,2,2]*(radial_velocity**2 + radial_velocity_error**2)/auKmYearPerSec**2 + \ (parallax*radial_velocity_error/auKmYearPerSec)**2 return np.squeeze(cmat)
[ "Take", "the", "astrometric", "parameter", "standard", "uncertainties", "and", "the", "uncertainty", "correlations", "as", "quoted", "in", "the", "Gaia", "catalogue", "and", "construct", "the", "covariance", "matrix", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/utils.py#L57-L113
[ "def", "construct_covariance_matrix", "(", "cvec", ",", "parallax", ",", "radial_velocity", ",", "radial_velocity_error", ")", ":", "if", "np", ".", "ndim", "(", "cvec", ")", "==", "1", ":", "cmat", "=", "np", ".", "zeros", "(", "(", "1", ",", "6", ",", "6", ")", ")", "nsources", "=", "1", "cv", "=", "np", ".", "atleast_2d", "(", "cvec", ")", "else", ":", "nsources", "=", "cvec", ".", "shape", "[", "0", "]", "cmat", "=", "np", ".", "zeros", "(", "(", "nsources", ",", "6", ",", "6", ")", ")", "cv", "=", "cvec", "for", "k", "in", "range", "(", "nsources", ")", ":", "cmat", "[", "k", ",", "0", ":", "5", ",", "0", ":", "5", "]", "=", "cv", "[", "k", ",", "0", ":", "5", "]", "**", "2", "iu", "=", "np", ".", "triu_indices", "(", "5", ",", "k", "=", "1", ")", "for", "k", "in", "range", "(", "10", ")", ":", "i", "=", "iu", "[", "0", "]", "[", "k", "]", "j", "=", "iu", "[", "1", "]", "[", "k", "]", "cmat", "[", ":", ",", "i", ",", "j", "]", "=", "cv", "[", ":", ",", "i", "]", "*", "cv", "[", ":", ",", "j", "]", "*", "cv", "[", ":", ",", "k", "+", "5", "]", "cmat", "[", ":", ",", "j", ",", "i", "]", "=", "cmat", "[", ":", ",", "i", ",", "j", "]", "for", "k", "in", "range", "(", "nsources", ")", ":", "cmat", "[", "k", ",", "0", ":", "5", ",", "5", "]", "=", "cmat", "[", "k", ",", "0", ":", "5", ",", "2", "]", "*", "np", ".", "atleast_1d", "(", "radial_velocity", ")", "[", "k", "]", "/", "auKmYearPerSec", "cmat", "[", ":", ",", "5", ",", "0", ":", "5", "]", "=", "cmat", "[", ":", ",", "0", ":", "5", ",", "5", "]", "cmat", "[", ":", ",", "5", ",", "5", "]", "=", "cmat", "[", ":", ",", "2", ",", "2", "]", "*", "(", "radial_velocity", "**", "2", "+", "radial_velocity_error", "**", "2", ")", "/", "auKmYearPerSec", "**", "2", "+", "(", "parallax", "*", "radial_velocity_error", "/", "auKmYearPerSec", ")", "**", "2", "return", "np", ".", "squeeze", "(", "cmat", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
makePlot
Make a plot of a Mv vs (V-I) colour magnitude diagram containing lines of constant distance for stars at G=20. This will give an idea of the reach of Gaia. Parameters ---------- args - command line arguments
examples/plotDistanceLimitsInMvVmini.py
def makePlot(gmag, pdf=False, png=False, rvs=False): """ Make a plot of a Mv vs (V-I) colour magnitude diagram containing lines of constant distance for stars at G=20. This will give an idea of the reach of Gaia. Parameters ---------- args - command line arguments """ vmini = np.linspace(-0.5,4.0,100) if (rvs): gminv = -vminGrvsFromVmini(vmini) else: gminv = gminvFromVmini(vmini) mvlimit100pc = gmag-5.0*np.log10(100.0)+5.0-gminv mvlimit1kpc = gmag-5.0*np.log10(1000.0)+5.0-gminv mvlimit10kpc = gmag-5.0*np.log10(10000.0)+5.0-gminv fig=plt.figure(figsize=(8,8)) plt.plot(vmini,mvlimit100pc,'b') plt.text(vmini[50]-0.4,mvlimit100pc[50],"$d=100$ pc", horizontalalignment='right', va='top') plt.plot(vmini,mvlimit1kpc,'r') plt.text(vmini[50]-0.4,mvlimit1kpc[50],"$d=1000$ pc", horizontalalignment='right', va='top') plt.plot(vmini,mvlimit10kpc,'g') plt.text(vmini[50]-0.4,mvlimit10kpc[50],"$d=10000$ pc", horizontalalignment='right', va='top') ax=plt.gca() ax.set_ylim(ax.get_ylim()[::-1]) plt.xlabel("$(V-I)$") plt.ylabel("$M_V$") if (rvs): plt.title("Distance limits for $G_\\mathrm{RVS}"+"={0}$".format(gmag)) else: plt.title("Distance limits for $G={0}$".format(gmag)) if (args['pdfOutput']): plt.savefig('GaiaSurveyLimits.pdf') elif (args['pngOutput']): plt.savefig('GaiaSurveyLimits.png') else: plt.show()
def makePlot(gmag, pdf=False, png=False, rvs=False): """ Make a plot of a Mv vs (V-I) colour magnitude diagram containing lines of constant distance for stars at G=20. This will give an idea of the reach of Gaia. Parameters ---------- args - command line arguments """ vmini = np.linspace(-0.5,4.0,100) if (rvs): gminv = -vminGrvsFromVmini(vmini) else: gminv = gminvFromVmini(vmini) mvlimit100pc = gmag-5.0*np.log10(100.0)+5.0-gminv mvlimit1kpc = gmag-5.0*np.log10(1000.0)+5.0-gminv mvlimit10kpc = gmag-5.0*np.log10(10000.0)+5.0-gminv fig=plt.figure(figsize=(8,8)) plt.plot(vmini,mvlimit100pc,'b') plt.text(vmini[50]-0.4,mvlimit100pc[50],"$d=100$ pc", horizontalalignment='right', va='top') plt.plot(vmini,mvlimit1kpc,'r') plt.text(vmini[50]-0.4,mvlimit1kpc[50],"$d=1000$ pc", horizontalalignment='right', va='top') plt.plot(vmini,mvlimit10kpc,'g') plt.text(vmini[50]-0.4,mvlimit10kpc[50],"$d=10000$ pc", horizontalalignment='right', va='top') ax=plt.gca() ax.set_ylim(ax.get_ylim()[::-1]) plt.xlabel("$(V-I)$") plt.ylabel("$M_V$") if (rvs): plt.title("Distance limits for $G_\\mathrm{RVS}"+"={0}$".format(gmag)) else: plt.title("Distance limits for $G={0}$".format(gmag)) if (args['pdfOutput']): plt.savefig('GaiaSurveyLimits.pdf') elif (args['pngOutput']): plt.savefig('GaiaSurveyLimits.png') else: plt.show()
[ "Make", "a", "plot", "of", "a", "Mv", "vs", "(", "V", "-", "I", ")", "colour", "magnitude", "diagram", "containing", "lines", "of", "constant", "distance", "for", "stars", "at", "G", "=", "20", ".", "This", "will", "give", "an", "idea", "of", "the", "reach", "of", "Gaia", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/examples/plotDistanceLimitsInMvVmini.py#L27-L67
[ "def", "makePlot", "(", "gmag", ",", "pdf", "=", "False", ",", "png", "=", "False", ",", "rvs", "=", "False", ")", ":", "vmini", "=", "np", ".", "linspace", "(", "-", "0.5", ",", "4.0", ",", "100", ")", "if", "(", "rvs", ")", ":", "gminv", "=", "-", "vminGrvsFromVmini", "(", "vmini", ")", "else", ":", "gminv", "=", "gminvFromVmini", "(", "vmini", ")", "mvlimit100pc", "=", "gmag", "-", "5.0", "*", "np", ".", "log10", "(", "100.0", ")", "+", "5.0", "-", "gminv", "mvlimit1kpc", "=", "gmag", "-", "5.0", "*", "np", ".", "log10", "(", "1000.0", ")", "+", "5.0", "-", "gminv", "mvlimit10kpc", "=", "gmag", "-", "5.0", "*", "np", ".", "log10", "(", "10000.0", ")", "+", "5.0", "-", "gminv", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "8", ",", "8", ")", ")", "plt", ".", "plot", "(", "vmini", ",", "mvlimit100pc", ",", "'b'", ")", "plt", ".", "text", "(", "vmini", "[", "50", "]", "-", "0.4", ",", "mvlimit100pc", "[", "50", "]", ",", "\"$d=100$ pc\"", ",", "horizontalalignment", "=", "'right'", ",", "va", "=", "'top'", ")", "plt", ".", "plot", "(", "vmini", ",", "mvlimit1kpc", ",", "'r'", ")", "plt", ".", "text", "(", "vmini", "[", "50", "]", "-", "0.4", ",", "mvlimit1kpc", "[", "50", "]", ",", "\"$d=1000$ pc\"", ",", "horizontalalignment", "=", "'right'", ",", "va", "=", "'top'", ")", "plt", ".", "plot", "(", "vmini", ",", "mvlimit10kpc", ",", "'g'", ")", "plt", ".", "text", "(", "vmini", "[", "50", "]", "-", "0.4", ",", "mvlimit10kpc", "[", "50", "]", ",", "\"$d=10000$ pc\"", ",", "horizontalalignment", "=", "'right'", ",", "va", "=", "'top'", ")", "ax", "=", "plt", ".", "gca", "(", ")", "ax", ".", "set_ylim", "(", "ax", ".", "get_ylim", "(", ")", "[", ":", ":", "-", "1", "]", ")", "plt", ".", "xlabel", "(", "\"$(V-I)$\"", ")", "plt", ".", "ylabel", "(", "\"$M_V$\"", ")", "if", "(", "rvs", ")", ":", "plt", ".", "title", "(", "\"Distance limits for $G_\\\\mathrm{RVS}\"", "+", "\"={0}$\"", ".", "format", "(", "gmag", ")", ")", "else", ":", "plt", ".", "title", "(", "\"Distance limits for $G={0}$\"", ".", "format", "(", "gmag", ")", ")", "if", "(", "args", "[", "'pdfOutput'", "]", ")", ":", "plt", ".", "savefig", "(", "'GaiaSurveyLimits.pdf'", ")", "elif", "(", "args", "[", "'pngOutput'", "]", ")", ":", "plt", ".", "savefig", "(", "'GaiaSurveyLimits.png'", ")", "else", ":", "plt", ".", "show", "(", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
vradErrorSkyAvg
Calculate radial velocity error from V and the spectral type. The value of the error is an average over the sky. Parameters ---------- vmag - Value of V-band magnitude. spt - String representing the spectral type of the star. Returns ------- The radial velocity error in km/s.
pygaia/errors/spectroscopic.py
def vradErrorSkyAvg(vmag, spt): """ Calculate radial velocity error from V and the spectral type. The value of the error is an average over the sky. Parameters ---------- vmag - Value of V-band magnitude. spt - String representing the spectral type of the star. Returns ------- The radial velocity error in km/s. """ return _vradCalibrationFloor + _vradErrorBCoeff[spt]*exp(_vradErrorACoeff[spt]*(vmag-_vradMagnitudeZeroPoint))
def vradErrorSkyAvg(vmag, spt): """ Calculate radial velocity error from V and the spectral type. The value of the error is an average over the sky. Parameters ---------- vmag - Value of V-band magnitude. spt - String representing the spectral type of the star. Returns ------- The radial velocity error in km/s. """ return _vradCalibrationFloor + _vradErrorBCoeff[spt]*exp(_vradErrorACoeff[spt]*(vmag-_vradMagnitudeZeroPoint))
[ "Calculate", "radial", "velocity", "error", "from", "V", "and", "the", "spectral", "type", ".", "The", "value", "of", "the", "error", "is", "an", "average", "over", "the", "sky", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/errors/spectroscopic.py#L12-L28
[ "def", "vradErrorSkyAvg", "(", "vmag", ",", "spt", ")", ":", "return", "_vradCalibrationFloor", "+", "_vradErrorBCoeff", "[", "spt", "]", "*", "exp", "(", "_vradErrorACoeff", "[", "spt", "]", "*", "(", "vmag", "-", "_vradMagnitudeZeroPoint", ")", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
_orderGridlinePoints
This code takes care of ordering the points (x,y), calculated for a sky map parallel or merdian, such that the drawing code can start at one end of the curve and end at the other (so no artifacts due to connecting the disjoint ends occur). Parameters ---------- x - Set of x coordinates y - Set of y coordinates Returns ------- x, y: Order set of (x,y) points
pygaia/plot/sky.py
def _orderGridlinePoints(x, y): """ This code takes care of ordering the points (x,y), calculated for a sky map parallel or merdian, such that the drawing code can start at one end of the curve and end at the other (so no artifacts due to connecting the disjoint ends occur). Parameters ---------- x - Set of x coordinates y - Set of y coordinates Returns ------- x, y: Order set of (x,y) points """ xroll=roll(x,1) yroll=roll(y,1) distance=(xroll-x)**2+(yroll-y)**2 indexmax=argmax(distance) return roll(x,-indexmax), roll(y,-indexmax)
def _orderGridlinePoints(x, y): """ This code takes care of ordering the points (x,y), calculated for a sky map parallel or merdian, such that the drawing code can start at one end of the curve and end at the other (so no artifacts due to connecting the disjoint ends occur). Parameters ---------- x - Set of x coordinates y - Set of y coordinates Returns ------- x, y: Order set of (x,y) points """ xroll=roll(x,1) yroll=roll(y,1) distance=(xroll-x)**2+(yroll-y)**2 indexmax=argmax(distance) return roll(x,-indexmax), roll(y,-indexmax)
[ "This", "code", "takes", "care", "of", "ordering", "the", "points", "(", "x", "y", ")", "calculated", "for", "a", "sky", "map", "parallel", "or", "merdian", "such", "that", "the", "drawing", "code", "can", "start", "at", "one", "end", "of", "the", "curve", "and", "end", "at", "the", "other", "(", "so", "no", "artifacts", "due", "to", "connecting", "the", "disjoint", "ends", "occur", ")", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/plot/sky.py#L10-L31
[ "def", "_orderGridlinePoints", "(", "x", ",", "y", ")", ":", "xroll", "=", "roll", "(", "x", ",", "1", ")", "yroll", "=", "roll", "(", "y", ",", "1", ")", "distance", "=", "(", "xroll", "-", "x", ")", "**", "2", "+", "(", "yroll", "-", "y", ")", "**", "2", "indexmax", "=", "argmax", "(", "distance", ")", "return", "roll", "(", "x", ",", "-", "indexmax", ")", ",", "roll", "(", "y", ",", "-", "indexmax", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
plotCoordinateTransformationOnSky
Produce a sky-plot in a given coordinate system with the meridians and paralles for another coordinate system overlayed. The coordinate systems are specified through the pygaia.coordinates.Transformations enum. For example for Transformations.GAL2ECL the sky plot will be in Ecliptic coordinates with the Galactic coordinate grid overlayed. Keywords -------- transformation - The coordinate transformation for which to make the plot (e.g., Transformations.GAL2ECL) outfile - Save plot to this output file (default is to plot on screen). Make sure an extension (.pdf, .png, etc) is included. myProjection - Use this map projection (default is 'hammer', see basemap documentation) noTitle - If true do not include the plot title. noLabels - If true do not include plot labels. returnPlotObject - If true return the matplotlib object used for plotting. Further plot elements can then be added.
pygaia/plot/sky.py
def plotCoordinateTransformationOnSky(transformation, outfile=None, myProjection='hammer', noTitle=False, noLabels=False, returnPlotObject=False): """ Produce a sky-plot in a given coordinate system with the meridians and paralles for another coordinate system overlayed. The coordinate systems are specified through the pygaia.coordinates.Transformations enum. For example for Transformations.GAL2ECL the sky plot will be in Ecliptic coordinates with the Galactic coordinate grid overlayed. Keywords -------- transformation - The coordinate transformation for which to make the plot (e.g., Transformations.GAL2ECL) outfile - Save plot to this output file (default is to plot on screen). Make sure an extension (.pdf, .png, etc) is included. myProjection - Use this map projection (default is 'hammer', see basemap documentation) noTitle - If true do not include the plot title. noLabels - If true do not include plot labels. returnPlotObject - If true return the matplotlib object used for plotting. Further plot elements can then be added. """ ct = CoordinateTransformation(transformation) parallels=arange(-80.0,90.0,10.0) meridians=arange(0.0,375.0,15.0) meridianMax=degreesToRadians(85.0) parallelsMax=degreesToRadians(179.0) fig=plt.figure(figsize=(12,6)) basemapInstance=Basemap(projection=myProjection,lon_0=0, celestial=True) basemapInstance.drawmapboundary() for thetaDeg in parallels: phi=linspace(-pi,pi,1001) theta=zeros_like(phi)+degreesToRadians(thetaDeg) phirot, thetarot = ct.transformSkyCoordinates(phi, theta) x ,y = basemapInstance(radiansToDegrees(phirot), radiansToDegrees(thetarot)) indices=(phirot>=0.0) xplot=x[indices] yplot=y[indices] if any(indices): xplot, yplot = _orderGridlinePoints(xplot, yplot) plt.plot(xplot,yplot,'b-') indices=(phirot<0.0) xplot=x[indices] yplot=y[indices] if any(indices): xplot, yplot = _orderGridlinePoints(xplot, yplot) plt.plot(xplot,yplot,'b-') for phiDeg in meridians: theta=linspace(-meridianMax,meridianMax,1001) phi=zeros_like(theta)+degreesToRadians(phiDeg) phirot, thetarot = ct.transformSkyCoordinates(phi, theta) x ,y = basemapInstance(radiansToDegrees(phirot), radiansToDegrees(thetarot)) indices=(phirot>=0.0) xplot=x[indices] yplot=y[indices] if any(indices): xplot, yplot = _orderGridlinePoints(xplot, yplot) plt.plot(xplot,yplot,'b-') indices=(phirot<0.0) xplot=x[indices] yplot=y[indices] if any(indices): xplot, yplot = _orderGridlinePoints(xplot, yplot) plt.plot(xplot,yplot,'b-') if (not noTitle): plt.title("Sky projection in " + ct.transformationStrings[1] + " coordinates with the corresponding " + ct.transformationStrings[0] + " grid overlayed") if (not noLabels): for theta in arange(-60,90,30): phirot, thetarot=ct.transformSkyCoordinates(0.0,degreesToRadians(theta)) x, y = basemapInstance(radiansToDegrees(phirot), radiansToDegrees(thetarot)) plt.text(x,y,"${0}$".format(theta),fontsize=16,va='bottom',ha='center',color='r') for phi in arange(-180,0,30): phirot, thetarot=ct.transformSkyCoordinates(degreesToRadians(phi),0.0) x, y = basemapInstance(radiansToDegrees(phirot), radiansToDegrees(thetarot)) plt.text(x,y,"${0}$".format(phi),fontsize=16,va='bottom',ha='center',color='r') for phi in arange(30,180,30): phirot, thetarot=ct.transformSkyCoordinates(degreesToRadians(phi),0.0) x, y = basemapInstance(radiansToDegrees(phirot), radiansToDegrees(thetarot)) plt.text(x,y,"${0}$".format(phi),fontsize=16,va='bottom',ha='center',color='r') if (outfile != None): plt.savefig(outfile) elif (returnPlotObject): return plt.gca(), basemapInstance else: plt.show()
def plotCoordinateTransformationOnSky(transformation, outfile=None, myProjection='hammer', noTitle=False, noLabels=False, returnPlotObject=False): """ Produce a sky-plot in a given coordinate system with the meridians and paralles for another coordinate system overlayed. The coordinate systems are specified through the pygaia.coordinates.Transformations enum. For example for Transformations.GAL2ECL the sky plot will be in Ecliptic coordinates with the Galactic coordinate grid overlayed. Keywords -------- transformation - The coordinate transformation for which to make the plot (e.g., Transformations.GAL2ECL) outfile - Save plot to this output file (default is to plot on screen). Make sure an extension (.pdf, .png, etc) is included. myProjection - Use this map projection (default is 'hammer', see basemap documentation) noTitle - If true do not include the plot title. noLabels - If true do not include plot labels. returnPlotObject - If true return the matplotlib object used for plotting. Further plot elements can then be added. """ ct = CoordinateTransformation(transformation) parallels=arange(-80.0,90.0,10.0) meridians=arange(0.0,375.0,15.0) meridianMax=degreesToRadians(85.0) parallelsMax=degreesToRadians(179.0) fig=plt.figure(figsize=(12,6)) basemapInstance=Basemap(projection=myProjection,lon_0=0, celestial=True) basemapInstance.drawmapboundary() for thetaDeg in parallels: phi=linspace(-pi,pi,1001) theta=zeros_like(phi)+degreesToRadians(thetaDeg) phirot, thetarot = ct.transformSkyCoordinates(phi, theta) x ,y = basemapInstance(radiansToDegrees(phirot), radiansToDegrees(thetarot)) indices=(phirot>=0.0) xplot=x[indices] yplot=y[indices] if any(indices): xplot, yplot = _orderGridlinePoints(xplot, yplot) plt.plot(xplot,yplot,'b-') indices=(phirot<0.0) xplot=x[indices] yplot=y[indices] if any(indices): xplot, yplot = _orderGridlinePoints(xplot, yplot) plt.plot(xplot,yplot,'b-') for phiDeg in meridians: theta=linspace(-meridianMax,meridianMax,1001) phi=zeros_like(theta)+degreesToRadians(phiDeg) phirot, thetarot = ct.transformSkyCoordinates(phi, theta) x ,y = basemapInstance(radiansToDegrees(phirot), radiansToDegrees(thetarot)) indices=(phirot>=0.0) xplot=x[indices] yplot=y[indices] if any(indices): xplot, yplot = _orderGridlinePoints(xplot, yplot) plt.plot(xplot,yplot,'b-') indices=(phirot<0.0) xplot=x[indices] yplot=y[indices] if any(indices): xplot, yplot = _orderGridlinePoints(xplot, yplot) plt.plot(xplot,yplot,'b-') if (not noTitle): plt.title("Sky projection in " + ct.transformationStrings[1] + " coordinates with the corresponding " + ct.transformationStrings[0] + " grid overlayed") if (not noLabels): for theta in arange(-60,90,30): phirot, thetarot=ct.transformSkyCoordinates(0.0,degreesToRadians(theta)) x, y = basemapInstance(radiansToDegrees(phirot), radiansToDegrees(thetarot)) plt.text(x,y,"${0}$".format(theta),fontsize=16,va='bottom',ha='center',color='r') for phi in arange(-180,0,30): phirot, thetarot=ct.transformSkyCoordinates(degreesToRadians(phi),0.0) x, y = basemapInstance(radiansToDegrees(phirot), radiansToDegrees(thetarot)) plt.text(x,y,"${0}$".format(phi),fontsize=16,va='bottom',ha='center',color='r') for phi in arange(30,180,30): phirot, thetarot=ct.transformSkyCoordinates(degreesToRadians(phi),0.0) x, y = basemapInstance(radiansToDegrees(phirot), radiansToDegrees(thetarot)) plt.text(x,y,"${0}$".format(phi),fontsize=16,va='bottom',ha='center',color='r') if (outfile != None): plt.savefig(outfile) elif (returnPlotObject): return plt.gca(), basemapInstance else: plt.show()
[ "Produce", "a", "sky", "-", "plot", "in", "a", "given", "coordinate", "system", "with", "the", "meridians", "and", "paralles", "for", "another", "coordinate", "system", "overlayed", ".", "The", "coordinate", "systems", "are", "specified", "through", "the", "pygaia", ".", "coordinates", ".", "Transformations", "enum", ".", "For", "example", "for", "Transformations", ".", "GAL2ECL", "the", "sky", "plot", "will", "be", "in", "Ecliptic", "coordinates", "with", "the", "Galactic", "coordinate", "grid", "overlayed", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/plot/sky.py#L33-L127
[ "def", "plotCoordinateTransformationOnSky", "(", "transformation", ",", "outfile", "=", "None", ",", "myProjection", "=", "'hammer'", ",", "noTitle", "=", "False", ",", "noLabels", "=", "False", ",", "returnPlotObject", "=", "False", ")", ":", "ct", "=", "CoordinateTransformation", "(", "transformation", ")", "parallels", "=", "arange", "(", "-", "80.0", ",", "90.0", ",", "10.0", ")", "meridians", "=", "arange", "(", "0.0", ",", "375.0", ",", "15.0", ")", "meridianMax", "=", "degreesToRadians", "(", "85.0", ")", "parallelsMax", "=", "degreesToRadians", "(", "179.0", ")", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "12", ",", "6", ")", ")", "basemapInstance", "=", "Basemap", "(", "projection", "=", "myProjection", ",", "lon_0", "=", "0", ",", "celestial", "=", "True", ")", "basemapInstance", ".", "drawmapboundary", "(", ")", "for", "thetaDeg", "in", "parallels", ":", "phi", "=", "linspace", "(", "-", "pi", ",", "pi", ",", "1001", ")", "theta", "=", "zeros_like", "(", "phi", ")", "+", "degreesToRadians", "(", "thetaDeg", ")", "phirot", ",", "thetarot", "=", "ct", ".", "transformSkyCoordinates", "(", "phi", ",", "theta", ")", "x", ",", "y", "=", "basemapInstance", "(", "radiansToDegrees", "(", "phirot", ")", ",", "radiansToDegrees", "(", "thetarot", ")", ")", "indices", "=", "(", "phirot", ">=", "0.0", ")", "xplot", "=", "x", "[", "indices", "]", "yplot", "=", "y", "[", "indices", "]", "if", "any", "(", "indices", ")", ":", "xplot", ",", "yplot", "=", "_orderGridlinePoints", "(", "xplot", ",", "yplot", ")", "plt", ".", "plot", "(", "xplot", ",", "yplot", ",", "'b-'", ")", "indices", "=", "(", "phirot", "<", "0.0", ")", "xplot", "=", "x", "[", "indices", "]", "yplot", "=", "y", "[", "indices", "]", "if", "any", "(", "indices", ")", ":", "xplot", ",", "yplot", "=", "_orderGridlinePoints", "(", "xplot", ",", "yplot", ")", "plt", ".", "plot", "(", "xplot", ",", "yplot", ",", "'b-'", ")", "for", "phiDeg", "in", "meridians", ":", "theta", "=", "linspace", "(", "-", "meridianMax", ",", "meridianMax", ",", "1001", ")", "phi", "=", "zeros_like", "(", "theta", ")", "+", "degreesToRadians", "(", "phiDeg", ")", "phirot", ",", "thetarot", "=", "ct", ".", "transformSkyCoordinates", "(", "phi", ",", "theta", ")", "x", ",", "y", "=", "basemapInstance", "(", "radiansToDegrees", "(", "phirot", ")", ",", "radiansToDegrees", "(", "thetarot", ")", ")", "indices", "=", "(", "phirot", ">=", "0.0", ")", "xplot", "=", "x", "[", "indices", "]", "yplot", "=", "y", "[", "indices", "]", "if", "any", "(", "indices", ")", ":", "xplot", ",", "yplot", "=", "_orderGridlinePoints", "(", "xplot", ",", "yplot", ")", "plt", ".", "plot", "(", "xplot", ",", "yplot", ",", "'b-'", ")", "indices", "=", "(", "phirot", "<", "0.0", ")", "xplot", "=", "x", "[", "indices", "]", "yplot", "=", "y", "[", "indices", "]", "if", "any", "(", "indices", ")", ":", "xplot", ",", "yplot", "=", "_orderGridlinePoints", "(", "xplot", ",", "yplot", ")", "plt", ".", "plot", "(", "xplot", ",", "yplot", ",", "'b-'", ")", "if", "(", "not", "noTitle", ")", ":", "plt", ".", "title", "(", "\"Sky projection in \"", "+", "ct", ".", "transformationStrings", "[", "1", "]", "+", "\" coordinates with the corresponding \"", "+", "ct", ".", "transformationStrings", "[", "0", "]", "+", "\" grid overlayed\"", ")", "if", "(", "not", "noLabels", ")", ":", "for", "theta", "in", "arange", "(", "-", "60", ",", "90", ",", "30", ")", ":", "phirot", ",", "thetarot", "=", "ct", ".", "transformSkyCoordinates", "(", "0.0", ",", "degreesToRadians", "(", "theta", ")", ")", "x", ",", "y", "=", "basemapInstance", "(", "radiansToDegrees", "(", "phirot", ")", ",", "radiansToDegrees", "(", "thetarot", ")", ")", "plt", ".", "text", "(", "x", ",", "y", ",", "\"${0}$\"", ".", "format", "(", "theta", ")", ",", "fontsize", "=", "16", ",", "va", "=", "'bottom'", ",", "ha", "=", "'center'", ",", "color", "=", "'r'", ")", "for", "phi", "in", "arange", "(", "-", "180", ",", "0", ",", "30", ")", ":", "phirot", ",", "thetarot", "=", "ct", ".", "transformSkyCoordinates", "(", "degreesToRadians", "(", "phi", ")", ",", "0.0", ")", "x", ",", "y", "=", "basemapInstance", "(", "radiansToDegrees", "(", "phirot", ")", ",", "radiansToDegrees", "(", "thetarot", ")", ")", "plt", ".", "text", "(", "x", ",", "y", ",", "\"${0}$\"", ".", "format", "(", "phi", ")", ",", "fontsize", "=", "16", ",", "va", "=", "'bottom'", ",", "ha", "=", "'center'", ",", "color", "=", "'r'", ")", "for", "phi", "in", "arange", "(", "30", ",", "180", ",", "30", ")", ":", "phirot", ",", "thetarot", "=", "ct", ".", "transformSkyCoordinates", "(", "degreesToRadians", "(", "phi", ")", ",", "0.0", ")", "x", ",", "y", "=", "basemapInstance", "(", "radiansToDegrees", "(", "phirot", ")", ",", "radiansToDegrees", "(", "thetarot", ")", ")", "plt", ".", "text", "(", "x", ",", "y", ",", "\"${0}$\"", ".", "format", "(", "phi", ")", ",", "fontsize", "=", "16", ",", "va", "=", "'bottom'", ",", "ha", "=", "'center'", ",", "color", "=", "'r'", ")", "if", "(", "outfile", "!=", "None", ")", ":", "plt", ".", "savefig", "(", "outfile", ")", "elif", "(", "returnPlotObject", ")", ":", "return", "plt", ".", "gca", "(", ")", ",", "basemapInstance", "else", ":", "plt", ".", "show", "(", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
calcParallaxError
Calculate the parallax error for the given input source magnitude and colour. :argument args: command line arguments
examples/parallax_errors.py
def calcParallaxError(args): """ Calculate the parallax error for the given input source magnitude and colour. :argument args: command line arguments """ gmag=float(args['gmag']) vmini=float(args['vmini']) sigmaPar=parallaxErrorSkyAvg(gmag, vmini) gminv=gminvFromVmini(vmini) print("G = {0}".format(gmag)) print("V = {0}".format(gmag-gminv)) print("(V-I) = {0}".format(vmini)) print("(G-V) = {0}".format(gminv)) print("standard error = {0} muas".format(sigmaPar))
def calcParallaxError(args): """ Calculate the parallax error for the given input source magnitude and colour. :argument args: command line arguments """ gmag=float(args['gmag']) vmini=float(args['vmini']) sigmaPar=parallaxErrorSkyAvg(gmag, vmini) gminv=gminvFromVmini(vmini) print("G = {0}".format(gmag)) print("V = {0}".format(gmag-gminv)) print("(V-I) = {0}".format(vmini)) print("(G-V) = {0}".format(gminv)) print("standard error = {0} muas".format(sigmaPar))
[ "Calculate", "the", "parallax", "error", "for", "the", "given", "input", "source", "magnitude", "and", "colour", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/examples/parallax_errors.py#L17-L31
[ "def", "calcParallaxError", "(", "args", ")", ":", "gmag", "=", "float", "(", "args", "[", "'gmag'", "]", ")", "vmini", "=", "float", "(", "args", "[", "'vmini'", "]", ")", "sigmaPar", "=", "parallaxErrorSkyAvg", "(", "gmag", ",", "vmini", ")", "gminv", "=", "gminvFromVmini", "(", "vmini", ")", "print", "(", "\"G = {0}\"", ".", "format", "(", "gmag", ")", ")", "print", "(", "\"V = {0}\"", ".", "format", "(", "gmag", "-", "gminv", ")", ")", "print", "(", "\"(V-I) = {0}\"", ".", "format", "(", "vmini", ")", ")", "print", "(", "\"(G-V) = {0}\"", ".", "format", "(", "gminv", ")", ")", "print", "(", "\"standard error = {0} muas\"", ".", "format", "(", "sigmaPar", ")", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
parseCommandLineArguments
Set up command line parsing.
examples/parallax_errors.py
def parseCommandLineArguments(): """ Set up command line parsing. """ parser = argparse.ArgumentParser(description="Calculate parallax error for given G and (V-I)") parser.add_argument("gmag", help="G-band magnitude of source", type=float) parser.add_argument("vmini", help="(V-I) colour of source", type=float) args=vars(parser.parse_args()) return args
def parseCommandLineArguments(): """ Set up command line parsing. """ parser = argparse.ArgumentParser(description="Calculate parallax error for given G and (V-I)") parser.add_argument("gmag", help="G-band magnitude of source", type=float) parser.add_argument("vmini", help="(V-I) colour of source", type=float) args=vars(parser.parse_args()) return args
[ "Set", "up", "command", "line", "parsing", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/examples/parallax_errors.py#L33-L42
[ "def", "parseCommandLineArguments", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "\"Calculate parallax error for given G and (V-I)\"", ")", "parser", ".", "add_argument", "(", "\"gmag\"", ",", "help", "=", "\"G-band magnitude of source\"", ",", "type", "=", "float", ")", "parser", ".", "add_argument", "(", "\"vmini\"", ",", "help", "=", "\"(V-I) colour of source\"", ",", "type", "=", "float", ")", "args", "=", "vars", "(", "parser", ".", "parse_args", "(", ")", ")", "return", "args" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
gMagnitudeError
Calculate the single-field-of-view-transit photometric standard error in the G band as a function of G. A 20% margin is included. Parameters ---------- G - Value(s) of G-band magnitude. Returns ------- The G band photometric standard error in units of magnitude.
pygaia/errors/photometric.py
def gMagnitudeError(G): """ Calculate the single-field-of-view-transit photometric standard error in the G band as a function of G. A 20% margin is included. Parameters ---------- G - Value(s) of G-band magnitude. Returns ------- The G band photometric standard error in units of magnitude. """ z=calcZ(G) return 1.0e-3*sqrt(0.04895*z*z + 1.8633*z + 0.0001985) * _scienceMargin
def gMagnitudeError(G): """ Calculate the single-field-of-view-transit photometric standard error in the G band as a function of G. A 20% margin is included. Parameters ---------- G - Value(s) of G-band magnitude. Returns ------- The G band photometric standard error in units of magnitude. """ z=calcZ(G) return 1.0e-3*sqrt(0.04895*z*z + 1.8633*z + 0.0001985) * _scienceMargin
[ "Calculate", "the", "single", "-", "field", "-", "of", "-", "view", "-", "transit", "photometric", "standard", "error", "in", "the", "G", "band", "as", "a", "function", "of", "G", ".", "A", "20%", "margin", "is", "included", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/errors/photometric.py#L20-L36
[ "def", "gMagnitudeError", "(", "G", ")", ":", "z", "=", "calcZ", "(", "G", ")", "return", "1.0e-3", "*", "sqrt", "(", "0.04895", "*", "z", "*", "z", "+", "1.8633", "*", "z", "+", "0.0001985", ")", "*", "_scienceMargin" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
gMagnitudeErrorEoM
Calculate the end of mission photometric standard error in the G band as a function of G. A 20% margin is included. Parameters ---------- G - Value(s) of G-band magnitude. Keywords -------- nobs - Number of observations collected (default 70). Returns ------- The G band photometric standard error in units of magnitude.
pygaia/errors/photometric.py
def gMagnitudeErrorEoM(G, nobs=70): """ Calculate the end of mission photometric standard error in the G band as a function of G. A 20% margin is included. Parameters ---------- G - Value(s) of G-band magnitude. Keywords -------- nobs - Number of observations collected (default 70). Returns ------- The G band photometric standard error in units of magnitude. """ return sqrt( (power(gMagnitudeError(G)/_scienceMargin,2) + _eomCalibrationFloorG*_eomCalibrationFloorG)/nobs ) * _scienceMargin
def gMagnitudeErrorEoM(G, nobs=70): """ Calculate the end of mission photometric standard error in the G band as a function of G. A 20% margin is included. Parameters ---------- G - Value(s) of G-band magnitude. Keywords -------- nobs - Number of observations collected (default 70). Returns ------- The G band photometric standard error in units of magnitude. """ return sqrt( (power(gMagnitudeError(G)/_scienceMargin,2) + _eomCalibrationFloorG*_eomCalibrationFloorG)/nobs ) * _scienceMargin
[ "Calculate", "the", "end", "of", "mission", "photometric", "standard", "error", "in", "the", "G", "band", "as", "a", "function", "of", "G", ".", "A", "20%", "margin", "is", "included", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/errors/photometric.py#L38-L59
[ "def", "gMagnitudeErrorEoM", "(", "G", ",", "nobs", "=", "70", ")", ":", "return", "sqrt", "(", "(", "power", "(", "gMagnitudeError", "(", "G", ")", "/", "_scienceMargin", ",", "2", ")", "+", "_eomCalibrationFloorG", "*", "_eomCalibrationFloorG", ")", "/", "nobs", ")", "*", "_scienceMargin" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
bpMagnitudeError
Calculate the single-field-of-view-transit photometric standard error in the BP band as a function of G and (V-I). Note: this refers to the integrated flux from the BP spectrophotometer. A margin of 20% is included. Parameters ---------- G - Value(s) of G-band magnitude. vmini - Value(s) of (V-I) colour. Returns ------- The BP band photometric standard error in units of magnitude.
pygaia/errors/photometric.py
def bpMagnitudeError(G, vmini): """ Calculate the single-field-of-view-transit photometric standard error in the BP band as a function of G and (V-I). Note: this refers to the integrated flux from the BP spectrophotometer. A margin of 20% is included. Parameters ---------- G - Value(s) of G-band magnitude. vmini - Value(s) of (V-I) colour. Returns ------- The BP band photometric standard error in units of magnitude. """ z=calcZBpRp(G) a = -0.000562*power(vmini,3) + 0.044390*vmini*vmini + 0.355123*vmini + 1.043270 b = -0.000400*power(vmini,3) + 0.018878*vmini*vmini + 0.195768*vmini + 1.465592 c = +0.000262*power(vmini,3) + 0.060769*vmini*vmini - 0.205807*vmini - 1.866968 return 1.0e-3*sqrt(power(10.0,a)*z*z+power(10.0,b)*z+power(10.0,c))
def bpMagnitudeError(G, vmini): """ Calculate the single-field-of-view-transit photometric standard error in the BP band as a function of G and (V-I). Note: this refers to the integrated flux from the BP spectrophotometer. A margin of 20% is included. Parameters ---------- G - Value(s) of G-band magnitude. vmini - Value(s) of (V-I) colour. Returns ------- The BP band photometric standard error in units of magnitude. """ z=calcZBpRp(G) a = -0.000562*power(vmini,3) + 0.044390*vmini*vmini + 0.355123*vmini + 1.043270 b = -0.000400*power(vmini,3) + 0.018878*vmini*vmini + 0.195768*vmini + 1.465592 c = +0.000262*power(vmini,3) + 0.060769*vmini*vmini - 0.205807*vmini - 1.866968 return 1.0e-3*sqrt(power(10.0,a)*z*z+power(10.0,b)*z+power(10.0,c))
[ "Calculate", "the", "single", "-", "field", "-", "of", "-", "view", "-", "transit", "photometric", "standard", "error", "in", "the", "BP", "band", "as", "a", "function", "of", "G", "and", "(", "V", "-", "I", ")", ".", "Note", ":", "this", "refers", "to", "the", "integrated", "flux", "from", "the", "BP", "spectrophotometer", ".", "A", "margin", "of", "20%", "is", "included", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/errors/photometric.py#L61-L82
[ "def", "bpMagnitudeError", "(", "G", ",", "vmini", ")", ":", "z", "=", "calcZBpRp", "(", "G", ")", "a", "=", "-", "0.000562", "*", "power", "(", "vmini", ",", "3", ")", "+", "0.044390", "*", "vmini", "*", "vmini", "+", "0.355123", "*", "vmini", "+", "1.043270", "b", "=", "-", "0.000400", "*", "power", "(", "vmini", ",", "3", ")", "+", "0.018878", "*", "vmini", "*", "vmini", "+", "0.195768", "*", "vmini", "+", "1.465592", "c", "=", "+", "0.000262", "*", "power", "(", "vmini", ",", "3", ")", "+", "0.060769", "*", "vmini", "*", "vmini", "-", "0.205807", "*", "vmini", "-", "1.866968", "return", "1.0e-3", "*", "sqrt", "(", "power", "(", "10.0", ",", "a", ")", "*", "z", "*", "z", "+", "power", "(", "10.0", ",", "b", ")", "*", "z", "+", "power", "(", "10.0", ",", "c", ")", ")" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
bpMagnitudeErrorEoM
Calculate the end-of-mission photometric standard error in the BP band as a function of G and (V-I). Note: this refers to the integrated flux from the BP spectrophotometer. A margin of 20% is included. Parameters ---------- G - Value(s) of G-band magnitude. vmini - Value(s) of (V-I) colour. Keywords -------- nobs - Number of observations collected (default 70). Returns ------- The BP band photometric standard error in units of magnitude.
pygaia/errors/photometric.py
def bpMagnitudeErrorEoM(G, vmini, nobs=70): """ Calculate the end-of-mission photometric standard error in the BP band as a function of G and (V-I). Note: this refers to the integrated flux from the BP spectrophotometer. A margin of 20% is included. Parameters ---------- G - Value(s) of G-band magnitude. vmini - Value(s) of (V-I) colour. Keywords -------- nobs - Number of observations collected (default 70). Returns ------- The BP band photometric standard error in units of magnitude. """ return sqrt( (power(bpMagnitudeError(G, vmini)/_scienceMargin,2) + _eomCalibrationFloorBP*_eomCalibrationFloorBP)/nobs ) * _scienceMargin
def bpMagnitudeErrorEoM(G, vmini, nobs=70): """ Calculate the end-of-mission photometric standard error in the BP band as a function of G and (V-I). Note: this refers to the integrated flux from the BP spectrophotometer. A margin of 20% is included. Parameters ---------- G - Value(s) of G-band magnitude. vmini - Value(s) of (V-I) colour. Keywords -------- nobs - Number of observations collected (default 70). Returns ------- The BP band photometric standard error in units of magnitude. """ return sqrt( (power(bpMagnitudeError(G, vmini)/_scienceMargin,2) + _eomCalibrationFloorBP*_eomCalibrationFloorBP)/nobs ) * _scienceMargin
[ "Calculate", "the", "end", "-", "of", "-", "mission", "photometric", "standard", "error", "in", "the", "BP", "band", "as", "a", "function", "of", "G", "and", "(", "V", "-", "I", ")", ".", "Note", ":", "this", "refers", "to", "the", "integrated", "flux", "from", "the", "BP", "spectrophotometer", ".", "A", "margin", "of", "20%", "is", "included", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/errors/photometric.py#L84-L106
[ "def", "bpMagnitudeErrorEoM", "(", "G", ",", "vmini", ",", "nobs", "=", "70", ")", ":", "return", "sqrt", "(", "(", "power", "(", "bpMagnitudeError", "(", "G", ",", "vmini", ")", "/", "_scienceMargin", ",", "2", ")", "+", "_eomCalibrationFloorBP", "*", "_eomCalibrationFloorBP", ")", "/", "nobs", ")", "*", "_scienceMargin" ]
ae972b0622a15f713ffae471f925eac25ccdae47
test
rpMagnitudeErrorEoM
Calculate the end-of-mission photometric standard error in the RP band as a function of G and (V-I). Note: this refers to the integrated flux from the RP spectrophotometer. A margin of 20% is included. Parameters ---------- G - Value(s) of G-band magnitude. vmini - Value(s) of (V-I) colour. Keywords -------- nobs - Number of observations collected (default 70). Returns ------- The RP band photometric standard error in units of magnitude.
pygaia/errors/photometric.py
def rpMagnitudeErrorEoM(G, vmini, nobs=70): """ Calculate the end-of-mission photometric standard error in the RP band as a function of G and (V-I). Note: this refers to the integrated flux from the RP spectrophotometer. A margin of 20% is included. Parameters ---------- G - Value(s) of G-band magnitude. vmini - Value(s) of (V-I) colour. Keywords -------- nobs - Number of observations collected (default 70). Returns ------- The RP band photometric standard error in units of magnitude. """ return sqrt( (power(rpMagnitudeError(G, vmini)/_scienceMargin,2) + _eomCalibrationFloorRP*_eomCalibrationFloorRP)/nobs ) * _scienceMargin
def rpMagnitudeErrorEoM(G, vmini, nobs=70): """ Calculate the end-of-mission photometric standard error in the RP band as a function of G and (V-I). Note: this refers to the integrated flux from the RP spectrophotometer. A margin of 20% is included. Parameters ---------- G - Value(s) of G-band magnitude. vmini - Value(s) of (V-I) colour. Keywords -------- nobs - Number of observations collected (default 70). Returns ------- The RP band photometric standard error in units of magnitude. """ return sqrt( (power(rpMagnitudeError(G, vmini)/_scienceMargin,2) + _eomCalibrationFloorRP*_eomCalibrationFloorRP)/nobs ) * _scienceMargin
[ "Calculate", "the", "end", "-", "of", "-", "mission", "photometric", "standard", "error", "in", "the", "RP", "band", "as", "a", "function", "of", "G", "and", "(", "V", "-", "I", ")", ".", "Note", ":", "this", "refers", "to", "the", "integrated", "flux", "from", "the", "RP", "spectrophotometer", ".", "A", "margin", "of", "20%", "is", "included", "." ]
agabrown/PyGaia
python
https://github.com/agabrown/PyGaia/blob/ae972b0622a15f713ffae471f925eac25ccdae47/pygaia/errors/photometric.py#L131-L153
[ "def", "rpMagnitudeErrorEoM", "(", "G", ",", "vmini", ",", "nobs", "=", "70", ")", ":", "return", "sqrt", "(", "(", "power", "(", "rpMagnitudeError", "(", "G", ",", "vmini", ")", "/", "_scienceMargin", ",", "2", ")", "+", "_eomCalibrationFloorRP", "*", "_eomCalibrationFloorRP", ")", "/", "nobs", ")", "*", "_scienceMargin" ]
ae972b0622a15f713ffae471f925eac25ccdae47